pax_global_header00006660000000000000000000000064142040115300014500gustar00rootroot0000000000000052 comment=a3fa4c479e6c34e1250340e416398b69d3b84f21 satpy-0.34.0/000077500000000000000000000000001420401153000127245ustar00rootroot00000000000000satpy-0.34.0/.bandit000066400000000000000000000000521420401153000141630ustar00rootroot00000000000000[bandit] skips: B506 exclude: satpy/tests satpy-0.34.0/.codebeatignore000066400000000000000000000000211420401153000156700ustar00rootroot00000000000000satpy/version.py satpy-0.34.0/.git_archival.txt000066400000000000000000000000461420401153000161770ustar00rootroot00000000000000ref-names: HEAD -> main, tag: v0.34.0 satpy-0.34.0/.gitattributes000066400000000000000000000000401420401153000156110ustar00rootroot00000000000000.git_archival.txt export-subst satpy-0.34.0/.github/000077500000000000000000000000001420401153000142645ustar00rootroot00000000000000satpy-0.34.0/.github/CODEOWNERS000066400000000000000000000112661420401153000156650ustar00rootroot00000000000000* @djhoese @mraspaud satpy/readers/seviri_l1b_hrit.py @sfinkens @mraspaud doc/source/api/satpy.composites.rst @pnuu @djhoese @mraspaud doc/source/composites.rst @pnuu @djhoese @mraspaud satpy/composites/abi.py @djhoese satpy/composites/cloud_products.py @adybbroe @mraspaud satpy/composites/crefl_utils.py @djhoese @mraspaud @adybbroe satpy/composites/sar.py @mraspaud satpy/demo/__init__.py @djhoese satpy/demo/google_cloud_platform.py @djhoese satpy/etc/composites/abi.yaml @djhoese satpy/etc/composites/avhrr-3.yaml @adybbroe @pnuu satpy/etc/composites/msi.yaml @mraspaud satpy/etc/composites/msu-gs.yaml @mraspaud satpy/etc/composites/olci.yaml @mraspaud satpy/etc/composites/sar-c.yaml @mraspaud satpy/etc/composites/sar.yaml @mraspaud satpy/etc/composites/seviri.yaml @mraspaud @pnuu @adybbroe satpy/etc/composites/slstr.yaml @mraspaud satpy/etc/composites/viirs.yaml @djhoese @mraspaud @adybbroe satpy/etc/composites/visir.yaml @djhoese @mraspaud @adybbroe @pnuu satpy/etc/readers/abi_l1b.yaml @djhoese satpy/etc/readers/abi_l1b_scmi.yaml @djhoese satpy/etc/readers/acspo.yaml @djhoese satpy/etc/readers/ahi_hrit.yaml @sfinkens @mraspaud satpy/etc/readers/ahi_hsd.yaml @sfinkens @djhoese @mraspaud satpy/etc/readers/avhrr_l1b_aapp.yaml @pnuu @mraspaud @adybbroe satpy/etc/readers/avhrr_l1b_eps.yaml @pnuu @mraspaud @adybbroe satpy/etc/readers/avhrr_l1b_gaclac.yaml @mraspaud @sfinkens satpy/etc/readers/avhrr_l1b_hrpt.yaml @mraspaud satpy/etc/readers/clavrx.yaml @djhoese satpy/etc/readers/electrol_hrit.yaml @sfinkens @mraspaud satpy/etc/readers/fci_l1c_nc.yaml @ameraner @gerritholl satpy/etc/readers/geocat.yaml @djhoese satpy/etc/readers/goes-imager_hrit.yaml @sfinkens @mraspaud satpy/etc/readers/goes-imager_nc.yaml @sfinkens @mraspaud satpy/etc/readers/iasi_l2.yaml @pnuu satpy/etc/readers/jami_hrit.yaml @sfinkens @mraspaud satpy/etc/readers/li_l2.yaml @sjoro satpy/etc/readers/maia.yaml @adybbroe satpy/etc/readers/msi_safe.yaml @mraspaud satpy/etc/readers/mtsat2-imager_hrit.yaml @sfinkens @mraspaud satpy/etc/readers/nucaps.yaml @djhoese satpy/etc/readers/nwcsaf-geo.yaml @adybbroe @pnuu satpy/etc/readers/nwcsaf-pps_nc.yaml @adybbroe @mraspaud satpy/etc/readers/olci_l1b.yaml @mraspaud satpy/etc/readers/olci_l2.yaml @mraspaud satpy/etc/readers/omps_edr.yaml @djhoese satpy/etc/readers/sar-c_safe.yaml @mraspaud satpy/etc/readers/seviri_l1b_hrit.yaml @sfinkens @sjoro @mraspaud satpy/etc/readers/seviri_l1b_native.yaml @sfinkens @sjoro @mraspaud satpy/etc/readers/seviri_l1b_nc.yaml @sjoro @sfinkens satpy/etc/readers/slstr_l1b.yaml @mraspaud satpy/etc/readers/viirs_compact.yaml @mraspaud satpy/etc/readers/viirs_edr_active_fires.yaml @adybbroe @djhoese satpy/etc/readers/viirs_edr_flood.yaml @djhoese satpy/etc/readers/viirs_l1b.yaml @djhoese satpy/etc/readers/virr_l1b.yaml @djhoese @adybbroe satpy/etc/writers/cf.yaml @mraspaud satpy/etc/writers/ninjotiff.yaml @mraspaud satpy/etc/writers/awips_tiled.yaml @djhoese satpy/readers/aapp_l1b.py @pnuu @mraspaud @adybbroe satpy/readers/abi_l1b.py @djhoese satpy/readers/acspo.py @djhoese satpy/readers/ahi_hsd.py @sfinkens @djhoese @mraspaud satpy/readers/avhrr_l1b_gaclac.py @mraspaud @sfinkens satpy/readers/clavrx.py @djhoese satpy/readers/electrol_hrit.py @sfinkens @mraspaud satpy/readers/eps_l1b.py @mraspaud @pnuu @adybbroe satpy/readers/eum_base.py @sjoro @sfinkens @adybbroe satpy/readers/fci_l1c_nc.py @ameraner @gerritholl satpy/readers/geocat.py @djhoese satpy/readers/goes_imager_hrit.py @sfinkens @mraspaud satpy/readers/goes_imager_nc.py @sfinkens @mraspaud satpy/readers/hrit_base.py @sfinkens @sjoro @mraspaud satpy/readers/hrit_jma.py @sfinkens @mraspaud satpy/readers/hrpt.py @mraspaud satpy/readers/iasi_l2.py @pnuu satpy/readers/li_l2.py @sjoro satpy/readers/maia.py @adybbroe satpy/readers/msi_safe.py @mraspaud satpy/readers/nucaps.py @djhoese satpy/readers/nwcsaf_nc.py @adybbroe @mraspaud satpy/readers/olci_nc.py @mraspaud satpy/readers/omps_edr.py @djhoese satpy/readers/sar_c_safe.py @mraspaud satpy/readers/scmi.py @djhoese satpy/readers/seviri_base.py @sfinkens @sjoro @mraspaud @adybbroe satpy/readers/seviri_l1b_hrit.py @sfinkens @sjoro @mraspaud satpy/readers/seviri_l1b_native.py @sjoro @sfinkens @mraspaud satpy/readers/seviri_l1b_native_hdr.py @sjoro @sfinkens @adybbroe satpy/readers/seviri_l1b_nc.py @sjoro @sfinkens @mraspaud satpy/readers/slstr_l1b.py @mraspaud satpy/readers/viirs_compact.py @mraspaud satpy/readers/viirs_edr_active_fires.py @adybbroe @djhoese satpy/readers/viirs_edr_flood.py @djhoese satpy/readers/viirs_l1b.py @djhoese satpy/readers/xmlformat.py @mraspaud satpy/resample.py @pnuu @djhoese @mraspaud satpy/writers/cf_writer.py @mraspaud satpy/writers/awips_tiled.py @djhoese utils/coord2area_def.py @mraspaud @adybbroe utils/fetch_avhrr_calcoeffs.py @pnuu satpy-0.34.0/.github/ISSUE_TEMPLATE/000077500000000000000000000000001420401153000164475ustar00rootroot00000000000000satpy-0.34.0/.github/ISSUE_TEMPLATE/bug_report.md000066400000000000000000000015261420401153000211450ustar00rootroot00000000000000--- name: Bug report about: Create a report to help us improve --- **Describe the bug** A clear and concise description of what the bug is. **To Reproduce** ```python # Your code here ``` **Expected behavior** A clear and concise description of what you expected to happen. **Actual results** Text output of actual results or error messages including full tracebacks if applicable. **Screenshots** If applicable, add screenshots to help explain your problem. **Environment Info:** - OS: [e.g. OSX, Windows, Linux] - Satpy Version: [e.g. 0.9.0] - PyResample Version: - Readers and writers dependencies (when relevant): [run `from satpy.utils import check_satpy; check_satpy()`] **Additional context** Add any other context about the problem here. satpy-0.34.0/.github/ISSUE_TEMPLATE/feature_request.md000066400000000000000000000012001420401153000221650ustar00rootroot00000000000000--- name: Feature request about: Suggest an idea for this project --- ## Feature Request **Is your feature request related to a problem? Please describe.** A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] **Describe the solution you'd like** A clear and concise description of what you want to happen. **Describe any changes to existing user workflow** Are there any backwards compatibility concerns? Changes to the build process? Additional dependencies? **Additional context** Have you considered any alternative solutions or is there anything else that would help describe your request. satpy-0.34.0/.github/PULL_REQUEST_TEMPLATE.md000066400000000000000000000011021420401153000200570ustar00rootroot00000000000000 - [ ] Closes #xxxx - [ ] Tests added - [ ] Fully documented - [ ] Add your name to `AUTHORS.md` if not there already satpy-0.34.0/.github/workflows/000077500000000000000000000000001420401153000163215ustar00rootroot00000000000000satpy-0.34.0/.github/workflows/ci.yaml000066400000000000000000000115031420401153000176000ustar00rootroot00000000000000name: CI # https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#concurrency # https://docs.github.com/en/developers/webhooks-and-events/events/github-event-types#pullrequestevent concurrency: group: ${{ github.workflow }}-${{ github.event.number }}-${{ github.event.type }} cancel-in-progress: true on: [push, pull_request] jobs: lint: name: lint and style checks runs-on: ubuntu-latest steps: - name: Checkout source uses: actions/checkout@v2 - name: Set up Python uses: actions/setup-python@v2 with: python-version: 3.9 - name: Install dependencies run: | python -m pip install --upgrade pip pip install flake8 flake8-docstrings flake8-debugger flake8-bugbear pytest - name: Install Satpy run: | pip install -e . - name: Run linting run: | flake8 satpy/ website: name: build website runs-on: ubuntu-latest steps: - name: Checkout source uses: actions/checkout@v2 with: fetch-depth: 0 - name: Setup Conda Environment uses: conda-incubator/setup-miniconda@v2 with: miniforge-variant: Mambaforge miniforge-version: latest use-mamba: true python-version: "3.8" environment-file: continuous_integration/environment.yaml activate-environment: test-environment - name: Install Satpy shell: bash -l {0} run: | pip install sphinx sphinx_rtd_theme sphinxcontrib-apidoc; \ pip install --no-deps -e . - name: Run Sphinx Build shell: bash -l {0} run: | cd doc; \ make html SPHINXOPTS="-W" test: runs-on: ${{ matrix.os }} continue-on-error: ${{ matrix.experimental }} needs: [lint] strategy: fail-fast: true matrix: os: ["windows-latest", "ubuntu-latest", "macos-latest"] python-version: ["3.7", "3.8", "3.9"] experimental: [false] include: - python-version: "3.8" os: "ubuntu-latest" experimental: true env: PYTHON_VERSION: ${{ matrix.python-version }} OS: ${{ matrix.os }} UNSTABLE: ${{ matrix.experimental }} ACTIONS_ALLOW_UNSECURE_COMMANDS: true steps: - name: Checkout source uses: actions/checkout@v2 - name: Setup Conda Environment uses: conda-incubator/setup-miniconda@v2 with: miniforge-variant: Mambaforge miniforge-version: latest use-mamba: true python-version: ${{ matrix.python-version }} environment-file: continuous_integration/environment.yaml activate-environment: test-environment - name: Install unstable dependencies if: matrix.experimental == true shell: bash -l {0} run: | python -m pip install \ --index-url https://pypi.anaconda.org/scipy-wheels-nightly/simple/ \ --trusted-host pypi.anaconda.org \ --no-deps --pre --upgrade \ matplotlib \ numpy \ pandas \ scipy; \ python -m pip install \ --no-deps --upgrade \ git+https://github.com/dask/dask \ git+https://github.com/dask/distributed \ git+https://github.com/zarr-developers/zarr \ git+https://github.com/Unidata/cftime \ git+https://github.com/mapbox/rasterio \ git+https://github.com/pydata/bottleneck \ git+https://github.com/pydata/xarray; - name: Install satpy shell: bash -l {0} run: | python -m pip install --no-deps -e . - name: Run unit tests shell: bash -l {0} run: | pytest --cov=satpy satpy/tests --cov-report=xml --cov-report= - name: Upload unittest coverage to Codecov uses: codecov/codecov-action@v1 with: flags: unittests file: ./coverage.xml env_vars: OS,PYTHON_VERSION,UNSTABLE - name: Coveralls Parallel uses: AndreMiras/coveralls-python-action@develop with: flag-name: run-${{ matrix.test_number }} parallel: true if: runner.os == 'Linux' - name: Run behaviour tests shell: bash -l {0} run: | coverage run --source=satpy -m behave satpy/tests/features --tags=-download coverage xml - name: Upload behaviour test coverage to Codecov uses: codecov/codecov-action@v2 with: flags: behaviourtests file: ./coverage.xml env_vars: OS,PYTHON_VERSION,UNSTABLE coveralls: needs: [test] runs-on: ubuntu-latest steps: - name: Coveralls Finished uses: AndreMiras/coveralls-python-action@develop with: parallel-finished: true satpy-0.34.0/.github/workflows/deploy-sdist.yaml000066400000000000000000000007601420401153000216300ustar00rootroot00000000000000name: Deploy sdist on: release: types: - published jobs: test: runs-on: ubuntu-latest steps: - name: Checkout source uses: actions/checkout@v2 - name: Create sdist shell: bash -l {0} run: python setup.py sdist - name: Publish package to PyPI if: github.event.action == 'published' uses: pypa/gh-action-pypi-publish@v1.4.1 with: user: __token__ password: ${{ secrets.pypi_password }} satpy-0.34.0/.gitignore000066400000000000000000000013361420401153000147170ustar00rootroot00000000000000### PYTHON IGNORES ### *.py[cod] # C extensions *.so # Packages *.egg *.egg-info dist build doc/build eggs *.eggs parts bin var sdist develop-eggs .installed.cfg lib lib64 # Installer logs pip-log.txt # Unit test / coverage reports .coverage .tox nosetests.xml htmlcov #Translations *.mo #Sphinx doc/source/_build/* #Mr Developer .mr.developer.cfg ### C IGNORES ### # Object files *.o # Libraries *.lib *.a # Shared objects (inc. Windows DLLs) *.dll *.so *.so.* *.dylib # Executables *.exe *.out *.app # Others *~ # PyCharm Settings .idea # VSCode Settings .vscode # vi / vim swp files *.swp .DS_STORE # setuptools_scm files # this should be generated automatically when installed satpy/version.py doc/source/api/*.rst satpy-0.34.0/.pre-commit-config.yaml000066400000000000000000000021301420401153000172010ustar00rootroot00000000000000exclude: '^$' fail_fast: false repos: - repo: https://github.com/PyCQA/flake8 rev: 4.0.1 hooks: - id: flake8 additional_dependencies: [flake8-docstrings, flake8-debugger, flake8-bugbear, mccabe] args: [--max-complexity, "10"] - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.1.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer - id: check-yaml args: [--unsafe] - repo: https://github.com/PyCQA/bandit rev: '1.7.2' # Update me! hooks: - id: bandit args: [--ini, .bandit] - repo: https://github.com/pre-commit/mirrors-mypy rev: 'v0.931' # Use the sha / tag you want to point at hooks: - id: mypy additional_dependencies: - types-docutils - types-pkg-resources - types-PyYAML - types-requests - repo: https://github.com/pycqa/isort rev: 5.10.1 hooks: - id: isort language_version: python3 ci: # To trigger manually, comment on a pull request with "pre-commit.ci autofix" autofix_prs: false skip: [bandit] satpy-0.34.0/.readthedocs.yml000066400000000000000000000005441420401153000160150ustar00rootroot00000000000000# Read the Docs configuration file # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details version: 2 # Build documentation in the docs/ directory with Sphinx sphinx: configuration: doc/source/conf.py # Optionally build your docs in additional formats such as PDF and ePub formats: all conda: environment: doc/rtd_environment.yml satpy-0.34.0/.stickler.yml000066400000000000000000000000671420401153000153500ustar00rootroot00000000000000linters: flake8: python: 3 config: setup.cfg satpy-0.34.0/.travis.yml000066400000000000000000000067721420401153000150510ustar00rootroot00000000000000language: python env: global: # Set defaults to avoid repeating in most cases - PYTHON_VERSION=$TRAVIS_PYTHON_VERSION - NUMPY_VERSION=stable - MAIN_CMD='python setup.py' - CONDA_DEPENDENCIES='xarray dask distributed toolz Cython sphinx cartopy pillow matplotlib scipy pyyaml pyproj pyresample coveralls coverage codecov behave netcdf4 h5py h5netcdf gdal rasterio imageio pyhdf mock libtiff geoviews zarr python-eccodes geoviews pytest pytest-cov fsspec pylibtiff' - PIP_DEPENDENCIES='trollsift trollimage pyspectral pyorbital' - SETUP_XVFB=False - EVENT_TYPE='push pull_request' - SETUP_CMD='test' - CONDA_CHANNELS='conda-forge' - CONDA_CHANNEL_PRIORITY='strict' - MAMBA=True - UNSTABLE_DEPS=False matrix: include: - env: PYTHON_VERSION=3.8 os: linux - env: PYTHON_VERSION=3.8 os: osx language: generic - env: PYTHON_VERSION=3.8 os: windows language: bash - env: PYTHON_VERSION=3.7 os: linux - env: PYTHON_VERSION=3.7 os: osx language: generic # allowed to fail: - os: linux env: - PYTHON_VERSION=3.8 - UNSTABLE_DEPS=True allow_failures: - os: linux env: - PYTHON_VERSION=3.8 - UNSTABLE_DEPS=True install: - git clone --depth 1 git://github.com/astropy/ci-helpers.git - source ci-helpers/travis/setup_conda.sh # See https://github.com/travis-ci/travis-ci/issues/8920 - if [ $TRAVIS_OS_NAME != "windows" ]; then python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)"; fi - if [ "$UNSTABLE_DEPS" == "True" ]; then python -m pip install -f https://7933911d6844c6c53a7d-47bd50c35cd79bd838daf386af554a83.ssl.cf2.rackcdn.com --no-deps --pre --upgrade matplotlib numpy pandas scipy; python -m pip install --no-deps --upgrade git+https://github.com/dask/dask git+https://github.com/dask/distributed git+https://github.com/zarr-developers/zarr git+https://github.com/Unidata/cftime git+https://github.com/mapbox/rasterio git+https://github.com/pydata/bottleneck git+https://github.com/pydata/xarray; fi - pip install --no-deps -e . script: - pytest --cov=satpy satpy/tests - coverage run -a --source=satpy -m behave satpy/tests/features --tags=-download - if [ "$TRAVIS_EVENT_TYPE" == "cron" ]; then coverage run -a --source=satpy -m behave satpy/tests/features; fi #after_success: #- if [[ $PYTHON_VERSION == 3.8 ]]; then coveralls; codecov; fi #deploy: # - provider: pypi # user: dhoese # password: # secure: frK+0k1STeTM7SizRseP0qdTfOVz9ZMIra+3qEytPdxCLceXAH8LxPU16zj5rdNQxasF1hZ6rAd952fly+ypw2TEf5r2WnStrt7G5QlyE7VB6XGSDpIUxKF1FYccLvYs0/R6Y35MTEPqdM51PM5yEBjoY5b4tA3RF3fDq11cqc/SiWr6DgSLB1WJZULOdtCzBbfGbm5LyJ7yeNbISASSAwVvZTGWw7kJDgi0W5zxwEX82N5tBGbfKIu59qmxyj8FxmcrUwKZ4P3rQNg1kN1utzAB+PSf3GAVvbZfWJQuAKwMqpZgaV9lX0V7eUd/AxPobzEk9WyoNBMIdrSPej5BKWTDiYvaeRTOsggoUCSQJJA/SITEvkJgLWXoKKX2OWrM8RBUO4MoZJpPGXN42PRtMJkV2sx6ZigkpJlHdn39SsIRZX31zsfv8bBhclb70bt1Ts0fDd0rVdZAI6gMI+sgUePwEUn+XbWrvI0sMfDX3QsXDMV393RHgaIPxd+lRqUlYsNOxjsWpsbsvX55ePLxYHsNrv11KKyL/iGjGotVeVUO5D78qvfd4JrsUnMalQyZfW8NTEKa5Ebcs7gYJTwYEOTCQU12BkHOv1zFkjZG5RdGwkEvG3pykLhx+qDyYEd7pKB3TvhzLPqZPSrPxirwcoc0UzCc6ocYdzpqVuViFuk= # distributions: sdist # skip_existing: true # on: # tags: true # repo: pytroll/satpy #notifications: # slack: # rooms: # - pytroll:96mNSYSI1dBjGyzVXkBT6qFt#github satpy-0.34.0/AUTHORS.md000066400000000000000000000107111420401153000143730ustar00rootroot00000000000000# Project Contributors The following people have made contributions to this project: - [Trygve Aspenes (TAlonglong)](https://github.com/TAlonglong) - [Talfan Barnie (TalfanBarnie)](https://github.com/TalfanBarnie) - [Jonathan Beavers (jon4than)](https://github.com/jon4than) - [Suyash Behera (Suyash458)](https://github.com/Suyash458) - [Ray Bell (raybellwaves)](https://github.com/raybellwaves) - [Jorge Bravo (jhbravo)](https://github.com/jhbravo) - [Sebastian Brodehl (sbrodehl)](https://github.com/sbrodehl) - [Andrew Brooks (howff)](https://github.com/howff) - Guido della Bruna - meteoswiss - [Pierre de Buyl (pdebuyl)](https://github.com/pdebuyl) - [Eric Bruning (deeplycloudy)](https://github.com/deeplycloudy) - [Lorenzo Clementi (loreclem)](https://github.com/loreclem) - [Colin Duff (ColinDuff)](https://github.com/ColinDuff) - [Radar, Satellite and Nowcasting Division (meteoswiss-mdr)](https://github.com/meteoswiss-mdr) - [Rohan Daruwala (rdaruwala)](https://github.com/rdaruwala) - [Adam Dybbroe (adybbroe)](https://github.com/adybbroe) - [Ulrik Egede (egede)](https://github.com/egede) - [Joleen Feltz (joleenf)](https://github.com/joleenf) - [Stephan Finkensieper (sfinkens)](https://github.com/sfinkens) - [Andrea Grillini (AppLEaDaY)](https://github.com/AppLEaDaY) - [Blanka Gvozdikova (gvozdikb)](https://github.com/gvozdikb) - [Nina Håkansson (ninahakansson)](https://github.com/ninahakansson) - [Ulrich Hamann](https://github.com/) - [Mitch Herbertson (mherbertson)](https://github.com/mherbertson) - [Gerrit Holl (gerritholl)](https://github.com/gerritholl) - [David Hoese (djhoese)](https://github.com/djhoese) - [Marc Honnorat (honnorat)](https://github.com/honnorat) - [Mikhail Itkin (mitkin)](https://github.com/mitkin) - [Tommy Jasmin (tommyjasmin)](https://github.com/tommyjasmin) - [Jactry Zeng](https://github.com/jactry) - [Johannes Johansson (JohannesSMHI)](https://github.com/JohannesSMHI) - [Sauli Joro (sjoro)](https://github.com/sjoro) - [Janne Kotro (jkotro)](https://github.com/jkotro) - [Ralph Kuehn (ralphk11)](https://github.com/ralphk11) - [Panu Lahtinen (pnuu)](https://github.com/pnuu) - [Thomas Leppelt (m4sth0)](https://github.com/m4sth0) - [Lu Liu (yukaribbba)](https://github.com/yukaribbba) - [Andrea Meraner (ameraner)](https://github.com/ameraner) - [Aronne Merrelli (aronnem)](https://github.com/aronnem) - [Lucas Meyer (LTMeyer)](https://github.com/LTMeyer) - [Ondrej Nedelcev (nedelceo)](https://github.com/nedelceo) - [Oana Nicola](https://github.com/) - [Esben S. Nielsen (storpipfugl)](https://github.com/storpipfugl) - [Tom Parker (tparker-usgs)](https://github.com/tparker-usgs) - [Christian Peters (peters77)](https://github.com/peters77) - [Ghislain Picard (ghislainp)](https://github.com/ghislainp) - [Simon R. Proud (simonrp84)](https://github.com/simonrp84) - [Lars Ørum Rasmussen (loerum)](https://github.com/loerum) - [Martin Raspaud (mraspaud)](https://github.com/mraspaud) - [William Roberts (wroberts4)](https://github.com/wroberts4) - [Pascale Roquet (roquetp)](https://github.com/roquetp) - [Kristian Rune Larsen](https://github.com/) - [RutgerK (RutgerK)](https://github.com/RutgerK) - Marco Sassi - meteoswiss - [Stefan Scheiblauer (StefanSnippetCoder)](https://github.com/StefanSnippetCoder) - [Ronald Scheirer](https://github.com/) - [Hauke Schulz (observingClouds)](https://github.com/observingClouds) - [Jakub Seidl (seidlj)](https://github.com/seidlj) - [Eysteinn Sigurðsson (eysteinn)](https://github.com/eysteinn) - [Dario Stelitano (bornagain1981)](https://github.com/bornagain1981) - [Johan Strandgren (strandgren)](https://github.com/strandgren) - [Matias Takala (elfsprite)](https://github.com/elfsprite) - [Taiga Tsukada (tsukada-cs)](https://github.com/tsukada-cs) - [Christian Versloot (christianversloot)](https://github.com/christianversloot) - [Helga Weber (helgaweb)](https://github.com/helgaweb) - [hazbottles (hazbottles)](https://github.com/hazbottles) - [oananicola (oananicola)](https://github.com/oananicola) - [praerien (praerien)](https://github.com/praerien) - [Xin Zhang (zxdawn)](https://github.com/zxdawn) - [Yufei Zhu (yufeizhu600)](https://github.com/yufeizhu600) satpy-0.34.0/CHANGELOG.md000066400000000000000000005324041420401153000145450ustar00rootroot00000000000000## Version 0.34.0 (2022/02/18) ### Issues Closed * [Issue 2026](https://github.com/pytroll/satpy/issues/2026) - Missing units in avhrr_l1b_eps reader ([PR 2027](https://github.com/pytroll/satpy/pull/2027) by [@gerritholl](https://github.com/gerritholl)) * [Issue 2024](https://github.com/pytroll/satpy/issues/2024) - Allow to skip unit conversion in ninjotiff writer ([PR 2025](https://github.com/pytroll/satpy/pull/2025) by [@gerritholl](https://github.com/gerritholl)) * [Issue 2023](https://github.com/pytroll/satpy/issues/2023) - Allow to keep units in composite * [Issue 2022](https://github.com/pytroll/satpy/issues/2022) - save_dataset changes dataset in-place * [Issue 2018](https://github.com/pytroll/satpy/issues/2018) - Wrong AxisIntercept (add_offset) when writing °C temperature units with ninjogeotiff writer * [Issue 2014](https://github.com/pytroll/satpy/issues/2014) - Problem in converting VIIRS hdf to geotif * [Issue 2010](https://github.com/pytroll/satpy/issues/2010) - AHI HSD true_color incorrect with cache_sensor_angles ([PR 2013](https://github.com/pytroll/satpy/pull/2013) by [@djhoese](https://github.com/djhoese)) * [Issue 2008](https://github.com/pytroll/satpy/issues/2008) - abi_l1b reader leaks memory in Python-3.7 ([PR 2011](https://github.com/pytroll/satpy/pull/2011) by [@sfinkens](https://github.com/sfinkens)) * [Issue 2004](https://github.com/pytroll/satpy/issues/2004) - Configure image type returned by MaskingCompositor ([PR 2005](https://github.com/pytroll/satpy/pull/2005) by [@gerritholl](https://github.com/gerritholl)) * [Issue 2001](https://github.com/pytroll/satpy/issues/2001) - Failed to load AVHRR LAC data * [Issue 1999](https://github.com/pytroll/satpy/issues/1999) - Reader for Арктика-М (Arktika-M) МСУ-ГС (MSU-GS) data ([PR 2000](https://github.com/pytroll/satpy/pull/2000) by [@simonrp84](https://github.com/simonrp84)) * [Issue 1998](https://github.com/pytroll/satpy/issues/1998) - Add reader for Arctica M N-1 hdf5 data * [Issue 1995](https://github.com/pytroll/satpy/issues/1995) - AttributeError when cropping data for VIIRS * [Issue 1959](https://github.com/pytroll/satpy/issues/1959) - Unittest failure in test_modifiers.py * [Issue 1948](https://github.com/pytroll/satpy/issues/1948) - Contribute to Satpy * [Issue 1945](https://github.com/pytroll/satpy/issues/1945) - Wrong dtype of `uint32` array saved by the cf_writer * [Issue 1943](https://github.com/pytroll/satpy/issues/1943) - sza_check from trollflow2 fails with KeyError: 'start_time' * [Issue 1883](https://github.com/pytroll/satpy/issues/1883) - Test failure on i386 and armhf ([PR 1966](https://github.com/pytroll/satpy/pull/1966) by [@djhoese](https://github.com/djhoese)) * [Issue 1384](https://github.com/pytroll/satpy/issues/1384) - AHI HRIT reader has gotten slower ([PR 1986](https://github.com/pytroll/satpy/pull/1986) by [@pnuu](https://github.com/pnuu)) * [Issue 1099](https://github.com/pytroll/satpy/issues/1099) - `find_files_and_readers` read unneeded files In this release 20 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 2027](https://github.com/pytroll/satpy/pull/2027) - Include units with AVHRR EPS metadata ([2026](https://github.com/pytroll/satpy/issues/2026)) * [PR 2017](https://github.com/pytroll/satpy/pull/2017) - Fix ABI rayleigh_corrected_crefl modifier using deprecated DEM specifier * [PR 2015](https://github.com/pytroll/satpy/pull/2015) - Fix various dask array bugs in CREFL modifier * [PR 2013](https://github.com/pytroll/satpy/pull/2013) - Fix angle generation caching occassionally swapping results ([2010](https://github.com/pytroll/satpy/issues/2010)) * [PR 2011](https://github.com/pytroll/satpy/pull/2011) - Fix memory leak in cached_property backport ([2008](https://github.com/pytroll/satpy/issues/2008), [2008](https://github.com/pytroll/satpy/issues/2008)) * [PR 2006](https://github.com/pytroll/satpy/pull/2006) - Fix Scene not being serializable * [PR 2002](https://github.com/pytroll/satpy/pull/2002) - Update tests to be more flexible to CRS and enhancement changes * [PR 1991](https://github.com/pytroll/satpy/pull/1991) - Update reference to dask distributed setup page * [PR 1988](https://github.com/pytroll/satpy/pull/1988) - Update geometry.py docstring from compositor to modifier * [PR 1987](https://github.com/pytroll/satpy/pull/1987) - Check that time is not already a coordinate in CF writer * [PR 1983](https://github.com/pytroll/satpy/pull/1983) - More general filename filter for ascat soil moisture, allowing for Metop-B and Metop-C * [PR 1982](https://github.com/pytroll/satpy/pull/1982) - Fix ninjotiff writer from erraneous K to C conversion #### Features added * [PR 2025](https://github.com/pytroll/satpy/pull/2025) - Allow skipping unit conversion in NinJoTIFF ([2024](https://github.com/pytroll/satpy/issues/2024)) * [PR 2007](https://github.com/pytroll/satpy/pull/2007) - Update abi_l2_nc to include filename metadata similar to abi_l1b * [PR 2005](https://github.com/pytroll/satpy/pull/2005) - Add flag to MaskingCompositor to return RGBA for single-band input ([2004](https://github.com/pytroll/satpy/issues/2004)) * [PR 2000](https://github.com/pytroll/satpy/pull/2000) - Add a reader for the MSU-GS/A + Arctica-M1 data ([1999](https://github.com/pytroll/satpy/issues/1999)) * [PR 1992](https://github.com/pytroll/satpy/pull/1992) - Add support for CMIC product from PPSv2021 * [PR 1989](https://github.com/pytroll/satpy/pull/1989) - read the "elevation" variable in slstr_l1b * [PR 1986](https://github.com/pytroll/satpy/pull/1986) - Add reader kwarg to 'ahi_hrit' to disable exact start_time ([1384](https://github.com/pytroll/satpy/issues/1384)) * [PR 1967](https://github.com/pytroll/satpy/pull/1967) - Add ability to read comma-separated colormaps during enhancement * [PR 1966](https://github.com/pytroll/satpy/pull/1966) - Reduce MODIS L1b/L2 test case size for better test performance ([1883](https://github.com/pytroll/satpy/issues/1883)) * [PR 1962](https://github.com/pytroll/satpy/pull/1962) - Use a dependency matrix for benchmarking #### Documentation changes * [PR 2020](https://github.com/pytroll/satpy/pull/2020) - Clarify documentation regarding attributes used in get_angles * [PR 1991](https://github.com/pytroll/satpy/pull/1991) - Update reference to dask distributed setup page * [PR 1988](https://github.com/pytroll/satpy/pull/1988) - Update geometry.py docstring from compositor to modifier * [PR 1969](https://github.com/pytroll/satpy/pull/1969) - Improve modifier documentation * [PR 1968](https://github.com/pytroll/satpy/pull/1968) - Improve API documentation in CompositeBase * [PR 1961](https://github.com/pytroll/satpy/pull/1961) - Update documentation to refer to all EO satellite data * [PR 1960](https://github.com/pytroll/satpy/pull/1960) - Add release notes and security policy to documentation * [PR 1950](https://github.com/pytroll/satpy/pull/1950) - Fix formatting in configuration documentation In this release 30 pull requests were closed. ## Version 0.33.1 (2021/12/17) ### Issues Closed * [Issue 1937](https://github.com/pytroll/satpy/issues/1937) - Add SECURITY.md * [Issue 1932](https://github.com/pytroll/satpy/issues/1932) - warnings of `invalid value encountered in true_divide` and `invalid value encountered in double_scalars` in * [Issue 1903](https://github.com/pytroll/satpy/issues/1903) - MPEF Product Header record definition , in seviri_base.py, needs to be updated * [Issue 1799](https://github.com/pytroll/satpy/issues/1799) - Deprecate Scene.attrs property * [Issue 1192](https://github.com/pytroll/satpy/issues/1192) - Harmonize SEVIRI area definitions In this release 5 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 1946](https://github.com/pytroll/satpy/pull/1946) - Fix angle generation not working for StackedAreaDefinitions * [PR 1942](https://github.com/pytroll/satpy/pull/1942) - Fix dynamic_dnb composite converting NaNs to 0s * [PR 1941](https://github.com/pytroll/satpy/pull/1941) - Fix SAFE SAR azimuth noise array construction * [PR 1918](https://github.com/pytroll/satpy/pull/1918) - Fix geo interpolation for aapp data #### Features added * [PR 1674](https://github.com/pytroll/satpy/pull/1674) - Feature add support for AHI True Color Reproduction In this release 5 pull requests were closed. ## Version 0.33.0 (2021/12/10) ### Issues Closed * [Issue 1930](https://github.com/pytroll/satpy/issues/1930) - ninjogeotiff writer produces file with ninjo_TransparentPixel=None ([PR 1931](https://github.com/pytroll/satpy/pull/1931) by [@gerritholl](https://github.com/gerritholl)) * [Issue 1902](https://github.com/pytroll/satpy/issues/1902) - High memory usage generating composites from ABI/AHI In this release 2 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 1931](https://github.com/pytroll/satpy/pull/1931) - When no fill value is used, write TransparentPixel=-1 in ninjogeotiff headers ([1930](https://github.com/pytroll/satpy/issues/1930)) * [PR 1926](https://github.com/pytroll/satpy/pull/1926) - Update seadas_l2 chlor_a enhancement to use new log10 stretch * [PR 1922](https://github.com/pytroll/satpy/pull/1922) - Fix ABI cloud_phase composite recipe and enhancement #### Features added * [PR 1917](https://github.com/pytroll/satpy/pull/1917) - Add support to read and visualize NOAA GOESR L2+ cloud mask products * [PR 1912](https://github.com/pytroll/satpy/pull/1912) - Add Frequency range * [PR 1908](https://github.com/pytroll/satpy/pull/1908) - Update AHI HSD calibration coefficients * [PR 1905](https://github.com/pytroll/satpy/pull/1905) - Updated mpef product header to include new fields * [PR 1882](https://github.com/pytroll/satpy/pull/1882) - Update GDAL_OPTIONS with driver= and COG-specific options * [PR 1370](https://github.com/pytroll/satpy/pull/1370) - Add support for reading AAPP level-1c MHS/AMSU-B data #### Refactoring * [PR 1910](https://github.com/pytroll/satpy/pull/1910) - Refactor SZA and cos(SZA) generation to reduce duplicate computations In this release 10 pull requests were closed. ## Version 0.32.0 (2021/12/01) ### Issues Closed * [Isse1900](https://github.com/pytroll/satpy/issues/1900) - Load composites mixed from files or provided data ([PR1901](https://github.com/pytroll/satpy/pull/1901) by [@djhoese](https://github.com/djhoese)) * [Isse1898](https://github.com/pytroll/satpy/issues/1898) - Loading composites without file handlers fails with KeyError ([PR1899](https://github.com/pytroll/satpy/pull/1899) by [@erritholl](https://github.com/gerritholl)) * [Isse1893](https://github.com/pytroll/satpy/issues/1893) - Download and install Satpy for raspberry pi * [Isse1889](https://github.com/pytroll/satpy/issues/1889) - Question: How to release loaded data from memory? * [Isse1880](https://github.com/pytroll/satpy/issues/1880) - Add area definitions corresponding to geostationary imager fields of regard ([PR1881](https://github.com/pytroll/satpy/pull/1881) by [@erritholl](https://github.com/gerritholl)) * [Isse1879](https://github.com/pytroll/satpy/issues/1879) - How to use histogram enhancement in yaml files? * [Isse1749](https://github.com/pytroll/satpy/issues/1749) - Load from blended scene ([PR 1797](https://github.com/pytroll/satpy/pull/1797) by [@djhoese](https://github.com/djhoese)) * [Issue 1747](https://github.com/pytroll/satpy/issues/1747) - Load composites without file handlers. ([PR 1797](https://github.com/pytroll/satpy/pull/1797) by [@djhoese](https://github.com/djhoese)) * [Issue 1456](https://github.com/pytroll/satpy/issues/1456) - Default cache directory should respect XDG Base Directory Specification. * [Issue 583](https://github.com/pytroll/satpy/issues/583) - PPP_CONFIG_DIR set locally does not include the global dir for the eps_l1b reader In this release 10 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 1899](https://github.com/pytroll/satpy/pull/1899) - Fix loading multi-sensor composites for manually added data ([1898](https://github.com/pytroll/satpy/issues/1898)) * [PR 1891](https://github.com/pytroll/satpy/pull/1891) - Fix file handlers improperly matching some file types * [PR 1884](https://github.com/pytroll/satpy/pull/1884) - Fix nucaps reader failing when given multiple input files #### Features added * [PR 1901](https://github.com/pytroll/satpy/pull/1901) - Update Scene.sensor_names to include sensors from readers and contained data ([1900](https://github.com/pytroll/satpy/issues/1900)) * [PR 1897](https://github.com/pytroll/satpy/pull/1897) - Update AHI gridded reader to use HTTP instead of FTP * [PR 1894](https://github.com/pytroll/satpy/pull/1894) - Add 'seadas_l2' reader for 'chlor_a' product * [PR 1892](https://github.com/pytroll/satpy/pull/1892) - Add new pre-commit checks * [PR 1888](https://github.com/pytroll/satpy/pull/1888) - Optimize composite YAML loading * [PR 1885](https://github.com/pytroll/satpy/pull/1885) - Add optional on-disk zarr caching to sensor angle generation * [PR 1881](https://github.com/pytroll/satpy/pull/1881) - Add area definitions for GOES ABI FOR ([1880](https://github.com/pytroll/satpy/issues/1880)) * [PR 1797](https://github.com/pytroll/satpy/pull/1797) - Allow loading of composites after Scene resampling ([1752](https://github.com/pytroll/satpy/issues/1752), [1749](https://github.com/pytroll/satpy/issues/1749), [1747](https://github.com/pytroll/satpy/issues/1747)) #### Documentation changes * [PR 1873](https://github.com/pytroll/satpy/pull/1873) - Fix a typo in the ninjogeotiff documentation In this release 12 pull requests were closed. ## Version 0.31.0 (2021/11/03) ### Issues Closed * [Isse1866](https://github.com/pytroll/satpy/issues/1866) - Data Type of AHI NetCDF Output * [Isse1859](https://github.com/pytroll/satpy/issues/1859) - Yaml UnsafeLoader ImportErrror on colab.google ([PR1860](https://github.com/pytroll/satpy/pull/1860) by [@arammer](https://github.com/abrammer)) * [Isse1853](https://m/pytroll/satpy/pull/1864) by [@djhoese](https://github.com/djhoese)) In this release 12 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR1868](https://github.com/pytroll/satpy/pull/1868) - Fix MiRS reader not working with new versions of dask * [PR1860](https://github.com/pytroll/satpy/pull/1860) - Catch ImportError on UnsafeLoader in composites/config_loader ([1859](https://github.com/pytroll/satpy/issues/1859)) * [PR1855](https://github.com/pytroll/satpy/pull/1855) - Fix 'acspo' reader producing non-y/x dimension names * [PR1854](https://github.com/pytroll/satpy/pull/1854) - Fix 'awips_tiled' writer doing unnecessary attribute formatting * [PR1849](https://github.com/pytroll/satpy/pull/1849) - Update AGRI reader to ensure that angles can be correctly loaded. #### Features added * [PR1850](https://github.com/pytroll/satpy/pull/1850) - Adapt msi-safe to the new product format ([1847](https://github.com/pytroll/satpy/issues/1847)) * [PR1839](https://github.com/pytroll/satpy/pull/1839) - Add ninjogeotiff writer to write GeoTIFFs including necessary NinJo tags in GDALMetadata ([1838](https://github.com/pytroll/satpy/issues/1838)) * [PR1743](https://github.com/pytroll/satpy/pull/1743) - Add option to configure group_files behaviour on empty groups in case of multiple readers ([1742](https://github.com/pytroll/satpy/issues/1742)) #### Documentation changes * [PR1867](https://github.com/pytroll/satpy/pull/1867) - Update PDF metadata for sphinx documentation * [PR1864](https://github.com/pytroll/satpy/pull/1864) - Update Scene.save_datasets to clarify what will be saved ([1138](https://github.com/pytroll/satpy/issues/1138)) * [PR1862](https://github.com/pytroll/satpy/pull/1862) - Correct phrasing of upside-down * [PR1852](https://github.com/pytroll/satpy/pull/1852) - Fix reference to dask distributed setup page In this release 12 pull requests were closed. ## Version 0.30.1 (2021/09/28) ### Issues Closed * [Isse1835](https://github.com/pytroll/satpy/issues/1835) - scipy module error? * [Isse1832](https://github.com/pytroll/satpy/issues/1832) - variable from python to composite * [Isse1831](https://github.com/pytroll/satpy/issues/1831) - example yml files for other readers * [Isse1829](https://github.com/pytroll/satpy/issues/1829) - pytest satpy/tests does not work ([PR1830](https://github.com/pytroll/satpy/pull/1830) by [@djhoese](https://github.com/djhoese)) * [Isse1828](https://github.com/pytroll/satpy/issues/1828) - Error occurred plotting Himawari-8 * [Isse1484](https://github.com/pytroll/satpy/issues/1484) - Broken links to new EUMETSAT website ([PR1827](https://github.com/pytroll/satpy/pull/1827) by [@pdeyl](https://github.com/pdebuyl)) In this release 6 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR1837](https://github.com/pytroll/satpy/pull/1837) - Fix config path handling on Windows * [PR1827](https://github.com/pytroll/satpy/pull/1827) - Fix eumetsat urls in satpy/readers ([1484](https://github.com/pytroll/satpy/issues/1484)) #### Documentation changes * [PR1837](https://github.com/pytroll/satpy/pull/1837) - Fix config path handling on Windows * [PR1830](https://github.com/pytroll/satpy/pull/1830) - Move tests_require to special "tests" extra for easier installation ([1829](https://github.com/pytroll/satpy/issues/1829)) * [PR1827](https://github.com/pytroll/satpy/pull/1827) - Fix eumetsat urls in satpy/readers ([1484](https://github.com/pytroll/satpy/issues/1484)) In this release 5 pull requests were closed. ## Version 0.30.0 (2021/09/17) ### Issues Closed * [Issue 1821](https://github.com/pytroll/satpy/issues/1821) - Resampling to `true_color_with_night_ir_hires` no longer works. ([PR 1823](https://github.com/pytroll/satpy/pull/1823)) * [Issue 1803](https://github.com/pytroll/satpy/issues/1803) - how to xRITDecompress files for using satpy * [Issue 1796](https://github.com/pytroll/satpy/issues/1796) - Extend use of bz2 compression for input files for seviri_l1b_hrit ([PR 1798](https://github.com/pytroll/satpy/pull/1798)) * [Issue 1794](https://github.com/pytroll/satpy/issues/1794) - ir_overview vs cloudtop * [Issue 1793](https://github.com/pytroll/satpy/issues/1793) - Different `y_bounds` and `x_bounds` shapes of TROPOMI MultiScene * [Issue 1791](https://github.com/pytroll/satpy/issues/1791) - Memory usage has increased drastically * [Issue 1786](https://github.com/pytroll/satpy/issues/1786) - The `viirs_sdr` reader does not function correctly with `GMODO` geolocation. ([PR 1787](https://github.com/pytroll/satpy/pull/1787)) * [Issue 1783](https://github.com/pytroll/satpy/issues/1783) - Metadata name problem in HY-2B L2B reader ([PR 1785](https://github.com/pytroll/satpy/pull/1785)) * [Issue 1780](https://github.com/pytroll/satpy/issues/1780) - What shoud I do if I only want to keep the day part of DayNightCompositor? ([PR 1816](https://github.com/pytroll/satpy/pull/1816)) * [Issue 1779](https://github.com/pytroll/satpy/issues/1779) - piecewise_linear_stretch didn't work properly on GK-2A AMI data * [Issue 1773](https://github.com/pytroll/satpy/issues/1773) - [Question] Geolocation information of FengYun4A (FY-4A) AGRI L1B data ([PR 1782](https://github.com/pytroll/satpy/pull/1782)) * [Issue 1759](https://github.com/pytroll/satpy/issues/1759) - Ask For Help: How to operate SunZenithCorrector manually? * [Issue 1750](https://github.com/pytroll/satpy/issues/1750) - MultiScene.blend does not document the interface for the blend function ([PR 1751](https://github.com/pytroll/satpy/pull/1751)) * [Issue 1745](https://github.com/pytroll/satpy/issues/1745) - Resampling MODIS Level 1B data * [Issue 1738](https://github.com/pytroll/satpy/issues/1738) - available_dataset_names omits composites depending on more than one reader * [Issue 1730](https://github.com/pytroll/satpy/issues/1730) - geotiff writer ignores dtype argument, always writes float if enhance=False ([PR 1733](https://github.com/pytroll/satpy/pull/1733)) * [Issue 1728](https://github.com/pytroll/satpy/issues/1728) - Unable to read HY-2B SCA L2B file * [Issue 1727](https://github.com/pytroll/satpy/issues/1727) - 'NoData' area is not black(clean) in the Sentinel-2 MSI output ([PR 1628](https://github.com/pytroll/satpy/pull/1628)) * [Issue 1722](https://github.com/pytroll/satpy/issues/1722) - 'ModuleNotFoundError' when processing Sentinel-2 MSI data ([PR 1723](https://github.com/pytroll/satpy/pull/1723)) * [Issue 1718](https://github.com/pytroll/satpy/issues/1718) - Raw metadata handling impacts performance ([PR 1795](https://github.com/pytroll/satpy/pull/1795)) * [Issue 1661](https://github.com/pytroll/satpy/issues/1661) - Support for clavrx netcdf files ([PR 1716](https://github.com/pytroll/satpy/pull/1716)) * [Issue 1625](https://github.com/pytroll/satpy/issues/1625) - Part of Sentinel-2 images missing when atmospheric corrected ([PR 1628](https://github.com/pytroll/satpy/pull/1628)) * [Issue 1584](https://github.com/pytroll/satpy/issues/1584) - to_xarray_dataset on empty scene fails with TypeError ([PR 1698](https://github.com/pytroll/satpy/pull/1698)) In this release 23 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 1823](https://github.com/pytroll/satpy/pull/1823) - Fix unify_chunks usage in compositors and fix image mode in BackgroundCompositor ([1821](https://github.com/pytroll/satpy/issues/1821)) * [PR 1814](https://github.com/pytroll/satpy/pull/1814) - Add missing metadata to MODIS L1b and L2 readers * [PR 1813](https://github.com/pytroll/satpy/pull/1813) - Fix composites failing when inputs are different chunk sizes * [PR 1808](https://github.com/pytroll/satpy/pull/1808) - Fix ReflectanceCorrector (crefl) for MODIS data * [PR 1804](https://github.com/pytroll/satpy/pull/1804) - Fix consistency with nucaps sensor metadata (set/lowercase) * [PR 1802](https://github.com/pytroll/satpy/pull/1802) - Add warning in 'awips_tiled' writer when 'units' are too long * [PR 1800](https://github.com/pytroll/satpy/pull/1800) - Fix for missing attributes when requesting 'counts' calibration from ABI L1B reader. * [PR 1792](https://github.com/pytroll/satpy/pull/1792) - Maintain categorical clavrx data as integer arrays * [PR 1787](https://github.com/pytroll/satpy/pull/1787) - Fix 'viirs_sdr' repeating data when TC geolocation was not available ([1786](https://github.com/pytroll/satpy/issues/1786)) * [PR 1784](https://github.com/pytroll/satpy/pull/1784) - Fix ABI readers not assigning 'platform_name' for GOES-18/19 * [PR 1782](https://github.com/pytroll/satpy/pull/1782) - Update AGRI/L1 geolocation ([1773](https://github.com/pytroll/satpy/issues/1773)) * [PR 1777](https://github.com/pytroll/satpy/pull/1777) - Fix mviri l1b fiduceo reader compatibility with newer xarray * [PR 1776](https://github.com/pytroll/satpy/pull/1776) - Fix 'awips_tiled' writer producing an invalid y coordinate * [PR 1774](https://github.com/pytroll/satpy/pull/1774) - Fix the seviri benchmarks * [PR 1771](https://github.com/pytroll/satpy/pull/1771) - Fix VIIRS SDR reader not handling multi-granule files with fewer scans * [PR 1770](https://github.com/pytroll/satpy/pull/1770) - Fix CLAVR-x reader and 'awips_tiled' writer to produce AWIPS-compatible output * [PR 1744](https://github.com/pytroll/satpy/pull/1744) - Fix VIRR reader handling valid_range when it is a numpy array * [PR 1734](https://github.com/pytroll/satpy/pull/1734) - Remove valid_range from attributes in VIRR L1b reader * [PR 1733](https://github.com/pytroll/satpy/pull/1733) - Fix geotiff writer ignoring dtype argument ([1730](https://github.com/pytroll/satpy/issues/1730), [1730](https://github.com/pytroll/satpy/issues/1730)) * [PR 1724](https://github.com/pytroll/satpy/pull/1724) - Replace doc references to PPP_CONFIG_DIR ([1724](https://github.com/pytroll/satpy/issues/1724)) * [PR 1723](https://github.com/pytroll/satpy/pull/1723) - Fix package dependencies for the `msi_safe` reader ([1722](https://github.com/pytroll/satpy/issues/1722)) * [PR 1698](https://github.com/pytroll/satpy/pull/1698) - Fix error when calling to_xarray_dataset on an empty scene ([1584](https://github.com/pytroll/satpy/issues/1584)) * [PR 1628](https://github.com/pytroll/satpy/pull/1628) - Fix for transposed angles in safe-msi reader ([1727](https://github.com/pytroll/satpy/issues/1727), [1625](https://github.com/pytroll/satpy/issues/1625)) #### Features added * [PR 1824](https://github.com/pytroll/satpy/pull/1824) - Add additional ACSPO reader file patterns * [PR 1817](https://github.com/pytroll/satpy/pull/1817) - Fix ninjotiff writer for mode P * [PR 1816](https://github.com/pytroll/satpy/pull/1816) - Add 'day_night' flag to DayNightCompositor for day-only or night-only results ([1780](https://github.com/pytroll/satpy/issues/1780)) * [PR 1815](https://github.com/pytroll/satpy/pull/1815) - Add MODIS L2 products produced by IMAPP * [PR 1805](https://github.com/pytroll/satpy/pull/1805) - Add 'reader' name to all produced DataArrays * [PR 1801](https://github.com/pytroll/satpy/pull/1801) - added link to the GOES-2-go package in the docs as a download source. * [PR 1798](https://github.com/pytroll/satpy/pull/1798) - Add on-the-fly bz2 decompression for HRIT MSG PRO and EPI files ([1796](https://github.com/pytroll/satpy/issues/1796)) * [PR 1790](https://github.com/pytroll/satpy/pull/1790) - Add ABI L1B benchmarks * [PR 1785](https://github.com/pytroll/satpy/pull/1785) - Feature handle data from HY-2B SCAT files directly from NSOAS ([1783](https://github.com/pytroll/satpy/issues/1783)) * [PR 1772](https://github.com/pytroll/satpy/pull/1772) - Add access point to global_attrs to netCDF4FileHandler * [PR 1760](https://github.com/pytroll/satpy/pull/1760) - Add benchmarks for seviri hrit * [PR 1720](https://github.com/pytroll/satpy/pull/1720) - Add a test to ensure seviri hrv has priority over vis008 when requesting 0.8µm * [PR 1717](https://github.com/pytroll/satpy/pull/1717) - Add low resolution file patterns for AHI HSD reader * [PR 1716](https://github.com/pytroll/satpy/pull/1716) - Update Clavrx reader for netcdf files ([1661](https://github.com/pytroll/satpy/issues/1661)) * [PR 1692](https://github.com/pytroll/satpy/pull/1692) - Add raw 'counts' calibration to 'abi_l1b' reader * [PR 1297](https://github.com/pytroll/satpy/pull/1297) - Add support for MCMIP GOES ABI L2 files ([1162](https://github.com/pytroll/satpy/issues/1162)) #### Documentation changes * [PR 1819](https://github.com/pytroll/satpy/pull/1819) - Fix invalid YAML syntax in enhancement documentation * [PR 1801](https://github.com/pytroll/satpy/pull/1801) - added link to the GOES-2-go package in the docs as a download source. * [PR 1765](https://github.com/pytroll/satpy/pull/1765) - Add missing demo data directory entry to config documentation * [PR 1751](https://github.com/pytroll/satpy/pull/1751) - Improve documentation for MultiScene.blend ([1750](https://github.com/pytroll/satpy/issues/1750)) * [PR 1726](https://github.com/pytroll/satpy/pull/1726) - Point out get_area_def in resample documentation ([1726](https://github.com/pytroll/satpy/issues/1726)) * [PR 1724](https://github.com/pytroll/satpy/pull/1724) - Replace doc references to PPP_CONFIG_DIR ([1724](https://github.com/pytroll/satpy/issues/1724)) In this release 45 pull requests were closed. ## Version 0.29.0 (2021/06/04) ### Issues Closed * [Issue 1714](https://github.com/pytroll/satpy/issues/1714) - Plotting day night composite satellite image * [Issue 1689](https://github.com/pytroll/satpy/issues/1689) - BackgroundCompositor using IR Sandwich (masked so only coldest clouds are visible) and True Color as inputs ([PR 1690](https://github.com/pytroll/satpy/pull/1690)) * [Issue 1684](https://github.com/pytroll/satpy/issues/1684) - Rename fci_l1c_fdhsi to fci_l1c_nc ([PR 1712](https://github.com/pytroll/satpy/pull/1712)) * [Issue 1293](https://github.com/pytroll/satpy/issues/1293) - DOC: broken link for geoview ([PR 1697](https://github.com/pytroll/satpy/pull/1697)) * [Issue 1120](https://github.com/pytroll/satpy/issues/1120) - Broken-off sentence in `cf_writer` module documentation: "If a non-dimensional coordinate is identical for" ([PR 1697](https://github.com/pytroll/satpy/pull/1697)) * [Issue 1104](https://github.com/pytroll/satpy/issues/1104) - NUCAPS reader uses incorrect _FillValue ([PR 1710](https://github.com/pytroll/satpy/pull/1710)) * [Issue 1097](https://github.com/pytroll/satpy/issues/1097) - Deprecate satpy.readers.utils.get_area_slices * [Issue 1085](https://github.com/pytroll/satpy/issues/1085) - Add tonemapping modifiers for truecolor images * [Issue 1060](https://github.com/pytroll/satpy/issues/1060) - Reorder installation instructions to put conda before PyPI ([PR 1711](https://github.com/pytroll/satpy/pull/1711)) * [Issue 1028](https://github.com/pytroll/satpy/issues/1028) - Mitiff tests failing on python 3.7 travis environments * [Issue 990](https://github.com/pytroll/satpy/issues/990) - Documentation on storing area definitions has a broken fragment identifier link to pyresample ([PR 1697](https://github.com/pytroll/satpy/pull/1697)) * [Issue 973](https://github.com/pytroll/satpy/issues/973) - For VIIRS composite there are two composites with the same name. * [Issue 936](https://github.com/pytroll/satpy/issues/936) - Swap names for Vis/IR default natural_color and natural_color_sun composites * [Issue 722](https://github.com/pytroll/satpy/issues/722) - Standardise self.mda for SEVIRI attributes * [Issue 608](https://github.com/pytroll/satpy/issues/608) - Update to fix deprecation warning from dask regarding atop * [Issue 566](https://github.com/pytroll/satpy/issues/566) - Add AbstractScene class * [Issue 500](https://github.com/pytroll/satpy/issues/500) - Add ability to add proper references to published algorithms * [Issue 495](https://github.com/pytroll/satpy/issues/495) - Update tests to skip tests if dependencies are missing * [Issue 425](https://github.com/pytroll/satpy/issues/425) - Add DART compatible observation writer * [Issue 346](https://github.com/pytroll/satpy/issues/346) - lat-lon as the default dimensions * [Issue 334](https://github.com/pytroll/satpy/issues/334) - Add 'Performance Tips' section to documentation * [Issue 164](https://github.com/pytroll/satpy/issues/164) - Should enhancers know the data type beforehand * [Issue 102](https://github.com/pytroll/satpy/issues/102) - Fix meteosat 10 area * [Issue 100](https://github.com/pytroll/satpy/issues/100) - Add background color option to simple image writer * [Issue 99](https://github.com/pytroll/satpy/issues/99) - Adding coastlines does not preserve transparency * [Issue 92](https://github.com/pytroll/satpy/issues/92) - Merge area definition files * [Issue 9](https://github.com/pytroll/satpy/issues/9) - Convert mpop readers to satpy yaml readers In this release 27 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 1710](https://github.com/pytroll/satpy/pull/1710) - Fix NUCAPS reader having incorrect _FillValue attribute ([1104](https://github.com/pytroll/satpy/issues/1104)) * [PR 1706](https://github.com/pytroll/satpy/pull/1706) - Update SLSTR reader to choose correct file for interpolated angles * [PR 1691](https://github.com/pytroll/satpy/pull/1691) - Fix reference to sector_id global key in 'awips_tiled' writer YAML * [PR 1690](https://github.com/pytroll/satpy/pull/1690) - Fix SandwichCompositor modifying input data ([1689](https://github.com/pytroll/satpy/issues/1689)) * [PR 1679](https://github.com/pytroll/satpy/pull/1679) - Remove extra attributes tag and fix indentation #### Features added * [PR 1715](https://github.com/pytroll/satpy/pull/1715) - Fix benchmarks to run with older commits * [PR 1701](https://github.com/pytroll/satpy/pull/1701) - Add pending deprecation reader names check * [PR 1680](https://github.com/pytroll/satpy/pull/1680) - Implement reading of index map and auxiliary data in FCI L1c reader #### Documentation changes * [PR 1711](https://github.com/pytroll/satpy/pull/1711) - Rewrite installation instructions to make conda use clearer ([1060](https://github.com/pytroll/satpy/issues/1060)) * [PR 1697](https://github.com/pytroll/satpy/pull/1697) - Solve various documentation issues ([990](https://github.com/pytroll/satpy/issues/990), [1293](https://github.com/pytroll/satpy/issues/1293), [1120](https://github.com/pytroll/satpy/issues/1120)) In this release 10 pull requests were closed. ## Version 0.28.1 (2021/05/18) ### Issues Closed * [Issue 1676](https://github.com/pytroll/satpy/issues/1676) - New config feature does not support a subprocess call to another script which uses satpy too. ([PR 1677](https://github.com/pytroll/satpy/pull/1677)) * [Issue 1647](https://github.com/pytroll/satpy/issues/1647) - Bucket resamplers AttributeError in logging message ([PR 1648](https://github.com/pytroll/satpy/pull/1648)) * [Issue 1145](https://github.com/pytroll/satpy/issues/1145) - satpy to support reading of satpy generated netcdf cf files * [Issue 1016](https://github.com/pytroll/satpy/issues/1016) - Add reader for netcdf datasets written with Satpy * [Issue 604](https://github.com/pytroll/satpy/issues/604) - test_generic_image.py failure: "projection not named" * [Issue 562](https://github.com/pytroll/satpy/issues/562) - Undocumented dependency packages ([PR 1673](https://github.com/pytroll/satpy/pull/1673)) In this release 6 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 1677](https://github.com/pytroll/satpy/pull/1677) - Fix SATPY_CONFIG_PATH being unusable when imported in a subprocess ([1676](https://github.com/pytroll/satpy/issues/1676)) * [PR 1671](https://github.com/pytroll/satpy/pull/1671) - Improve MiRS reader handling of missing metadata * [PR 1670](https://github.com/pytroll/satpy/pull/1670) - Fix combination of raw metadata (again) * [PR 1666](https://github.com/pytroll/satpy/pull/1666) - Ensure that orbital parameters are in a dict * [PR 1648](https://github.com/pytroll/satpy/pull/1648) - Fix bucket resamplers trying to print non-existent name ([1647](https://github.com/pytroll/satpy/issues/1647)) * [PR 1639](https://github.com/pytroll/satpy/pull/1639) - Fix MultiScene writer handling of multiple delayed objects * [PR 1499](https://github.com/pytroll/satpy/pull/1499) - Fix default dtype in geotiff writer if enhance=False In this release 7 pull requests were closed. ## Version 0.28.0 (2021/05/14) ### Issues Closed * [Issue 1669](https://github.com/pytroll/satpy/issues/1669) - Cropping a country from an earth image using latitude and longitude coordinate * [Issue 1667](https://github.com/pytroll/satpy/issues/1667) - Extracting data/ reading data from .DAT file * [Issue 1664](https://github.com/pytroll/satpy/issues/1664) - Nan values when resample with Kompsat * [Issue 1656](https://github.com/pytroll/satpy/issues/1656) - Cannot load datasets of multiple SEVIRI native files ([PR 1663](https://github.com/pytroll/satpy/pull/1663)) * [Issue 1650](https://github.com/pytroll/satpy/issues/1650) - wrong gamma for red beam of cira_fire_temperature RGB ([PR 1662](https://github.com/pytroll/satpy/pull/1662)) * [Issue 1641](https://github.com/pytroll/satpy/issues/1641) - UnicodeDecodeError and ValueError when passing local FSFile to abi_l1b * [Issue 1635](https://github.com/pytroll/satpy/issues/1635) - The `crop` function is no longer working. * [Issue 1633](https://github.com/pytroll/satpy/issues/1633) - Auxiliary offline download doesn't work for modifiers ([PR 1634](https://github.com/pytroll/satpy/pull/1634)) * [Issue 1632](https://github.com/pytroll/satpy/issues/1632) - Can't resample GOES Meso data when using night IR composite ([PR 1643](https://github.com/pytroll/satpy/pull/1643)) * [Issue 1626](https://github.com/pytroll/satpy/issues/1626) - problem with read UMETSAT * [Issue 1601](https://github.com/pytroll/satpy/issues/1601) - Allow MiRS reader to apply limb correction optionally ([PR 1621](https://github.com/pytroll/satpy/pull/1621)) * [Issue 1594](https://github.com/pytroll/satpy/issues/1594) - slstr_l2: Failed to filter out correct files using find_files_and_readers() with start_time and end_time * [Issue 1562](https://github.com/pytroll/satpy/issues/1562) - Improve Scene.copy wishlist handling when datasets to copy are specified ([PR 1630](https://github.com/pytroll/satpy/pull/1630)) * [Issue 1495](https://github.com/pytroll/satpy/issues/1495) - Values of reflectance In this release 14 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 1665](https://github.com/pytroll/satpy/pull/1665) - Fix fci l2 tests on windows * [PR 1663](https://github.com/pytroll/satpy/pull/1663) - Ignore raw metadata when combining metadata ([1656](https://github.com/pytroll/satpy/issues/1656)) * [PR 1662](https://github.com/pytroll/satpy/pull/1662) - Fix cira fire temperature and green snow ([1650](https://github.com/pytroll/satpy/issues/1650)) * [PR 1655](https://github.com/pytroll/satpy/pull/1655) - Apply valid_range in MiRS reader when present * [PR 1644](https://github.com/pytroll/satpy/pull/1644) - Add id for GOMS3/Electro-l n3 * [PR 1643](https://github.com/pytroll/satpy/pull/1643) - Fix combine_metadata not handling lists of different sizes ([1632](https://github.com/pytroll/satpy/issues/1632)) * [PR 1640](https://github.com/pytroll/satpy/pull/1640) - Fix AAPP l1b reader for negative slope on channel 2 ([332](https://github.com/ssec/polar2grid/issues/332)) * [PR 1634](https://github.com/pytroll/satpy/pull/1634) - Fix offline aux download not working for modifiers ([1633](https://github.com/pytroll/satpy/issues/1633)) * [PR 1631](https://github.com/pytroll/satpy/pull/1631) - Fix satellite altitude being in kilometers in ABI L2 reader * [PR 1630](https://github.com/pytroll/satpy/pull/1630) - Fix Scene.copy not preserving wishlist properly ([1562](https://github.com/pytroll/satpy/issues/1562)) * [PR 1578](https://github.com/pytroll/satpy/pull/1578) - Fix nightly/unstable CI URL #### Features added * [PR 1659](https://github.com/pytroll/satpy/pull/1659) - Add SEVIRI + NWC SAF GEO VIS/IR cloud overlay composite * [PR 1657](https://github.com/pytroll/satpy/pull/1657) - Add parallax-corrected file patterns to the nwcsaf-geo reader * [PR 1646](https://github.com/pytroll/satpy/pull/1646) - Add new piecewise_linear_stretch enhancement method * [PR 1636](https://github.com/pytroll/satpy/pull/1636) - Add first benchmarks (uses asv) * [PR 1623](https://github.com/pytroll/satpy/pull/1623) - Add the reinhard enhancements * [PR 1621](https://github.com/pytroll/satpy/pull/1621) - Add `limb_correction` keyword argument to MiRS reader ([1601](https://github.com/pytroll/satpy/issues/1601)) * [PR 1620](https://github.com/pytroll/satpy/pull/1620) - Add feature to StaticImageCompositor to allow filenames relative to Satpy 'data_dir' * [PR 1560](https://github.com/pytroll/satpy/pull/1560) - Allow custom dataset names in 'generic_image' reader and fix nodata handling In this release 19 pull requests were closed. ## Version 0.27.0 (2021/03/26) ### Issues Closed * [Issue 1616](https://github.com/pytroll/satpy/issues/1616) - Thermal channels NinJoTIFF writing fail with AttributeError due to lost attributes ([PR 1617](https://github.com/pytroll/satpy/pull/1617)) * [Issue 1614](https://github.com/pytroll/satpy/issues/1614) - Saving AAPP-processed NOAA HRPT to NinJoTIFF fails with AttributeError ([PR 1615](https://github.com/pytroll/satpy/pull/1615)) * [Issue 1608](https://github.com/pytroll/satpy/issues/1608) - SEVIRI L1.5 native reader does not support files not including 0100 in the file name ([PR 1609](https://github.com/pytroll/satpy/pull/1609)) * [Issue 1605](https://github.com/pytroll/satpy/issues/1605) - Reading FSFile fails with TypeError ([PR 1606](https://github.com/pytroll/satpy/pull/1606)) * [Issue 1604](https://github.com/pytroll/satpy/issues/1604) - group_files does not support FSFile objects (TypeError: unhasshable type 'FSFile') ([PR 1606](https://github.com/pytroll/satpy/pull/1606)) * [Issue 1493](https://github.com/pytroll/satpy/issues/1493) - Failed to save tropomi nc file with specific variables loaded ([PR 1588](https://github.com/pytroll/satpy/pull/1588)) In this release 6 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 1617](https://github.com/pytroll/satpy/pull/1617) - Fix ninjotiff convert units attributes ([1616](https://github.com/pytroll/satpy/issues/1616)) * [PR 1615](https://github.com/pytroll/satpy/pull/1615) - Fix and improve unit conversion when writing NinJoTIFF ([1614](https://github.com/pytroll/satpy/issues/1614)) * [PR 1613](https://github.com/pytroll/satpy/pull/1613) - Standardize vii dims * [PR 1610](https://github.com/pytroll/satpy/pull/1610) - Fix auxiliary download script not using provided data directory * [PR 1609](https://github.com/pytroll/satpy/pull/1609) - Fix file pattern matching in SEVIRI Native reader ([1608](https://github.com/pytroll/satpy/issues/1608)) * [PR 1606](https://github.com/pytroll/satpy/pull/1606) - Make FSFile hashable again ([1605](https://github.com/pytroll/satpy/issues/1605), [1604](https://github.com/pytroll/satpy/issues/1604)) * [PR 1603](https://github.com/pytroll/satpy/pull/1603) - Update slstr_l2.yaml * [PR 1600](https://github.com/pytroll/satpy/pull/1600) - When setting `upper_right_corner` make sure that all dataset coordinates are flipped * [PR 1588](https://github.com/pytroll/satpy/pull/1588) - Bugfix of link_coords ([1493](https://github.com/pytroll/satpy/issues/1493)) #### Features added * [PR 1618](https://github.com/pytroll/satpy/pull/1618) - Update VIIRS/MODIS ReflecanceCorrector modifier to download DEM data at runtime * [PR 1612](https://github.com/pytroll/satpy/pull/1612) - Add support for SEVIRI Native files without archive header * [PR 1602](https://github.com/pytroll/satpy/pull/1602) - Fix missing VIIRS SDR DNB solar and lunar azimuth angle datasets * [PR 1468](https://github.com/pytroll/satpy/pull/1468) - Harmonize SEVIRI auxiliary data #### Documentation changes * [PR 1599](https://github.com/pytroll/satpy/pull/1599) - Add MiRS and mimicTPW2_comp readers to sphinx documentation In this release 14 pull requests were closed. ## Version 0.26.0 (2021/03/15) ### Issues Closed * [Issue 1587](https://github.com/pytroll/satpy/issues/1587) - Don't allow auxiliary downloads during tests ([PR 1591](https://github.com/pytroll/satpy/pull/1591)) * [Issue 1581](https://github.com/pytroll/satpy/issues/1581) - FSFile object compares unequal when all properties equal ([PR 1582](https://github.com/pytroll/satpy/pull/1582)) * [Issue 1573](https://github.com/pytroll/satpy/issues/1573) - Crash when reaching warnings.DeprecationWarning ([PR 1576](https://github.com/pytroll/satpy/pull/1576)) * [Issue 1572](https://github.com/pytroll/satpy/issues/1572) - Satpy Github issue template example code fails with ModuleNotFoundError ([PR 1575](https://github.com/pytroll/satpy/pull/1575)) * [Issue 1550](https://github.com/pytroll/satpy/issues/1550) - Scene metadata overwriting composite metadata and handling sets in filename generation ([PR 1551](https://github.com/pytroll/satpy/pull/1551)) * [Issue 1549](https://github.com/pytroll/satpy/issues/1549) - Satpy problems with MODIS ([PR 1556](https://github.com/pytroll/satpy/pull/1556)) * [Issue 1538](https://github.com/pytroll/satpy/issues/1538) - modifier API documentation not included with sphinx-generated API documentation * [Issue 1536](https://github.com/pytroll/satpy/issues/1536) - Can't resample mscn to GridDefinition * [Issue 1532](https://github.com/pytroll/satpy/issues/1532) - Loading SLSTR composite doesn't respect the `view` ([PR 1533](https://github.com/pytroll/satpy/pull/1533)) * [Issue 1530](https://github.com/pytroll/satpy/issues/1530) - Improve documentation/handling of string input for config_path ([PR 1534](https://github.com/pytroll/satpy/pull/1534)) * [Issue 1520](https://github.com/pytroll/satpy/issues/1520) - Test failure if SATPY_CONFIG_PATH set ([PR 1521](https://github.com/pytroll/satpy/pull/1521)) * [Issue 1518](https://github.com/pytroll/satpy/issues/1518) - satpy_cf_nc reader fails to read satpy cf writer generated netcdf files where variables start with a number. ([PR 1525](https://github.com/pytroll/satpy/pull/1525)) * [Issue 1517](https://github.com/pytroll/satpy/issues/1517) - Scene.load error on conflicting 'y' values with MSG example. * [Issue 1516](https://github.com/pytroll/satpy/issues/1516) - FSFile should support any PathLike objects ([PR 1519](https://github.com/pytroll/satpy/pull/1519)) * [Issue 1510](https://github.com/pytroll/satpy/issues/1510) - Seviri L1b native Solar zenith angle * [Issue 1509](https://github.com/pytroll/satpy/issues/1509) - Replace pkg_resources usage with version.py file ([PR 1512](https://github.com/pytroll/satpy/pull/1512)) * [Issue 1508](https://github.com/pytroll/satpy/issues/1508) - Add sphinx building to GitHub Actions * [Issue 1507](https://github.com/pytroll/satpy/issues/1507) - FCI Level2 OCA Data - error parameters have a parameter name change in the latest version of the test data ([PR 1524](https://github.com/pytroll/satpy/pull/1524)) * [Issue 1477](https://github.com/pytroll/satpy/issues/1477) - seviri l2 grib add file names from Eumetsat datastore ([PR 1503](https://github.com/pytroll/satpy/pull/1503)) * [Issue 1362](https://github.com/pytroll/satpy/issues/1362) - Feature request: download tif's if needed in a composite ([PR 1513](https://github.com/pytroll/satpy/pull/1513)) * [Issue 894](https://github.com/pytroll/satpy/issues/894) - SCMI Writer can produce un-ingestable AWIPS files * [Issue 628](https://github.com/pytroll/satpy/issues/628) - Use 'donfig' package for global configuration settings ([PR 1501](https://github.com/pytroll/satpy/pull/1501)) * [Issue 367](https://github.com/pytroll/satpy/issues/367) - Add 'to_xarray_dataset' method to Scene * [Issue 175](https://github.com/pytroll/satpy/issues/175) - Cannot read AVHRR in HRPT format (geoloc dtype error) ([PR 1531](https://github.com/pytroll/satpy/pull/1531)) In this release 24 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 1596](https://github.com/pytroll/satpy/pull/1596) - Fix bug in finest_area and coarsest_area logic for originally flipped SEVIRI data * [PR 1592](https://github.com/pytroll/satpy/pull/1592) - Fix tests where xarray was unable to guess backend engine * [PR 1589](https://github.com/pytroll/satpy/pull/1589) - Delete unnecessary coordinates in tropomi reader * [PR 1582](https://github.com/pytroll/satpy/pull/1582) - Ensure FSFile objects compare equal when they should ([1581](https://github.com/pytroll/satpy/issues/1581)) * [PR 1579](https://github.com/pytroll/satpy/pull/1579) - Fix AHI HSD reader not having access to the AreaDefinition on load * [PR 1574](https://github.com/pytroll/satpy/pull/1574) - Fix, correct usage of data returned by pyspectral AtmosphericalCorrection * [PR 1567](https://github.com/pytroll/satpy/pull/1567) - Redesign awips_tiled writer to avoid xarray/dask deadlocks * [PR 1564](https://github.com/pytroll/satpy/pull/1564) - Fix DifferenceCompositor ignoring YAML metadata * [PR 1558](https://github.com/pytroll/satpy/pull/1558) - Fix dependency tree CompositorNode not retaining properties on copy * [PR 1556](https://github.com/pytroll/satpy/pull/1556) - Fix the dataid sorting ([1549](https://github.com/pytroll/satpy/issues/1549)) * [PR 1551](https://github.com/pytroll/satpy/pull/1551) - Fix composite metadata overwriting and 'sensor' filename formatting ([1550](https://github.com/pytroll/satpy/issues/1550)) * [PR 1548](https://github.com/pytroll/satpy/pull/1548) - Add 'environment_prefix' to AWIPS tiled writer for flexible filenames * [PR 1546](https://github.com/pytroll/satpy/pull/1546) - Make viirs-compact datasets compatible with dask distributed * [PR 1545](https://github.com/pytroll/satpy/pull/1545) - Fix deprecated sphinx html_context usage in conf.py * [PR 1542](https://github.com/pytroll/satpy/pull/1542) - Fix compression not being applied in awips_tiled writer * [PR 1541](https://github.com/pytroll/satpy/pull/1541) - Fix swath builtin coordinates not being used * [PR 1537](https://github.com/pytroll/satpy/pull/1537) - Add static scale_factor/add_offset/_FillValue to awips_tiled GLM config * [PR 1533](https://github.com/pytroll/satpy/pull/1533) - Fix SLSTR composites for oblique view ([1532](https://github.com/pytroll/satpy/issues/1532)) * [PR 1531](https://github.com/pytroll/satpy/pull/1531) - Update the HRPT reader to latest satpy api ([175](https://github.com/pytroll/satpy/issues/175)) * [PR 1524](https://github.com/pytroll/satpy/pull/1524) - Fixed issue with reading fci oca error data and added fci toz product ([1507](https://github.com/pytroll/satpy/issues/1507)) * [PR 1521](https://github.com/pytroll/satpy/pull/1521) - Fix config test when user environment variables are set ([1520](https://github.com/pytroll/satpy/issues/1520)) * [PR 1519](https://github.com/pytroll/satpy/pull/1519) - Allow to pass pathlike-objects to FSFile ([1516](https://github.com/pytroll/satpy/issues/1516)) * [PR 1514](https://github.com/pytroll/satpy/pull/1514) - Correct the pdict a_name of agri_l1 reader * [PR 1503](https://github.com/pytroll/satpy/pull/1503) - Fix issue with reading MSG GRIB products from the eumetsat datastore ([1477](https://github.com/pytroll/satpy/issues/1477)) #### Features added * [PR 1597](https://github.com/pytroll/satpy/pull/1597) - add file_patterns in file_types with resolution type for satpy_cf_nc reader * [PR 1591](https://github.com/pytroll/satpy/pull/1591) - Disallow tests from downloading files while running tests ([1587](https://github.com/pytroll/satpy/issues/1587)) * [PR 1586](https://github.com/pytroll/satpy/pull/1586) - Update GRIB reader for greater flexibility. * [PR 1580](https://github.com/pytroll/satpy/pull/1580) - Sar-c reader optimization * [PR 1577](https://github.com/pytroll/satpy/pull/1577) - New compositors: MultiFiller and LongitudeMaskingCompositor * [PR 1570](https://github.com/pytroll/satpy/pull/1570) - Add the SAR Ice Log composite * [PR 1565](https://github.com/pytroll/satpy/pull/1565) - Rename min_area() and max_area() methods * [PR 1563](https://github.com/pytroll/satpy/pull/1563) - Allow 'glm_l2' reader to accept arbitrary filename prefixes * [PR 1555](https://github.com/pytroll/satpy/pull/1555) - Add altitude in the list of dataset for OLCI.nc * [PR 1554](https://github.com/pytroll/satpy/pull/1554) - Enable showing DeprecationWarning in debug_on and add unit test ([1554](https://github.com/pytroll/satpy/issues/1554)) * [PR 1544](https://github.com/pytroll/satpy/pull/1544) - Read wavelength ranges from netcdf * [PR 1539](https://github.com/pytroll/satpy/pull/1539) - Fix args of bucket_sum and bucket_avg resampler * [PR 1525](https://github.com/pytroll/satpy/pull/1525) - When saving to CF prepend datasets starting with a digit by CHANNEL_ ([1518](https://github.com/pytroll/satpy/issues/1518)) * [PR 1522](https://github.com/pytroll/satpy/pull/1522) - Switch to 'ewa' and 'ewa_legacy' resamplers from pyresample * [PR 1513](https://github.com/pytroll/satpy/pull/1513) - Add auxiliary data download API ([1362](https://github.com/pytroll/satpy/issues/1362)) * [PR 1505](https://github.com/pytroll/satpy/pull/1505) - Ascat soilmoisture reader * [PR 1501](https://github.com/pytroll/satpy/pull/1501) - Add central configuration object ([628](https://github.com/pytroll/satpy/issues/628)) #### Documentation changes * [PR 1559](https://github.com/pytroll/satpy/pull/1559) - Fix geotiff writer FAQ link * [PR 1545](https://github.com/pytroll/satpy/pull/1545) - Fix deprecated sphinx html_context usage in conf.py * [PR 1543](https://github.com/pytroll/satpy/pull/1543) - Switch to sphinxcontrib.apidoc for automatically updating API docs ([1540](https://github.com/pytroll/satpy/issues/1540)) * [PR 1534](https://github.com/pytroll/satpy/pull/1534) - Clarify usage of config 'config_path' option ([1530](https://github.com/pytroll/satpy/issues/1530)) #### Backward incompatible changes * [PR 1565](https://github.com/pytroll/satpy/pull/1565) - Rename min_area() and max_area() methods * [PR 1561](https://github.com/pytroll/satpy/pull/1561) - Remove deprecated VIIRSFog compositor in favor of DifferenceCompositor * [PR 1501](https://github.com/pytroll/satpy/pull/1501) - Add central configuration object ([628](https://github.com/pytroll/satpy/issues/628)) In this release 48 pull requests were closed. ## Version 0.25.1 (2021/01/06) ### Issues Closed * [Issue 1500](https://github.com/pytroll/satpy/issues/1500) - Cannot create a scene for OLCI data In this release 1 issue was closed. ### Pull Requests Merged #### Bugs fixed * [PR 1502](https://github.com/pytroll/satpy/pull/1502) - Fix the linting error of test_agri_l1 * [PR 1459](https://github.com/pytroll/satpy/pull/1459) - Remove unnecessary string decode in agri_l1 reader In this release 2 pull requests were closed. ## Version 0.25.0 (2021/01/04) ### Issues Closed * [Issue 1494](https://github.com/pytroll/satpy/issues/1494) - geolocation problem with MODIS LAADS data * [Issue 1489](https://github.com/pytroll/satpy/issues/1489) - The reader "viirs_l1b" cannot read the VIIRS L1B data * [Issue 1488](https://github.com/pytroll/satpy/issues/1488) - Resampling with bucket resamplers drops coords from xr.DataArray ([PR 1491](https://github.com/pytroll/satpy/pull/1491)) * [Issue 1460](https://github.com/pytroll/satpy/issues/1460) - VIIl1b reader fails for testdata ([PR 1462](https://github.com/pytroll/satpy/pull/1462)) * [Issue 1453](https://github.com/pytroll/satpy/issues/1453) - Small error in documentation ([PR 1473](https://github.com/pytroll/satpy/pull/1473)) * [Issue 1449](https://github.com/pytroll/satpy/issues/1449) - Encoding of wavelength range ([PR 1466](https://github.com/pytroll/satpy/pull/1466)) * [Issue 1446](https://github.com/pytroll/satpy/issues/1446) - Resample * [Issue 1443](https://github.com/pytroll/satpy/issues/1443) - Loading and resampling composites sometimes discards their dependencies ([PR 1351](https://github.com/pytroll/satpy/pull/1351)) * [Issue 1440](https://github.com/pytroll/satpy/issues/1440) - Error reading SEVIRI native file from EUMETSAT API ([PR 1438](https://github.com/pytroll/satpy/pull/1438)) * [Issue 1437](https://github.com/pytroll/satpy/issues/1437) - HSD / HRIT projection question * [Issue 1436](https://github.com/pytroll/satpy/issues/1436) - 'str' object has no attribute 'decode' during Sentinel-2 MSI processing * [Issue 1187](https://github.com/pytroll/satpy/issues/1187) - Areas claiming to view "full globe" should be labelled "full disk" instead ([PR 1485](https://github.com/pytroll/satpy/pull/1485)) In this release 12 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 1491](https://github.com/pytroll/satpy/pull/1491) - Fix missing coordinates for bucket resamplers ([1488](https://github.com/pytroll/satpy/issues/1488)) * [PR 1481](https://github.com/pytroll/satpy/pull/1481) - Remove x/y coordinates in mviri_l1b_fiduceo_nc * [PR 1473](https://github.com/pytroll/satpy/pull/1473) - Fix '::' erroneous for dicts syntax in docstrings ([1453](https://github.com/pytroll/satpy/issues/1453), [1453](https://github.com/pytroll/satpy/issues/1453)) * [PR 1466](https://github.com/pytroll/satpy/pull/1466) - Fix wavelength range print out to use regular nbsp ([1449](https://github.com/pytroll/satpy/issues/1449)) * [PR 1447](https://github.com/pytroll/satpy/pull/1447) - Fix handling of modifiers in satpy-cf reader #### Features added * [PR 1485](https://github.com/pytroll/satpy/pull/1485) - Harmonise AreaDefinition namings in EUM geos readers and sort geos areas in areas.yaml ([1187](https://github.com/pytroll/satpy/issues/1187)) * [PR 1478](https://github.com/pytroll/satpy/pull/1478) - Improve FCI geolocation computation, harmonize area_id, add geolocation tests * [PR 1476](https://github.com/pytroll/satpy/pull/1476) - Add support for multiple values in the DecisionTree used for enhancements * [PR 1474](https://github.com/pytroll/satpy/pull/1474) - Fix EUMGACFDR reader so that all datasets can be read. * [PR 1465](https://github.com/pytroll/satpy/pull/1465) - Updates to FCI reader to include CT, CTTH, GII and the latest filenam… * [PR 1457](https://github.com/pytroll/satpy/pull/1457) - Harmonize calibration in SEVIRI readers * [PR 1442](https://github.com/pytroll/satpy/pull/1442) - Switch ci coverage to xml for codecov compatibility * [PR 1441](https://github.com/pytroll/satpy/pull/1441) - Add github workflow * [PR 1439](https://github.com/pytroll/satpy/pull/1439) - Add support for s3 buckets in OLCI and ABI l1 readers * [PR 1438](https://github.com/pytroll/satpy/pull/1438) - Full disk padding feature for SEVIRI Native data ([1440](https://github.com/pytroll/satpy/issues/1440)) * [PR 1427](https://github.com/pytroll/satpy/pull/1427) - Add reader for FIDUCEO MVIRI FCDR data * [PR 1421](https://github.com/pytroll/satpy/pull/1421) - Add reader for AMSR2 Level 2 data produced by GAASP software (amsr2_l2_gaasp) * [PR 1402](https://github.com/pytroll/satpy/pull/1402) - Add ability to create complex tiled AWIPS NetCDF files (formerly SCMI writer) * [PR 1393](https://github.com/pytroll/satpy/pull/1393) - Fix sar-c calibration and add support for dB units * [PR 1380](https://github.com/pytroll/satpy/pull/1380) - Add arbitrary filename suffix to ABI L1B reader * [PR 1351](https://github.com/pytroll/satpy/pull/1351) - Refactor Scene loading and dependency tree ([1443](https://github.com/pytroll/satpy/issues/1443)) * [PR 937](https://github.com/pytroll/satpy/pull/937) - Add GLM + ABI highlight composite #### Documentation changes * [PR 1473](https://github.com/pytroll/satpy/pull/1473) - Fix '::' erroneous for dicts syntax in docstrings ([1453](https://github.com/pytroll/satpy/issues/1453), [1453](https://github.com/pytroll/satpy/issues/1453)) * [PR 1448](https://github.com/pytroll/satpy/pull/1448) - DOC: add explanation to the way x and y work in aggregate #### Refactoring * [PR 1402](https://github.com/pytroll/satpy/pull/1402) - Add ability to create complex tiled AWIPS NetCDF files (formerly SCMI writer) * [PR 1351](https://github.com/pytroll/satpy/pull/1351) - Refactor Scene loading and dependency tree ([1443](https://github.com/pytroll/satpy/issues/1443)) In this release 26 pull requests were closed. ## Version 0.24.0 (2020/11/16) ### Issues Closed * [Issue 1412](https://github.com/pytroll/satpy/issues/1412) - Mimic reader fails when multiple times are provided to Scene object * [Issue 1409](https://github.com/pytroll/satpy/issues/1409) - "Unexpected number of scanlines!" when reading AVHRR GAC data * [Issue 1399](https://github.com/pytroll/satpy/issues/1399) - Customes Scene creation from MultiScene.from_files ([PR 1400](https://github.com/pytroll/satpy/pull/1400)) * [Issue 1396](https://github.com/pytroll/satpy/issues/1396) - reader_kwargs should differentiate between different readers ([PR 1397](https://github.com/pytroll/satpy/pull/1397)) * [Issue 1389](https://github.com/pytroll/satpy/issues/1389) - Can't load angle data from msi_safe in version 0.23 ([PR 1391](https://github.com/pytroll/satpy/pull/1391)) * [Issue 1387](https://github.com/pytroll/satpy/issues/1387) - NUCAPS time format of data from CLASS ([PR 1388](https://github.com/pytroll/satpy/pull/1388)) * [Issue 1371](https://github.com/pytroll/satpy/issues/1371) - MIMIC reader available_dataset_names returns 1d lat/lon fields ([PR 1392](https://github.com/pytroll/satpy/pull/1392)) * [Issue 1343](https://github.com/pytroll/satpy/issues/1343) - Feature Request: available_readers to return alphabetical order * [Issue 1224](https://github.com/pytroll/satpy/issues/1224) - GRIB-2/ICON geolocation unknown or invalid for western hemisphere ([PR 1296](https://github.com/pytroll/satpy/pull/1296)) In this release 9 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 1435](https://github.com/pytroll/satpy/pull/1435) - Fix tests for GEOFlippableFileYAMLReader after AreaDefinition.area_extent being immutable * [PR 1433](https://github.com/pytroll/satpy/pull/1433) - Fix cloud-free pixels in cloudtop height composite * [PR 1432](https://github.com/pytroll/satpy/pull/1432) - Fix enhance2dataset to support P-mode datasets * [PR 1431](https://github.com/pytroll/satpy/pull/1431) - Fix crash when TLE files are missing * [PR 1430](https://github.com/pytroll/satpy/pull/1430) - Fix infer_mode not using the band coordinate * [PR 1428](https://github.com/pytroll/satpy/pull/1428) - Bugfix NWC SAF GEO v2016 area definition * [PR 1422](https://github.com/pytroll/satpy/pull/1422) - Fix HDF5 utility file handler not decoding byte arrays consistently * [PR 1413](https://github.com/pytroll/satpy/pull/1413) - Fix pyspectral link in the main doc page * [PR 1407](https://github.com/pytroll/satpy/pull/1407) - Fix mersi 2 angles reading * [PR 1392](https://github.com/pytroll/satpy/pull/1392) - Remove 1-D lat/lon variables from mimic reader's available datasets ([1371](https://github.com/pytroll/satpy/issues/1371)) * [PR 1391](https://github.com/pytroll/satpy/pull/1391) - Fix the MSI / Sentinel-2 reader so it uses new DataID ([1389](https://github.com/pytroll/satpy/issues/1389)) * [PR 1388](https://github.com/pytroll/satpy/pull/1388) - Fix handling of new date string formats in NUCAPS reader ([1387](https://github.com/pytroll/satpy/issues/1387)) * [PR 1382](https://github.com/pytroll/satpy/pull/1382) - Fixed bug getting name to the calibration in mitiff writer * [PR 1296](https://github.com/pytroll/satpy/pull/1296) - Fix grib reader handling for data on 0-360 longitude ([1224](https://github.com/pytroll/satpy/issues/1224)) #### Features added * [PR 1420](https://github.com/pytroll/satpy/pull/1420) - Add support for Near-realtime VIIRS L1b data. * [PR 1411](https://github.com/pytroll/satpy/pull/1411) - Added MERSI-2 file pattern for data from NMSC * [PR 1406](https://github.com/pytroll/satpy/pull/1406) - Handle bilinear caching in Pyresample * [PR 1405](https://github.com/pytroll/satpy/pull/1405) - Add FIR product to seviri_l2_grib reader * [PR 1401](https://github.com/pytroll/satpy/pull/1401) - Add function to the SLSTR L1 reader to enable correction of VIS radiances. * [PR 1400](https://github.com/pytroll/satpy/pull/1400) - Improve customisation in multiscene creation ([1399](https://github.com/pytroll/satpy/issues/1399)) * [PR 1397](https://github.com/pytroll/satpy/pull/1397) - Allow different kwargs for different readers ([1396](https://github.com/pytroll/satpy/issues/1396)) * [PR 1394](https://github.com/pytroll/satpy/pull/1394) - Add satpy cf-reader and eumetsat gac reader ([1205](https://github.com/pytroll/satpy/issues/1205)) * [PR 1390](https://github.com/pytroll/satpy/pull/1390) - Add support to Pyspectral NIRReflectance masking limit * [PR 1378](https://github.com/pytroll/satpy/pull/1378) - Alphabetize available_readers method and update documentation #### Documentation changes * [PR 1415](https://github.com/pytroll/satpy/pull/1415) - Update Code of Conduct contact email to groups.io address * [PR 1413](https://github.com/pytroll/satpy/pull/1413) - Fix pyspectral link in the main doc page * [PR 1374](https://github.com/pytroll/satpy/pull/1374) - DOC: add conda-forge badge #### Backward incompatible changes * [PR 1360](https://github.com/pytroll/satpy/pull/1360) - Create new ModifierBase class and move existing modifiers to satpy.modifiers #### Refactoring * [PR 1360](https://github.com/pytroll/satpy/pull/1360) - Create new ModifierBase class and move existing modifiers to satpy.modifiers In this release 29 pull requests were closed. ## Version 0.23.0 (2020/09/18) ### Issues Closed * [Issue 1372](https://github.com/pytroll/satpy/issues/1372) - fix typo in developer instructions for conda install ([PR 1373](https://github.com/pytroll/satpy/pull/1373)) * [Issue 1367](https://github.com/pytroll/satpy/issues/1367) - AVHRR lat/lon grids incorrect size ([PR 1368](https://github.com/pytroll/satpy/pull/1368)) * [Issue 1355](https://github.com/pytroll/satpy/issues/1355) - ir product * [Issue 1350](https://github.com/pytroll/satpy/issues/1350) - pip install[complete] vs pip install[all] * [Issue 1344](https://github.com/pytroll/satpy/issues/1344) - scn.load('C01') gives - TypeError * [Issue 1339](https://github.com/pytroll/satpy/issues/1339) - hrv composites for global scene * [Issue 1336](https://github.com/pytroll/satpy/issues/1336) - Problem with making MODIS L1 images * [Issue 1334](https://github.com/pytroll/satpy/issues/1334) - SEVIRI reader doesn't include Earth-Sun distance in the rad->refl calibration ([PR 1341](https://github.com/pytroll/satpy/pull/1341)) * [Issue 1330](https://github.com/pytroll/satpy/issues/1330) - AAPP AVHRR level 1 reader raises a Value error when a channel is missing ([PR 1333](https://github.com/pytroll/satpy/pull/1333)) * [Issue 1292](https://github.com/pytroll/satpy/issues/1292) - Feature Request: update to Quickstart to use data from the demo module * [Issue 1291](https://github.com/pytroll/satpy/issues/1291) - get_us_midlatitude_cyclone_abi in satpy.demo fails ([PR 1295](https://github.com/pytroll/satpy/pull/1295)) * [Issue 1289](https://github.com/pytroll/satpy/issues/1289) - update _makedirs in satpy.demo ([PR 1295](https://github.com/pytroll/satpy/pull/1295)) * [Issue 1279](https://github.com/pytroll/satpy/issues/1279) - MultiScene.blend(blend_function=timeseries) results in incorrect start_time, end_time * [Issue 1278](https://github.com/pytroll/satpy/issues/1278) - Trying to get Earth's semimajor and semiminor axis size from HRIT files * [Issue 1271](https://github.com/pytroll/satpy/issues/1271) - Test failures in MERSI and VIIRS readers after fixing bugs in test routines ([PR 1270](https://github.com/pytroll/satpy/pull/1270)) * [Issue 1268](https://github.com/pytroll/satpy/issues/1268) - Support multiple readers in MultiScene.from_files ([PR 1269](https://github.com/pytroll/satpy/pull/1269)) * [Issue 1261](https://github.com/pytroll/satpy/issues/1261) - Reading the SEVIRI HRV channel with seviri_l1b_native returns a numpy array ([PR 1272](https://github.com/pytroll/satpy/pull/1272)) * [Issue 1258](https://github.com/pytroll/satpy/issues/1258) - Saving true color GOES image requires double-resampling if calibration='radiance' ([PR 1088](https://github.com/pytroll/satpy/pull/1088)) * [Issue 1252](https://github.com/pytroll/satpy/issues/1252) - Incorrect error message when calibration key unknown * [Issue 1243](https://github.com/pytroll/satpy/issues/1243) - Wrong data type of orbital_parameters in FY4A AGRI reader ([PR 1244](https://github.com/pytroll/satpy/pull/1244)) * [Issue 1191](https://github.com/pytroll/satpy/issues/1191) - cf_writer should append to Convention global attribute if given header_attr ([PR 1204](https://github.com/pytroll/satpy/pull/1204)) * [Issue 1149](https://github.com/pytroll/satpy/issues/1149) - GLM data LCFA from Class * [Issue 299](https://github.com/pytroll/satpy/issues/299) - Missing HRV-channel StackedAreaDefinition for native_msg-reader In this release 23 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 1368](https://github.com/pytroll/satpy/pull/1368) - Fix wrong number of scanlines in eps reader ([1367](https://github.com/pytroll/satpy/issues/1367)) * [PR 1366](https://github.com/pytroll/satpy/pull/1366) - Fixing a few typos in slstr_l1b yaml reader * [PR 1365](https://github.com/pytroll/satpy/pull/1365) - Fix leftovers from module splitting * [PR 1358](https://github.com/pytroll/satpy/pull/1358) - Daskify Earth-Sun distance correction. * [PR 1357](https://github.com/pytroll/satpy/pull/1357) - Only add longitude/latitude variables in cf_writer if they are not included already. * [PR 1354](https://github.com/pytroll/satpy/pull/1354) - Update name for gridded AHI reader * [PR 1353](https://github.com/pytroll/satpy/pull/1353) - Add_band workaround for dask bug * [PR 1341](https://github.com/pytroll/satpy/pull/1341) - Add Sun-Earth distance corrector utility and apply in SEVIRI readers ([1334](https://github.com/pytroll/satpy/issues/1334)) * [PR 1338](https://github.com/pytroll/satpy/pull/1338) - Fix exception to catch when new namedtuple syntax is used * [PR 1333](https://github.com/pytroll/satpy/pull/1333) - Fix aapp_l1b reader to behave nicely on missing datasets ([1330](https://github.com/pytroll/satpy/issues/1330)) * [PR 1320](https://github.com/pytroll/satpy/pull/1320) - Fix 'viirs_sdr' reader not scaling DNB data properly * [PR 1319](https://github.com/pytroll/satpy/pull/1319) - Fix NIRReflectance passing None as sunz_threshold * [PR 1318](https://github.com/pytroll/satpy/pull/1318) - Fix time extraction from filenames in yaml for SEVIRI Native and NetCDF readers * [PR 1315](https://github.com/pytroll/satpy/pull/1315) - Fix tests on i386 * [PR 1313](https://github.com/pytroll/satpy/pull/1313) - Fix true colors generation for AHI HSD data and refactor the dep tree code * [PR 1311](https://github.com/pytroll/satpy/pull/1311) - Make colorize compositor dask-compatible * [PR 1309](https://github.com/pytroll/satpy/pull/1309) - Refactor the combine_metadata function and allow numpy arrays to be combined * [PR 1303](https://github.com/pytroll/satpy/pull/1303) - Fix nucaps reader failing when kwargs are passed * [PR 1302](https://github.com/pytroll/satpy/pull/1302) - Fix numpy scalars considered arrays in combine_metadata * [PR 1295](https://github.com/pytroll/satpy/pull/1295) - Fix ABI mid-latitude cyclone demo downloading wrong number of files ([1291](https://github.com/pytroll/satpy/issues/1291), [1289](https://github.com/pytroll/satpy/issues/1289)) * [PR 1262](https://github.com/pytroll/satpy/pull/1262) - Fix handling of HRV channel navigation for RSS data in seviri_l1b_native reader * [PR 1259](https://github.com/pytroll/satpy/pull/1259) - Update safe_msi for new pyproj compatibility * [PR 1247](https://github.com/pytroll/satpy/pull/1247) - Fix time reading in vaisala_gld360 reader #### Features added * [PR 1352](https://github.com/pytroll/satpy/pull/1352) - Reintroduce support for pyproj 1.9.6 in cf_writer * [PR 1342](https://github.com/pytroll/satpy/pull/1342) - Update seviri icare tests * [PR 1327](https://github.com/pytroll/satpy/pull/1327) - Refactor reader configuration loading to remove redundant code * [PR 1312](https://github.com/pytroll/satpy/pull/1312) - Add reader for gridded AHI data * [PR 1304](https://github.com/pytroll/satpy/pull/1304) - DOC: add create vm instructions * [PR 1294](https://github.com/pytroll/satpy/pull/1294) - Add ability to supply radiance correction coefficients to AHI HSD and AMI readers * [PR 1284](https://github.com/pytroll/satpy/pull/1284) - add more RGB to FY4A * [PR 1269](https://github.com/pytroll/satpy/pull/1269) - Support multiple readers in group_files and MultiScene.from_files ([1268](https://github.com/pytroll/satpy/issues/1268)) * [PR 1263](https://github.com/pytroll/satpy/pull/1263) - Add generic filepatterns for mersi2 reader * [PR 1257](https://github.com/pytroll/satpy/pull/1257) - Add per-frame decoration to MultiScene ([1257](https://github.com/pytroll/satpy/issues/1257)) * [PR 1255](https://github.com/pytroll/satpy/pull/1255) - Add test utility to make a scene. * [PR 1254](https://github.com/pytroll/satpy/pull/1254) - Preserve chunks in CF Writer * [PR 1251](https://github.com/pytroll/satpy/pull/1251) - Add ABI Fire Temperature, Day Convection, and Cloud Type composites. * [PR 1241](https://github.com/pytroll/satpy/pull/1241) - Add environment variables handeling to static image compositor * [PR 1237](https://github.com/pytroll/satpy/pull/1237) - More flexible way of passing avhrr_l1b_gaclac reader kwargs to pygac * [PR 1204](https://github.com/pytroll/satpy/pull/1204) - Alter the way cf_writer handle hardcoded global attributes ([1191](https://github.com/pytroll/satpy/issues/1191)) * [PR 1088](https://github.com/pytroll/satpy/pull/1088) - Make the metadata keys that uniquely identify a DataArray (DataID) configurable per reader ([1258](https://github.com/pytroll/satpy/issues/1258)) * [PR 564](https://github.com/pytroll/satpy/pull/564) - Add new ABI composites #### Documentation changes * [PR 1373](https://github.com/pytroll/satpy/pull/1373) - Fix word order error in conda install instructions ([1372](https://github.com/pytroll/satpy/issues/1372)) * [PR 1346](https://github.com/pytroll/satpy/pull/1346) - DOC: put pip install with extra dependency in quotation * [PR 1332](https://github.com/pytroll/satpy/pull/1332) - Remove reference to datasetid in tests.utils. * [PR 1331](https://github.com/pytroll/satpy/pull/1331) - Fix auxiliary files for releasing and pr template * [PR 1325](https://github.com/pytroll/satpy/pull/1325) - Use nbviewer for linking notebooks. * [PR 1317](https://github.com/pytroll/satpy/pull/1317) - Fix typo in variable names in resample documentation * [PR 1314](https://github.com/pytroll/satpy/pull/1314) - Remove use of YAML Anchors for easier understanding * [PR 1304](https://github.com/pytroll/satpy/pull/1304) - DOC: add create vm instructions * [PR 1264](https://github.com/pytroll/satpy/pull/1264) - Fix "see above" reference at start of enhance docs * [PR 1088](https://github.com/pytroll/satpy/pull/1088) - Make the metadata keys that uniquely identify a DataArray (DataID) configurable per reader ([1258](https://github.com/pytroll/satpy/issues/1258)) #### Backward incompatible changes * [PR 1327](https://github.com/pytroll/satpy/pull/1327) - Refactor reader configuration loading to remove redundant code * [PR 1300](https://github.com/pytroll/satpy/pull/1300) - Refactor scene to privatize some attributes and methods #### Refactoring * [PR 1341](https://github.com/pytroll/satpy/pull/1341) - Add Sun-Earth distance corrector utility and apply in SEVIRI readers ([1334](https://github.com/pytroll/satpy/issues/1334)) * [PR 1327](https://github.com/pytroll/satpy/pull/1327) - Refactor reader configuration loading to remove redundant code * [PR 1313](https://github.com/pytroll/satpy/pull/1313) - Fix true colors generation for AHI HSD data and refactor the dep tree code * [PR 1309](https://github.com/pytroll/satpy/pull/1309) - Refactor the combine_metadata function and allow numpy arrays to be combined * [PR 1301](https://github.com/pytroll/satpy/pull/1301) - Split DependencyTree from Node and DatasetDict * [PR 1300](https://github.com/pytroll/satpy/pull/1300) - Refactor scene to privatize some attributes and methods * [PR 1088](https://github.com/pytroll/satpy/pull/1088) - Make the metadata keys that uniquely identify a DataArray (DataID) configurable per reader ([1258](https://github.com/pytroll/satpy/issues/1258)) In this release 60 pull requests were closed. ## Version 0.22.0 (2020/06/10) ### Issues Closed * [Issue 1232](https://github.com/pytroll/satpy/issues/1232) - Add link to documentation for VII L1b-reader. ([PR 1236](https://github.com/pytroll/satpy/pull/1236)) * [Issue 1229](https://github.com/pytroll/satpy/issues/1229) - FCI reader can read pixel_quality flags only after reading corresponding channel data ([PR 1230](https://github.com/pytroll/satpy/pull/1230)) * [Issue 1215](https://github.com/pytroll/satpy/issues/1215) - FCI reader fails to load composites due to metadata issues ([PR 1216](https://github.com/pytroll/satpy/pull/1216)) * [Issue 1201](https://github.com/pytroll/satpy/issues/1201) - Incorrect error message when some but not all readers found ([PR 1202](https://github.com/pytroll/satpy/pull/1202)) * [Issue 1198](https://github.com/pytroll/satpy/issues/1198) - Let NetCDF4FileHandler cache variable dimension names ([PR 1199](https://github.com/pytroll/satpy/pull/1199)) * [Issue 1190](https://github.com/pytroll/satpy/issues/1190) - Unknown dataset, solar_zenith_angle * [Issue 1172](https://github.com/pytroll/satpy/issues/1172) - find_files_and_readers is slow ([PR 1178](https://github.com/pytroll/satpy/pull/1178)) * [Issue 1171](https://github.com/pytroll/satpy/issues/1171) - Add reading of pixel_quality variable to FCI FDHSI reader ([PR 1177](https://github.com/pytroll/satpy/pull/1177)) * [Issue 1168](https://github.com/pytroll/satpy/issues/1168) - Add more versatile options for masking datasets ([PR 1175](https://github.com/pytroll/satpy/pull/1175)) * [Issue 1167](https://github.com/pytroll/satpy/issues/1167) - saving sentinel-2 image as jpg * [Issue 1164](https://github.com/pytroll/satpy/issues/1164) - Question about license * [Issue 1162](https://github.com/pytroll/satpy/issues/1162) - abi_l2_nc reader unable to read MCMIP files * [Issue 1156](https://github.com/pytroll/satpy/issues/1156) - dealing with 1D array output from data assimilation * [Issue 1154](https://github.com/pytroll/satpy/issues/1154) - MERSI-2 250meters corrected refl. * [Issue 1153](https://github.com/pytroll/satpy/issues/1153) - tropomi reader: scene attributes and data array attributes are different ([PR 1155](https://github.com/pytroll/satpy/pull/1155)) * [Issue 1151](https://github.com/pytroll/satpy/issues/1151) - amsr2 l1b reader also match amsr2 l2 products ([PR 1152](https://github.com/pytroll/satpy/pull/1152)) * [Issue 1144](https://github.com/pytroll/satpy/issues/1144) - Documentation bug: group_files keyword argument reader doc has sentence consisting of only the word "This" ([PR 1147](https://github.com/pytroll/satpy/pull/1147)) * [Issue 1143](https://github.com/pytroll/satpy/issues/1143) - save_datasets doesn't work for tropomi_l2 data ([PR 1139](https://github.com/pytroll/satpy/pull/1139)) * [Issue 1132](https://github.com/pytroll/satpy/issues/1132) - Add area definitions for the FCI FDHSI L1c grids ([PR 1188](https://github.com/pytroll/satpy/pull/1188)) * [Issue 1050](https://github.com/pytroll/satpy/issues/1050) - Return counts from avhrr_l1b_gaclac reader ([PR 1051](https://github.com/pytroll/satpy/pull/1051)) * [Issue 1014](https://github.com/pytroll/satpy/issues/1014) - The fci_l1c_fdhsi reader should proved the `platform_name` in the attributes ([PR 1176](https://github.com/pytroll/satpy/pull/1176)) * [Issue 958](https://github.com/pytroll/satpy/issues/958) - Add a CMSAF reader ([PR 720](https://github.com/pytroll/satpy/pull/720)) * [Issue 680](https://github.com/pytroll/satpy/issues/680) - Expose `overviews` from Trollimage for saving (geo)tiff images In this release 23 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 1230](https://github.com/pytroll/satpy/pull/1230) - FCI: fix areadef when only pixel quality asked ([1229](https://github.com/pytroll/satpy/issues/1229), [1229](https://github.com/pytroll/satpy/issues/1229)) * [PR 1216](https://github.com/pytroll/satpy/pull/1216) - Make combine_arrays understand non-numpy arrays ([1215](https://github.com/pytroll/satpy/issues/1215), [1215](https://github.com/pytroll/satpy/issues/1215)) * [PR 1213](https://github.com/pytroll/satpy/pull/1213) - Remove invalid valid_range metadata from abi readers * [PR 1211](https://github.com/pytroll/satpy/pull/1211) - Fix "rows_per_scan" not being available from VIIRS SDR readers * [PR 1202](https://github.com/pytroll/satpy/pull/1202) - Fix bad error message when Scene was given a bad reader name ([1201](https://github.com/pytroll/satpy/issues/1201)) * [PR 1195](https://github.com/pytroll/satpy/pull/1195) - Fix accessing uncached root group variable when using NetCDF4FileHandler in caching mode ([1195](https://github.com/pytroll/satpy/issues/1195)) * [PR 1170](https://github.com/pytroll/satpy/pull/1170) - Fix cf writing of 3d arrays * [PR 1155](https://github.com/pytroll/satpy/pull/1155) - Lowercase sensor of tropomi_l2 ([1153](https://github.com/pytroll/satpy/issues/1153)) * [PR 1139](https://github.com/pytroll/satpy/pull/1139) - Keep int type and fix scale_factor/dim bug in tropomi_l2 reader ([1143](https://github.com/pytroll/satpy/issues/1143)) #### Features added * [PR 1227](https://github.com/pytroll/satpy/pull/1227) - Delete kdtree after saving cache * [PR 1226](https://github.com/pytroll/satpy/pull/1226) - Add a feature for handling scheduled_time in ahi_hsd reader. * [PR 1219](https://github.com/pytroll/satpy/pull/1219) - Add VII L2 netCDF-reader. * [PR 1218](https://github.com/pytroll/satpy/pull/1218) - Add VII L1b netCDF-reader. * [PR 1212](https://github.com/pytroll/satpy/pull/1212) - Add file pattern for NWCSAF input file names to 'grib' reader ([1212](https://github.com/pytroll/satpy/issues/1212)) * [PR 1199](https://github.com/pytroll/satpy/pull/1199) - Cache dimension per variable ([1198](https://github.com/pytroll/satpy/issues/1198)) * [PR 1189](https://github.com/pytroll/satpy/pull/1189) - Add option to supply sunz-threshold applied in Pyspectral * [PR 1188](https://github.com/pytroll/satpy/pull/1188) - Add areas for FCI ([1132](https://github.com/pytroll/satpy/issues/1132)) * [PR 1186](https://github.com/pytroll/satpy/pull/1186) - Fix SEVIRI native reader flipping * [PR 1185](https://github.com/pytroll/satpy/pull/1185) - Add scanline acquisition times to hrit_jma * [PR 1183](https://github.com/pytroll/satpy/pull/1183) - Add options for creating geotiff overviews * [PR 1181](https://github.com/pytroll/satpy/pull/1181) - Add more explicit error message when string is passed to Scene.load * [PR 1180](https://github.com/pytroll/satpy/pull/1180) - Migrate FCI tests to pytest * [PR 1178](https://github.com/pytroll/satpy/pull/1178) - Optimize readers searching for matching filenames ([1172](https://github.com/pytroll/satpy/issues/1172)) * [PR 1177](https://github.com/pytroll/satpy/pull/1177) - Add support for reading pixel_quality ancillary variables, FCI reader no longer logs warnings ([1171](https://github.com/pytroll/satpy/issues/1171)) * [PR 1176](https://github.com/pytroll/satpy/pull/1176) - Provide platform_name in FCI L1C FDHSI reader. ([1014](https://github.com/pytroll/satpy/issues/1014)) * [PR 1175](https://github.com/pytroll/satpy/pull/1175) - Add more flexible masking ([1168](https://github.com/pytroll/satpy/issues/1168)) * [PR 1173](https://github.com/pytroll/satpy/pull/1173) - Check whether time dimension exists for timeseries * [PR 1169](https://github.com/pytroll/satpy/pull/1169) - Implement remote file search * [PR 1165](https://github.com/pytroll/satpy/pull/1165) - Add missing_ok option to find_files_and_readers ([1165](https://github.com/pytroll/satpy/issues/1165)) * [PR 1163](https://github.com/pytroll/satpy/pull/1163) - Add TROPOMI NO2 LEVEL2 composites * [PR 1161](https://github.com/pytroll/satpy/pull/1161) - Add Effective_Pressure to NUCAPS reader * [PR 1152](https://github.com/pytroll/satpy/pull/1152) - amsr2 reader for l2 ssw product ([1151](https://github.com/pytroll/satpy/issues/1151)) * [PR 1142](https://github.com/pytroll/satpy/pull/1142) - add filepatterns S-HSAF-h03B and S-HSAF-h05B to hsaf_grib.yaml * [PR 1141](https://github.com/pytroll/satpy/pull/1141) - Add night lights composites for ABI, AHI and AMI * [PR 1135](https://github.com/pytroll/satpy/pull/1135) - Fix reflectance and BT calibration in FCI FDHSI reader * [PR 1100](https://github.com/pytroll/satpy/pull/1100) - Add support for GPM IMERG data * [PR 1051](https://github.com/pytroll/satpy/pull/1051) - Return counts from satpy/avhrr_l1b_gaclac reader ([1050](https://github.com/pytroll/satpy/issues/1050)) * [PR 983](https://github.com/pytroll/satpy/pull/983) - Add group method to MultiScene * [PR 812](https://github.com/pytroll/satpy/pull/812) - Add MOD06 support to 'modis_l2' reader ([1200](https://github.com/pytroll/satpy/issues/1200)) * [PR 720](https://github.com/pytroll/satpy/pull/720) - CMSAF CLAAS v2. reader ([958](https://github.com/pytroll/satpy/issues/958)) #### Documentation changes * [PR 1223](https://github.com/pytroll/satpy/pull/1223) - Add FCI Natural Color example page to sphinx docs * [PR 1203](https://github.com/pytroll/satpy/pull/1203) - Add link to MTSAT sample data * [PR 1147](https://github.com/pytroll/satpy/pull/1147) - Fix incomplete group_files docstring ([1144](https://github.com/pytroll/satpy/issues/1144)) In this release 43 pull requests were closed. ## Version 0.21.0 (2020/04/06) ### Issues Closed * [Issue 1124](https://github.com/pytroll/satpy/issues/1124) - Crop scene of visual spectrum of the sentinel 2 satellite ([PR 1125](https://github.com/pytroll/satpy/pull/1125)) * [Issue 1112](https://github.com/pytroll/satpy/issues/1112) - Loading both abi and nwcsaf-geo confuses satpy into sometimes trying the wrong composite ([PR 1113](https://github.com/pytroll/satpy/pull/1113)) * [Issue 1096](https://github.com/pytroll/satpy/issues/1096) - Saving an image with NinjoTIFFWriter is broken in satpy v.0.20.0 ([PR 1098](https://github.com/pytroll/satpy/pull/1098)) * [Issue 1092](https://github.com/pytroll/satpy/issues/1092) - Avhrr l1b eps reader changes values of angles after reading ([PR 1101](https://github.com/pytroll/satpy/pull/1101)) * [Issue 1087](https://github.com/pytroll/satpy/issues/1087) - Saving each scene in a separate image file * [Issue 1075](https://github.com/pytroll/satpy/issues/1075) - SEVIRI L1b netCDF reader not dask-compliant ([PR 1109](https://github.com/pytroll/satpy/pull/1109)) * [Issue 1059](https://github.com/pytroll/satpy/issues/1059) - test against xarray master ([PR 1095](https://github.com/pytroll/satpy/pull/1095)) * [Issue 1013](https://github.com/pytroll/satpy/issues/1013) - Fails to load solar_zenith_angle from SLSTR l1b data * [Issue 883](https://github.com/pytroll/satpy/issues/883) - satpy resample call -> numby.ndarray deepcopy error ([PR 1126](https://github.com/pytroll/satpy/pull/1126)) * [Issue 840](https://github.com/pytroll/satpy/issues/840) - MTG-FCI-FDHSI reader has wrong projection ([PR 845](https://github.com/pytroll/satpy/pull/845)) * [Issue 630](https://github.com/pytroll/satpy/issues/630) - Converting hdf5 attributes to string containing h5py.Reference of size 1 causes a AttributeError ([PR 1126](https://github.com/pytroll/satpy/pull/1126)) In this release 11 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 1131](https://github.com/pytroll/satpy/pull/1131) - Fix geostationary utilities assuming a/b radii are always available * [PR 1129](https://github.com/pytroll/satpy/pull/1129) - Make the viirs_sdr reader return float32s * [PR 1125](https://github.com/pytroll/satpy/pull/1125) - Fix Scene.crop using PROJ definition to create target area definition ([1124](https://github.com/pytroll/satpy/issues/1124)) * [PR 1118](https://github.com/pytroll/satpy/pull/1118) - Fix supported Python version in devguide * [PR 1116](https://github.com/pytroll/satpy/pull/1116) - Make an alias for the snow composite in viirs * [PR 1115](https://github.com/pytroll/satpy/pull/1115) - Fix mitiff writer to support sensors as a set * [PR 1113](https://github.com/pytroll/satpy/pull/1113) - Add sensor-name property to NWCSAF readers ([1112](https://github.com/pytroll/satpy/issues/1112), [1111](https://github.com/pytroll/satpy/issues/1111)) * [PR 1107](https://github.com/pytroll/satpy/pull/1107) - Raise an error if data and angle shapes don't match in NIRReflectance * [PR 1106](https://github.com/pytroll/satpy/pull/1106) - Scale valid range if available. * [PR 1101](https://github.com/pytroll/satpy/pull/1101) - Fix eps l1b angles computation returning non deterministic results ([1092](https://github.com/pytroll/satpy/issues/1092)) * [PR 1098](https://github.com/pytroll/satpy/pull/1098) - Fix ninjotiff writer tests failing when pyninjotiff is installed ([1096](https://github.com/pytroll/satpy/issues/1096)) * [PR 1089](https://github.com/pytroll/satpy/pull/1089) - Make sunz correction use available sunz dataset * [PR 1038](https://github.com/pytroll/satpy/pull/1038) - Switch to pyproj for projection to CF NetCDF grid mapping ([1029](https://github.com/pytroll/satpy/issues/1029), [1029](https://github.com/pytroll/satpy/issues/1029)) #### Features added * [PR 1128](https://github.com/pytroll/satpy/pull/1128) - Add tm5_constant_a and tm5_constant_b for tropomi_l2 * [PR 1126](https://github.com/pytroll/satpy/pull/1126) - Update omps edr reader and hdf5_utils to handle OMPS SO2 data from FMI ([883](https://github.com/pytroll/satpy/issues/883), [630](https://github.com/pytroll/satpy/issues/630)) * [PR 1121](https://github.com/pytroll/satpy/pull/1121) - HY-2B scatterometer l2b hdf5 reader * [PR 1117](https://github.com/pytroll/satpy/pull/1117) - Add support for satpy.composites entry points * [PR 1113](https://github.com/pytroll/satpy/pull/1113) - Add sensor-name property to NWCSAF readers ([1112](https://github.com/pytroll/satpy/issues/1112), [1111](https://github.com/pytroll/satpy/issues/1111)) * [PR 1109](https://github.com/pytroll/satpy/pull/1109) - Fix dask and attribute issue in seviri_l1b_nc reader ([1075](https://github.com/pytroll/satpy/issues/1075)) * [PR 1095](https://github.com/pytroll/satpy/pull/1095) - Switch to pytest in CI and add unstable dependency environment ([1059](https://github.com/pytroll/satpy/issues/1059)) * [PR 1091](https://github.com/pytroll/satpy/pull/1091) - Add assembled_lat_bounds, assembled_lon_bounds and time variables * [PR 1071](https://github.com/pytroll/satpy/pull/1071) - Add SEVIRI L2 GRIB reader * [PR 1044](https://github.com/pytroll/satpy/pull/1044) - Set travis and appveyor numpy version back to 'stable' * [PR 845](https://github.com/pytroll/satpy/pull/845) - MTG: get projection and extent information from file ([840](https://github.com/pytroll/satpy/issues/840), [840](https://github.com/pytroll/satpy/issues/840)) * [PR 606](https://github.com/pytroll/satpy/pull/606) - Add enhanced (more natural) version of natural colors composite #### Documentation changes * [PR 1130](https://github.com/pytroll/satpy/pull/1130) - Add note about datatype in custom reader documentation * [PR 1118](https://github.com/pytroll/satpy/pull/1118) - Fix supported Python version in devguide ## Version 0.20.0 (2020/02/25) ### Issues Closed * [Issue 1077](https://github.com/pytroll/satpy/issues/1077) - Tropomi l2 reader needs to handle more filenames ([PR 1078](https://github.com/pytroll/satpy/pull/1078)) * [Issue 1076](https://github.com/pytroll/satpy/issues/1076) - Metop level 2 EUMETCAST BUFR reader ([PR 1079](https://github.com/pytroll/satpy/pull/1079)) * [Issue 1004](https://github.com/pytroll/satpy/issues/1004) - Computing the lons and lats of metop granules from the eps_l1b reader is painfully slow ([PR 1063](https://github.com/pytroll/satpy/pull/1063)) * [Issue 1002](https://github.com/pytroll/satpy/issues/1002) - Resampling of long passes of metop l1b eps data gives strange results * [Issue 928](https://github.com/pytroll/satpy/issues/928) - Satpy Writer 'geotiff' exists but could not be loaded * [Issue 924](https://github.com/pytroll/satpy/issues/924) - eps_l1b reader does not accept more than 1 veadr element ([PR 1063](https://github.com/pytroll/satpy/pull/1063)) * [Issue 809](https://github.com/pytroll/satpy/issues/809) - Update avhrr_l1b_aapp reader ([PR 811](https://github.com/pytroll/satpy/pull/811)) * [Issue 112](https://github.com/pytroll/satpy/issues/112) - Python 2 Cruft ([PR 1047](https://github.com/pytroll/satpy/pull/1047)) In this release 8 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 1084](https://github.com/pytroll/satpy/pull/1084) - Add latitude_bounds and longitude_bounds to tropomi_l2 * [PR 1078](https://github.com/pytroll/satpy/pull/1078) - Tropomi l2 reader to handle more types of products ([1077](https://github.com/pytroll/satpy/issues/1077)) * [PR 1072](https://github.com/pytroll/satpy/pull/1072) - Fix the omerc-bb area to use a sphere as ellps * [PR 1066](https://github.com/pytroll/satpy/pull/1066) - Rename natural_color_sun to natural_color in generic VIS/IR RGB recipes * [PR 1063](https://github.com/pytroll/satpy/pull/1063) - Fix eps infinite loop ([924](https://github.com/pytroll/satpy/issues/924), [1004](https://github.com/pytroll/satpy/issues/1004)) * [PR 1058](https://github.com/pytroll/satpy/pull/1058) - Work around changes in xarray 0.15 * [PR 1057](https://github.com/pytroll/satpy/pull/1057) - lowercase the sensor name * [PR 1055](https://github.com/pytroll/satpy/pull/1055) - Fix sst standard name * [PR 1049](https://github.com/pytroll/satpy/pull/1049) - Fix handling of paths with forward slashes on Windows * [PR 1048](https://github.com/pytroll/satpy/pull/1048) - Fix AMI L1b reader incorrectly grouping files * [PR 1045](https://github.com/pytroll/satpy/pull/1045) - Update hrpt.py for new pygac syntax * [PR 1043](https://github.com/pytroll/satpy/pull/1043) - Update seviri icare reader that handles differing dataset versions * [PR 1042](https://github.com/pytroll/satpy/pull/1042) - Replace a unicode hyphen in the glm_l2 reader * [PR 1041](https://github.com/pytroll/satpy/pull/1041) - Unify Dataset attribute naming in SEVIRI L2 BUFR-reader #### Features added * [PR 1082](https://github.com/pytroll/satpy/pull/1082) - Update SLSTR composites * [PR 1079](https://github.com/pytroll/satpy/pull/1079) - Metop level 2 EUMETCAST BUFR reader ([1076](https://github.com/pytroll/satpy/issues/1076)) * [PR 1067](https://github.com/pytroll/satpy/pull/1067) - Add GOES-17 support to the 'geocat' reader * [PR 1065](https://github.com/pytroll/satpy/pull/1065) - Add AHI airmass, ash, dust, fog, and night_microphysics RGBs * [PR 1064](https://github.com/pytroll/satpy/pull/1064) - Adjust default blending in DayNightCompositor * [PR 1061](https://github.com/pytroll/satpy/pull/1061) - Add support for NUCAPS Science EDRs * [PR 1052](https://github.com/pytroll/satpy/pull/1052) - Delegate dask delays to pyninjotiff * [PR 1047](https://github.com/pytroll/satpy/pull/1047) - Remove deprecated abstractproperty usage ([112](https://github.com/pytroll/satpy/issues/112)) * [PR 1020](https://github.com/pytroll/satpy/pull/1020) - Feature Sentinel-3 Level-2 SST * [PR 988](https://github.com/pytroll/satpy/pull/988) - Remove py27 tests and switch to py38 * [PR 964](https://github.com/pytroll/satpy/pull/964) - Update SEVIRI L2 BUFR reader to handle BUFR products from EUMETSAT Data Centre * [PR 839](https://github.com/pytroll/satpy/pull/839) - Add support of colorbar * [PR 811](https://github.com/pytroll/satpy/pull/811) - Daskify and test avhrr_l1b_aapp reader ([809](https://github.com/pytroll/satpy/issues/809)) #### Documentation changes * [PR 1068](https://github.com/pytroll/satpy/pull/1068) - Fix a typo in writer 'filename' documentation * [PR 1056](https://github.com/pytroll/satpy/pull/1056) - Fix name of natural_color composite in quickstart #### Backwards incompatible changes * [PR 1066](https://github.com/pytroll/satpy/pull/1066) - Rename natural_color_sun to natural_color in generic VIS/IR RGB recipes * [PR 988](https://github.com/pytroll/satpy/pull/988) - Remove py27 tests and switch to py38 In this release 31 pull requests were closed. ## Version 0.19.1 (2020/01/10) ### Issues Closed * [Issue 1030](https://github.com/pytroll/satpy/issues/1030) - Geostationary padding results in wrong area definition for AHI mesoscale sectors. ([PR 1037](https://github.com/pytroll/satpy/pull/1037)) * [Issue 1029](https://github.com/pytroll/satpy/issues/1029) - NetCDF (CF) writer doesn't include semi_minor_axis/semi_major_axis for new versions of pyproj ([PR 1040](https://github.com/pytroll/satpy/pull/1040)) * [Issue 1023](https://github.com/pytroll/satpy/issues/1023) - RTD "Edit on Github" broken in "latest" documentation In this release 3 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 1040](https://github.com/pytroll/satpy/pull/1040) - Fix geostationary axis handling in CF writer ([1029](https://github.com/pytroll/satpy/issues/1029)) * [PR 1037](https://github.com/pytroll/satpy/pull/1037) - Fix segment handling for non-FLDK sectors in the AHI HSD reader ([1030](https://github.com/pytroll/satpy/issues/1030)) * [PR 1036](https://github.com/pytroll/satpy/pull/1036) - Fix ABI L1b/L2 time dimension causing issues with newer xarray * [PR 1034](https://github.com/pytroll/satpy/pull/1034) - Fix AMI geolocation being off by 1 pixel * [PR 1033](https://github.com/pytroll/satpy/pull/1033) - Fix avhrr_l1b_aapp reader not including standard_name metadata * [PR 1031](https://github.com/pytroll/satpy/pull/1031) - Fix tropomi_l2 reader not using y and x dimension names #### Features added * [PR 1035](https://github.com/pytroll/satpy/pull/1035) - Add additional Sentinel 3 OLCI 2 datasets * [PR 1027](https://github.com/pytroll/satpy/pull/1027) - Update SCMI writer and VIIRS EDR Flood reader to work for pre-tiled data #### Documentation changes * [PR 1032](https://github.com/pytroll/satpy/pull/1032) - Add documentation about y and x dimensions for custom readers In this release 9 pull requests were closed. ## Version 0.19.0 (2019/12/30) ### Issues Closed * [Issue 996](https://github.com/pytroll/satpy/issues/996) - In the sar-c_safe reader, add platform_name to the attribute. ([PR 998](https://github.com/pytroll/satpy/pull/998)) * [Issue 991](https://github.com/pytroll/satpy/issues/991) - Secondary file name patterns aren't used if the first doesn't match * [Issue 975](https://github.com/pytroll/satpy/issues/975) - Add HRV navigation to `seviri_l1b_native`-reader ([PR 985](https://github.com/pytroll/satpy/pull/985)) * [Issue 972](https://github.com/pytroll/satpy/issues/972) - MTG-FCI-FDHSI reader is slow, apparently not actually dask-aware ([PR 981](https://github.com/pytroll/satpy/pull/981)) * [Issue 970](https://github.com/pytroll/satpy/issues/970) - Pad all geostationary L1 data to full disk area ([PR 977](https://github.com/pytroll/satpy/pull/977)) * [Issue 960](https://github.com/pytroll/satpy/issues/960) - Factorize area def computation in jma_hrit ([PR 978](https://github.com/pytroll/satpy/pull/978)) * [Issue 957](https://github.com/pytroll/satpy/issues/957) - Rayleigh correction in bands l2 of the ABI sensor * [Issue 954](https://github.com/pytroll/satpy/issues/954) - Mask composites using cloud products ([PR 982](https://github.com/pytroll/satpy/pull/982)) * [Issue 949](https://github.com/pytroll/satpy/issues/949) - Make a common function for geostationnary area_extent computation ([PR 952](https://github.com/pytroll/satpy/pull/952)) * [Issue 807](https://github.com/pytroll/satpy/issues/807) - Add a MIMIC-TPW2 reader ([PR 858](https://github.com/pytroll/satpy/pull/858)) * [Issue 782](https://github.com/pytroll/satpy/issues/782) - Update custom reader documentation to mention coordinates and available datasets ([PR 1019](https://github.com/pytroll/satpy/pull/1019)) * [Issue 486](https://github.com/pytroll/satpy/issues/486) - Add GMS series satellite data reader In this release 12 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 1021](https://github.com/pytroll/satpy/pull/1021) - Fix padding of segmented geostationary images * [PR 1010](https://github.com/pytroll/satpy/pull/1010) - Fix missing part in ahi_hrit file pattern * [PR 1007](https://github.com/pytroll/satpy/pull/1007) - Fix `ahi_hrit` expected segments * [PR 1006](https://github.com/pytroll/satpy/pull/1006) - Rename standard_name for various readers to be consistent * [PR 993](https://github.com/pytroll/satpy/pull/993) - Fix VIIRS EDR Flood file patterns not working for AOI files ([243](https://github.com/ssec/polar2grid/issues/243)) * [PR 989](https://github.com/pytroll/satpy/pull/989) - Fix generation of solar and satellite angles when lon/lats are invalid * [PR 976](https://github.com/pytroll/satpy/pull/976) - CF Writer Improvements * [PR 974](https://github.com/pytroll/satpy/pull/974) - Fix available_composite_names including night_background static images ([239](https://github.com/ssec/polar2grid/issues/239)) * [PR 969](https://github.com/pytroll/satpy/pull/969) - Fix HDF4 handling of scalar attributes * [PR 966](https://github.com/pytroll/satpy/pull/966) - Add the fire temperature products to AHI * [PR 931](https://github.com/pytroll/satpy/pull/931) - Update coord2area_def.py #### Features added * [PR 1012](https://github.com/pytroll/satpy/pull/1012) - Implement a small cviirs speedup * [PR 1011](https://github.com/pytroll/satpy/pull/1011) - Provide only dask arrays to pyspectral's nir reflectance computation * [PR 1009](https://github.com/pytroll/satpy/pull/1009) - Add support for SEVIRI data from icare * [PR 1005](https://github.com/pytroll/satpy/pull/1005) - Remove unused reader xslice/yslice keyword arguments * [PR 1003](https://github.com/pytroll/satpy/pull/1003) - Update copyright header in readers. Add and fix docstrings. * [PR 998](https://github.com/pytroll/satpy/pull/998) - Add platform name to attributes of sar_c_safe reader ([996](https://github.com/pytroll/satpy/issues/996)) * [PR 997](https://github.com/pytroll/satpy/pull/997) - Add check if prerequisites is used * [PR 994](https://github.com/pytroll/satpy/pull/994) - Add LAC support to the avhrr-gac-lac reader * [PR 992](https://github.com/pytroll/satpy/pull/992) - Add hrv_clouds, hrv_fog and natural_with_night_fog composites to seviri.yaml * [PR 987](https://github.com/pytroll/satpy/pull/987) - scene.aggregate will now handle a SwathDefinition * [PR 985](https://github.com/pytroll/satpy/pull/985) - Add HRV full disk navigation for `seviri_l1b_native`-reader ([975](https://github.com/pytroll/satpy/issues/975)) * [PR 984](https://github.com/pytroll/satpy/pull/984) - Add on-the-fly decompression to the AHI HSD reader * [PR 982](https://github.com/pytroll/satpy/pull/982) - Add simple masking compositor ([954](https://github.com/pytroll/satpy/issues/954)) * [PR 981](https://github.com/pytroll/satpy/pull/981) - Optionally cache small data variables and file handles ([972](https://github.com/pytroll/satpy/issues/972)) * [PR 980](https://github.com/pytroll/satpy/pull/980) - Read the meta_data dictionary from pygac * [PR 978](https://github.com/pytroll/satpy/pull/978) - Factorize area computation in hrit_jma ([960](https://github.com/pytroll/satpy/issues/960)) * [PR 977](https://github.com/pytroll/satpy/pull/977) - Add a YAMLReader to pad segmented geo data ([970](https://github.com/pytroll/satpy/issues/970)) * [PR 976](https://github.com/pytroll/satpy/pull/976) - CF Writer Improvements * [PR 966](https://github.com/pytroll/satpy/pull/966) - Add the fire temperature products to AHI * [PR 962](https://github.com/pytroll/satpy/pull/962) - add support for meteo file in OLCI L1B reader * [PR 961](https://github.com/pytroll/satpy/pull/961) - Fix default radius_of_influence for lon/lat AreaDefintions * [PR 952](https://github.com/pytroll/satpy/pull/952) - Adds a common function for geostationary projection / area definition calculations ([949](https://github.com/pytroll/satpy/issues/949)) * [PR 920](https://github.com/pytroll/satpy/pull/920) - Transverse Mercator section added in cf writer * [PR 908](https://github.com/pytroll/satpy/pull/908) - Add interface to pyresample gradient resampler * [PR 858](https://github.com/pytroll/satpy/pull/858) - Mimic TPW Reader ([807](https://github.com/pytroll/satpy/issues/807)) * [PR 854](https://github.com/pytroll/satpy/pull/854) - Add GOES-R GLM L2 Gridded product reader and small ABI L1b changes #### Documentation changes * [PR 1025](https://github.com/pytroll/satpy/pull/1025) - Switch to configuration file for readthedocs * [PR 1019](https://github.com/pytroll/satpy/pull/1019) - Add more information about creating custom readers ([782](https://github.com/pytroll/satpy/issues/782)) * [PR 1018](https://github.com/pytroll/satpy/pull/1018) - Add information to Quickstart on basics of getting measurement values and navigation * [PR 1008](https://github.com/pytroll/satpy/pull/1008) - Add documentation for combine_metadata function * [PR 1003](https://github.com/pytroll/satpy/pull/1003) - Update copyright header in readers. Add and fix docstrings. * [PR 1001](https://github.com/pytroll/satpy/pull/1001) - Get travis badge from master branch * [PR 999](https://github.com/pytroll/satpy/pull/999) - Add FCI L1C reader short and long name metadata * [PR 968](https://github.com/pytroll/satpy/pull/968) - Add information about multi-threaded compression with geotiff creation In this release 45 pull requests were closed. ## Version 0.18.1 (2019/11/07) ### Pull Requests Merged #### Bugs fixed * [PR 959](https://github.com/pytroll/satpy/pull/959) - Fix `grid` argument handling in overlaying In this release 1 pull request was closed. ## Version 0.18.0 (2019/11/06) ### Issues Closed * [Issue 944](https://github.com/pytroll/satpy/issues/944) - Multiple errors when processing OLCI data. ([PR 945](https://github.com/pytroll/satpy/pull/945)) * [Issue 940](https://github.com/pytroll/satpy/issues/940) - Loading of DNB data from VIIRS compact SDR is slow ([PR 941](https://github.com/pytroll/satpy/pull/941)) * [Issue 922](https://github.com/pytroll/satpy/issues/922) - Clarify orbital_parameters metadata ([PR 950](https://github.com/pytroll/satpy/pull/950)) * [Issue 888](https://github.com/pytroll/satpy/issues/888) - Unintended/wrong behaviour of getitem method in HDF5FileHandler? ([PR 886](https://github.com/pytroll/satpy/pull/886)) * [Issue 737](https://github.com/pytroll/satpy/issues/737) - Add reader for GEO-KOMPSAT AMI ([PR 911](https://github.com/pytroll/satpy/pull/911)) In this release 5 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 953](https://github.com/pytroll/satpy/pull/953) - Encode header attributes in CF writer * [PR 945](https://github.com/pytroll/satpy/pull/945) - Fix bug in OLCI reader that caused multiple error messages to print ([944](https://github.com/pytroll/satpy/issues/944)) * [PR 942](https://github.com/pytroll/satpy/pull/942) - Fix VIIRS EDR Active Fires not assigning a _FillValue to confidence_pct * [PR 939](https://github.com/pytroll/satpy/pull/939) - Fix MERSI-2 natural_color composite using the wrong band for sharpening * [PR 938](https://github.com/pytroll/satpy/pull/938) - Fix MultiScene.save_animation to work with new dask.distributed versions * [PR 914](https://github.com/pytroll/satpy/pull/914) - Cleaning up and adding MERSI-2 RGB composites #### Features added * [PR 955](https://github.com/pytroll/satpy/pull/955) - Code clean-up for SEVIRI L2 BUFR-reader * [PR 953](https://github.com/pytroll/satpy/pull/953) - Encode header attributes in CF writer * [PR 948](https://github.com/pytroll/satpy/pull/948) - Add the possibility to include scale and offset in geotiffs * [PR 947](https://github.com/pytroll/satpy/pull/947) - Feature mitiff palette * [PR 941](https://github.com/pytroll/satpy/pull/941) - Speed up cviirs tiepoint interpolation ([940](https://github.com/pytroll/satpy/issues/940)) * [PR 935](https://github.com/pytroll/satpy/pull/935) - Adapt avhrr_l1b_gaclac to recent pygac changes * [PR 934](https://github.com/pytroll/satpy/pull/934) - Update add_overlay to make use of the full pycoast capabilities * [PR 911](https://github.com/pytroll/satpy/pull/911) - Add GK-2A AMI L1B Reader ([737](https://github.com/pytroll/satpy/issues/737)) * [PR 886](https://github.com/pytroll/satpy/pull/886) - Reader for NWCSAF/MSG 2013 format ([888](https://github.com/pytroll/satpy/issues/888)) * [PR 769](https://github.com/pytroll/satpy/pull/769) - Added initial version of an MSG BUFR reader and TOZ product yaml file * [PR 586](https://github.com/pytroll/satpy/pull/586) - Update handling of reading colormaps from files in enhancements #### Documentation changes * [PR 950](https://github.com/pytroll/satpy/pull/950) - Clarify documentation of orbital_parameters metadata ([922](https://github.com/pytroll/satpy/issues/922)) * [PR 943](https://github.com/pytroll/satpy/pull/943) - Fix sphinx docs generation after setuptools_scm migration In this release 19 pull requests were closed. ## Version 0.17.1 (2019/10/08) ### Issues Closed * [Issue 918](https://github.com/pytroll/satpy/issues/918) - satpy 0.17 does not work with pyresample 1.11 ([PR 927](https://github.com/pytroll/satpy/pull/927)) * [Issue 902](https://github.com/pytroll/satpy/issues/902) - background compositor with colorized ir_clouds and static image problem ([PR 917](https://github.com/pytroll/satpy/pull/917)) * [Issue 853](https://github.com/pytroll/satpy/issues/853) - scene.available_composite_names() return a composite even the dependency is not fullfilled ([PR 921](https://github.com/pytroll/satpy/pull/921)) * [Issue 830](https://github.com/pytroll/satpy/issues/830) - generic_image reader doesn't read area from .yaml file? ([PR 925](https://github.com/pytroll/satpy/pull/925)) In this release 4 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 925](https://github.com/pytroll/satpy/pull/925) - Fix area handling in StaticImageCompositor ([830](https://github.com/pytroll/satpy/issues/830)) * [PR 923](https://github.com/pytroll/satpy/pull/923) - Make the olci l2 mask a bool array instead of floats * [PR 921](https://github.com/pytroll/satpy/pull/921) - Fix Scene.available_composite_names showing unavailable composites ([853](https://github.com/pytroll/satpy/issues/853)) * [PR 917](https://github.com/pytroll/satpy/pull/917) - Fix BackgroundCompositor not retaining input metadata ([902](https://github.com/pytroll/satpy/issues/902)) #### Features added * [PR 927](https://github.com/pytroll/satpy/pull/927) - Fix resampler imports ([918](https://github.com/pytroll/satpy/issues/918)) #### Backwards incompatible changes * [PR 921](https://github.com/pytroll/satpy/pull/921) - Fix Scene.available_composite_names showing unavailable composites ([853](https://github.com/pytroll/satpy/issues/853)) In this release 6 pull requests were closed. ## Version 0.17.0 (2019/10/01) ### Issues Closed * [Issue 896](https://github.com/pytroll/satpy/issues/896) - Satpy built-in composite for dust RGB (MSG/SEVIRI data) does not generate expected color pattern * [Issue 893](https://github.com/pytroll/satpy/issues/893) - Resampling data read with generic image reader corrupts data * [Issue 876](https://github.com/pytroll/satpy/issues/876) - Update reader configuration with human-readable long names ([PR 887](https://github.com/pytroll/satpy/pull/887)) * [Issue 865](https://github.com/pytroll/satpy/issues/865) - Himawari-8 B13 image is negative? * [Issue 863](https://github.com/pytroll/satpy/issues/863) - Record what the values from MODIS cloud mask represent * [Issue 852](https://github.com/pytroll/satpy/issues/852) - No module named geotiepoints.modisinterpolator * [Issue 851](https://github.com/pytroll/satpy/issues/851) - Scene(reader, filenames = [radiance, geoloc]) expects filenames to be in a specific format * [Issue 850](https://github.com/pytroll/satpy/issues/850) - group_files function returns only one dictionary ([PR 855](https://github.com/pytroll/satpy/pull/855)) * [Issue 848](https://github.com/pytroll/satpy/issues/848) - FCI composites not loadable ([PR 849](https://github.com/pytroll/satpy/pull/849)) * [Issue 846](https://github.com/pytroll/satpy/issues/846) - Segmentation fault calculating overlay projection with MTG * [Issue 762](https://github.com/pytroll/satpy/issues/762) - Add x and y coordinates to all loaded gridded DataArrays * [Issue 735](https://github.com/pytroll/satpy/issues/735) - Bilinear interpolation doesn't work with `StackedAreaDefinitions` * [Issue 678](https://github.com/pytroll/satpy/issues/678) - Consider using setuptools-scm instead of versioneer ([PR 856](https://github.com/pytroll/satpy/pull/856)) * [Issue 617](https://github.com/pytroll/satpy/issues/617) - Update 'generic_image' reader to use rasterio for area creation ([PR 847](https://github.com/pytroll/satpy/pull/847)) * [Issue 603](https://github.com/pytroll/satpy/issues/603) - Support FY-4A hdf data ([PR 751](https://github.com/pytroll/satpy/pull/751)) In this release 15 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 915](https://github.com/pytroll/satpy/pull/915) - Fix CRS object being recreated when adding CRS coordinate * [PR 905](https://github.com/pytroll/satpy/pull/905) - Fix ABI L2 reader not scaling and masking data * [PR 901](https://github.com/pytroll/satpy/pull/901) - Fix compact viirs angle interpolation at the poles * [PR 891](https://github.com/pytroll/satpy/pull/891) - Fix HDF4 reading utility using dtype classes instead of instances * [PR 890](https://github.com/pytroll/satpy/pull/890) - Fix MERSI-2 and VIRR readers being recognized by pyspectral * [PR 889](https://github.com/pytroll/satpy/pull/889) - Fix the ninjotiff writer to provide correct scale and offset * [PR 884](https://github.com/pytroll/satpy/pull/884) - Update mersi2_l1b sensor name to mersi-2 to match pyspectral * [PR 882](https://github.com/pytroll/satpy/pull/882) - Bug in mitiff writer; calibration information is not written in the imagedescription * [PR 877](https://github.com/pytroll/satpy/pull/877) - Fix standard_name and units for T4/T13 in viirs_edr_active_fires reader * [PR 875](https://github.com/pytroll/satpy/pull/875) - Fix error in hncc_dnb composite test * [PR 871](https://github.com/pytroll/satpy/pull/871) - Fix FY-4 naming to follow WMO Oscar naming * [PR 869](https://github.com/pytroll/satpy/pull/869) - Fix the nwcsaf-nc reader to drop scale and offset once data is scaled * [PR 867](https://github.com/pytroll/satpy/pull/867) - Fix attribute datatypes in CF Writer * [PR 837](https://github.com/pytroll/satpy/pull/837) - Fix Satpy tests to work with new versions of pyresample * [PR 790](https://github.com/pytroll/satpy/pull/790) - Modify the SLSTR file pattern to support stripe and frame products #### Features added * [PR 910](https://github.com/pytroll/satpy/pull/910) - Add near real-time and reprocessed file patterns to TROPOMI L1b reader * [PR 907](https://github.com/pytroll/satpy/pull/907) - Handle bad orbit coefficients in SEVIRI HRIT header * [PR 906](https://github.com/pytroll/satpy/pull/906) - Avoid xarray 0.13.0 * [PR 903](https://github.com/pytroll/satpy/pull/903) - Fix HRV area definition tests * [PR 898](https://github.com/pytroll/satpy/pull/898) - Add night lights compositor and SEVIRI day/night composite * [PR 897](https://github.com/pytroll/satpy/pull/897) - Cache slicing arrays in bilinear resampler * [PR 895](https://github.com/pytroll/satpy/pull/895) - Add the possibility to pad the HRV in the seviri hrit reader * [PR 892](https://github.com/pytroll/satpy/pull/892) - Update coefficients for FY-3B VIRR reflectance calibration * [PR 890](https://github.com/pytroll/satpy/pull/890) - Fix MERSI-2 and VIRR readers being recognized by pyspectral * [PR 881](https://github.com/pytroll/satpy/pull/881) - Make it possible to reverse a built-in colormap in enhancements * [PR 880](https://github.com/pytroll/satpy/pull/880) - Replace Numpy files with zarr for resampling LUT caching * [PR 874](https://github.com/pytroll/satpy/pull/874) - Hardcoding of mersi2 l1b reader valid_range for channel 24 and 25 as these are wrong in the HDF data * [PR 873](https://github.com/pytroll/satpy/pull/873) - Add mersi2 level 1b ears data file names to the reader * [PR 872](https://github.com/pytroll/satpy/pull/872) - Fix ABI L1B coordinates to be equivalent at all resolutions * [PR 856](https://github.com/pytroll/satpy/pull/856) - Switch to setuptools_scm for automatic version numbers from git tags ([678](https://github.com/pytroll/satpy/issues/678)) * [PR 849](https://github.com/pytroll/satpy/pull/849) - Make composites available to FCI FDHSI L1C ([848](https://github.com/pytroll/satpy/issues/848)) * [PR 847](https://github.com/pytroll/satpy/pull/847) - Update 'generic_image' reader to use rasterio for area creation ([617](https://github.com/pytroll/satpy/issues/617)) * [PR 767](https://github.com/pytroll/satpy/pull/767) - Add a reader for NOAA GOES-R ABI L2+ products (abi_l2_nc) * [PR 751](https://github.com/pytroll/satpy/pull/751) - Add a reader for FY-4A AGRI level 1 data ([603](https://github.com/pytroll/satpy/issues/603)) * [PR 672](https://github.com/pytroll/satpy/pull/672) - Add CIMSS True Color (Natural Color) RGB recipes #### Documentation changes * [PR 916](https://github.com/pytroll/satpy/pull/916) - Update orbit coefficient docstrings in seviri_l1b_hrit * [PR 887](https://github.com/pytroll/satpy/pull/887) - Add more reader metadata like long_name and description ([876](https://github.com/pytroll/satpy/issues/876)) * [PR 878](https://github.com/pytroll/satpy/pull/878) - Add Suyash458 to AUTHORS.md #### Backwards incompatible changes * [PR 890](https://github.com/pytroll/satpy/pull/890) - Fix MERSI-2 and VIRR readers being recognized by pyspectral In this release 39 pull requests were closed. ## Version 0.16.1 (2019/07/04) ### Issues Closed * [Issue 835](https://github.com/pytroll/satpy/issues/835) - modis_l2 reader is not working properly. * [Issue 829](https://github.com/pytroll/satpy/issues/829) - Citing satpy ([PR 833](https://github.com/pytroll/satpy/pull/833)) * [Issue 826](https://github.com/pytroll/satpy/issues/826) - SEVIRI channels loaded from netcdf in Scene object appear to have wrong names and calibration ([PR 827](https://github.com/pytroll/satpy/pull/827)) * [Issue 823](https://github.com/pytroll/satpy/issues/823) - Netcdf produced with the satpy CF writer don't pass cf-checker ([PR 825](https://github.com/pytroll/satpy/pull/825)) * [Issue 398](https://github.com/pytroll/satpy/issues/398) - Add AUTHORS file to replace individual copyright authors In this release 5 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 843](https://github.com/pytroll/satpy/pull/843) - Remove Invalid Metadata From ACSPO Reader * [PR 841](https://github.com/pytroll/satpy/pull/841) - Temporarily remove longitude/latitude 2D xarray coordinates * [PR 838](https://github.com/pytroll/satpy/pull/838) - Fix 'abi_l1b' reader keeping _Unsigned attribute * [PR 836](https://github.com/pytroll/satpy/pull/836) - Fix composites not being recorded with desired resolution in deptree * [PR 831](https://github.com/pytroll/satpy/pull/831) - Fix EWA resampling tests not properly testing caching * [PR 828](https://github.com/pytroll/satpy/pull/828) - Fix delayed generation of composites and composite resolution * [PR 827](https://github.com/pytroll/satpy/pull/827) - Corrected nc_key for channels WV_062, WV_073, IR_087 ([826](https://github.com/pytroll/satpy/issues/826)) * [PR 825](https://github.com/pytroll/satpy/pull/825) - Fix the cf writer for better CF compliance ([823](https://github.com/pytroll/satpy/issues/823)) #### Features added * [PR 842](https://github.com/pytroll/satpy/pull/842) - Fix cviirs reader to be more dask-friendly * [PR 832](https://github.com/pytroll/satpy/pull/832) - Add pre-commit configuration #### Documentation changes * [PR 813](https://github.com/pytroll/satpy/pull/813) - Add some documentation to modis readers similar to hrit #### Backwards incompatible changes * [PR 844](https://github.com/pytroll/satpy/pull/844) - Change default CF writer engine to follow xarray defaults In this release 12 pull requests were closed. ## Version 0.16.0 (2019/06/18) ### Issues Closed * [Issue 795](https://github.com/pytroll/satpy/issues/795) - Composites delayed in the presence of non-dimensional coordinates ([PR 796](https://github.com/pytroll/satpy/pull/796)) * [Issue 753](https://github.com/pytroll/satpy/issues/753) - seviri l1b netcdf reader needs to be updated due to EUM fixing Attribute Issue ([PR 791](https://github.com/pytroll/satpy/pull/791)) * [Issue 734](https://github.com/pytroll/satpy/issues/734) - Add a compositor that can use static images ([PR 804](https://github.com/pytroll/satpy/pull/804)) * [Issue 670](https://github.com/pytroll/satpy/issues/670) - Refine Satellite Position * [Issue 640](https://github.com/pytroll/satpy/issues/640) - question: save geotiff without modifying pixel value * [Issue 625](https://github.com/pytroll/satpy/issues/625) - Fix inconsistency between save_dataset and save_datasets ([PR 803](https://github.com/pytroll/satpy/pull/803)) * [Issue 460](https://github.com/pytroll/satpy/issues/460) - Creating day/night composites ([PR 804](https://github.com/pytroll/satpy/pull/804)) In this release 7 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 805](https://github.com/pytroll/satpy/pull/805) - Fix 3a3b transition in the aapp l1b reader * [PR 803](https://github.com/pytroll/satpy/pull/803) - Fix save_datasets always using geotiff writer regardless of filename ([625](https://github.com/pytroll/satpy/issues/625), [625](https://github.com/pytroll/satpy/issues/625)) * [PR 802](https://github.com/pytroll/satpy/pull/802) - Factorize and improve modis reader's interpolation * [PR 800](https://github.com/pytroll/satpy/pull/800) - Fix 'virr_l1b' reader when slope attribute is 0 * [PR 796](https://github.com/pytroll/satpy/pull/796) - Drop non-dimensional coordinates in Compositor ([795](https://github.com/pytroll/satpy/issues/795), [795](https://github.com/pytroll/satpy/issues/795)) * [PR 792](https://github.com/pytroll/satpy/pull/792) - Bug mitiff writer when only one channel is to be written with calibration information * [PR 791](https://github.com/pytroll/satpy/pull/791) - Fix handling of file attributes in seviri_l1b_nc reader ([753](https://github.com/pytroll/satpy/issues/753)) #### Features added * [PR 821](https://github.com/pytroll/satpy/pull/821) - Remove warning about unused kwargs in YAML reader * [PR 820](https://github.com/pytroll/satpy/pull/820) - Add support for NWCSAF GEO v2018, retain support for v2016 * [PR 818](https://github.com/pytroll/satpy/pull/818) - Add TLEs to dataset attributes in avhrr_l1b_gaclac * [PR 816](https://github.com/pytroll/satpy/pull/816) - Add grouping parameters for the 'viirs_sdr' reader * [PR 814](https://github.com/pytroll/satpy/pull/814) - Reader for Hydrology SAF precipitation products * [PR 806](https://github.com/pytroll/satpy/pull/806) - Add flag_meanings and flag_values to 'viirs_edr_active_fires' categories * [PR 805](https://github.com/pytroll/satpy/pull/805) - Fix 3a3b transition in the aapp l1b reader * [PR 804](https://github.com/pytroll/satpy/pull/804) - Add compositor for adding an image as a background ([734](https://github.com/pytroll/satpy/issues/734), [460](https://github.com/pytroll/satpy/issues/460)) * [PR 794](https://github.com/pytroll/satpy/pull/794) - Add 'orbital_parameters' metadata to all geostationary satellite readers * [PR 788](https://github.com/pytroll/satpy/pull/788) - Add new 'crs' coordinate variable when pyproj 2.0+ is installed * [PR 779](https://github.com/pytroll/satpy/pull/779) - Add TROPOMI L2 reader (tropomi_l2) * [PR 736](https://github.com/pytroll/satpy/pull/736) - CF Writer: Attribute encoding, groups and non-dimensional coordinates. Plus: Raw SEVIRI HRIT metadata * [PR 687](https://github.com/pytroll/satpy/pull/687) - Add Vaisala GLD360-reader. #### Documentation changes * [PR 797](https://github.com/pytroll/satpy/pull/797) - Sort AUTHORS.md file by last name #### Backwards incompatible changes * [PR 822](https://github.com/pytroll/satpy/pull/822) - Deprecate old reader names so that they are no longer recognized ([598](https://github.com/pytroll/satpy/issues/598)) * [PR 815](https://github.com/pytroll/satpy/pull/815) - Remove legacy GDAL-based geotiff writer support In this release 23 pull requests were closed. ## Version 0.15.2 (2019/05/22) ### Issues Closed * [Issue 785](https://github.com/pytroll/satpy/issues/785) - Loading cache for resampling scene fails with numpy 1.16.3 ([PR 787](https://github.com/pytroll/satpy/pull/787)) * [Issue 777](https://github.com/pytroll/satpy/issues/777) - Log warning and error messages are not printed to console ([PR 778](https://github.com/pytroll/satpy/pull/778)) * [Issue 776](https://github.com/pytroll/satpy/issues/776) - africa projection yields CRSError when saving dataset ([PR 780](https://github.com/pytroll/satpy/pull/780)) * [Issue 774](https://github.com/pytroll/satpy/issues/774) - ABI Level 1b long_name when reflectances and brightness temperatures are calculated * [Issue 766](https://github.com/pytroll/satpy/issues/766) - MODIS l1b reader seems to switch latitude and longitude for 500m data ([PR 781](https://github.com/pytroll/satpy/pull/781)) * [Issue 742](https://github.com/pytroll/satpy/issues/742) - GOES16/17 netcdf reader fails with rasterio installed * [Issue 649](https://github.com/pytroll/satpy/issues/649) - Make MTG-I reader work ([PR 755](https://github.com/pytroll/satpy/pull/755)) * [Issue 466](https://github.com/pytroll/satpy/issues/466) - Fix deprecation warnings with xarray, dask, and numpy * [Issue 449](https://github.com/pytroll/satpy/issues/449) - Adding coastlines to single channel not working In this release 9 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 787](https://github.com/pytroll/satpy/pull/787) - Loading resample cache with numpy 1.16.3 ([785](https://github.com/pytroll/satpy/issues/785)) * [PR 781](https://github.com/pytroll/satpy/pull/781) - Fix longitude/latitude being swapped in modis readers ([766](https://github.com/pytroll/satpy/issues/766)) * [PR 780](https://github.com/pytroll/satpy/pull/780) - Fix builtin areas to be compatible with rasterio ([776](https://github.com/pytroll/satpy/issues/776)) * [PR 778](https://github.com/pytroll/satpy/pull/778) - Fix NullHandler not allowing warning/error logs to be printed to console ([777](https://github.com/pytroll/satpy/issues/777)) * [PR 775](https://github.com/pytroll/satpy/pull/775) - Fix 'abi_l1b' reader not updating long_name when calibrating * [PR 770](https://github.com/pytroll/satpy/pull/770) - Fix typo for mersi2/abi/ahi using bidirection instead of bidirectional * [PR 763](https://github.com/pytroll/satpy/pull/763) - Fix AVHRR tests importing external mock on Python 3 * [PR 760](https://github.com/pytroll/satpy/pull/760) - Avoid leaking file objects in NetCDF4FileHandler #### Features added * [PR 759](https://github.com/pytroll/satpy/pull/759) - Fix the avhrr_l1b_gaclac to support angles, units and avhrr variants * [PR 755](https://github.com/pytroll/satpy/pull/755) - Update MTG FCI FDHSI L1C reader for latest data format ([649](https://github.com/pytroll/satpy/issues/649)) * [PR 470](https://github.com/pytroll/satpy/pull/470) - Switched `xarray.unfuncs` to `numpy` #### Documentation changes * [PR 773](https://github.com/pytroll/satpy/pull/773) - Improve Scene.show documentation * [PR 771](https://github.com/pytroll/satpy/pull/771) - Update pull request template to include AUTHORS and flake8 changes In this release 13 pull requests were closed. ## Version 0.15.1 (2019/05/10) ### Pull Requests Merged #### Bugs fixed * [PR 761](https://github.com/pytroll/satpy/pull/761) - Fix mersi2_l1b reader setting sensor as a set object In this release 1 pull request was closed. ## Version 0.15.0 (2019/05/10) ### Issues Closed * [Issue 758](https://github.com/pytroll/satpy/issues/758) - RuntimeError with NetCDF4FileHandler * [Issue 730](https://github.com/pytroll/satpy/issues/730) - Rewrite introduction paragraph in documentation ([PR 747](https://github.com/pytroll/satpy/pull/747)) * [Issue 725](https://github.com/pytroll/satpy/issues/725) - Update 'viirs_edr_active_fires' reader to read newest algorithm output ([PR 733](https://github.com/pytroll/satpy/pull/733)) * [Issue 706](https://github.com/pytroll/satpy/issues/706) - Add reader for FY3D MERSI2 L1B data ([PR 740](https://github.com/pytroll/satpy/pull/740)) * [Issue 434](https://github.com/pytroll/satpy/issues/434) - Allow readers to filter the available datasets configured in YAML ([PR 739](https://github.com/pytroll/satpy/pull/739)) In this release 5 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 757](https://github.com/pytroll/satpy/pull/757) - Fix MODIS L1B and L2 readers not reading geolocation properly * [PR 754](https://github.com/pytroll/satpy/pull/754) - Fix optional modifier dependencies being unloaded for delayed composites * [PR 750](https://github.com/pytroll/satpy/pull/750) - Add missing warnings import to geotiff writer #### Features added * [PR 752](https://github.com/pytroll/satpy/pull/752) - Add scanline timestamps to seviri_l1b_hrit * [PR 740](https://github.com/pytroll/satpy/pull/740) - Add FY-3D MERSI-2 L1B Reader (mersi2_l1b) ([706](https://github.com/pytroll/satpy/issues/706)) * [PR 739](https://github.com/pytroll/satpy/pull/739) - Refactor available datasets logic to be more flexible ([434](https://github.com/pytroll/satpy/issues/434)) * [PR 738](https://github.com/pytroll/satpy/pull/738) - Remove unused area slice-based filtering in the base reader * [PR 733](https://github.com/pytroll/satpy/pull/733) - Update VIIRS EDR Active Fires ([725](https://github.com/pytroll/satpy/issues/725)) * [PR 728](https://github.com/pytroll/satpy/pull/728) - Add VIIRS Fire Temperature rgb * [PR 711](https://github.com/pytroll/satpy/pull/711) - Replace usage of deprecated get_proj_coords_dask * [PR 611](https://github.com/pytroll/satpy/pull/611) - Add MODIS L2 reader * [PR 580](https://github.com/pytroll/satpy/pull/580) - Allow colormaps to be saved with geotiff writer * [PR 532](https://github.com/pytroll/satpy/pull/532) - Add enhancement for VIIRS flood reader #### Documentation changes * [PR 747](https://github.com/pytroll/satpy/pull/747) - Update index page introduction ([730](https://github.com/pytroll/satpy/issues/730)) In this release 14 pull requests were closed. ## Version 0.14.2 (2019/04/25) ### Issues Closed * [Issue 679](https://github.com/pytroll/satpy/issues/679) - Cannot save a multiscene animation - imagio:ffmpeg warning In this release 1 issue was closed. ### Pull Requests Merged #### Bugs fixed * [PR 731](https://github.com/pytroll/satpy/pull/731) - Fix viirs sdr reader to allow ivcdb files in the sdr directory * [PR 726](https://github.com/pytroll/satpy/pull/726) - Bugfixes in the Electro-L reader ([](https://groups.google.com/forum//issues/)) #### Features added * [PR 729](https://github.com/pytroll/satpy/pull/729) - Add "extras" checks to check_satpy utility function #### Documentation changes * [PR 724](https://github.com/pytroll/satpy/pull/724) - Add codeowners In this release 4 pull requests were closed. ## Version 0.14.1 (2019/04/12) ### Issues Closed * [Issue 716](https://github.com/pytroll/satpy/issues/716) - Reading the EUMETSAT compact viirs format returns wrong platform name (J01 instead of NOAA-20) ([PR 717](https://github.com/pytroll/satpy/pull/717)) * [Issue 710](https://github.com/pytroll/satpy/issues/710) - Question (maybe a bug): Why does RBG array exported with scn.save_dataset contain values greater than 255 ? In this release 2 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 721](https://github.com/pytroll/satpy/pull/721) - Consistent platform id attribute across NAT + HRIT SEVIRI readers * [PR 719](https://github.com/pytroll/satpy/pull/719) - Fix VIIRS 'night_fog' RGB composite recipe to use M12 instead of M14 * [PR 718](https://github.com/pytroll/satpy/pull/718) - Fix 'seviri_l1b_hrit' reader's area creation for pyproj 2.0+ * [PR 717](https://github.com/pytroll/satpy/pull/717) - Fix 'viirs_compact' and 'viirs_l1b' readers to return WMO/Oscar platform name ([716](https://github.com/pytroll/satpy/issues/716)) * [PR 715](https://github.com/pytroll/satpy/pull/715) - Fix hurricane florence demo download to only include M1 files * [PR 712](https://github.com/pytroll/satpy/pull/712) - Fix 'mitiff' writer not clipping enhanced data before scaling to 8 bit values * [PR 709](https://github.com/pytroll/satpy/pull/709) - Fix datetime64 use in 'seviri_l1b_hrit' reader for numpy < 1.15 * [PR 708](https://github.com/pytroll/satpy/pull/708) - Fix 'seviri_0deg' and 'seviri_iodc' builtin areas (areas.yaml) not matching reader areas #### Documentation changes * [PR 713](https://github.com/pytroll/satpy/pull/713) - Add links to source from API documentation In this release 9 pull requests were closed. ## Version 0.14.0 (2019/04/09) ### Issues Closed * [Issue 698](https://github.com/pytroll/satpy/issues/698) - Read WKT geotiff * [Issue 692](https://github.com/pytroll/satpy/issues/692) - sdr_viirs_l1b reader fails in 0.13, recent master, Works with version 0.12.0 ([PR 693](https://github.com/pytroll/satpy/pull/693)) * [Issue 683](https://github.com/pytroll/satpy/issues/683) - Question: Change image size when saving with satpy.save_dataset ([PR 691](https://github.com/pytroll/satpy/pull/691)) * [Issue 681](https://github.com/pytroll/satpy/issues/681) - incorrect data offset in HSD files ([PR 689](https://github.com/pytroll/satpy/pull/689)) * [Issue 666](https://github.com/pytroll/satpy/issues/666) - Add drawing of lat lon graticules when saving dataset ([PR 668](https://github.com/pytroll/satpy/pull/668)) * [Issue 646](https://github.com/pytroll/satpy/issues/646) - Add 'demo' subpackage for accessing example data ([PR 686](https://github.com/pytroll/satpy/pull/686)) * [Issue 528](https://github.com/pytroll/satpy/issues/528) - Support dask version of PySpectral ([PR 529](https://github.com/pytroll/satpy/pull/529)) * [Issue 511](https://github.com/pytroll/satpy/issues/511) - Add/update documentation about composites and compositors ([PR 705](https://github.com/pytroll/satpy/pull/705)) In this release 8 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 700](https://github.com/pytroll/satpy/pull/700) - Mask out invalid values in the precipitation probability product * [PR 693](https://github.com/pytroll/satpy/pull/693) - Fix VIIRS SDR reading of visible channels at nighttime ([692](https://github.com/pytroll/satpy/issues/692)) * [PR 689](https://github.com/pytroll/satpy/pull/689) - Fix Himawari HSD reader's incorrect header information ([681](https://github.com/pytroll/satpy/issues/681)) * [PR 688](https://github.com/pytroll/satpy/pull/688) - Fix offset correction in seviri_l1b_hrit * [PR 685](https://github.com/pytroll/satpy/pull/685) - Fix bug in Scene.resample causing AssertionError * [PR 677](https://github.com/pytroll/satpy/pull/677) - Fix MultiScene save_animation when distributed isn't installed * [PR 675](https://github.com/pytroll/satpy/pull/675) - Do not pass `filter_parameters` to the filehandler creation #### Features added * [PR 691](https://github.com/pytroll/satpy/pull/691) - Add Scene.aggregate method (python 3 only) ([683](https://github.com/pytroll/satpy/issues/683)) * [PR 686](https://github.com/pytroll/satpy/pull/686) - Add demo subpackage to simplify test data download ([646](https://github.com/pytroll/satpy/issues/646)) * [PR 676](https://github.com/pytroll/satpy/pull/676) - Feature add nightfog modis * [PR 674](https://github.com/pytroll/satpy/pull/674) - Use platform ID to choose the right reader for AVHRR GAC data * [PR 671](https://github.com/pytroll/satpy/pull/671) - Add satellite position to dataset attributes (seviri_l1b_hrit) * [PR 669](https://github.com/pytroll/satpy/pull/669) - Add ocean-color for viirs and modis * [PR 668](https://github.com/pytroll/satpy/pull/668) - Add grid/graticules to add_overlay function. ([666](https://github.com/pytroll/satpy/issues/666)) * [PR 665](https://github.com/pytroll/satpy/pull/665) - Add reader for VIIRS Active Fires * [PR 645](https://github.com/pytroll/satpy/pull/645) - Reader for the SAR OCN L2 wind product in SAFE format. * [PR 565](https://github.com/pytroll/satpy/pull/565) - Add reader for FY-3 VIRR (virr_l1b) * [PR 529](https://github.com/pytroll/satpy/pull/529) - Add dask support to NIRReflectance modifier ([528](https://github.com/pytroll/satpy/issues/528)) #### Documentation changes * [PR 707](https://github.com/pytroll/satpy/pull/707) - Add ABI Meso demo data case and clean up documentation * [PR 705](https://github.com/pytroll/satpy/pull/705) - Document composites ([511](https://github.com/pytroll/satpy/issues/511)) * [PR 701](https://github.com/pytroll/satpy/pull/701) - Clarify release instructions * [PR 699](https://github.com/pytroll/satpy/pull/699) - Rename SatPy to Satpy throughout documentation * [PR 673](https://github.com/pytroll/satpy/pull/673) - Add information about GDAL_CACHEMAX to FAQ In this release 23 pull requests were closed. ## Version 0.13.0 (2019/03/18) ### Issues Closed * [Issue 641](https://github.com/pytroll/satpy/issues/641) - After pip upgrade to satpy 0.12 and pyproj 2.0.1 got pyproj.exceptions.CRSError * [Issue 626](https://github.com/pytroll/satpy/issues/626) - Issue loading MODIS Aqua data ([PR 648](https://github.com/pytroll/satpy/pull/648)) * [Issue 620](https://github.com/pytroll/satpy/issues/620) - Add FAQ about controlling number of threads for pykdtree and blas ([PR 621](https://github.com/pytroll/satpy/pull/621)) * [Issue 521](https://github.com/pytroll/satpy/issues/521) - Interactively set the Calibration Mode when creating the Scene Object ([PR 543](https://github.com/pytroll/satpy/pull/543)) In this release 4 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 664](https://github.com/pytroll/satpy/pull/664) - Fix Scene.crop with RGBs and multidimensional data * [PR 662](https://github.com/pytroll/satpy/pull/662) - Fix masked resampling when dataset dtype is integer * [PR 661](https://github.com/pytroll/satpy/pull/661) - Fix CTTH composite not to mark invalid data as cloud-free * [PR 660](https://github.com/pytroll/satpy/pull/660) - Fix seviri_l1b_hrit prologue/epilogue readers * [PR 655](https://github.com/pytroll/satpy/pull/655) - Fix yaml load to be compatible with pyyaml 5.1 * [PR 652](https://github.com/pytroll/satpy/pull/652) - Fix resampling of ancillary variables when also first class datasets * [PR 648](https://github.com/pytroll/satpy/pull/648) - Add wrapped line support for metadata in modis_l1b reader ([626](https://github.com/pytroll/satpy/issues/626)) * [PR 644](https://github.com/pytroll/satpy/pull/644) - Fix the modis overview not to sun normalize the IR channel * [PR 633](https://github.com/pytroll/satpy/pull/633) - Fix VIIRS HNCC composite passing xarray objects to dask * [PR 632](https://github.com/pytroll/satpy/pull/632) - Fixing start and end times when missing in the CF writer #### Features added * [PR 647](https://github.com/pytroll/satpy/pull/647) - Switch python-hdf4 dependencies to pyhdf * [PR 643](https://github.com/pytroll/satpy/pull/643) - In cira_strech clip values less or equal to 0 to avoid nans and -inf. * [PR 642](https://github.com/pytroll/satpy/pull/642) - Bugfix pps2018 cpp products * [PR 638](https://github.com/pytroll/satpy/pull/638) - Add processing-mode and disposition-mode to the avhrr-l1b-eps file name * [PR 636](https://github.com/pytroll/satpy/pull/636) - Facilitate selection of calibration coefficients in seviri_l1b_hrit * [PR 635](https://github.com/pytroll/satpy/pull/635) - Add local caching of slicing for data reduction * [PR 627](https://github.com/pytroll/satpy/pull/627) - Add DNB satellite angles (DNB_SENZ, DNB_SENA) to VIIRS SDR reader * [PR 557](https://github.com/pytroll/satpy/pull/557) - Improve the SAR-C reading and Ice composite * [PR 543](https://github.com/pytroll/satpy/pull/543) - Calibration mode can now be passed via a keyword argument ([521](https://github.com/pytroll/satpy/issues/521)) * [PR 538](https://github.com/pytroll/satpy/pull/538) - Support CLASS packed viirs files in viirs_sdr reader #### Documentation changes * [PR 659](https://github.com/pytroll/satpy/pull/659) - DOC: Refer to PyTroll coding guidelines * [PR 653](https://github.com/pytroll/satpy/pull/653) - DOC: Fix small typos in documentation * [PR 651](https://github.com/pytroll/satpy/pull/651) - Rename changelog for releases before 0.9.0 * [PR 621](https://github.com/pytroll/satpy/pull/621) - Add FAQ items on number of workers and threads ([620](https://github.com/pytroll/satpy/issues/620)) In this release 24 pull requests were closed. ## Version 0.12.0 (2019/02/15) ### Issues Closed * [Issue 601](https://github.com/pytroll/satpy/issues/601) - MultiScene 'save_animation' fails if "datasets=" isn't provided ([PR 602](https://github.com/pytroll/satpy/pull/602)) * [Issue 310](https://github.com/pytroll/satpy/issues/310) - Create MultiScene from list of files ([PR 576](https://github.com/pytroll/satpy/pull/576)) In this release 2 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 616](https://github.com/pytroll/satpy/pull/616) - Fix geotiff writer being unimportable if gdal isn't installed * [PR 615](https://github.com/pytroll/satpy/pull/615) - Fix confusing error in abi_l1b reader when file fails to open * [PR 607](https://github.com/pytroll/satpy/pull/607) - Fix VIIRS 'histogram_dnb' compositor not returning new data * [PR 605](https://github.com/pytroll/satpy/pull/605) - Fix enhancements using dask delayed on internal functions * [PR 602](https://github.com/pytroll/satpy/pull/602) - Fix MultiScene save_animation not using dataset IDs correctly ([601](https://github.com/pytroll/satpy/issues/601), [601](https://github.com/pytroll/satpy/issues/601)) * [PR 600](https://github.com/pytroll/satpy/pull/600) - Fix resample reduce_data bug introduced in #582 #### Features added * [PR 614](https://github.com/pytroll/satpy/pull/614) - Support for reduced resolution OLCI data * [PR 613](https://github.com/pytroll/satpy/pull/613) - Add 'crop' and 'save_datasets' to MultiScene * [PR 609](https://github.com/pytroll/satpy/pull/609) - Add ability to use dask distributed when generating animation videos * [PR 582](https://github.com/pytroll/satpy/pull/582) - Add 'reduce_data' keyword argument to disable cropping before resampling * [PR 576](https://github.com/pytroll/satpy/pull/576) - Add group_files and from_files utility functions for creating Scenes from multiple files ([310](https://github.com/pytroll/satpy/issues/310)) * [PR 567](https://github.com/pytroll/satpy/pull/567) - Add utility functions for generating GeoViews plots ([541](https://github.com/pytroll/satpy/issues/541)) In this release 12 pull requests were closed. ## Version 0.11.2 (2019/01/28) ### Issues Closed * [Issue 584](https://github.com/pytroll/satpy/issues/584) - DayNightCompositor does not work with eg overview_sun as the day part ([PR 593](https://github.com/pytroll/satpy/pull/593)) * [Issue 577](https://github.com/pytroll/satpy/issues/577) - Creation of composites using `sunz_corrected` modifier fails with VIIRS SDR data * [Issue 569](https://github.com/pytroll/satpy/issues/569) - Can not show or save ABI true color image (RuntimeWarning: invalid value encountered in log) * [Issue 531](https://github.com/pytroll/satpy/issues/531) - Mask space pixels in AHI HSD reader ([PR 592](https://github.com/pytroll/satpy/pull/592)) * [Issue 106](https://github.com/pytroll/satpy/issues/106) - Warnings In this release 5 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 594](https://github.com/pytroll/satpy/pull/594) - Fix VIIRS L1B reader not using standard 'y' and 'x' dimension names * [PR 593](https://github.com/pytroll/satpy/pull/593) - Fix sunz_corrected modifier adding unnecessary x and y coordinates ([587](https://github.com/pytroll/satpy/issues/587), [584](https://github.com/pytroll/satpy/issues/584)) * [PR 592](https://github.com/pytroll/satpy/pull/592) - Fix masking of AHI HSD space pixels ([531](https://github.com/pytroll/satpy/issues/531)) * [PR 589](https://github.com/pytroll/satpy/pull/589) - Fix dask not importing sharedict automatically in dask 1.1+ * [PR 588](https://github.com/pytroll/satpy/pull/588) - Fix start_time type in seviri_l1b_nc reader * [PR 585](https://github.com/pytroll/satpy/pull/585) - Fix geotiff writer not using fill_value from writer YAML config * [PR 572](https://github.com/pytroll/satpy/pull/572) - Fix VIIRS SDR masking and distracting colors in composites * [PR 570](https://github.com/pytroll/satpy/pull/570) - Fix CF epoch for xarray compat * [PR 563](https://github.com/pytroll/satpy/pull/563) - Fix StopIteration and python 3.7 compatibility issue in MultiScene * [PR 554](https://github.com/pytroll/satpy/pull/554) - Fix AreaDefinition usage to work with newer versions of pyresample #### Features added * [PR 561](https://github.com/pytroll/satpy/pull/561) - Add AHI HRIT B07 files for high resolution night data #### Documentation changes * [PR 590](https://github.com/pytroll/satpy/pull/590) - Add FAQ page to docs * [PR 575](https://github.com/pytroll/satpy/pull/575) - Add page for data download resources * [PR 574](https://github.com/pytroll/satpy/pull/574) - Add code of conduct In this release 14 pull requests were closed. ## Version 0.11.1 (2018/12/27) ### Pull Requests Merged #### Bugs fixed * [PR 560](https://github.com/pytroll/satpy/pull/560) - Fix available_composite_ids including inline comp dependencies In this release 1 pull request was closed. ## Version 0.11.0 (2018/12/21) ### Issues Closed * [Issue 555](https://github.com/pytroll/satpy/issues/555) - GOES-16 geolocation seems off when saving as TIFF * [Issue 552](https://github.com/pytroll/satpy/issues/552) - GOES Composites failling ([PR 553](https://github.com/pytroll/satpy/pull/553)) * [Issue 534](https://github.com/pytroll/satpy/issues/534) - Support GOES-15 in netcdf format from Eumetcast (`nc_goes` reader) ([PR 530](https://github.com/pytroll/satpy/pull/530)) * [Issue 527](https://github.com/pytroll/satpy/issues/527) - [SEP] Reader naming conventions ([PR 546](https://github.com/pytroll/satpy/pull/546)) * [Issue 518](https://github.com/pytroll/satpy/issues/518) - Make bilinear interpolation dask/xarray friendly ([PR 519](https://github.com/pytroll/satpy/pull/519)) * [Issue 467](https://github.com/pytroll/satpy/issues/467) - Flake8-ify all of satpy ([PR 515](https://github.com/pytroll/satpy/pull/515)) * [Issue 459](https://github.com/pytroll/satpy/issues/459) - How to colorize images * [Issue 449](https://github.com/pytroll/satpy/issues/449) - Adding coastlines to single channel not working ([PR 551](https://github.com/pytroll/satpy/pull/551)) * [Issue 337](https://github.com/pytroll/satpy/issues/337) - Plot true color by using VIIRS SDR * [Issue 333](https://github.com/pytroll/satpy/issues/333) - `available_readers` to detail unavailable items * [Issue 263](https://github.com/pytroll/satpy/issues/263) - How to get the available dataset names from the reader * [Issue 147](https://github.com/pytroll/satpy/issues/147) - SEVIRI HRIT reading: More userfriendly warning when no EPI/PRO files are present ([PR 452](https://github.com/pytroll/satpy/pull/452)) In this release 12 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 556](https://github.com/pytroll/satpy/pull/556) - Fix turning off enhancements in writers for float data * [PR 553](https://github.com/pytroll/satpy/pull/553) - Fix DifferenceCompositor and other compositors when areas are incompatible ([552](https://github.com/pytroll/satpy/issues/552), [552](https://github.com/pytroll/satpy/issues/552)) * [PR 550](https://github.com/pytroll/satpy/pull/550) - Fix AHI HRIT file patterns so area's ID is correct * [PR 548](https://github.com/pytroll/satpy/pull/548) - Fix ratio sharpening compositors when the ratio is negative * [PR 547](https://github.com/pytroll/satpy/pull/547) - Fix EWA resampling for new versions of pyresample * [PR 542](https://github.com/pytroll/satpy/pull/542) - Fix palette application for pps 2018 products * [PR 508](https://github.com/pytroll/satpy/pull/508) - Fix the cf_writer to accept single-valued time coordinate variable #### Features added * [PR 558](https://github.com/pytroll/satpy/pull/558) - Make counts available in ahi_hsd * [PR 551](https://github.com/pytroll/satpy/pull/551) - Fix image overlays for single band data (requires trollimage 1.6+) ([449](https://github.com/pytroll/satpy/issues/449)) * [PR 549](https://github.com/pytroll/satpy/pull/549) - Fix nwcpps ct palette from v2018 to be backwards compatible * [PR 546](https://github.com/pytroll/satpy/pull/546) - Rename readers to meet new reader naming scheme ([527](https://github.com/pytroll/satpy/issues/527)) * [PR 545](https://github.com/pytroll/satpy/pull/545) - Add configurable parameters to solar zenith correctors * [PR 530](https://github.com/pytroll/satpy/pull/530) - Add reader for Goes15 netcdf Eumetsat format ([534](https://github.com/pytroll/satpy/issues/534)) * [PR 519](https://github.com/pytroll/satpy/pull/519) - Add xarray/dask bilinear resampling ([518](https://github.com/pytroll/satpy/issues/518)) * [PR 507](https://github.com/pytroll/satpy/pull/507) - Change default enhancement for reflectance data to gamma 1.5 * [PR 452](https://github.com/pytroll/satpy/pull/452) - Improve handling of missing file requirements in readers ([147](https://github.com/pytroll/satpy/issues/147)) #### Documentation changes * [PR 533](https://github.com/pytroll/satpy/pull/533) - Fix copy/paste error in readers table for viirs_l1b * [PR 515](https://github.com/pytroll/satpy/pull/515) - Fix all flake8 errors in satpy package code ([467](https://github.com/pytroll/satpy/issues/467)) #### Backwards incompatible changes * [PR 546](https://github.com/pytroll/satpy/pull/546) - Rename readers to meet new reader naming scheme ([527](https://github.com/pytroll/satpy/issues/527)) * [PR 507](https://github.com/pytroll/satpy/pull/507) - Change default enhancement for reflectance data to gamma 1.5 In this release 20 pull requests were closed. ## Version 0.10.0 (2018/11/23) ### Issues Closed * [Issue 491](https://github.com/pytroll/satpy/issues/491) - Area definition of incomplete SEVIRI images * [Issue 487](https://github.com/pytroll/satpy/issues/487) - Resampling a User Defined Scene * [Issue 465](https://github.com/pytroll/satpy/issues/465) - Native resampler fails with 3D DataArrays ([PR 468](https://github.com/pytroll/satpy/pull/468)) * [Issue 464](https://github.com/pytroll/satpy/issues/464) - Drawing coastlines/borders with save_datasets ([PR 469](https://github.com/pytroll/satpy/pull/469)) * [Issue 453](https://github.com/pytroll/satpy/issues/453) - Review subclasses of BaseFileHander ([PR 455](https://github.com/pytroll/satpy/pull/455)) * [Issue 450](https://github.com/pytroll/satpy/issues/450) - Allow readers to accept pathlib.Path instances ([PR 451](https://github.com/pytroll/satpy/pull/451)) * [Issue 445](https://github.com/pytroll/satpy/issues/445) - Readthedocs builds are failing * [Issue 439](https://github.com/pytroll/satpy/issues/439) - KeyError when creating true_color for ABI * [Issue 417](https://github.com/pytroll/satpy/issues/417) - Add custom string formatter for lower/upper support * [Issue 414](https://github.com/pytroll/satpy/issues/414) - Inconsistent units of geostationary radiances ([PR 490](https://github.com/pytroll/satpy/pull/490)) * [Issue 405](https://github.com/pytroll/satpy/issues/405) - Angle interpolation for MODIS data missing ([PR 430](https://github.com/pytroll/satpy/pull/430)) * [Issue 397](https://github.com/pytroll/satpy/issues/397) - Add README to setup.py description ([PR 443](https://github.com/pytroll/satpy/pull/443)) * [Issue 369](https://github.com/pytroll/satpy/issues/369) - Mitiff writer is broken ([PR 480](https://github.com/pytroll/satpy/pull/480)) In this release 13 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 510](https://github.com/pytroll/satpy/pull/510) - Make sure a discrete data type is preserved through resampling * [PR 506](https://github.com/pytroll/satpy/pull/506) - Remove dependency on nc_nwcsaf_msg * [PR 504](https://github.com/pytroll/satpy/pull/504) - Change unnecessary warning messages to debug * [PR 496](https://github.com/pytroll/satpy/pull/496) - Add more descriptive names to AHI readers AreaDefinition names * [PR 492](https://github.com/pytroll/satpy/pull/492) - Fix thinned modis reading in 'hdfeos_l1b' reader * [PR 480](https://github.com/pytroll/satpy/pull/480) - Fix 'mitiff' writer to use 'base_dir' properly ([369](https://github.com/pytroll/satpy/issues/369)) * [PR 476](https://github.com/pytroll/satpy/pull/476) - Fix handling of navigation in a grib file with lons greater than 180 * [PR 473](https://github.com/pytroll/satpy/pull/473) - Change combine_metadata to average any 'time' fields * [PR 471](https://github.com/pytroll/satpy/pull/471) - Fix offset between VIS+IR and HRV navigation for hrit seviri * [PR 469](https://github.com/pytroll/satpy/pull/469) - Fix attributes not being preserved when adding overlays or decorations ([464](https://github.com/pytroll/satpy/issues/464)) * [PR 468](https://github.com/pytroll/satpy/pull/468) - Fix native resampling when RGBs are resampled ([465](https://github.com/pytroll/satpy/issues/465)) * [PR 458](https://github.com/pytroll/satpy/pull/458) - Fix the slstr reader for consistency and tir view * [PR 456](https://github.com/pytroll/satpy/pull/456) - Fix SCMI writer not writing fill values properly * [PR 448](https://github.com/pytroll/satpy/pull/448) - Fix saving a dataset with a prerequisites attrs to netcdf * [PR 447](https://github.com/pytroll/satpy/pull/447) - Fix masking in DayNightCompositor when composites have partial missing data * [PR 446](https://github.com/pytroll/satpy/pull/446) - Fix nc_nwcsaf_msg reader's handling of projection units #### Features added * [PR 503](https://github.com/pytroll/satpy/pull/503) - Add two luminance sharpening compositors * [PR 498](https://github.com/pytroll/satpy/pull/498) - Make it possible to configure in-line composites * [PR 488](https://github.com/pytroll/satpy/pull/488) - Add the check_satpy function to find missing dependencies * [PR 481](https://github.com/pytroll/satpy/pull/481) - Refactor SCMI writer to be dask friendly * [PR 478](https://github.com/pytroll/satpy/pull/478) - Allow writers to create output directories if they don't exist * [PR 477](https://github.com/pytroll/satpy/pull/477) - Add additional metadata to ABI L1B DataArrays * [PR 474](https://github.com/pytroll/satpy/pull/474) - Improve handling of dependency loading when reader has multiple matches * [PR 463](https://github.com/pytroll/satpy/pull/463) - MSG Level1.5 NetCDF Reader (code and yaml file) for VIS/IR Channels * [PR 455](https://github.com/pytroll/satpy/pull/455) - Ensure file handlers all use filenames as strings ([453](https://github.com/pytroll/satpy/issues/453)) * [PR 451](https://github.com/pytroll/satpy/pull/451) - Allow readers to accept pathlib.Path instances as filenames. ([450](https://github.com/pytroll/satpy/issues/450)) * [PR 442](https://github.com/pytroll/satpy/pull/442) - Replace areas.def with areas.yaml * [PR 441](https://github.com/pytroll/satpy/pull/441) - Fix metop reader * [PR 438](https://github.com/pytroll/satpy/pull/438) - Feature new olcil2 datasets * [PR 436](https://github.com/pytroll/satpy/pull/436) - Allow on-the-fly decompression of xRIT files in xRIT readers * [PR 430](https://github.com/pytroll/satpy/pull/430) - Implement fast modis lon/lat and angles interpolation ([405](https://github.com/pytroll/satpy/issues/405)) #### Documentation changes * [PR 501](https://github.com/pytroll/satpy/pull/501) - Add DOI role and reference to Zinke DNB method * [PR 489](https://github.com/pytroll/satpy/pull/489) - Add a first version on how to write a custom reader * [PR 444](https://github.com/pytroll/satpy/pull/444) - Fix the readers table in the sphinx docs so it wraps text * [PR 443](https://github.com/pytroll/satpy/pull/443) - Add long_description to setup.py ([397](https://github.com/pytroll/satpy/issues/397)) * [PR 440](https://github.com/pytroll/satpy/pull/440) - Fix CI badges in README #### Backwards incompatible changes * [PR 485](https://github.com/pytroll/satpy/pull/485) - Deprecate 'enhancement_config' keyword argument in favor of 'enhance' In this release 37 pull requests were closed. ## Version 0.9.4 (2018/09/29) ### Pull Requests Merged #### Bugs fixed * [PR 433](https://github.com/pytroll/satpy/pull/433) - Fix native_msg readers standard_names to match other satpy readers * [PR 432](https://github.com/pytroll/satpy/pull/432) - Fix reader config loading so it raises exception for bad reader name * [PR 428](https://github.com/pytroll/satpy/pull/428) - Fix start_time and end_time being lists in native_msg reader * [PR 426](https://github.com/pytroll/satpy/pull/426) - Fix hrit_jma reader not having satellite lon/lat/alt info * [PR 423](https://github.com/pytroll/satpy/pull/423) - Fixed that save_dataset does not propagate fill_value * [PR 421](https://github.com/pytroll/satpy/pull/421) - Fix masking and simplify avhrr_aapp_l1b reader * [PR 413](https://github.com/pytroll/satpy/pull/413) - Fix calculating solar zenith angle in eps_l1b reader * [PR 412](https://github.com/pytroll/satpy/pull/412) - Fix platform_name and sensor not being added by avhrr eps l1b reader #### Features added * [PR 415](https://github.com/pytroll/satpy/pull/415) - Add hrit_jma file patterns that don't include segments In this release 9 pull requests were closed. ## Version 0.9.3 (2018/09/10) ### Issues Closed * [Issue 336](https://github.com/pytroll/satpy/issues/336) - Scene crop does not compare all dataset areas ([PR 406](https://github.com/pytroll/satpy/pull/406)) In this release 1 issue was closed. ### Pull Requests Merged #### Bugs fixed * [PR 409](https://github.com/pytroll/satpy/pull/409) - Fix viirs_sdr reading of aggregated files * [PR 406](https://github.com/pytroll/satpy/pull/406) - Fix Scene crop so new areas are consistent with resolution ([336](https://github.com/pytroll/satpy/issues/336)) In this release 2 pull requests were closed. ## Version 0.9.2 (2018/08/23) ### Pull Requests Merged #### Bugs fixed * [PR 402](https://github.com/pytroll/satpy/pull/402) - Fix 'platform_name' metadata in ACSPO and CLAVR-x readers * [PR 401](https://github.com/pytroll/satpy/pull/401) - Wrap solar and satellite angles in xarray in AVHRR AAPP reader In this release 2 pull requests were closed. ## Version 0.9.1 (2018/08/19) ### Issues Closed * [Issue 388](https://github.com/pytroll/satpy/issues/388) - SCMI Writer raises exception with lettered grids ([PR 389](https://github.com/pytroll/satpy/pull/389)) * [Issue 385](https://github.com/pytroll/satpy/issues/385) - No platform_name and sensor in dataset metadata for avhrr_aapp_l1b reader ([PR 386](https://github.com/pytroll/satpy/pull/386)) * [Issue 379](https://github.com/pytroll/satpy/issues/379) - Data is not masked when loading calibrated GOES HRIT data ([PR 380](https://github.com/pytroll/satpy/pull/380)) * [Issue 377](https://github.com/pytroll/satpy/issues/377) - Unmasked data when using DayNightCompositor ([PR 378](https://github.com/pytroll/satpy/pull/378)) * [Issue 372](https://github.com/pytroll/satpy/issues/372) - "find_files_and_readers" doesn't work on Windows ([PR 373](https://github.com/pytroll/satpy/pull/373)) * [Issue 364](https://github.com/pytroll/satpy/issues/364) - Unable to load individual channels from VIIRS_SDR data. * [Issue 350](https://github.com/pytroll/satpy/issues/350) - Creating a Scene object with NOAA-15/18 data * [Issue 347](https://github.com/pytroll/satpy/issues/347) - No image is shown in Jupyter notebook via scene.show() * [Issue 345](https://github.com/pytroll/satpy/issues/345) - Future warning - xarray ([PR 352](https://github.com/pytroll/satpy/pull/352)) In this release 9 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 395](https://github.com/pytroll/satpy/pull/395) - Fix DayNightCompositor not checking inputs areas * [PR 391](https://github.com/pytroll/satpy/pull/391) - Fix native resampler using SwathDefinition as an AreaDefinition * [PR 387](https://github.com/pytroll/satpy/pull/387) - Fix enhancement config loading when yaml file is empty * [PR 386](https://github.com/pytroll/satpy/pull/386) - Add platform_name and sensor in avhrr_aapp_l1b reader ([385](https://github.com/pytroll/satpy/issues/385)) * [PR 381](https://github.com/pytroll/satpy/pull/381) - Fix keyword arguments not being properly passed to writers * [PR 362](https://github.com/pytroll/satpy/pull/362) - Replace np.ma.mean by np.nanmean for pixel aggregation * [PR 361](https://github.com/pytroll/satpy/pull/361) - Remove Rayleigh correction from abi natural composite * [PR 360](https://github.com/pytroll/satpy/pull/360) - Fix lookup table enhancement for multi-band datasets * [PR 339](https://github.com/pytroll/satpy/pull/339) - fixed meteosat native georeferencing #### Documentation changes * [PR 359](https://github.com/pytroll/satpy/pull/359) - Add examples from pytroll-examples to documentation In this release 10 pull requests were closed. ## Version 0.9.0 (2018/07/02) ### Issues Closed * [Issue 344](https://github.com/pytroll/satpy/issues/344) - find_files_and_reader does not seem to care about start_time! ([PR 349](https://github.com/pytroll/satpy/pull/349)) * [Issue 338](https://github.com/pytroll/satpy/issues/338) - Creating a Scene object with Terra MODIS data * [Issue 332](https://github.com/pytroll/satpy/issues/332) - Non-requested datasets are saved when composites fail to generate ([PR 342](https://github.com/pytroll/satpy/pull/342)) In this release 3 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 355](https://github.com/pytroll/satpy/pull/355) - Fix ABI L1B reader losing file variable attributes * [PR 353](https://github.com/pytroll/satpy/pull/353) - Fix multiscene memory issues by adding an optional batch_size * [PR 351](https://github.com/pytroll/satpy/pull/351) - Fix AMSR-2 L1B reader loading bytes incorrectly * [PR 349](https://github.com/pytroll/satpy/pull/349) - Fix datetime-based file selection when filename only has a start time ([344](https://github.com/pytroll/satpy/issues/344)) * [PR 348](https://github.com/pytroll/satpy/pull/348) - Fix freezing of areas before resampling even as strings * [PR 343](https://github.com/pytroll/satpy/pull/343) - Fix shape assertion after resampling * [PR 342](https://github.com/pytroll/satpy/pull/342) - Fix Scene save_datasets to only save datasets from the wishlist ([332](https://github.com/pytroll/satpy/issues/332)) * [PR 341](https://github.com/pytroll/satpy/pull/341) - Fix ancillary variable loading when anc var is already loaded * [PR 340](https://github.com/pytroll/satpy/pull/340) - Cut radiances array depending on number of scans In this release 9 pull requests were closed. ## Version 0.9.0b0 (2018/06/26) ### Issues Closed * [Issue 328](https://github.com/pytroll/satpy/issues/328) - hrit reader bugs ([PR 329](https://github.com/pytroll/satpy/pull/329)) * [Issue 323](https://github.com/pytroll/satpy/issues/323) - "Manual" application of corrections * [Issue 320](https://github.com/pytroll/satpy/issues/320) - Overview of code layout * [Issue 279](https://github.com/pytroll/satpy/issues/279) - Add 'level' to DatasetID ([PR 283](https://github.com/pytroll/satpy/pull/283)) * [Issue 272](https://github.com/pytroll/satpy/issues/272) - How to save region of interest from Band 3 Himawari Data as png image ([PR 276](https://github.com/pytroll/satpy/pull/276)) * [Issue 267](https://github.com/pytroll/satpy/issues/267) - Missing dependency causes strange error during unit tests ([PR 273](https://github.com/pytroll/satpy/pull/273)) * [Issue 244](https://github.com/pytroll/satpy/issues/244) - Fix NUCAPS reader for NUCAPS EDR v2 files ([PR 326](https://github.com/pytroll/satpy/pull/326)) * [Issue 236](https://github.com/pytroll/satpy/issues/236) - scene.resample(cache_dir=) fails with TypeError: Unicode-objects must be encoded before hashing * [Issue 233](https://github.com/pytroll/satpy/issues/233) - IOError: Unable to read attribute (no appropriate function for conversion path) * [Issue 211](https://github.com/pytroll/satpy/issues/211) - Fix OLCI and other readers' file patterns to work on Windows * [Issue 207](https://github.com/pytroll/satpy/issues/207) - Method not fully documented in terms of possible key word arguments * [Issue 199](https://github.com/pytroll/satpy/issues/199) - Reading Modis file produce a double image * [Issue 168](https://github.com/pytroll/satpy/issues/168) - Cannot read MODIS data * [Issue 167](https://github.com/pytroll/satpy/issues/167) - KeyError 'v' using Scene(base_dir=, reader=) ([PR 325](https://github.com/pytroll/satpy/pull/325)) * [Issue 165](https://github.com/pytroll/satpy/issues/165) - HRIT GOES reader is broken ([PR 303](https://github.com/pytroll/satpy/pull/303)) * [Issue 160](https://github.com/pytroll/satpy/issues/160) - Inconsistent naming of optional datasets in composite configs and compositors * [Issue 157](https://github.com/pytroll/satpy/issues/157) - Add animation example ([PR 322](https://github.com/pytroll/satpy/pull/322)) * [Issue 156](https://github.com/pytroll/satpy/issues/156) - Add cartopy example * [Issue 146](https://github.com/pytroll/satpy/issues/146) - Add default null log handler * [Issue 123](https://github.com/pytroll/satpy/issues/123) - NetCDF writer doesn't work ([PR 307](https://github.com/pytroll/satpy/pull/307)) * [Issue 114](https://github.com/pytroll/satpy/issues/114) - Print a list of available sensors/readers * [Issue 82](https://github.com/pytroll/satpy/issues/82) - Separate file discovery from Scene init * [Issue 61](https://github.com/pytroll/satpy/issues/61) - Creating composites post-load * [Issue 10](https://github.com/pytroll/satpy/issues/10) - Optimize CREFL for memory In this release 24 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 331](https://github.com/pytroll/satpy/pull/331) - Adapt slstr reader to xarray&dask * [PR 329](https://github.com/pytroll/satpy/pull/329) - issue#328: fixed bugs loading JMA HRIT files ([328](https://github.com/pytroll/satpy/issues/328)) * [PR 326](https://github.com/pytroll/satpy/pull/326) - Fix nucaps reader for NUCAPS EDR v2 files ([244](https://github.com/pytroll/satpy/issues/244), [244](https://github.com/pytroll/satpy/issues/244)) * [PR 325](https://github.com/pytroll/satpy/pull/325) - Fix exception when Scene is given reader and base_dir ([167](https://github.com/pytroll/satpy/issues/167)) * [PR 319](https://github.com/pytroll/satpy/pull/319) - Fix msi reader delayed * [PR 318](https://github.com/pytroll/satpy/pull/318) - Fix nir reflectance to use XArray * [PR 312](https://github.com/pytroll/satpy/pull/312) - Allow custom regions in ahi-hsd file patterns * [PR 311](https://github.com/pytroll/satpy/pull/311) - Allow valid_range to be a tuple for cloud product colorization * [PR 303](https://github.com/pytroll/satpy/pull/303) - Fix hrit goes to support python 3 ([165](https://github.com/pytroll/satpy/issues/165)) * [PR 288](https://github.com/pytroll/satpy/pull/288) - Fix hrit-goes reader * [PR 192](https://github.com/pytroll/satpy/pull/192) - Clip day and night composites after enhancement #### Features added * [PR 315](https://github.com/pytroll/satpy/pull/315) - Add slicing to Scene * [PR 314](https://github.com/pytroll/satpy/pull/314) - Feature mitiff writer * [PR 307](https://github.com/pytroll/satpy/pull/307) - Fix projections in cf writer ([123](https://github.com/pytroll/satpy/issues/123)) * [PR 305](https://github.com/pytroll/satpy/pull/305) - Add support for geolocation and angles to msi reader * [PR 302](https://github.com/pytroll/satpy/pull/302) - Workaround the LinearNDInterpolator thread-safety issue for Sentinel 1 SAR geolocation * [PR 301](https://github.com/pytroll/satpy/pull/301) - Factorize header definitions between hrit_msg and native_msg. Fix a bug in header definition. * [PR 298](https://github.com/pytroll/satpy/pull/298) - Implement sentinel 2 MSI reader * [PR 294](https://github.com/pytroll/satpy/pull/294) - Add the ocean color product to olci * [PR 153](https://github.com/pytroll/satpy/pull/153) - [WIP] Improve compatibility of cf_writer with CF-conventions In this release 20 pull requests were closed. ## Version 0.9.0a2 (2018/05/14) ### Issues Closed * [Issue 286](https://github.com/pytroll/satpy/issues/286) - Proposal: search automatically for local config-files/readers * [Issue 278](https://github.com/pytroll/satpy/issues/278) - msg native reader fails on full disk image * [Issue 277](https://github.com/pytroll/satpy/issues/277) - msg_native reader fails when order number has a hyphen in it ([PR 282](https://github.com/pytroll/satpy/pull/282)) * [Issue 270](https://github.com/pytroll/satpy/issues/270) - How to find the value at certain latitude and longtitude * [Issue 269](https://github.com/pytroll/satpy/issues/269) - How to intepret the parameter values in AreaDefinition * [Issue 268](https://github.com/pytroll/satpy/issues/268) - How to find the appropriate values of parameters in Scene.resample() function using Himawari Data * [Issue 241](https://github.com/pytroll/satpy/issues/241) - reader native_msg using `np.str` * [Issue 218](https://github.com/pytroll/satpy/issues/218) - Resampling to EPSG:4326 produces unexpected results * [Issue 189](https://github.com/pytroll/satpy/issues/189) - Error when reading MSG native format * [Issue 62](https://github.com/pytroll/satpy/issues/62) - msg_native example * [Issue 33](https://github.com/pytroll/satpy/issues/33) - Load metadata without loading data In this release 11 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 290](https://github.com/pytroll/satpy/pull/290) - Fix unicode-named data loading * [PR 285](https://github.com/pytroll/satpy/pull/285) - Fix native_msg calibration bug * [PR 282](https://github.com/pytroll/satpy/pull/282) - Fix native_msg reader for ROI input and multi-part order file patterns ([277](https://github.com/pytroll/satpy/issues/277)) * [PR 280](https://github.com/pytroll/satpy/pull/280) - Fix CLAVR-x reader to work with xarray * [PR 274](https://github.com/pytroll/satpy/pull/274) - Convert ahi hsd reader to dask and xarray * [PR 265](https://github.com/pytroll/satpy/pull/265) - Bugfix msg native reader * [PR 262](https://github.com/pytroll/satpy/pull/262) - Fix dependency tree to find the best dependency when multiple matches occur * [PR 260](https://github.com/pytroll/satpy/pull/260) - Fix ABI L1B reader masking data improperly #### Features added * [PR 293](https://github.com/pytroll/satpy/pull/293) - Switch to netcdf4 as engine for nc nwcsaf reading * [PR 292](https://github.com/pytroll/satpy/pull/292) - Use pyresample's boundary classes * [PR 291](https://github.com/pytroll/satpy/pull/291) - Allow datasets without areas to be concatenated * [PR 289](https://github.com/pytroll/satpy/pull/289) - Fix so UMARF files (with extention .nat) are found as well * [PR 287](https://github.com/pytroll/satpy/pull/287) - Add production configuration for NWCSAF RDT, ASII products by Marco Sassi * [PR 283](https://github.com/pytroll/satpy/pull/283) - Add GRIB Reader ([279](https://github.com/pytroll/satpy/issues/279)) * [PR 281](https://github.com/pytroll/satpy/pull/281) - Port the maia reader to dask/xarray * [PR 276](https://github.com/pytroll/satpy/pull/276) - Support reducing data for geos areas ([272](https://github.com/pytroll/satpy/issues/272)) * [PR 273](https://github.com/pytroll/satpy/pull/273) - Msg readers cleanup ([267](https://github.com/pytroll/satpy/issues/267)) * [PR 271](https://github.com/pytroll/satpy/pull/271) - Add appveyor and use ci-helpers for CI environments * [PR 264](https://github.com/pytroll/satpy/pull/264) - Add caching at the scene level, and handle saving/loading from disk * [PR 262](https://github.com/pytroll/satpy/pull/262) - Fix dependency tree to find the best dependency when multiple matches occur In this release 20 pull requests were closed. ## Version 0.9.0a1 (2018/04/22) ### Issues Closed * [Issue 227](https://github.com/pytroll/satpy/issues/227) - Issue Reading MSG4 * [Issue 225](https://github.com/pytroll/satpy/issues/225) - Save Datasets using SCMI ([PR 228](https://github.com/pytroll/satpy/pull/228)) * [Issue 215](https://github.com/pytroll/satpy/issues/215) - Change `Scene.compute` to something else ([PR 220](https://github.com/pytroll/satpy/pull/220)) * [Issue 208](https://github.com/pytroll/satpy/issues/208) - Strange behaviour when trying to load data to a scene object after having worked with it ([PR 214](https://github.com/pytroll/satpy/pull/214)) * [Issue 200](https://github.com/pytroll/satpy/issues/200) - Different mask handling when saving to PNG or GeoTIFF ([PR 201](https://github.com/pytroll/satpy/pull/201)) * [Issue 176](https://github.com/pytroll/satpy/issues/176) - Loading viirs natural_color composite fails ([PR 177](https://github.com/pytroll/satpy/pull/177)) In this release 6 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 259](https://github.com/pytroll/satpy/pull/259) - Fix writer and refactor so bad writer name raises logical exception * [PR 257](https://github.com/pytroll/satpy/pull/257) - Fix geotiff and png writers to save to a temporary directory * [PR 256](https://github.com/pytroll/satpy/pull/256) - Add 'python_requires' to setup.py to specify python support * [PR 253](https://github.com/pytroll/satpy/pull/253) - Fix ABI L1B reader to use 64-bit scaling factors for X/Y variables * [PR 250](https://github.com/pytroll/satpy/pull/250) - Fix floating point geotiff saving in dask geotiff writer * [PR 249](https://github.com/pytroll/satpy/pull/249) - Fix float geotiff saving on 0.8 * [PR 248](https://github.com/pytroll/satpy/pull/248) - Fix unloading composite deps when one of them has incompatible areas * [PR 243](https://github.com/pytroll/satpy/pull/243) - Remove ABI composite reducerX modifiers #### Features added * [PR 252](https://github.com/pytroll/satpy/pull/252) - Use rasterio to save geotiffs when available * [PR 239](https://github.com/pytroll/satpy/pull/239) - Add CSPP Geo (geocat) AHI reading support In this release 10 pull requests were closed. ## Version 0.9.0a0 (2018-03-20) #### Bugs fixed * [Issue 179](https://github.com/pytroll/satpy/issues/179) - Cannot read AVHRR in AAPP format * [PR 234](https://github.com/pytroll/satpy/pull/234) - Bugfix sar reader * [PR 231](https://github.com/pytroll/satpy/pull/231) - Bugfix palette based compositor concatenation * [PR 230](https://github.com/pytroll/satpy/pull/230) - Fix dask angle calculations of rayleigh corrector * [PR 229](https://github.com/pytroll/satpy/pull/229) - Fix bug in dep tree when modifier deps are modified wavelengths * [PR 228](https://github.com/pytroll/satpy/pull/228) - Fix 'platform' being used instead of 'platform_name' * [PR 224](https://github.com/pytroll/satpy/pull/224) - Add helper method for checking areas in compositors * [PR 222](https://github.com/pytroll/satpy/pull/222) - Fix resampler caching by source area * [PR 221](https://github.com/pytroll/satpy/pull/221) - Fix Scene loading and resampling when generate=False * [PR 220](https://github.com/pytroll/satpy/pull/220) - Rename Scene's `compute` to `generate_composites` * [PR 219](https://github.com/pytroll/satpy/pull/219) - Fixed native_msg calibration problem and added env var to change the … * [PR 214](https://github.com/pytroll/satpy/pull/214) - Fix Scene not being copied properly during resampling * [PR 210](https://github.com/pytroll/satpy/pull/210) - Bugfix check if lons and lats should be masked before resampling * [PR 206](https://github.com/pytroll/satpy/pull/206) - Fix optional dependencies not being passed to modifiers with opts only * [PR 187](https://github.com/pytroll/satpy/pull/187) - Fix reader configs having mismatched names between filename and config * [PR 185](https://github.com/pytroll/satpy/pull/185) - Bugfix nwcsaf_pps reader for file discoverability * [PR 177](https://github.com/pytroll/satpy/pull/177) - Bugfix viirs loading - picked from (xarray)develop branch * [PR 163](https://github.com/pytroll/satpy/pull/163) - Bugfix float geotiff #### Features added * [PR 232](https://github.com/pytroll/satpy/pull/232) - Add ABI L1B system tests * [PR 226](https://github.com/pytroll/satpy/pull/226) - EARS NWCSAF products reading * [PR 217](https://github.com/pytroll/satpy/pull/217) - Add xarray/dask support to DayNightCompositor * [PR 216](https://github.com/pytroll/satpy/pull/216) - Fix dataset writing so computations are shared between tasks * [PR 213](https://github.com/pytroll/satpy/pull/213) - [WIP] Reuse same resampler for similar datasets * [PR 212](https://github.com/pytroll/satpy/pull/212) - Improve modis reader to support dask * [PR 209](https://github.com/pytroll/satpy/pull/209) - Fix enhancements to work with xarray * [PR 205](https://github.com/pytroll/satpy/pull/205) - Fix ABI 'natural' and 'true_color' composites to work with xarray * [PR 204](https://github.com/pytroll/satpy/pull/204) - Add 'native' resampler * [PR 203](https://github.com/pytroll/satpy/pull/203) - [WIP] Feature trollimage xarray * [PR 195](https://github.com/pytroll/satpy/pull/195) - Add ABI-specific configs for Airmass composite * [PR 186](https://github.com/pytroll/satpy/pull/186) - Add missing nodata tiff tag * [PR 180](https://github.com/pytroll/satpy/pull/180) - Replace BW and RGBCompositor with a more generic one #### Documentation changes * [PR 155](https://github.com/pytroll/satpy/pull/155) - Add contributing and developers guide documentation In this release 1 issue and 31 pull requests were closed. satpy-0.34.0/CITATION000066400000000000000000000002521420401153000140600ustar00rootroot00000000000000To find out how to reference satpy, go to https://zenodo.org/badge/latestdoi/51397392 and choose your favourite citation format on the bottom of the right hand side-bar. satpy-0.34.0/CODE_OF_CONDUCT.md000066400000000000000000000064421420401153000155310ustar00rootroot00000000000000# Contributor Covenant Code of Conduct ## Our Pledge In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation. ## Our Standards Examples of behavior that contributes to creating a positive environment include: * Using welcoming and inclusive language * Being respectful of differing viewpoints and experiences * Gracefully accepting constructive criticism * Focusing on what is best for the community * Showing empathy towards other community members Examples of unacceptable behavior by participants include: * The use of sexualized language or imagery and unwelcome sexual attention or advances * Trolling, insulting/derogatory comments, and personal or political attacks * Public or private harassment * Publishing others' private information, such as a physical or electronic address, without explicit permission * Other conduct which could reasonably be considered inappropriate in a professional setting ## Our Responsibilities Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. ## Scope This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. ## Enforcement Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at . All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. ## Attribution This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html [homepage]: https://www.contributor-covenant.org For answers to common questions about this code of conduct, see https://www.contributor-covenant.org/faq satpy-0.34.0/CONTRIBUTING.rst000066400000000000000000000160521420401153000153710ustar00rootroot00000000000000================= How to contribute ================= Thank you for considering contributing to Satpy! Satpy's development team is made up of volunteers so any help we can get is very appreciated. Contributions from users are what keep this community going. We welcome any contributions including bug reports, documentation fixes or updates, bug fixes, and feature requests. By contributing to Satpy you are providing code that everyone can use and benefit from. The following guidelines will describe how the Satpy project structures its code contributions from discussion to code to package release. For more information on contributing to open source projects see `GitHub's Guide `_. What can I do? ============== - Make sure you have a `GitHub account `_. - Submit a ticket for your issue, assuming one does not already exist. - If you're uncomfortable using Git/GitHub, see `Learn Git Branching `_ or other online tutorials. - If you are uncomfortable contributing to an open source project see: * `How to Contribute to an Open Source Project on GitHub `_ video series * Aaron Meurer's `Git Workflow `_ * `How to Contribute to Open Source `_ - See what `issues `_ already exist. Issues marked `good first issue `_ or `help wanted `_ can be good issues to start with. - Read the :doc:`index` for more details on contributing code. - `Fork `_ the repository on GitHub and install the package in development mode. - Update the Satpy documentation to make it clearer and more detailed. - Contribute code to either fix a bug or add functionality and submit a `Pull Request `_. - Make an example Jupyter Notebook and add it to the `available examples `_. What if I break something? ========================== Not possible. If something breaks because of your contribution it was our fault. When you submit your changes to be merged as a GitHub `Pull Request `_ they will be automatically tested and checked against coding style rules. Before they are merged they are reviewed by at least one maintainer of the Satpy project. If anything needs updating, we'll let you know. What is expected? ================= You can expect the Satpy maintainers to help you. We are all volunteers, have jobs, and occasionally go on vacations. We will try our best to answer your questions as soon as possible. We will try our best to understand your use case and add the features you need. Although we strive to make Satpy useful for everyone there may be some feature requests that we can't allow if they would require breaking existing features. Other features may be best for a different package, PyTroll or otherwise. Regardless, we will help you find the best place for your feature and to make it possible to do what you want. We, the Satpy maintainers, expect you to be patient, understanding, and respectful of both developers and users. Satpy can only be successful if everyone in the community feels welcome. We also expect you to put in as much work as you expect out of us. There is no dedicated PyTroll or Satpy support team, so there may be times when you need to do most of the work to solve your problem (trying different test cases, environments, etc). Being respectful includes following the style of the existing code for any code submissions. Please follow `PEP8 `_ style guidelines and limit lines of code to 80 characters whenever possible and when it doesn't hurt readability. Satpy follows `Google Style Docstrings `_ for all code API documentation. When in doubt use the existing code as a guide for how coding should be done. .. _dev_help: How do I get help? ================== The Satpy developers (and all other PyTroll package developers) monitor the: - `Mailing List `_ - `Slack chat `_ (get an `invitation `_) - `GitHub issues `_ How do I submit my changes? =========================== Any contributions should start with some form of communication (see above) to let the Satpy maintainers know how you plan to help. The larger the contribution the more important direct communication is so everyone can avoid duplicate code and wasted time. After talking to the Satpy developers any additional work like code or documentation changes can be provided as a GitHub `Pull Request `_. To make sure that your code complies with the pytroll python standard, you can run the `flake8 `_ linter on your changes before you submit them, or even better install a pre-commit hook that runs the style check for you. To this aim, we provide a configuration file for the `pre-commit `_ tool, that you can install with eg:: pip install pre-commit pre-commit install running from your base satpy directory. This will automatically check code style for every commit. Code of Conduct =============== Satpy follows the same code of conduct as the PyTroll project. For reference it is copied to this repository in `CODE_OF_CONDUCT.md `_. As stated in the PyTroll home page, this code of conduct applies to the project space (GitHub) as well as the public space online and offline when an individual is representing the project or the community. Online examples of this include the PyTroll Slack team, mailing list, and the PyTroll twitter account. This code of conduct also applies to in-person situations like PyTroll Contributor Weeks (PCW), conference meet-ups, or any other time when the project is being represented. Any violations of this code of conduct will be handled by the core maintainers of the project including David Hoese, Martin Raspaud, and Adam Dybbroe. If you wish to report one of the maintainers for a violation and are not comfortable with them seeing it, please contact one or more of the other maintainers to report the violation. Responses to violations will be determined by the maintainers and may include one or more of the following: - Verbal warning - Ask for public apology - Temporary or permanent ban from in-person events - Temporary or permanent ban from online communication (Slack, mailing list, etc) For details see the official `code of conduct document `_. satpy-0.34.0/LICENSE.txt000066400000000000000000001045131420401153000145530ustar00rootroot00000000000000 GNU GENERAL PUBLIC LICENSE Version 3, 29 June 2007 Copyright (C) 2007 Free Software Foundation, Inc. Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The GNU General Public License is a free, copyleft license for software and other kinds of works. The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use the GNU General Public License for most of our software; it applies also to any other work released this way by its authors. You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights. Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it: responsibilities to respect the freedom of others. For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. Developers that use the GNU GPL protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License giving you legal permission to copy, distribute and/or modify it. For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software. For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems will not be attributed erroneously to authors of previous versions. Some devices are designed to deny users access to install or run modified versions of the software inside them, although the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those products. If such problems arise substantially in other domains, we stand ready to extend this provision to those domains in future versions of the GPL, as needed to protect the freedom of users. Finally, every program is threatened constantly by software patents. States should not allow patents to restrict development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that patents cannot be used to render the program non-free. The precise terms and conditions for copying, distribution and modification follow. TERMS AND CONDITIONS 0. Definitions. "This License" refers to version 3 of the GNU General Public License. "Copyright" also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. "The Program" refers to any copyrightable work licensed under this License. Each licensee is addressed as "you". "Licensees" and "recipients" may be individuals or organizations. To "modify" a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a "modified version" of the earlier work or a work "based on" the earlier work. A "covered work" means either the unmodified Program or a work based on the Program. To "propagate" a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. To "convey" a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. An interactive user interface displays "Appropriate Legal Notices" to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. 1. Source Code. The "source code" for a work means the preferred form of the work for making modifications to it. "Object code" means any non-source form of a work. A "Standard Interface" means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. The "System Libraries" of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A "Major Component", in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. The "Corresponding Source" for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. The Corresponding Source for a work in source code form is that same work. 2. Basic Permissions. All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. 3. Protecting Users' Legal Rights From Anti-Circumvention Law. No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. 4. Conveying Verbatim Copies. You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. 5. Conveying Modified Source Versions. You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: a) The work must carry prominent notices stating that you modified it, and giving a relevant date. b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to "keep intact all notices". c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an "aggregate" if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. 6. Conveying Non-Source Forms. You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. A "User Product" is either (1) a "consumer product", which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, "normally used" refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. "Installation Information" for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. 7. Additional Terms. "Additional permissions" are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or d) Limiting the use for publicity purposes of names of licensors or authors of the material; or e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. All other non-permissive additional terms are considered "further restrictions" within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. 8. Termination. You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. 9. Acceptance Not Required for Having Copies. You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. 10. Automatic Licensing of Downstream Recipients. Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. An "entity transaction" is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. 11. Patents. A "contributor" is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's "contributor version". A contributor's "essential patent claims" are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, "control" includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. In the following three paragraphs, a "patent license" is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To "grant" such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. "Knowingly relying" means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. A patent license is "discriminatory" if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. 12. No Surrender of Others' Freedom. If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. 13. Use with the GNU Affero General Public License. Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply to the combination as such. 14. Revised Versions of this License. The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU General Public License, you may choose any version ever published by the Free Software Foundation. If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. 15. Disclaimer of Warranty. THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. Limitation of Liability. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. 17. Interpretation of Sections 15 and 16. If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see . Also add information on how to contact you by electronic and paper mail. If the program does terminal interaction, make it output a short notice like this when it starts in an interactive mode: Copyright (C) This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, your program's commands might be different; for a GUI interface, you would use an "about box". You should also get your employer (if you work as a programmer) or school, if any, to sign a "copyright disclaimer" for the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see . The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read . satpy-0.34.0/MANIFEST.in000066400000000000000000000002041420401153000144560ustar00rootroot00000000000000include doc/Makefile include doc/source/* include doc/examples/*.py include LICENSE.txt include README.rst include satpy/version.py satpy-0.34.0/README000077700000000000000000000000001420401153000152662README.rstustar00rootroot00000000000000satpy-0.34.0/README.rst000066400000000000000000000053031420401153000144140ustar00rootroot00000000000000Satpy ===== .. image:: https://github.com/pytroll/satpy/workflows/CI/badge.svg?branch=main :target: https://github.com/pytroll/satpy/actions?query=workflow%3A%22CI%22 .. image:: https://coveralls.io/repos/github/pytroll/satpy/badge.svg?branch=main :target: https://coveralls.io/github/pytroll/satpy?branch=main .. image:: https://badge.fury.io/py/satpy.svg :target: https://badge.fury.io/py/satpy .. image:: https://anaconda.org/conda-forge/satpy/badges/version.svg :target: https://anaconda.org/conda-forge/satpy/ .. image:: https://zenodo.org/badge/51397392.svg :target: https://zenodo.org/badge/latestdoi/51397392 The Satpy package is a python library for reading and manipulating meteorological remote sensing data and writing it to various image and data file formats. Satpy comes with the ability to make various RGB composites directly from satellite instrument channel data or higher level processing output. The `pyresample `_ package is used to resample data to different uniform areas or grids. The documentation is available at http://satpy.readthedocs.org/. Installation ------------ Satpy can be installed from PyPI with pip: .. code-block:: bash pip install satpy It is also available from `conda-forge` for conda installations: .. code-block:: bash conda install -c conda-forge satpy Code of Conduct --------------- Satpy follows the same code of conduct as the PyTroll project. For reference it is copied to this repository in CODE_OF_CONDUCT.md_. As stated in the PyTroll home page, this code of conduct applies to the project space (GitHub) as well as the public space online and offline when an individual is representing the project or the community. Online examples of this include the PyTroll Slack team, mailing list, and the PyTroll twitter account. This code of conduct also applies to in-person situations like PyTroll Contributor Weeks (PCW), conference meet-ups, or any other time when the project is being represented. Any violations of this code of conduct will be handled by the core maintainers of the project including David Hoese, Martin Raspaud, and Adam Dybbroe. If you wish to report one of the maintainers for a violation and are not comfortable with them seeing it, please contact one or more of the other maintainers to report the violation. Responses to violations will be determined by the maintainers and may include one or more of the following: - Verbal warning - Ask for public apology - Temporary or permanent ban from in-person events - Temporary or permanent ban from online communication (Slack, mailing list, etc) For details see the official CODE_OF_CONDUCT.md_. .. _CODE_OF_CONDUCT.md: ./CODE_OF_CONDUCT.md satpy-0.34.0/RELEASING.md000066400000000000000000000035201420401153000145570ustar00rootroot00000000000000# Releasing Satpy 1. checkout main branch 2. pull from repo 3. run the unittests 4. run `loghub` and update the `CHANGELOG.md` file: ``` loghub pytroll/satpy --token $LOGHUB_GITHUB_TOKEN -st v -plg bug "Bugs fixed" -plg enhancement "Features added" -plg documentation "Documentation changes" -plg backwards-incompatibility "Backward incompatible changes" -plg refactor "Refactoring" ``` This uses a `LOGHUB_GITHUB_TOKEN` environment variable. This must be created on GitHub and it is recommended that you add it to your `.bashrc` or `.bash_profile` or equivalent. This command will create a CHANGELOG.temp file which need to be added to the top of the CHANGLOG.md file. The same content is also printed to terminal, so that can be copy-pasted, too. Remember to update also the version number to the same given in step 5. Don't forget to commit CHANGELOG.md! 5. Create a tag with the new version number, starting with a 'v', eg: ``` git tag -a v -m "Version " ``` For example if the previous tag was `v0.9.0` and the new release is a patch release, do: ``` git tag -a v0.9.1 -m "Version 0.9.1" ``` See [semver.org](http://semver.org/) on how to write a version number. 6. push changes to github `git push --follow-tags` 7. Verify github action unittests passed. 8. Create a "Release" on GitHub by going to https://github.com/pytroll/satpy/releases and clicking "Draft a new release". On the next page enter the newly created tag in the "Tag version" field, "Version X.Y.Z" in the "Release title" field, and paste the markdown from the changelog (the portion under the version section header) in the "Describe this release" box. Finally click "Publish release". 9. Verify the GitHub actions for deployment succeed and the release is on PyPI. satpy-0.34.0/SECURITY.md000066400000000000000000000017261420401153000145230ustar00rootroot00000000000000# Security Policy ## Supported Versions Satpy is currently pre-1.0 and includes a lot of changes in every release. As such we can't guarantee that releases before 1.0 will see security updates except for the most recent release. After 1.0, you can expect more stability in the interfaces and security fixes to be backported more regularly. | Version | Supported | | ------- | ------------------ | | 0.x.x (latest) | :white_check_mark: | | < 0.33.0 | :x: | ## Unsafe YAML Loading Satpy allows for unsafe loading of YAML configuration files. Any YAML files from untrusted sources should be sanitized of possibly malicious code. ## Reporting a Vulnerability Do you think you've found a security vulnerability or issue in this project? Let us know by sending an email to the maintainers at `pytroll-security@groups.io`. Please include as much information on the issue as possible like code examples, documentation on the issue in other packages, etc. satpy-0.34.0/asv.conf.json000066400000000000000000000164221420401153000153410ustar00rootroot00000000000000{ // The version of the config file format. Do not change, unless // you know what you are doing. "version": 1, // The name of the project being benchmarked "project": "satpy", // The project's homepage "project_url": "https://github.com/pytroll/satpy", // The URL or local path of the source code repository for the // project being benchmarked //"repo": "https://github.com/pytroll/satpy.git", "repo": ".", // The Python project's subdirectory in your repo. If missing or // the empty string, the project is assumed to be located at the root // of the repository. // "repo_subdir": "", // Customizable commands for building, installing, and // uninstalling the project. See asv.conf.json documentation. // //"install_command": ["in-dir={env_dir} python -mpip install {wheel_file} s3fs rasterio h5py netCDF4 pyhdf gcsfs shapely"], // "uninstall_command": ["return-code=any python -mpip uninstall -y {project}"], // "build_command": [ // "python setup.py build", // "PIP_NO_BUILD_ISOLATION=false python -mpip wheel --no-deps --no-index -w {build_cache_dir} {build_dir}" // ], // List of branches to benchmark. If not provided, defaults to "master" // (for git) or "default" (for mercurial). "branches": ["main"], // for git // "branches": ["default"], // for mercurial // The DVCS being used. If not set, it will be automatically // determined from "repo" by looking at the protocol in the URL // (if remote), or by looking for special directories, such as // ".git" (if local). // "dvcs": "git", // The tool to use to create environments. May be "conda", // "virtualenv" or other value depending on the plugins in use. // If missing or the empty string, the tool will be automatically // determined by looking for tools on the PATH environment // variable. //"environment_type": "virtualenv", "environment_type": "conda", // timeout in seconds for installing any dependencies in environment // defaults to 10 min //"install_timeout": 600, // the base URL to show a commit for the project. // "show_commit_url": "http://github.com/owner/project/commit/", // The Pythons you'd like to test against. If not provided, defaults // to the current version of Python used to run `asv`. // "pythons": ["2.7", "3.6"], "pythons": ["3.9", "3.10"], // The list of conda channel names to be searched for benchmark // dependency packages in the specified order "conda_channels": ["conda-forge"], // The matrix of dependencies to test. Each key is the name of a // package (in PyPI) and the values are version numbers. An empty // list or empty string indicates to just test against the default // (latest) version. null indicates that the package is to not be // installed. If the package to be tested is only available from // PyPi, and the 'environment_type' is conda, then you can preface // the package name by 'pip+', and the package will be installed via // pip (with all the conda available packages installed first, // followed by the pip installed packages). // // "matrix": { // "numpy": ["1.6", "1.7"], // "six": ["", null], // test with and without six installed // "pip+emcee": [""], // emcee is only available for install with pip. // }, "matrix": { "pyresample": ["1.22.3"], "trollimage": ["1.17.0"], "pyorbital": ["1.7.1"], "pyspectral": ["0.10.6"], "rasterio": ["1.2.10"], "dask": ["2021.12.0"], "xarray": ["0.20.2"], "numpy": ["1.22.0"], "s3fs": [], "h5py": [], "netCDF4": [], "pyhdf": [], "gcsfs": [], "shapely": [], "trollsift": [] }, // Combinations of libraries/python versions can be excluded/included // from the set to test. Each entry is a dictionary containing additional // key-value pairs to include/exclude. // // An exclude entry excludes entries where all values match. The // values are regexps that should match the whole string. // // An include entry adds an environment. Only the packages listed // are installed. The 'python' key is required. The exclude rules // do not apply to includes. // // In addition to package names, the following keys are available: // // - python // Python version, as in the *pythons* variable above. // - environment_type // Environment type, as above. // - sys_platform // Platform, as in sys.platform. Possible values for the common // cases: 'linux2', 'win32', 'cygwin', 'darwin'. // // "exclude": [ // {"python": "3.2", "sys_platform": "win32"}, // skip py3.2 on windows // {"environment_type": "conda", "six": null}, // don't run without six on conda // ], // // "include": [ // // additional env for python2.7 // {"python": "2.7", "numpy": "1.8"}, // // additional env if run on windows+conda // {"platform": "win32", "environment_type": "conda", "python": "2.7", "libpython": ""}, // ], // The directory (relative to the current directory) that benchmarks are // stored in. If not provided, defaults to "benchmarks" // "benchmark_dir": "benchmarks", // The directory (relative to the current directory) to cache the Python // environments in. If not provided, defaults to "env" // "env_dir": "env", // The directory (relative to the current directory) that raw benchmark // results are stored in. If not provided, defaults to "results". // "results_dir": "results", // The directory (relative to the current directory) that the html tree // should be written to. If not provided, defaults to "html". // "html_dir": "html", // The number of characters to retain in the commit hashes. // "hash_length": 8, // `asv` will cache results of the recent builds in each // environment, making them faster to install next time. This is // the number of builds to keep, per environment. // "build_cache_size": 2, // The commits after which the regression search in `asv publish` // should start looking for regressions. Dictionary whose keys are // regexps matching to benchmark names, and values corresponding to // the commit (exclusive) after which to start looking for // regressions. The default is to start from the first commit // with results. If the commit is `null`, regression detection is // skipped for the matching benchmark. // // "regressions_first_commits": { // "some_benchmark": "352cdf", // Consider regressions only after this commit // "another_benchmark": null, // Skip regression detection altogether // }, // The thresholds for relative change in results, after which `asv // publish` starts reporting regressions. Dictionary of the same // form as in ``regressions_first_commits``, with values // indicating the thresholds. If multiple entries match, the // maximum is taken. If no entry matches, the default is 5%. // // "regressions_thresholds": { // "some_benchmark": 0.01, // Threshold of 1% // "another_benchmark": 0.5, // Threshold of 50% // }, } satpy-0.34.0/benchmarks/000077500000000000000000000000001420401153000150415ustar00rootroot00000000000000satpy-0.34.0/benchmarks/__init__.py000066400000000000000000000013601420401153000171520ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Benchmark satpy.""" satpy-0.34.0/benchmarks/abi_l1b_benchmarks.py000066400000000000000000000053301420401153000211020ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Benchmark ABI L1B operations.""" from __future__ import annotations import os from pyspectral.rayleigh import check_and_download as download_luts from pyspectral.rsr_reader import check_and_download as download_rsr from benchmarks.utils import GeoBenchmarks, get_filenames class ABIL1B(GeoBenchmarks): """Benchmark ABI L1B reading.""" timeout = 600 data_files: list[str] = [] subdir = os.path.join("abi_l1b", "20190314_us_midlatitude_cyclone") reader = "abi_l1b" def setup_cache(self): """Fetch the data files.""" try: from satpy.demo import get_us_midlatitude_cyclone_abi get_us_midlatitude_cyclone_abi() except ImportError: if len(get_filenames(self.subdir)) != 16: raise RuntimeError("Existing data files do not match the expected number of files.") download_rsr() download_luts(aerosol_type='rayleigh_only') def setup(self): """Set up the benchmarks.""" import satpy self.data_files = get_filenames(self.subdir) satpy.CHUNK_SIZE = 2048 def time_load_one_channel(self): """Time the loading of one channel.""" self.compute_channel("C01") def peakmem_load_one_channel(self): """Check peak memory usage of loading one channel.""" self.compute_channel("C01") def time_load_true_color(self): """Time the loading of the generation of true_color.""" self.compute_composite("true_color") def peakmem_load_true_color(self): """Check peak memory usage of the generation of true_color.""" self.compute_composite("true_color") def time_save_true_color_nocorr_to_geotiff(self): """Time the generation and saving of true_color_nocorr.""" self.save_composite_as_geotiff("true_color_nocorr") def peakmem_save_true_color_to_geotiff(self): """Check peak memory usage of the generation and saving of true_color_nocorr.""" self.save_composite_as_geotiff("true_color_nocorr") satpy-0.34.0/benchmarks/ahi_hsd_benchmarks.py000066400000000000000000000052361420401153000212150ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Benchmark AHI HSD operations..""" from __future__ import annotations import os from pyspectral.rayleigh import check_and_download as download_luts from pyspectral.rsr_reader import check_and_download as download_rsr from benchmarks.utils import GeoBenchmarks, get_filenames class HimawariHSD(GeoBenchmarks): """Benchmark Himawari HSD reading.""" timeout = 600 data_files: list[str] = [] subdir = os.path.join("ahi_hsd", "20210417_0500_typhoon_surigae") reader = 'ahi_hsd' def setup_cache(self): """Fetch the data files.""" try: from satpy.demo import download_typhoon_surigae_ahi download_typhoon_surigae_ahi(channels=[1, 2, 3, 4], segments=[4]) except ImportError: assert len(get_filenames(self.subdir)) == 4 download_rsr() download_luts(aerosol_type='rayleigh_only') def setup(self): """Set up the benchmarks.""" import satpy self.data_files = get_filenames(self.subdir) satpy.CHUNK_SIZE = 2048 def time_load_one_channel(self): """Time the loading of one channel.""" self.compute_channel("B01") def peakmem_load_one_channel(self): """Check peak memory usage of loading one channel.""" self.compute_channel("B01") def time_load_true_color(self): """Time the loading of the generation of true_color.""" self.compute_composite("true_color") def peakmem_load_true_color(self): """Check peak memory usage of the generation of true_color.""" self.compute_composite("true_color") def time_save_true_color_nocorr_to_geotiff(self): """Time the generation and saving of true_color_nocorr.""" self.save_composite_as_geotiff("true_color_nocorr") def peakmem_save_true_color_to_geotiff(self): """Check peak memory usage of the generation and saving of true_color_nocorr.""" self.save_composite_as_geotiff("true_color_nocorr") satpy-0.34.0/benchmarks/fci_benchmarks.py000066400000000000000000000140731420401153000203560ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Benchmark FCI. Benchmarks for reading and processing data from the Meteosat Third Generation (MTG) Flexible Combined Imager (FCI). Uses pre-launch simulated test data as published by EUMETSAT in 2020. Also includes some benchmarks trying different resamplers. """ from __future__ import annotations import fnmatch import os import satpy import satpy.demo.fci from .utils import GeoBenchmarks class FCI(GeoBenchmarks): """Benchmark FCI FDHSI test data reading.""" timeout = 600 region = "eurol" reader = "fci_l1c_nc" filenames: list[str] = [] def setup_cache(self, *args): """Fetch the data files.""" fns = self.get_filenames() cnt = len(fns) if cnt > 40: raise ValueError(f"Expected 41 files, found {cnt:d}") if cnt < 40: fns = satpy.demo.download_fci_test_data() def setup(self, *args): """Set location of data files.""" self.filenames = self.get_filenames() def get_filenames(self): """Get filenames of FCI test data as already available.""" p = satpy.demo.fci.get_fci_test_data_dir() g = p.glob("UNCOMPRESSED/NOMINAL/*-CHK-BODY-*.nc") return [os.fspath(fn) for fn in g] def time_create_scene(self, chunk): """Time to create a scene.""" names = self._get_filename_selection(chunk) self.create_scene(names) time_create_scene.params = ["some", "all"] # type: ignore time_create_scene.param_names = ["channel subset"] # type: ignore def peakmem_create_scene(self, chunk): """Peak RAM to create a scene.""" names = self._get_filename_selection(chunk) self.create_scene(names) peakmem_create_scene.params = time_create_scene.params # type: ignore peakmem_create_scene.param_names = time_create_scene.param_names # type: ignore def time_load(self, chunk, loadable): """Time to create a scene and load one channel or composite.""" names = self._get_filename_selection(chunk) self.load_no_padding(loadable, names) time_load.params = (time_create_scene.params, # type: ignore ["ir_105", "natural_color_raw"]) time_load.param_names = time_create_scene.param_names + ["dataset"] # type: ignore def peakmem_load(self, chunk, loadable): """Peak RAM to create a scene and load one channel or composite.""" names = self._get_filename_selection(chunk) self.load_no_padding(loadable, names) peakmem_load.params = time_load.params # type: ignore peakmem_load.param_names = time_load.param_names # type: ignore def time_compute(self, chunk, loadable): """Time to create a scene and load and compute one channel.""" names = self._get_filename_selection(chunk) self.compute_channel(loadable, names) time_compute.params = time_load.params # type: ignore time_compute.param_names = time_load.param_names # type: ignore def peakmem_compute(self, chunk, loadable): """Peak memory for creating a scene and loading and computing one channel.""" names = self._get_filename_selection(chunk) self.compute_channel(loadable, names) peakmem_compute.params = time_compute.params # type: ignore peakmem_compute.param_names = time_compute.param_names # type: ignore def time_load_resample_compute(self, chunk, loadable, mode): """Time to load all chunks, resample, and compute.""" names = self._get_filename_selection(chunk) self.compute_composite(loadable, mode, self.region, names) time_load_resample_compute.params = time_load.params + ( # type: ignore ["nearest", "bilinear", "gradient_search"],) time_load_resample_compute.param_names = time_load.param_names + ["resampler"] # type: ignore def peakmem_load_resample_compute(self, chunk, loadable, mode): """Peak memory to load all chunks, resample, and compute.""" names = self._get_filename_selection(chunk) self.compute_composite(loadable, mode, self.region, names) peakmem_load_resample_compute.params = time_load_resample_compute.params # type: ignore peakmem_load_resample_compute.param_names = time_load_resample_compute.param_names # type: ignore def time_load_resample_save(self, chunk, loadable, mode): """Time to load all chunks, resample, and save.""" names = self._get_filename_selection(chunk) self.save_composite_as_geotiff(loadable, mode, self.region, names) time_load_resample_save.params = time_load_resample_compute.params # type: ignore time_load_resample_save.param_names = time_load_resample_compute.param_names # type: ignore def peakmem_load_resample_save(self, chunk, loadable, mode): """Peak memory to load all chunks, resample, and save.""" names = self._get_filename_selection(chunk) self.save_composite_as_geotiff(loadable, mode, self.region, names) peakmem_load_resample_save.params = time_load_resample_save.params # type: ignore peakmem_load_resample_save.param_names = time_load_resample_save.param_names # type: ignore def _get_filename_selection(self, selection): if selection == "some": return fnmatch.filter(self.filenames, "*3[0123].nc") if selection == "all": return self.filenames raise ValueError("Expected selection some or all, got " + selection) satpy-0.34.0/benchmarks/seviri_hrit_benchmarks.py000066400000000000000000000051211420401153000221360ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Benchmark SEVIRI HRIT operations.""" from __future__ import annotations import os from pyspectral.rayleigh import check_and_download as download_luts from pyspectral.rsr_reader import check_and_download as download_rsr from benchmarks.utils import GeoBenchmarks, get_filenames class SEVIRIHRIT(GeoBenchmarks): """Benchmark SEVIRI HRIT reading.""" timeout = 600 data_files: list[str] = [] subdir = os.path.join("seviri_hrit", "20180228_1500") reader = "seviri_l1b_hrit" def setup_cache(self): """Fetch the data files.""" try: from satpy.demo import download_seviri_hrit_20180228_1500 download_seviri_hrit_20180228_1500() except ImportError: assert len(get_filenames(self.subdir)) == 114 download_rsr() download_luts(aerosol_type='rayleigh_only') def setup(self): """Set up the benchmarks.""" import satpy self.data_files = get_filenames(self.subdir) satpy.CHUNK_SIZE = 2048 def time_load_one_channel(self): """Time the loading of one channel.""" self.compute_channel("VIS006") def peakmem_load_one_channel(self): """Check peak memory usage of loading one channel.""" self.compute_channel("VIS006") def time_load_overview(self): """Time the loading of the generation of overview.""" self.compute_composite("overview") def peakmem_load_overview(self): """Check peak memory usage of the generation of overview.""" self.compute_composite("overview") def time_save_overview_to_geotiff(self): """Time the generation and saving of overview.""" self.save_composite_as_geotiff("overview") def peakmem_save_overview_to_geotiff(self): """Check peak memory usage of the generation and saving of overview.""" self.save_composite_as_geotiff("overview") satpy-0.34.0/benchmarks/utils.py000066400000000000000000000053731420401153000165630ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Benchmark utilities.""" import os def get_filenames(subdir): """Get the data filenames manually.""" import glob base_dir = os.environ.get("SATPY_DEMO_DATA_DIR", ".") return glob.glob(os.path.join(base_dir, subdir, "*")) class GeoBenchmarks: """Class for geo benchmarks.""" def create_scene(self, filenames=None): """Create a scene.""" from satpy import Scene scn = Scene(filenames=filenames or self.data_files, reader=self.reader) return scn def load_no_padding(self, composite, filenames=None): """Load one composite or channel.""" scn = self.create_scene(filenames=filenames) scn.load([composite], pad_data=False) return scn def load_and_native_resample(self, composite): """Load and native resample a composite or channel.""" return self.load_and_resample(composite, "native") def load_and_resample(self, composite, resampler, area=None, filenames=None): """Load and resample a composite or channel with resampler and area.""" scn = self.load_no_padding(composite, filenames=filenames) ls = scn.resample(area, resampler=resampler) ls._readers = scn._readers # workaround for GH#1861 return ls def compute_composite(self, composite, resampler="native", area=None, filenames=None): """Compute a true color image.""" lscn = self.load_and_resample( composite, resampler, area, filenames) lscn[composite].compute() def save_composite_as_geotiff(self, composite, resampler="native", area=None, filenames=None): """Save a composite to disk as geotiff.""" lscn = self.load_and_resample(composite, resampler, area, filenames) lscn.save_dataset(composite, filename='test.tif', tiled=True) def compute_channel(self, channel, filenames=None): """Load and compute one channel.""" scn = self.load_no_padding(channel, filenames=filenames) scn[channel].compute() satpy-0.34.0/benchmarks/viirs_sdr_benchmarks.py000066400000000000000000000101101420401153000216050ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Benchmark VIIRS SDR operations..""" from __future__ import annotations import glob import os from pyspectral.rayleigh import check_and_download as download_luts from pyspectral.rsr_reader import check_and_download as download_rsr class VIIRSSDRBenchmarkBase: """Shared methods for working with VIIRS SDR data.""" timeout = 600 data_files: list[str] = [] def setup_cache(self): """Fetch the data files.""" try: from satpy.demo import get_viirs_sdr_20170128_1229 get_viirs_sdr_20170128_1229( channels=("I01", "M03", "M04", "M05"), granules=(2, 3, 4)) except ImportError: assert len(self.get_filenames()) == 6 * 3 download_rsr() download_luts(aerosol_type='rayleigh_only') def setup(self, name): """Set up the benchmarks.""" import satpy self.data_files = self.get_filenames() satpy.CHUNK_SIZE = 2048 def get_filenames(self): """Get the data filenames manually.""" base_dir = os.environ.get("SATPY_DEMO_DATA_DIR", ".") return glob.glob(os.path.join(base_dir, "viirs_sdr", "20170128_1229", "*.h5")) def load(self, composite): """Load one composite.""" from satpy import Scene scn = Scene(filenames=self.data_files, reader='viirs_sdr') scn.load([composite]) return scn def load_and_native_resample(self, composite): """Load and native resample a composite.""" scn = self.load(composite) lscn = scn.resample(resampler='native') return lscn class VIIRSSDRReaderBenchmarks(VIIRSSDRBenchmarkBase): """Benchmark reading and writing VIIRS SDR data.""" params = ["I01", "M03"] param_names = ["name"] def time_load_one_channel(self, name): """Time the loading of one channel.""" self.compute_product(name) def peakmem_load_one_channel(self, name): """Check peak memory usage of loading one channel.""" self.compute_product(name) def compute_product(self, name): """Load and compute one channel.""" scn = self.load(name) scn[name].compute() class VIIRSSDRCompositeBenchmarks(VIIRSSDRBenchmarkBase): """Benchmark generating and writing composites from VIIRS SDR data.""" params = ["true_color", "true_color_crefl", "true_color_raw"] param_names = ["name"] def time_load_composite(self, name): """Time the loading of the generation of a composite.""" self.compute_composite(name) def peakmem_load_composite(self, name): """Check peak memory usage of the generation of a composite.""" self.compute_composite(name) def time_save_composite_to_geotiff(self, name): """Time the generation and saving of a composite.""" self.save_composite_as_geotiff(name) def peakmem_save_composite_raw_to_geotiff(self, name): """Check peak memory usage of the generation and saving of a composite.""" self.save_composite_as_geotiff(name) def compute_composite(self, name): """Compute a composite.""" lscn = self.load_and_native_resample(name) lscn[name].compute() def save_composite_as_geotiff(self, name): """Save a composite to disk as geotiff.""" lscn = self.load_and_native_resample(name) lscn.save_dataset(name, filename='test.tif', tiled=True) satpy-0.34.0/changelog_pre0.9.0.rst000066400000000000000000006026701420401153000166530ustar00rootroot00000000000000Changelog ========= v0.8.1 (2018-01-19) ------------------- Fix ~~~ - Bugfix: Fix so the Himawari platform name is a string and not a numpy array. [Adam.Dybbroe] - Bugfix: The satellite azimuth returned by PyOrbital is not in the range -180 to 180 as was expected. [Adam.Dybbroe] Other ~~~~~ - Update changelog. [Martin Raspaud] - Bump version: 0.8.0 → 0.8.1. [Martin Raspaud] - Merge pull request #162 from pytroll/bugfix-pyorbital-azimuth- difference. [Martin Raspaud] Bugfix: The satellite azimuth returned by PyOrbital is not in the ran… - Merge pull request #154 from pytroll/bugfix-viirs-truecolor- ratiosharpening. [Martin Raspaud] Add a rayleigh_correction modifier for I-bands, - Add a rayleigh_correction modifier for I-bands, which is refered to in the ratio-sharpened true color and natural_color RGBs. [Adam.Dybbroe] - Fix backwards compatibility with scene instantiation. [Martin Raspaud] v0.8.0 (2018-01-11) ------------------- Fix ~~~ - Bugfix: Explicitly set the resolution for sun-satellite geometry for the Rayleigh correction modifiers needed for True Color imagery. [Adam.Dybbroe] Other ~~~~~ - Update changelog. [Martin Raspaud] - Bump version: 0.7.8 → 0.8.0. [Martin Raspaud] - Merge pull request #152 from pytroll/bugfix-truecolor-viirs. [Martin Raspaud] Bugfix: Explicitly set the resolution for sun-satellite geometry - Bugfix viirs_sdr reader: Use correct sunz corrector for ibands. [Adam.Dybbroe] - Merge pull request #91 from pytroll/feature-discover-utility. [Martin Raspaud] Separate find files utility - Merge branch 'develop' into feature-discover-utility. [David Hoese] - Refactor all of the documentation and fix various docstrings. [davidh- ssec] - Update documentation index and installation instructions. [davidh- ssec] - Merge branch 'develop' into feature-discover-utility. [davidh-ssec] # Conflicts: # satpy/readers/mipp_xrit.py # satpy/tests/test_readers.py # satpy/utils.py - Add filename filtering and tests for find_files_and_readers. [davidh- ssec] - Remove unused strftime function. [davidh-ssec] - Fix behavior tests and other necessary changes to fix file discovery. [davidh-ssec] - Update Scene and reader loading docstrings. [davidh-ssec] - Move reader start_time and end_time to filter_parameters. [davidh- ssec] Includes a first attempt at updating mipp_xrit to work with this - Fix `load_readers` tests after changing from ReaderFinder. [davidh- ssec] - Remove 'sensor' functionality from Scene init and clean reader loading. [davidh-ssec] - Fix behavior tests. [davidh-ssec] - Move file finding functionality to a separate utility function. [davidh-ssec] - Move ABI simulated green calculation to a separate function. [davidh- ssec] - Merge pull request #149 from pytroll/truecolor-red-channel-corr. [Martin Raspaud] Truecolor uses red channel as base for rayleigh correction - Fix indentation error in viirs.yaml. [Martin Raspaud] - Merge branch 'develop' into truecolor-red-channel-corr. [Martin Raspaud] - Remove marine-clean true color recipe, as it was the same as the standard recipe. [Adam.Dybbroe] - Bugfix abi true color recipes. [Adam.Dybbroe] - Apply consistency in true color imagery across sensors. Adding for land and sea variants. [Adam.Dybbroe] - Use the red band in the damping of the atm correction over reflective targets. [Adam.Dybbroe] v0.7.8 (2018-01-11) ------------------- - Update changelog. [Martin Raspaud] - Bump version: 0.7.7 → 0.7.8. [Martin Raspaud] - Merge pull request #148 from pytroll/feature-utils. [Martin Raspaud] Fix platform name reading for ahi hsd reader in py3 - Fix platform name reading for ahi hsd reader in py3. [Martin Raspaud] This patch also factorizes some code to a np2str function that takes care of converting np.string_ to str - Merge pull request #130 from pytroll/ahi_truecolor. [Martin Raspaud] Use the cira stretch also for the true_color_ahi_default - Use consistent standard_name naming. [Adam.Dybbroe] - Fix for Himawari true colors at different resolutions. [Adam.Dybbroe] - Use the cira stretch also for the true_color_ahi_default. [Adam.Dybbroe] - Merge pull request #141 from pytroll/pep8. [Martin Raspaud] Remove unused imports and use pep8-ify - Remove unused imports and use pep8-ify. [Adam.Dybbroe] - Merge pull request #145 from pytroll/fix-refl37-rgbs. [Martin Raspaud] Add snow RGB, add r37-based and natural RGB recipes specific to SEVIRI, and fix sun-zenith correction - When doing atm correction with pass the band name rather than the wavelength to Pyspectral, as the latter may be ambigous. [Adam.Dybbroe] - Explain how the 3.x reflectance needs to be derived before getting the emissive part. [Adam.Dybbroe] - Removing the two protected internal variables: self._nir and self._tb11. [Adam.Dybbroe] - Add new recipes for daytime-cloudtop RGBs using Pyspectral to remove the reflective part of the 3.x signal. [Adam.Dybbroe] - Add method initiating the reflectance/emissive calculations. [Adam.Dybbroe] - Update __init__.py. [Adam Dybbroe] Replaced "dummy" with "_" - Add a NIR (3.x micron band) emissive RGB provided by new pyspectral. [Adam.Dybbroe] - Adapt method call to latest pyspectral. [Adam.Dybbroe] - Fix so it is possible to derive 3.7 micron reflective RGBs from both VIIRS I- and M-bands. [Adam.Dybbroe] - Add snow RGBs for VIIRS for both M- and I-bands. [Adam.Dybbroe] - Add snow RGB, add r37-based and natural RGB recipes specific to SEVIRI, and fix sun-zenith correction. [Adam.Dybbroe] - Merge pull request #143 from pytroll/noaa-20-platform-naming. [Martin Raspaud] Fix platform_name for NOAA-20 and -21 - Fix platform_name for NOAA-20 and -21. [Adam.Dybbroe] v0.7.7 (2017-12-21) ------------------- - Update changelog. [davidh-ssec] - Bump version: 0.7.6 → 0.7.7. [davidh-ssec] - Merge pull request #140 from pytroll/bugfix-scmi-signed. [David Hoese] Bugfix scmi signed integer data variables - Add ipython tab completion for scene keys. [davidh-ssec] - Fix SCMI writer because AWIPS doesn't like unsigned integers. [davidh- ssec] Using the entire 16-bit unsigned integer space displays fine in AWIPS but it doesn't handle them correctly when adding derived parameters. Meaning once the data goes in to a python script and gets converted to a signed interger...yeah. This change makes it so data is a signed 16-bit integer that only uses the positive half of the bit space. - Merge pull request #138 from pytroll/bugfix-modis-reader. [David Hoese] WIP: Fix readers not returning the highest resolution dataset IDs - Add more file patterns to hdfeos_l1b reader. [davidh-ssec] - Fix requesting a specific resolution from a reader. [davidh-ssec] - Merge remote-tracking branch 'origin/fix-resolution' into bugfix- modis-reader. [davidh-ssec] - Allow providing resolution when loading a composite. [Martin Raspaud] - Fix hdfeos_l1b reader not knowing what resolution of datasets it had. [davidh-ssec] - Fix interpolation problem at 250m resolution. [Martin Raspaud] - Fix readers not returning the highest resolution dataset IDs. [davidh- ssec] - Merge pull request #139 from pytroll/bugfix-viirs-l1b. [David Hoese] Fix VIIRS L1B to work with JPSS-1 and new NASA filenames - Fix VIIRS L1B to work with JPSS-1 and new NASA filenames. [davidh- ssec] - Clean up style. [Martin Raspaud] - Fix lon/lat caching in hdfeos_l1b for different resolutions. [Martin Raspaud] Fixes #132 - Merge pull request #137 from pytroll/logging_corrupted_file. [Martin Raspaud] When opening/reading a nc or hdf file fails, be verbose telling which file it is that fails - When opening/reading a file fails, be verbose telling which file it is that fails. [Adam.Dybbroe] - Merge pull request #134 from howff/hdfeos_l1b_ipopp_filenames. [Martin Raspaud] Added IPOPP-style MODIS-L1b filenames - Update doc re. IMAPP and IPOPP. [Andrew Brooks] - Added IPOPP-style MODIS-L1b filenames. [Andrew Brooks] v0.7.6 (2017-12-19) ------------------- - Update changelog. [Martin Raspaud] - Bump version: 0.7.5 → 0.7.6. [Martin Raspaud] - Merge pull request #135 from pytroll/viirs_truecolor_config_error. [Martin Raspaud] Replace effective_solar_pathlength_corrected with the standard sunz-corrected - Replace effective_solar_pathlength_corrected witn the standard sunz- correction. VIIRS data are already sun-zenith corrected. [Adam.Dybbroe] - Update documentation to add hrit_goes. [Martin Raspaud] - Fix GOES navigation. [Martin Raspaud] - Finalize GOES LRIT reader. [Martin Raspaud] - Merge pull request #39 from howff/develop. [Martin Raspaud] Reader for GOES HRIT, WIP - Fix available_composite_names in doc. [Andrew Brooks] - Merge branch 'develop' of https://github.com/pytroll/satpy into develop. [Andrew Brooks] - Start of reader for GOES HRIT. [howff] - Update PULL_REQUEST_TEMPLATE.md. [Martin Raspaud] This hides the comments when the PR is previewed and reminds user to provide a description for the PR. - Merge pull request #122 from eysteinn/scatsat1. [Martin Raspaud] Add reader for ScatSat1 Level 2B wind speed data, HDF5 format - Read end_time info correctly. [Eysteinn] - Add reader for ScatSat1 Level 2B wind speed data. [Eysteinn] - Merge pull request #129 from pytroll/viirs_rgbs. [Martin Raspaud] Use the Pyspectral atm correction as the default. - Use the Pyspectral atm correction as the default. Add a high-res overview RGB, use the hncc-dnb in the night-microphysics and use the effective_solar_pathlength_corrected for all true color RGBs. [Adam.Dybbroe] - Merge pull request #128 from pytroll/atm_corrections. [Martin Raspaud] Atm corrections - Pep8 cosmetics. [Adam.Dybbroe] - Pep8 cosmetics. [Adam.Dybbroe] - Pep8 editorial, and fixing copyright. [Adam.Dybbroe] - Add some pre-defined atm/rayleigh corrections to appply over land and sea. [Adam.Dybbroe] - Merge pull request #131 from pytroll/bugfix-hrit-jma. [Martin Raspaud] Bugfix hrit_jma - Bugfix hrit_jma. [Martin Raspaud] - Use a more appropriate and shorter link to the MSG native format pdf doc. [Adam.Dybbroe] - Merge pull request #126 from pytroll/feature_ahi_stretch. [Martin Raspaud] Improvemements to AHI True color imagery - Use marine_clean and us-standard for atm correction, and improve stretch at low sun elevation. [Adam.Dybbroe] - Use the CIRA stretch for True color imagery. [Adam.Dybbroe] v0.7.5 (2017-12-11) ------------------- - Update changelog. [davidh-ssec] - Bump version: 0.7.4 → 0.7.5. [davidh-ssec] - Remove unused legacy .cfg files. [davidh-ssec] - Merge branch 'master' into develop. [davidh-ssec] - Merge pull request #118 from mitkin/master. [Martin Raspaud] Add file pattern for MODIS L1B from LAADS WEB - Add file pattern for MODIS L1B from LAADS WEB. [Mikhail Itkin] NASA's LAADS WEB pattern is slightly different - Remove old and unused mipp_xrit reader. [davidh-ssec] - Fix SCMI writer not overwriting data from previous tiles. [davidh- ssec] - Merge pull request #121 from pytroll/fix-ir-modifiers. [Martin Raspaud] Remove VIIRS SDR IR modifiers - Remove sun zenith angle correction from IR channels. [Panu Lahtinen] - Add github templates for issues and PRs. [Martin Raspaud] - Bugfix epsl1b reader. [Martin Raspaud] - Merge pull request #107 from pytroll/fix-nwcsaf-proj4. [David Hoese] Convert NWC SAF MSG projection string to meters - Merge branch 'fix-nwcsaf-proj4' of https://github.com/pytroll/satpy into fix-nwcsaf-proj4. [Panu Lahtinen] - Merge branch 'fix-nwcsaf-proj4' of https://github.com/pytroll/satpy into fix-nwcsaf-proj4. [Panu Lahtinen] - Read attributes "flag_meanings", "flag_values" and "long_name" [Panu Lahtinen] - Configure more datasets. [Panu Lahtinen] - Fix also area extents. [Panu Lahtinen] - Add unit tests for utils.proj_units_to_meters() [Panu Lahtinen] - Move proj_units_to_meters() to satpy.utils. [Panu Lahtinen] - Convert projection parameters from kilometers to meters. [Panu Lahtinen] - Read attributes "flag_meanings", "flag_values" and "long_name" [Panu Lahtinen] - Configure more datasets. [Panu Lahtinen] - Fix also area extents. [Panu Lahtinen] - Add unit tests for utils.proj_units_to_meters() [Panu Lahtinen] - Move proj_units_to_meters() to satpy.utils. [Panu Lahtinen] - Convert projection parameters from kilometers to meters. [Panu Lahtinen] - Move proj_units_to_meters() to satpy.utils. [Panu Lahtinen] - Convert projection parameters from kilometers to meters. [Panu Lahtinen] - Read attributes "flag_meanings", "flag_values" and "long_name" [Panu Lahtinen] - Configure more datasets. [Panu Lahtinen] - Fix also area extents. [Panu Lahtinen] - Add unit tests for utils.proj_units_to_meters() [Panu Lahtinen] - Move proj_units_to_meters() to satpy.utils. [Panu Lahtinen] - Convert projection parameters from kilometers to meters. [Panu Lahtinen] - Merge pull request #111 from eysteinn/sentinel1-reproject. [David Hoese] Fixed area information to safe_sar_c reader to allow for resampling - Added coordinates to sar_c.yaml to allow for reprojection. [Eysteinn] - Merge pull request #108 from TAlonglong/feature-decorate. [Martin Raspaud] Feature decorate - __init__.py docstring in a few add pydecorate features. [Trygve Aspenes] - Satpy/writers/__init__.py implement more general way of handling pydecorate calls from satpy save_dataset. Instead of logo and text separate, use decorate. This needs to be a list to keep the order of alignment available in pydecorate. Since the argument to add_decorate needs to be a mapping it may look like this: decorate={'decorate':[{'logo':{...}},{'text':{...}},...]} [Trygve Aspenes] - Merge branch 'develop' into develop-fork. [Trygve Aspenes] - Satpy/writers/__init__.py added add_text function. This is meant to be used when calling save_dataset to add text to an image using pydecorate. eg save_dataset(...., text_overlay={'text': 'THIS IS THE TEXT TO BE ADDED', 'align':{'top_bottom':'bottom', 'left_right':'right'}, 'font':'/usr/share/fonts/truetype/msttcorefonts/Arial.ttf', 'font_size':25, 'height':30, 'bg':'black', 'bg_opacity':255, 'line':'white'}). Not all options available as style in pydecorate are implemented. This is left TODO. This PR is dependent on https://github.com/pytroll/pydecorate/pull/3 to be completed. [Trygve Aspenes] - Adding to more options to add_overlay. This to better control which levels of coast(GSHHS) and borders (WDB_II) are put on the plot. [Trygve Aspenes] - Merge pull request #88 from pytroll/feature-3d-enhancement. [Panu Lahtinen] Add 3D enhancement, fix BWCompositor - Merge branch 'feature-3d-enhancement' of https://github.com/pytroll/satpy into feature-3d-enhancement. [Panu Lahtinen] - Add example of composite with 3D effect. [Panu Lahtinen] - Fix BWCompositor to handle info correctly. [Panu Lahtinen] - Add 3D effect enhancement. [Panu Lahtinen] - Remove rebase comments. [Panu Lahtinen] - Add example of composite with 3D effect. [Panu Lahtinen] - Fix BWCompositor to handle info correctly. [Panu Lahtinen] - Add 3D effect enhancement. [Panu Lahtinen] - Merge pull request #87 from pytroll/feature-IASI-L2-reader. [Panu Lahtinen] Add IASI L2 reader - Merge branch 'feature-IASI-L2-reader' of https://github.com/pytroll/satpy into feature-IASI-L2-reader. [Panu Lahtinen] - Merge branch 'feature-IASI-L2-reader' of https://github.com/pytroll/satpy into feature-IASI-L2-reader. [Panu Lahtinen] - Fix unit of time. [Panu Lahtinen] - Remove un-needed '' from the reader init line. [Panu Lahtinen] - Merge branch 'develop' into feature-IASI-L2-reader. [Panu Lahtinen] - Add mapping from M03 to Metop-C. [Panu Lahtinen] - Add subsatellite resolution to datasets. [Panu Lahtinen] - Fix typos, make read_dataset() and read_geo() functions instead of methods. [Panu Lahtinen] - Add initial version of IASI L2 reader. [Panu Lahtinen] - Fix unit of time. [Panu Lahtinen] - Remove un-needed '' from the reader init line. [Panu Lahtinen] - Add mapping from M03 to Metop-C. [Panu Lahtinen] - Add subsatellite resolution to datasets. [Panu Lahtinen] - Fix typos, make read_dataset() and read_geo() functions instead of methods. [Panu Lahtinen] - Add initial version of IASI L2 reader. [Panu Lahtinen] - Fix unit of time. [Panu Lahtinen] - Remove un-needed '' from the reader init line. [Panu Lahtinen] - Add mapping from M03 to Metop-C. [Panu Lahtinen] - Add subsatellite resolution to datasets. [Panu Lahtinen] - Fix typos, make read_dataset() and read_geo() functions instead of methods. [Panu Lahtinen] - Add initial version of IASI L2 reader. [Panu Lahtinen] - Merge pull request #96 from eysteinn/create_colormap. [David Hoese] Create colormap - Make colorizing/palettizing more flexible. [Eysteinn] - Merge pull request #4 from pytroll/develop. [Eysteinn Sigurðsson] Develop - Merge pull request #3 from pytroll/develop. [Eysteinn Sigurðsson] Develop - Merge pull request #109 from pytroll/bugfix-scmi. [David Hoese] Fix SCMI writer and add more tiled grids - Fix SCMI writer writing masked geolocation to netcdf files. [davidh- ssec] - Add additional GOES SCMI grids. [davidh-ssec] - Allow adding overlay for L and LA images. [Martin Raspaud] - Merge pull request #101 from pytroll/bugfix-scmi3. [David Hoese] Fix python 3 compatibility in scmi writer - Add more SCMI writer tests for expected failures. [davidh-ssec] - Fix python 3 compatibility in scmi writer. [davidh-ssec] Includes fix for X/Y coordinate precision which affects GOES-16 data - Merge pull request #105 from howff/doc-fix. [Martin Raspaud] fix available_composite_names in doc - Fix available_composite_names in doc. [Andrew Brooks] v0.7.4 (2017-11-13) ------------------- - Update changelog. [davidh-ssec] - Bump version: 0.7.3 → 0.7.4. [davidh-ssec] - Update changelog. [davidh-ssec] - Fix physical_element for VIIRS M07 in SCMI writer. [davidh-ssec] - Merge pull request #97 from pytroll/feature-optimize-scmi. [David Hoese] Optimize SCMI writer to reuse results of tile calculations - Fix area id in SCMI writer to be more specific. [davidh-ssec] - Optimize SCMI writer to reuse results of tile calculations. [davidh- ssec] It uses a little bit more memory, but speeds up the processing by quite a bit when tested under the Polar2Grid equivalent. - Fix floating point saving for geotiff. [Martin Raspaud] - Merge pull request #93 from pytroll/bugfix-user-enhancements. [David Hoese] Fix enhancement config loading when user configs are present - Fix enhancement config loading when user configs are present. [davidh- ssec] v0.7.3 (2017-10-24) ------------------- - Update changelog. [davidh-ssec] - Bump version: 0.7.2 → 0.7.3. [davidh-ssec] - Merge branch 'develop' into new_release. [davidh-ssec] - Fix mock import in unittest. [davidh-ssec] mock should come from the unittest package in python 3+ - Merge pull request #90 from pytroll/bugfix-scmi-writer. [David Hoese] Fix SCMI writer to use newest version of pyresample - Fix SCMI writer to use newest version of pyresample. [davidh-ssec] - Adjust extents to kilometers. [Panu Lahtinen] - Merge pull request #86 from pytroll/bugfix-resample-setitem. [David Hoese] Fix resampling when a dataset was added via setitem and a test for it - Fix resampling when a dataset was added via setitem and a test for it. [davidh-ssec] Includes removing python 3.3 from travis tests - Merge pull request #84 from eysteinn/composite-snowage-fix. [Martin Raspaud] Composite snowage fix - Expand the dynamic of the channels up to 255 before to combine them: (0,1.6) => (0,255) [Eysteinn] - Merge pull request #2 from pytroll/develop. [Eysteinn Sigurðsson] Develop - Merge pull request #85 from pytroll/feature-fullres-abi-tc. [David Hoese] Feature fullres abi tc - Fix geocat tests. [davidh-ssec] - Fix bug in geocat reader and SCMI writer. [davidh-ssec] Caused incorrect H8 and GOES-16 geolocation - Fix reader metaclass with newer versions of six. [davidh-ssec] - Fix metadata in ABI true color. [davidh-ssec] - Fix ABI true color averaging. [davidh-ssec] - Fix DatasetID comparison in python 3 and add test for it. [davidh- ssec] - Fix super call in ABI true color 2km class. [davidh-ssec] - Add writers yaml files to setup.py. [davidh-ssec] - Create sharpened full resolution ABI true color. [davidh-ssec] - Merge pull request #81 from loreclem/develop. [Martin Raspaud] Develop - Added some doc. [lorenzo clementi] - Fixed missing import. [lorenzo clementi] - Bugfix (typo) [lorenzo clementi] - First working version of ninjo converter. [lorenzo clementi] - Improved generic reader, removed useles bitmap composite. [lorenzo clementi] - Bugfix in the generic image reader. [lorenzo clementi] - Draft generic image reader. [lorenzo clementi] - Merge pull request #80 from pytroll/solar-pathlength-correction. [Martin Raspaud] Solar pathlength correction and Rayleigh correction interface - Fix anti pattern: Not using get() to return a default value from a dict. [Adam.Dybbroe] - Introduce an alternative sun-zenith correction algorithm, and fix rayleigh/aerosol correction so atmosphere and aerosol type can be specified in the config files. [Adam.Dybbroe] - Merge branch 'develop' into solar-pathlength-correction. [Adam.Dybbroe] - Maia reader (#79) [roquetp] * not finalised version : problem with standard name * Fix maia reader for simple loading * working version with CM and CT * add Datasets and fix the problem with end_time. * Add a exemple for read MAIA files * Add maia reader * fix on maia name * add reference on the test case * autopep8 on the example polar_maia.py and add the reference of the data test case * maia-reader : clean and pep8 * add reference documentation v0.7.2 (2017-09-18) ------------------- Fix ~~~ - Bugfix: Get the solar zenith angle. [Adam.Dybbroe] Other ~~~~~ - Update changelog. [davidh-ssec] - Bump version: 0.7.1 → 0.7.2. [davidh-ssec] - Merge pull request #67 from pytroll/feature-scmi-writer. [David Hoese] Feature scmi writer - Fix SCMI lettered grid test to not create huge arrays. [davidh-ssec] - Fix SCMI test so it actually uses lettered grids. [davidh-ssec] - Add more SCMI writer tests and documentation. [davidh-ssec] - Fix geocat reader for better X/Y coordinate estimation. [davidh-ssec] - Add really basic SCMI writer test. [davidh-ssec] - Fix SCMI debug tile generation. [davidh-ssec] - Add debug tile creation to SCMI writer. [davidh-ssec] - Fix SCMI writer for lettered grids. [davidh-ssec] - Fix numbered tile counts for SCMI writer. [davidh-ssec] - Add initial SCMI writer. [davidh-ssec] WIP: Multiple tiles, lettered tiles, debug images - Separate EnhancementDecisionTree in to base DecisionTree and subclass. [davidh-ssec] - Add 'goesr' as possible platform in geocat reader. [davidh-ssec] - Add SCMI and geotiff writer extras to setup.py. [davidh-ssec] - Add GOES-16 filename to geocat config. [davidh-ssec] - Merge pull request #69 from pytroll/modis-viewing-geometry-and-atm- correction. [Martin Raspaud] Modis viewing geometry and atm correction - Modis true_color atm corrected with pyspectral. [Adam.Dybbroe] - Merge branch 'develop' into modis-viewing-geometry-and-atm-correction. [Adam.Dybbroe] - Merge pull request #73 from pytroll/cira-stretch-numpy-1-13-issue. [Martin Raspaud] Add unittest for cira_stretch and fix it for numpy >=1.13 - Bugfix unittest suite. [Adam.Dybbroe] - Fix cira_stretch to work despite broken numpy (numpy issue 9687) [Adam.Dybbroe] - Smaller unittest example, and fixed. Works for numpy < 1.13 only though. [Adam.Dybbroe] - Add unittest for cira_stretch and fix it for numpy >=1.13. [Adam.Dybbroe] - Merge pull request #75 from pytroll/feature_realistic_colors. [Martin Raspaud] Realistic colors composite for SEVIRI - Merge branch 'develop' into feature_realistic_colors. [Martin Raspaud] - Merge branch 'develop' into feature_realistic_colors. [Martin Raspaud] - Add RealisticColors compositor for SEVIRI. [Panu Lahtinen] - Use array shape instead of possibly non-existent lon array shape. [Panu Lahtinen] - Adjust mask size when number of channels is changed when enhancing. [Panu Lahtinen] - Merge pull request #71 from eysteinn/composite-snowage. [Martin Raspaud] added snow_age viirs composite & lookup table enhancement - Merge branch 'develop' into composite-snowage. [Martin Raspaud] - Ch out is explicit. [Eysteinn] - Allows any number of channels. [Eysteinn] - Allows any number of channels. [Eysteinn] - Fixed satpy/etc/enhancements/generic.yaml. [Eysteinn] - Added snow_age viirs composite & lookup table enhancement. [Eysteinn] - Merge pull request #72 from pytroll/feature_day-night_compositor. [Martin Raspaud] Add DayNightCompositor - Add DayNightCompositor and example composite and enhancement configs. [Panu Lahtinen] - Merge pull request #74 from eysteinn/composite-seviri. [Martin Raspaud] Composite seviri - .changed night_overview to ir_overview. [Eysteinn] - Added night_overview to seviri. [Eysteinn] - Added night_microphysics to visir. [Eysteinn] - Merge pull request #68 from pytroll/feature_palette_enhancement. [Panu Lahtinen] Merged. - Update with palettize() and clarify usage. [Panu Lahtinen] - Refactor using _merge_colormaps() instead of dupplicate code. [Panu Lahtinen] - Add palettize() [Panu Lahtinen] - Fix typo. [Panu Lahtinen] - Add user palette colorization to quickstart documentation. [Panu Lahtinen] - Add palettize enhancement and colormap creation from .npy files. [Panu Lahtinen] - Add sun-sat viewing angles and support for atm correction. [Adam.Dybbroe] - Bugfix atm correction. [Adam.Dybbroe] - Merge pull request #65 from pytroll/feature_bwcompositor. [Martin Raspaud] Feature bwcompositor - Undo line wrapping done by autopep8. [Panu Lahtinen] - Add single channel compositor. [Panu Lahtinen] - Merge pull request #66 from loreclem/master. [Martin Raspaud] Added test to check the 1.5 km georeferencing shift - Added test to check whether to apply the 1.5 km georeferencing correction or not. [lorenzo clementi] - Add ir atm correction, and new airmass composite using this correction. [Adam.Dybbroe] - Change writer configs from INI (.cfg) to YAML (#63) [David Hoese] * Change writer configs from INI (.cfg) to YAML * Add very simple writer tests and fix writer load from Scene - Merge pull request #59 from pytroll/feature-geocat-reader. [David Hoese] Add geocat reader - Add CLAVR-x reader to documentation. [davidh-ssec] - Add geocat reader to documentation. [davidh-ssec] - Fix a few styling issues in geocat reader. [davidh-ssec] - Add python-hdf4 and HDF4 C library to travis dependencies. [davidh- ssec] - Add HDF4 utils tests. [davidh-ssec] - Add geocat unit tests. [davidh-ssec] - Add geocat reader. [davidh-ssec] v0.7.1 (2017-08-29) ------------------- - Update changelog. [Martin Raspaud] - Bump version: 0.7.0 → 0.7.1. [Martin Raspaud] - Fix style. [Martin Raspaud] - Fix hdf4 lib name in dependencies. [Martin Raspaud] - Rename optional dependencies for hdfeos to match reader name. [Martin Raspaud] - Rename mda with metadata in hdfeos_l1b reader. [Martin Raspaud] - Add overview composite for modis. [Martin Raspaud] - Do not guess end time when filtering a filename. [Martin Raspaud] - Add optional dependencies for viirs_compact. [Martin Raspaud] - Fix abi_l1b test again. [Martin Raspaud] - Fix abi_l1b tests. [Martin Raspaud] - Fix sweep axis parameter reading in py3 for abi_l1b. [Martin Raspaud] - Support py3 in abi_l1b. [Martin Raspaud] - Add optional dependencies for abi_l1b. [Martin Raspaud] - Merge pull request #58 from pytroll/metadata-filtering. [Martin Raspaud] Metadata filtering - Fix filehandler unit test to use filename_info as a dict. [Martin Raspaud] - Implement suggested style changes. [Martin Raspaud] See conversation in PR #58 - Finish fixing 0° Service to 0DEG. [Martin Raspaud] - Fix Meteosat numbers to remove leading 0. [Martin Raspaud] - Change HRIT base service to 0DEG. [Martin Raspaud] - Change HRIT MSG patterns to explicit `service` [Martin Raspaud] - Correct unit tests for metadata filtering compatibility. [Martin Raspaud] - Add metadata filtering of filehandlers. [Martin Raspaud] - Replace filter by list comprehension for py3 compatibility. [Martin Raspaud] - Check area compatibility before merging channels in RGBCompositor. [Martin Raspaud] - Add overview for ABI. [Martin Raspaud] - Add EUM file patterns for ABI. [Martin Raspaud] - Avoid crash when pattern matching on file crashes. [Martin Raspaud] - Fix clavrx reader when filenames don't have end_time. [davidh-ssec] - Add optional dependencies for sar_c. [Martin Raspaud] - Fix h5py py3 issues with byte arrays as strings. [Martin Raspaud] - Add optional dependency for the nc_nwcsaf_msg reader. [Martin Raspaud] - Fix hrit_msg reading for py3. [Martin Raspaud] - Add optional dependency for the hrit_msg reader. [Martin Raspaud] - Add platform_name and service to msg metadata. [Martin Raspaud] - Bugfix in MSG acquisition time metadata. [Martin Raspaud] - Fix xRIT end time to follow specifications. [Martin Raspaud] v0.7.0 (2017-08-15) ------------------- - Update changelog. [Martin Raspaud] - Bump version: 0.6.2 → 0.7.0. [Martin Raspaud] - Fix support for OMPS EDRs from other NASA sources. [davidh-ssec] Fix #57 - Change 'ncc_zinke' composite name to 'hncc_dnb' [davidh-ssec] Includes changes to code to make sure that things we think are floats actually are floats. - Fix major bug that stopped certain composites from being loadable. [davidh-ssec] If a composite modified (added information) to the DatasetID of its returned Dataset then the wishlist was not properly modified. This resulted in the Dataset being unloaded and seen as "unneeded". There was a test for this, but it wasn't working as expected. - Update ABI scale factors to be 64-bit floats to improve X/Y calculations. [davidh-ssec] In other applications I have noticed that the in-file 32-bit factor and offset produce a noticeable drift in the per-pixel X/Y values. When converted to 64-bit to force 64-bit arithmetic the results are closer to the advertised pixel resolution of the instrument. - Add 'reader' name metadata to all reader datasets. [davidh-ssec] - Add flag_meanings to clavrx reader. [davidh-ssec] Includes addition of /dtype to hdf4/hdf5/netcdf file handlers - Fix area unit conversion. [Martin Raspaud] - Fix the path to the doc to test. [Martin Raspaud] - Fix some documentation. [Martin Raspaud] - Fix area hashing in resample caching. [davidh-ssec] - Add better error when provided enhancement config doesn't exist. [davidh-ssec] - Simple workaround for printing a dataset with no-name areas. [davidh- ssec] - Fix `get_config_path` to return user files before package provided. [davidh-ssec] - Fix bug in geotiff writer where gdal options were ignored. [davidh- ssec] - Merge pull request #53 from pytroll/feature-clavrx-reader. [David Hoese] Add CLAVR-x reader - Update setuptools before installing on travis. [davidh-ssec] - Fix enhancement configs in setup.py. [davidh-ssec] Includes fixing of hdf4 dependency to python-hdf4 - Add CLAVR-x reader. [davidh-ssec] - Merge pull request #54 from tparker-usgs/writerTypo. [David Hoese] Correct typo in writer - Correct typo. [Tom Parker] v0.6.2 (2017-05-22) ------------------- - Update changelog. [davidh-ssec] - Bump version: 0.6.1 → 0.6.2. [davidh-ssec] - Fix NUCAPS reader when used with multiple input granules. [davidh- ssec] Includes extra fix for the scene when missing datasets need to be printed/logged. - Work on projections for cf-writer. [Martin Raspaud] - Cosmetic fixes. [Martin Raspaud] - Improve cf write including grid mappings. [Martin Raspaud] - Bugfix eps_l1b. [Martin Raspaud] - Pass kwargs to dataset saving. [Martin Raspaud] - Add ninjotiff writer. [Martin Raspaud] - Avoid crashing when resampling datasets without area. [Martin Raspaud] - Add reducer8 compositor. [Martin Raspaud] - Merge pull request #51 from pytroll/common-nwcsaf-readers. [Martin Raspaud] Add reader for NWCSAF/PPS which can also be used by NWCSAF/MSG - Add support for PPS/CPP cloud phase and effective radius. [Adam.Dybbroe] - Harmonize composite names between PPS and MSG, and try handle the odd PPS palette in CTTH-height. [Adam.Dybbroe] - Added more PPS products - CPP parameters still missing. [Adam.Dybbroe] - Add modis support for pps reader. [Adam.Dybbroe] - Comment out get_shape method. [Adam.Dybbroe] - Add reader for NWCSAF/PPS which can also be used by NWCSAF/MSG. [Adam.Dybbroe] - Add initial enhancer tests. [davidh-ssec] v0.6.1 (2017-04-24) ------------------- - Update changelog. [Martin Raspaud] - Bump version: 0.6.0 → 0.6.1. [Martin Raspaud] - Change branch for landscape badge. [Martin Raspaud] - Fix badge to point to develop. [Martin Raspaud] - Add a couple of badges to the readme. [Martin Raspaud] - Remove imageo subpackage and related tests. [davidh-ssec] - Add test for ReaderFinder. [davidh-ssec] Required fixing all reader tests that had improper patching of base file handlers. - Add NUCAPS reader tests. [davidh-ssec] - Fix OMPS EDR valid_min comparison. [davidh-ssec] - Add OMPS EDR tests. [davidh-ssec] - Add shape checking to AMSR2 L1B tests. [davidh-ssec] - Attempt to fix AMSR2 L1B reader tests. [davidh-ssec] - Add AMSR2 L1B tests. [davidh-ssec] - Fix loading of failed datasets. [davidh-ssec] Fix #42 - Fix viirs sdr loading when dataset's file type isn't loaded. [davidh- ssec] - Add a ColorizeCompositor vs PaletteCompositor. [Martin Raspaud] - Fix viirs sdr tests for python 3. [davidh-ssec] - Add ability for VIIRS SDRs to load geolocation files from N_GEO_Ref. [davidh-ssec] Also fixed tests and fixed dfilter not working in VIIRS SDRs when key was a DatasetID - Clean up styling for coordinates check. [davidh-ssec] Quantified code complained about duplicate if statements - Raise ValueError instead of IOError when standard_name is missing in coordinates. [Adam.Dybbroe] - Use previously unused cache dict to hold cached geolocation data. [Adam.Dybbroe] - Remove redundant import. [Adam.Dybbroe] - Raise an IOError when (lon,lat) coordinates doesn't have a standard_name. [Adam.Dybbroe] - Add warning when sensor is not supported by any readers. [davidh-ssec] Fix #32 v0.6.0 (2017-04-18) ------------------- Fix ~~~ - Bugfix: Masking data and apply vis-calibration. [Adam.Dybbroe] - Bugfix: Add wavelength to the DatasetID. [Adam.Dybbroe] - Bugfix: Add wavelength to the dataset info object, so pyspectral interface works. [Adam.Dybbroe] Other ~~~~~ - Update changelog. [Martin Raspaud] - Bump version: 0.5.0 → 0.6.0. [Martin Raspaud] - Fix pyresample link in README. [davidh-ssec] - Update documentation and readme to be more SatPy-y. [davidh-ssec] - Add ACSPO reader to documentation. [davidh-ssec] - Reduce redundant code in netcdf4 based tests. [davidh-ssec] - Add ACSPO reader tests. [davidh-ssec] - Force minimum version of netcdf4-python. [davidh-ssec] - Update pip on travis before installing dependencies. [davidh-ssec] - Install netcdf4 from source tarball on travis instead of from wheel. [davidh-ssec] netCDF4-python seems to be broken on travis when installed from a wheel. This tries installing it from a source tarball. - Replace netcdf4 with h5netcdf in netcdf4 file handler tests. [davidh- ssec] Travis has a library issue with netcdf4 so trying h5netcdf instead - Install cython via apt for travis tests. [davidh-ssec] - Add tests for NetCDF4 File Handler utility class. [davidh-ssec] - Add tests for HDF5 File Handler utility class. [davidh-ssec] - Update VIIRS L1B tests to work with python 3. [davidh-ssec] Includes installing netcdf4 apt packages on travis - Add netCDF4 library to travis tests. [davidh-ssec] - Add VIIRS L1B tests. [davidh-ssec] - Change YAML reader to only provide datasets that are requested. [davidh-ssec] Includes changes to mask any data slices when data can't be loaded from one or more file handlers. Raises an error if all file handlers fail. - Clean up style. [Martin Raspaud] - Add behave test for returned least modified dataset. [davidh-ssec] - Merge pull request #48 from pytroll/feature_bilinear. [David Hoese] Bilinear interpolation - Merge pull request #49 from pytroll/fix_ewa. [David Hoese] Fix EWA resampling - Remove data copy from EWA resampling. [davidh-ssec] - Send copy of the data to fornav() [Panu Lahtinen] - Merge branch 'fix_ewa' of https://github.com/pytroll/satpy into fix_ewa. [Panu Lahtinen] - Send copy of data to fornav() [Panu Lahtinen] - Fixes EWA resampling - Remove unused import. [Panu Lahtinen] - Discard masks from cache data. [Panu Lahtinen] - Start fixing EWA; single channels work, multichannels yield bad images. [Panu Lahtinen] - Add example using bilinear interpolation, caching and more CPUs. [Panu Lahtinen] - Handle datasets with multiple channels. [Panu Lahtinen] - Reorganize code. [Panu Lahtinen] - move caches to base class attribute - move cache reading to base class - move cache updating to base class - Add bilinear resampling, separate lonlat masking to a function. [Panu Lahtinen] - Merge pull request #50 from pytroll/feature-acspo-reader. [David Hoese] Add ACSPO SST Reader - Add more documentation methods in ACSPO reader. [davidh-ssec] - Fix ACSPO reader module docstring. [davidh-ssec] - Add ACSPO SST Reader. [davidh-ssec] - Cleanup code based on quantifiedcode. [davidh-ssec] - Add test to make sure least modified datasets are priorities in getitem. [davidh-ssec] - Change DatasetID sorting to be more pythonic. [davidh-ssec] - Fix incorrect usage of setdefault. [davidh-ssec] - Change DatasetIDs to be sortable and sort them in DatasetDict.keys() [davidh-ssec] - Make failing test more deterministic. [davidh-ssec] Planning to change how requested datasets are loaded/discovered so this test will need to get updated in the future anyway. - Fix DatasetDict.__getitem__ being slightly non-deterministic. [davidh- ssec] __getitem__ was depending on the output and order of .keys() which is not guaranteed to be the same every time. If more than one key was found to match the `item` then the first in a list based on .keys() was returned. The first element in this list was not always the same. - Fix Scene loading or computing datasets multiple times. [davidh-ssec] - Add filename filtering for start and end time. [davidh-ssec] - Fix Scene loading datasets multiple times. [davidh-ssec] Fix #45 - Fix setup.py's usage of find_packages. [davidh-ssec] - Fix deleting an item from the Scene if it wasn't in the wishlist. [davidh-ssec] If a user specified `unload=False` then there may be something in the Scene that isn't needed later. - Use setuptool's find_packages in setup.py. [davidh-ssec] - Use only h5py for compact viirs reading. [Martin Raspaud] - Remove hanging print statements. [Martin Raspaud] - Add night overview composite for viirs. [Martin Raspaud] - Add area def for MSG HRV. [Martin Raspaud] - Merge pull request #47 from pytroll/feature-yaml-enhancements. [Martin Raspaud] Switch enhancements to yaml format - Switch enhancements to yaml format. [Martin Raspaud] - Fix missed Projectable use in composites. [davidh-ssec] - Add support for segmented geostationary data. [Martin Raspaud] - Merge pull request #43 from pytroll/msg-native. [Martin Raspaud] Msg native - Possible fix for python 3.5. [Adam.Dybbroe] - Fix for python 3.5. [Adam.Dybbroe] - Change from relative to absolute import. [Adam.Dybbroe] - Merge branch 'develop' into msg-native. [Adam.Dybbroe] - Handle (nastily) cases where channel data are not available in the file. Add unittests. [Adam.Dybbroe] - Merge branch 'develop' into msg-native. [Adam.Dybbroe] - Add unittests for count to radiance calibration. [Adam.Dybbroe] - Use 10 to 16 bit conversion function that was copied from mipp. [Adam.Dybbroe] - Handle subset of SEVIRI channels Full disk supported only. [Adam.Dybbroe] - Make file reading numpy 1.12 compatible. [Sauli Joro] - Remove dependency on mipp. [Adam.Dybbroe] - Merge branch 'develop' into msg-native. [Adam.Dybbroe] Conflicts: satpy/readers/__init__.py satpy/readers/hrit_msg.py - Fix IR and VIS calibration. [Adam.Dybbroe] - Pep8 and editorial (header) updates. [Adam.Dybbroe] - Adding the native msg header record definitions. [Adam.Dybbroe] - Semi-stable native reader version. Calibration unfinished. [Adam.Dybbroe] - Unfinished msg native reader. [Adam.Dybbroe] - Merge pull request #38 from bmu/develop. [Martin Raspaud] conda based install - Reformulated the documentation again. [bmu] - Corrected channel preferences of conda requirement file. [bmu] - Corrected file name in documentation. [bmu] - Renamed requirement file to reflect python and numpy version. [bmu] - Added installation section to the docs. [bmu] - Add vi swp files to gitignore. [bmu] - Added environment file for conda installations. [bmu] - Merge pull request #40 from m4sth0/develop. [Martin Raspaud] Add area slicing support for MTG-LI filehandler - Add workaround for area slicing issue. [m4sth0] Choosing an sub area for data import in a scene objects like EuropeCanary results in a wrong area slice due to wrong area interpolation. If the lat lon values of a sub area are invalid (e.g. in space) the slicing gets incorrect. This commit will bypass this by calculating the slices directly without interpolation for two areas with the same projection (geos) - Add area slicing support for MTG-LI filehandler. [m4sth0] - Merge pull request #41 from meteoswiss-mdr/develop. [Martin Raspaud] Pytroll workshop --> new NWCSAF v2016 products - Pytroll workshop --> new NWCSAF v2016 products. [sam] - Change table of supported data types. [Adam.Dybbroe] - Add column "shortcomings" to table of supported readers, and add row for native reader. [Adam.Dybbroe] - Do not compute resampling mask for AreaDefintions. [Martin Raspaud] - Add support for LRIT 8 bits. [Martin Raspaud] - Cleanup HRIT readers. [Martin Raspaud] - Add ABI composite module. [Martin Raspaud] - Update list of supported formats. [Martin Raspaud] - Remove uneeded code for electro reader. [Martin Raspaud] - Add HRIT JMA reader. [Martin Raspaud] - Merge pull request #35 from m4sth0/develop. [Martin Raspaud] Fix MTG-FCI and LI readers - Fix MTG-FCI and LI readers. [m4sth0] - Fix area extent for MSG segments. [Martin Raspaud] - Add very basic tests for the VIIRS SDR file reader. [davidh-ssec] - Test some utility functions. [Martin Raspaud] - Fix tutorial. [Martin Raspaud] v0.5.0 (2017-03-27) ------------------- - Update changelog. [Martin Raspaud] - Bump version: 0.4.3 → 0.5.0. [Martin Raspaud] - Make sure calibration order is respected. [Martin Raspaud] - Fix angles interpolation in olci reader. [Martin Raspaud] - Fix some py3 tests. [Martin Raspaud] - Test BaseFileHandler. [Martin Raspaud] - Add some reader tests. [Martin Raspaud] - Work on ABI true color. [Martin Raspaud] - Add more VIIRS SDR tests. [davidh-ssec] - Add a missing docstring. [Martin Raspaud] - Refactor and test yaml_reader. [Martin Raspaud] - Add basic VIIRS SDR file handler tests. [davidh-ssec] - Add h5netcdf to travis. [Martin Raspaud] - Add the ABI reader tests to main test suite. [Martin Raspaud] - Optimize and test ABI l1b calibration functions. [Martin Raspaud] - Add Zinke NCC algorithm to viirs DNB. [Martin Raspaud] - Fix lunar angles names in viirs sdr. [Martin Raspaud] - Add lunar angles support in compact viirs. [Martin Raspaud] v0.4.3 (2017-03-07) ------------------- - Update changelog. [Martin Raspaud] - Bump version: 0.4.2 → 0.4.3. [Martin Raspaud] - Add more tests to yaml_reader. [Martin Raspaud] - Document what the Scene accepts better. [davidh-ssec] - Remove unused FileKey class. [davidh-ssec] - Add more tests for Scene object. [davidh-ssec] - Fix ABI L1B area again. [davidh-ssec] - Add Electro-L N2 HRIT reader. [Martin Raspaud] - Fix off by one error on calculating ABI L1B pixel resolution. [davidh- ssec] - Add sweep PROJ.4 parameter to ABI L1B reader. [davidh-ssec] - Fix geos bbox to rotate in the right direction. [Martin Raspaud] - Fix ABI L1B file patterns not working for mesos. [davidh-ssec] - Fix tests to handle reader_kwargs and explicit sensor keyword argument. [davidh-ssec] - Add reader_kwargs to Scene to pass to readers. [davidh-ssec] - Fix yaml reader start/end time with multiple file types. [davidh-ssec] - Allow `Scene.all_composite_ids` to return even if no sensor composite config. [davidh-ssec] v0.4.2 (2017-02-27) ------------------- - Update changelog. [Martin Raspaud] - Bump version: 0.4.1 → 0.4.2. [Martin Raspaud] - Merge branch 'develop' [Martin Raspaud] - Fix area coverage test for inmporterror. [Martin Raspaud] - Add two more tests for yaml_reader. [Martin Raspaud] - Add more datasets for NUCAPS reader. [davidh-ssec] - Add missing_datasets property to Scene. [davidh-ssec] Includes fix for trying to compute datasets after resampling that previously failed to load from readers - Make 'view' a variable in SLSTR reader. [Martin Raspaud] - Test available_datasets in yaml_reader. [Martin Raspaud] - Remove NotImplementedError in abstactmethods. [Martin Raspaud] - Test filering yaml filehandlers by area. [Martin Raspaud] - Add yamlreader test. [Martin Raspaud] - Fix reader test of all_dataset_ids. [davidh-ssec] - Fix unit conversion for ABI L1B reader. [davidh-ssec] - Fix python3 tests. [Martin Raspaud] - Test all datasets ids and names. [Martin Raspaud] - Fix ABI Reader to work with non-CONUS images. [davidh-ssec] - Add unit conversion to ABI reader so generic composites work better. [davidh-ssec] - Fix ABI reader area definition and file type definitions. [davidh- ssec] - Change default start_time from file handler filename info. [davidh- ssec] - Add `get` method to hdf5 and netcdf file handlers. [davidh-ssec] - Fix interpolation of slstr angles. [Martin Raspaud] - Merge pull request #31 from mitkin/feature_caliop-reader. [Martin Raspaud] Add CALIOP v3 HDF4 reader - PEP8 fixes. [Mikhail Itkin] - Read end_time from file metadata. [Mikhail Itkin] - Functional CALIOP V3 HDF4 file handler. [Mikhail Itkin] - Merge branch 'develop' of https://github.com/pytroll/satpy into feature_caliop-reader. [Mikhail Itkin] - CALIOP reader WIP. [Mikhail Itkin] - Update to caliop reader. [Mikhail Itkin] - Add CALIOP reader (non functional yet) [Mikhail Itkin] - Work on slstr reader. [Martin Raspaud] - Fix small style error. [davidh-ssec] - Change swath definition name to be more unique. [davidh-ssec] - Fix style. [Martin Raspaud] - Create on-the-fly name for swath definitions. [Martin Raspaud] - Do some style cleanup. [Martin Raspaud] - Add simple tests for scene dunder-methods and others. [davidh-ssec] Fix bugs that these tests encountered - Remove osx from travis testing environments. [davidh-ssec] - Fix amsr2 l1b reader coordinates. [davidh-ssec] - Update link to satpy's repository. [Mikhail Itkin] Used to be under `mraspaud`, now `pytroll` v0.4.1 (2017-02-21) ------------------- - Update changelog. [davidh-ssec] - Bump version: 0.4.0 → 0.4.1. [davidh-ssec] - Remove forgotten print statement in tests. [davidh-ssec] - Fix wavelength comparison when there are mixed types. [davidh-ssec] - Remove old files. [Martin Raspaud] - Merge pull request #30 from pytroll/feature-get-dataset-key-refactor. [David Hoese] Refactor get_dataset_key - Merge branch 'develop' into feature-get-dataset-key-refactor. [Martin Raspaud] - Rename ds id search function. [Martin Raspaud] - Added some test to get_dataset_key refactor. [Martin Raspaud] - Refactor get_dataset_key. [Martin Raspaud] - Use dfilter in node. [Martin Raspaud] - Refactor get_dataset_key wip. [Martin Raspaud] - Use wavelength instead of channel name for NIR refl computation. [Martin Raspaud] - Update contact info. [Martin Raspaud] v0.4.0 (2017-02-21) ------------------- - Update changelog. [davidh-ssec] - Bump version: 0.3.1 → 0.4.0. [davidh-ssec] - Fix composite loading when prereqs are delayed. [davidh-ssec] - Remove randomness altogether. [Martin Raspaud] - Reduce range of randomness for helper tests. [Martin Raspaud] - Make PSPRayleigh modifier fail if dataset shapes don't match. [Martin Raspaud] - Replace compositor name by id in log message. [Martin Raspaud] - Remove unnecessary print statement. [Martin Raspaud] - Remove plotting from helper_functions. [Martin Raspaud] - Add some randomness in helper_function tests. [Martin Raspaud] - Refactor and test helper functions for geostationary areas. [Martin Raspaud] - Add masking of space pixels in AHI hsd reader. [Martin Raspaud] - Add tests when datasets fail to load. [davidh-ssec] - Remove redundant container specification in certain reader configs. [davidh-ssec] Now that Areas are set by coordinates and Projectables are now Datasets there is no need to customize the container a dataset uses to define it as "metadata". - Fix composite loading when the compositor adds more information to the DatasetID. [davidh-ssec] - Add new composites for AHI. [Martin Raspaud] - Remove fast finish and py26 from travis config. [davidh-ssec] - Fix duplicate or incorrect imports from Projectable/DatasetID refactor. [davidh-ssec] - Remove Projectable class to use Dataset everywhere instead. [davidh- ssec] - Merge pull request #28 from pytroll/feature-remove-id. [David Hoese] Remove 'id' from the info attribute in datasets and composites - Remove to_trimmed_dict, add a kw to to_dict instead. [Martin Raspaud] - Add id attribute to Dataset. [Martin Raspaud] - Fix tests.utils to work with the id attribute. [Martin Raspaud] - Remove id from infodict, wip. [Martin Raspaud] - Fix style. [Martin Raspaud] - Use getattr instead of if-else construct in apply_modifier_info. [Martin Raspaud] - Use wavelength instead of channel name for NIR refl computation. [Martin Raspaud] - Fix modifier info getting applied. [davidh-ssec] Now the modifiers DatasetID gets updated along with any information that can be gathered from the source - Fix loading modified datasets that change resolution. [davidh-ssec] - Add more Scene loading tests for composites that use wavelengths instead of names. [davidh-ssec] - Fix rows_per_scan for VIIRS L1B reader and the sharpened RGB compositor. [davidh-ssec] - Fix scene loading when reader dataset failed to load. [davidh-ssec] - Add day microphysics composite to slstr. [Martin Raspaud] - Fix reading angles for SLSTR (S3) [Martin Raspaud] - Fix test by using DATASET_KEYS instead of DatasetID's as_dict. [Martin Raspaud] - Correct some metadata in viirs_sdr. [Martin Raspaud] - Refactor and test get_dataset_by* [Martin Raspaud] - Merge pull request #27 from davidh-ssec/develop. [David Hoese] Refactor Scene dependency tree - Add some docstrings to new deptree and compositor handling. [davidh- ssec] - Fix intermittent bug where requested dataset/comp wasn't "kept" after loading. [davidh-ssec] This would happen when a composite depended on a dataset that was also requested by the user. If the composite was processed first then the dependency wasn't reprocessed, but this was incorrectly not replacing the requested `name` in the wishlist with the new `DatasetID`. - Add tests for Scene loading. [davidh-ssec] Includes a few fixes for bugs that were discovered including choosing the best dataset from a DatasetDict when there are multiple matching Datasets. - Add very basic Scene loading tests. [davidh-ssec] - Fix behavior tests for python 3 and composite dependencies. [davidh- ssec] - Move dependency logic to DependencyTree class. [davidh-ssec] - Fix dependency tree when scene is resampled. [davidh-ssec] - Refactor compositor loading to better handle modified datasets/composites. [davidh-ssec] Includes assigning DatasetIDs to every compositor and renaming some missed references to wavelength_range which should be wavelength. - Fix DatasetID hashability in python 3. [davidh-ssec] In python 3 if __eq__ is defined then the object is automatically unhashable. I don't think we should run in to problems with a more flexible __eq__ than the hash function. - Fix loading composite by DatasetID. [davidh-ssec] Includes some clean up of dependency tree, including changes to Node. Also includes adding comparison methods to the DatasetID class - Fix `available_modifiers` [davidh-ssec] Required changes to how a deptree is created. Includes adding name attribute to Node class. - Refactor name and wavelength comparison functions to top of readers module. [davidh-ssec] So they can be used outside of DatasetDict - Added some tests for yaml_reader generic functions. [Martin Raspaud] - Add true_color_lowres to viirs (no pan sharpening) [Martin Raspaud] - Provide blue band to psp rayleigh correction. [Martin Raspaud] - Add MODIS composite config. [Martin Raspaud] - Add ABI composite config. [Martin Raspaud] - Cleanup style in yaml_reader. [Martin Raspaud] - Implement slicing for hrit. [Martin Raspaud] - Cleanup abi_l1b reader. [Martin Raspaud] - Allow get_dataset to raise KeyError to signal missing dataset in file. [Martin Raspaud] - Fix geostationary boundingbox. [Martin Raspaud] - Fill in correct wavelength for olci. [Martin Raspaud] - Add lon and lan info for hrpt. [Martin Raspaud] - Remove redundant file opening in hdfeos. [Martin Raspaud] - Add forgoten unit. [Martin Raspaud] - Fix wrong standard_name and add "overview" recipe. [Adam.Dybbroe] - Fix NIRReflectance modifier. [Martin Raspaud] - Update standard names and mda for hrit_msg. [Martin Raspaud] - Add another modis filepattern. [Nina.Hakansson] - Add python 3.6 to travis testing. [davidh-ssec] - Update travis config to finish as soon as required environments finish. [davidh-ssec] - Fix h5py reading of byte strings on python 3. [davidh-ssec] Was handling scalar arrays of str objects, but in python 3 they are bytes objects and weren't detected in the previous condition. - Cleanup test_yaml_reader.py. [Martin Raspaud] - Add tests for file selection. [Martin Raspaud] - Document how to save custom composites. [Martin Raspaud] - Fix VIIRS L1B reader for reflectances on v1.1+ level 1 processing software. [davidh-ssec] - Fix bug in FileYAMLReader when filenames are provided. [davidh-ssec] - Add a reader for Sentinel-2 MSI L1C data. [Martin Raspaud] - Remove unnecessary arguments in sar-c reader. [Martin Raspaud] v0.3.1 (2017-01-16) ------------------- - Update changelog. [Martin Raspaud] - Bump version: 0.3.0 → 0.3.1. [Martin Raspaud] - Cleanup SAR-C. [Martin Raspaud] - Add annotations loading for sar-c. [Martin Raspaud] - Merge pull request #22 from mitkin/feature-sar-geolocation. [Martin Raspaud] Feature SAFE (Sentinel 1) SAR geolocation - Refactor coordinates computation. [Mikhail Itkin] Refactor changes for pull request #22 - Merge branch 'develop' of https://github.com/mitkin/satpy into feature-sar-geolocation. [Mikhail Itkin] - Make Sentinel 1 (SAFE) reader able to read coordinates. [Mikhail Itkin] Add latitude and longitude dictionaries to the `sar_c.yaml` reader and make the `safe_sar_c.py` reader compute coordinate arrays from a collection of GCPs provided in the measurement files. NB: each polarization has it's set of longitudes and latitudes. - Restore reducers to their original values. [Martin Raspaud] - Add alternatives for true color on ahi. [Martin Raspaud] Thanks balt - Add name to the dataset attributes when writing nc files. [Martin Raspaud] - Improve documentation. [Martin Raspaud] - Add proper enhancements for nwcsaf images. [Martin Raspaud] - Refactor hrit msg area def computation. [Martin Raspaud] - Perform som PEP8 cleanup. [Martin Raspaud] - Fix nwcsaf reader and its area definition. [Martin Raspaud] - Merge pull request #21 from mitkin/develop. [David Hoese] Mock pyresample.ewa - Mock pyresample.ewa. [Mikhail Itkin] Mock pyresample.ewa to prevent sphinx from importing the module. - Add NWCSAF MSG nc reader and composites. [Martin Raspaud] - Add gamma to the sarice composite. [Martin Raspaud] - Cleanup the sar composite. [Martin Raspaud] - Add the sar-ice composite. [Martin Raspaud] - Clean up the safe sar-c reader. [Martin Raspaud] - Finalize MSG HRIT calibration. [Martin Raspaud] - Fix abi reader copyright. [Martin Raspaud] - Refactor yaml_reader's create_filehandlers. [Martin Raspaud] - Rename function. [Martin Raspaud] - Add a composite file for slstr. [Martin Raspaud] - Add a noaa GAC/LAC reader using PyGAC. [Martin Raspaud] - Implement a mipp-free HRIT reader. [Martin Raspaud] WIP, supports only MSG, no calibration yet. - Concatenate area_def through making new AreaDefinition. [Martin Raspaud] This makes the concatenation independent of the AreaDefinition implementation. - Allow stacking area_def from bottom-up. [Martin Raspaud] - Fix yaml_reader testing. [Martin Raspaud] - Add support for filetype requirements. [Martin Raspaud] - Remove print statement in slstr reader. [Martin Raspaud] - Remove deprecated helper functions. [Martin Raspaud] - Refactor select_files, yaml_reader. [Martin Raspaud] - Editorials. [Adam.Dybbroe] - Add coastline overlay capability. [Martin Raspaud] - Move the Node class to its own module. [Martin Raspaud] - Initialize angles in epsl1b reader. [Martin Raspaud] - Add angles reading to eps reader. [Martin Raspaud] v0.3.0 (2016-12-13) ------------------- - Update changelog. [Martin Raspaud] - Bump version: 0.2.1 → 0.3.0. [Martin Raspaud] - Fix NUCAPS reader to work with latlon datasets. [davidh-ssec] This required changing yaml_reader to work with 1D arrays since NUCAPS is all 1D (both swath data and metadata). - Refactor yaml_reader's load method. [Martin Raspaud] - Merge branch 'develop' into feature-lonlat-datasets. [Martin Raspaud] - Fix VIIRS L1B reader to work with xslice/yslice and fix geolocation dataset names. [davidh-ssec] - Fix netcdf wrapper to work better with older and newer versions of netcdf4-python. [davidh-ssec] - Make ahi reader use correct default slicing. [Martin Raspaud] - Bugfix sliced reading. [Martin Raspaud] - Put slice(None) as default for reading. [Martin Raspaud] - Allow readers not supporting slices. [Martin Raspaud] - Refactor scene's init. [Martin Raspaud] - Convert nucaps to coordinates. [Martin Raspaud] - Adapt viirs_l1b to coordinates. [Martin Raspaud] - Convert omps reader to coordinates. [Martin Raspaud] - Reinstate viirs_sdr.yaml for coordinates, add standard_names. [Martin Raspaud] - Adapt compact viirs reader to coordinates. [Martin Raspaud] - Add first version of S1 Sar-c reader. [Martin Raspaud] - Adapt olci reader to coordinates. [Martin Raspaud] - Add S3 slstr reader. [Martin Raspaud] - Add standard_names to hdfeos navigation. [Martin Raspaud] - Fix epsl1b reader for lon/lat standard_name. [Martin Raspaud] - Adapt amsr2 reader for coordinates. [Martin Raspaud] - Fix aapp1b reader. [Martin Raspaud] - Use standard name for lon and lat identification. [Martin Raspaud] - Merge branch 'develop' into feature-lonlat-datasets. [Martin Raspaud] Conflicts: satpy/readers/ahi_hsd.py - Area loading for ahi_hsd. [Martin Raspaud] - Fix python3 syntax incompatibility. [Martin Raspaud] - Implement area-based loading. [Martin Raspaud] - Add get_bounding_box for area-based file selection. [Martin Raspaud] - Fix ahi area extent. [Martin Raspaud] - Merge remote-tracking branch 'origin/feature-lonlat-datasets' into feature-lonlat-datasets. [Martin Raspaud] - Convert VIIRS SDR reader to coordinates. [davidh-ssec] - Fix viirs_sdr i bands to work with coordinates. [davidh-ssec] - Support different path separators in patterns. [Martin Raspaud] - Move area def loading to its own function. [Martin Raspaud] - Merge branch 'develop' into feature-lonlat-datasets. [Martin Raspaud] Conflicts: satpy/readers/yaml_reader.py - Merge branch 'develop' into feature-lonlat-datasets. [Martin Raspaud] Conflicts: satpy/readers/yaml_reader.py - Pass down the calibration, polarization and resolution from main load. [Martin Raspaud] - Fix typo in sunzenith correction description. Default is 88 deg, not 80. [Adam.Dybbroe] - Fix sun zenith key for caching. [Martin Raspaud] - Move helper functions to readers directory. [Martin Raspaud] - Adapt hrpt reader to coordinates. [Martin Raspaud] - Fix resample to work when the area has no name. [Martin Raspaud] - Adapt aapp_l1b and hdfeos to coordinates. [Martin Raspaud] - Change remove arguments from get_area_def signature. [Martin Raspaud] - Adapt eps_l1b to 'coordinates' [Martin Raspaud] - Navigation is now handled thru 'coordinates' [Martin Raspaud] Here we make longitude and latitudes usual datasets, and the keyword called 'coordinates' in the config specifies the coordinates to use for the dataset at hand. v0.2.1 (2016-12-08) ------------------- - Update changelog. [Martin Raspaud] - Bump version: 0.2.0 → 0.2.1. [Martin Raspaud] - Move ghrsst_osisaf.yaml to new location. [Martin Raspaud] - Remove old mpop legacy files. [Martin Raspaud] - Move etc to satpy, use package_data for default config files. [Martin Raspaud] - Merge pull request #19 from adybbroe/osisaf_sst_reader. [Martin Raspaud] Add OSISAF SST GHRSST reader - Add OSISAF SST GHRSST reader. [Adam.Dybbroe] - Replace memmap with fromfile in ahi hsd reading. [Martin Raspaud] - Merge branch 'develop' of github.com:pytroll/satpy into develop. [Adam.Dybbroe] - Merge pull request #18 from northaholic/develop. [Martin Raspaud] improve FCI reader readability. fix FCI reader config for WV channels. - Improve FCI reader readability. fix FCI reader config for WV channels. [Sauli Joro] - Merge pull request #17 from m4sth0/develop. [Martin Raspaud] Add MTG LI reader - Add MTG-LI L2 reader for preliminary test data. [m4sth0] - Merge branch 'develop' of https://github.com/pytroll/satpy into develop. [m4sth0] - Merge branch 'develop' of https://github.com/pytroll/satpy into develop. [m4sth0] - Solve compatibility problem with older netCDF4 versions. [Adam.Dybbroe] - Fix style in abi reader. [Martin Raspaud] - Add ABI reader + YAML. [Guido Della Bruna] - Merge pull request #15 from m4sth0/develop. [Martin Raspaud] Develop - Merge branch 'develop' of https://github.com/pytroll/satpy into develop. [m4sth0] - Fixed FCI channel calibration method. [m4sth0] - Fix VIIRS L1B moon illumination fraction for L1B v2.0. [davidh-ssec] In NASA Level 1 software version <2.0 the fraction was a global attribute, now in v2.0 it is a per-pixel swath variable - Fix DNB SZA and LZA naming to match viirs composite configs. [davidh- ssec] - Fix start_time/end_time creation in Scene when no readers found. [davidh-ssec] - Merge pull request #14 from m4sth0/develop. [Martin Raspaud] Add calibration functions for FCI - Add calibration functions for FCI. [m4sth0] - Bugfix. [Adam.Dybbroe] - Bugfix. [Adam.Dybbroe] - Editorial pep8/pylint. [Adam.Dybbroe] - Merge pull request #13 from m4sth0/develop. [Martin Raspaud] Add MTG-FCI Level 1C netCDF reader - Add MTG-FCI Level 1C netCDF reader The test dataset from EUMETSAT for the FCI Level 1C Format Familiarisation is used to implement the reader in satpy. Limitations due to missing meta data for satellite georeferencing and calibration. [m4sth0] - Pass down the calibration, polarization and resolution from main load. [Martin Raspaud] - Fix typo in sunzenith correction description. Default is 88 deg, not 80. [Adam.Dybbroe] - Move helper functions to readers directory. [Martin Raspaud] - Fix Scene sensor metadata when it is a string instead of a list. [davidh-ssec] - Fix start_time/end_time properties on Scene object after resampling. [davidh-ssec] These properties were dependent on scn.readers which doesn't exist after resampling creates a new "copy" of the original Scene. Now these values are part of the metadata in .info and set on init. - Replace errors with warnings when loading dependencies. [davidh-ssec] v0.2.0 (2016-11-21) ------------------- Fix ~~~ - Bugfix: converted MSG products should be saveable. [Martin Raspaud] - Bugfix: satellite name in msg_hdf now supports missing number. [Martin Raspaud] - Bugfix: misspelling. [Martin Raspaud] - Bugfix: mipp_xrit: do not crash on unknown channels, just warn and skip. [Martin Raspaud] - Bugfix: changed reference from composites.cfg to composites/generic.cfg. [Martin Raspaud] - Bugfix: works now for file auto discovery. [Martin Raspaud] - Bugfix: get_filename wants a reader_instance and cleanup. [Martin Raspaud] - Bugfix: setup.py includes now eps xml format description. [Martin Raspaud] - Close all h5files in viirs_sdr, not only the last one. [Martin.Raspaud] - Bugfix: close h5 files when done. [Martin Raspaud] Prior to h5py 3.0, the h5 files open with h5py are not closed upon deletion, so we have to do it ourselves... - Bugfix: area.id doesn't exist, use area.area_id. [Martin Raspaud] - Bugfix: return when each file has been loaded independently. [Martin Raspaud] - Bugfix: Do not crash on multiple non-nwc files. [Martin Raspaud] - Bugfix: check start and end times from loaded channels only. [Martin Raspaud] - Bugfix: viirs start and end times not relying on non-existant channels anymore. [Martin Raspaud] - Bugfix: type() doesn't support unicode, cast to str. [Martin Raspaud] - Bugfix: allow more than one "-" in section names. [Martin Raspaud] - Bugfix: read aqua/terra orbit number from file only if not already defined. [Martin Raspaud] - Bugfix: fixed unittest case for wavelengths as lists. [Martin Raspaud] - Bugfix: remove deprecated mviri testcases. [Martin Raspaud] - Bugfix: backward compatibility with netcdf files. [Martin Raspaud] - Bugfix: removed the old mviri compositer. [Martin Raspaud] - Bugfix: When assembling, keep track of object, not just lon/lats. [Martin Raspaud] - Bugfix: assembling scenes would unmask some lon/lats... [Martin Raspaud] - Bugfix: handling of channels with different resolutions in assemble_segments. [Martin Raspaud] - Bugfix: Runner crashed if called with an area not in product list. [Martin Raspaud] - Bugfix: the nwcsaf_pps reader was crashing if no file was found... [Martin Raspaud] - Bugfix: pynav is not working in some cases, replace with pyorbital. [Martin Raspaud] - Bugfix: can now add overlay in monochromatic images. [Martin Raspaud] - Bugfix: swath scene projection takes forever from the second time. [Martin Raspaud] The swath scene, when projected more than once would recompute the nearest neighbours for every channel. - Bugfix: importing geotiepoints. [Martin Raspaud] - Bugfix: hdfeos was not eumetcast compliant :( [Martin Raspaud] - Bugfix: Do not raise exception on loading failure (nwcsaf_pps) [Martin Raspaud] - Bugfix: fixed misc bugs. [Martin Raspaud] - Bugfix: comparing directories with samefile is better than ==. [Martin Raspaud] - Bugfix: updating old eps_l1b interface. [Martin Raspaud] - Bugfix: Fixed typo in gatherer. [Martin Raspaud] - Bugfix: taking satscene.area into consideration for get_lonlat. [Martin Raspaud] - Bugfix: mipp required version to 0.6.0. [Martin Raspaud] - Bugfix: updating unittest and setup for new mipp release. [Martin Raspaud] - Bugfix: for eps l1b, get_lonlat did not return coherent values since the introduction of pyresample. [Martin Raspaud] - Bugfix: mipp to mipp_xrit namechange. [Martin Raspaud] - Bugfix: better detection of needed channels in aapp1b. [Martin Raspaud] - Bugfix: support for other platforms. [Martin Raspaud] - Bugfix: Support python 2.4 in mipp plugin. [Martin Raspaud] - Bugfix: masked arrays should be conserved by scene.__setitem__ [Martin Raspaud] - Bugfix: Don't make area and time_slot static in compositer. [Martin Raspaud] - Bugfix: reinit channels_to_load and messages for no loading. [Martin Raspaud] - When the loading process is interrupted, the channels_to_load attribute was not reinitialized. - Added a message when loading for a given level did not load anything. - Bugfix: Give an informative message when area is missing for msg's hdf reader. [Martin Raspaud] - Bugfix: update satpos file retrieval for hrpt and eps1a. [Martin Raspaud] - Bugfix: fixed unittests for new plugin system. [Martin Raspaud] - Bugfix: Do not load plugins automatically... [Martin Raspaud] - Bugfix: satellite vs satname again. [Martin Raspaud] - Bugfix: don't crash if msg hdf can't be loaded. [Martin Raspaud] - Bugfix: project now chooses mode automatically by default. [Martin Raspaud] - Bugfix: eps_avhrr adapted to new plugin format. [Martin Raspaud] - Bugfix: loading in msg_hdf adapted to new plugin system. [Martin Raspaud] - Bugfix: loading plugins should fail on any exception. [Martin Raspaud] - Bugfix: stupid syntax error. [Martin Raspaud] - Bugfix: mistook satname for satellite. [Martin Raspaud] - Bugfix: move to jenkins. [Martin Raspaud] - Bugfix: affecting area to channel_image. [Martin Raspaud] - Bugfix: Better handling of alpha channel. [Martin Raspaud] - Bugfix: filewatcher would wait a long time if no new file has come. [Martin Raspaud] - Bugfix: netcdf saving didn't record lat and lon correctly. [Martin Raspaud] - Bugfix: netcdf saving didn't work if only one value was available. [Martin Raspaud] - Bugfix: test_mipp had invalid proj parameters. [Martin Raspaud] - Bugfix: satellite vs satname again. [Martin Raspaud] - Bugfix: project now chooses mode automatically by default. [Martin Raspaud] - Bugfix: move to jenkins. [Martin Raspaud] - Bugfix: fixed unit test for projector reflecting the new mode handling. [Martin Raspaud] - Bugfix: fixed None mode problem in projector. [Martin Raspaud] - Bugfix: The default projecting mode now take into account the types of the in and out areas. [Martin Raspaud] - Bugfix: forgot the argument to wait in filewatcher. [Martin Raspaud] - Bugfix: tags and gdal_options were class attributes, they should be instance attributes. [Martin Raspaud] - Bugfix: 0 reflectances were masked in aapp1b loader. [Martin Raspaud] - Bugfix: corrected parallax values as no_data in msg products reading. [Martin Raspaud] - Bugfix: tags and gdal_options were class attributes, they should be instance attributes. [Martin Raspaud] - Bugfix: Compatibility with nordrad was broken. [Martin Raspaud] - Bugfix: forgot the argument to wait in filewatcher. [Martin Raspaud] - Bugfix: forgot strptime = datetime.strptime when python > 2.5. [Martin Raspaud] - Bugfix: corrected parallax values as no_data in msg products reading. [Martin Raspaud] - Bugfix: individual channel areas are preserved when assembled together. [Martin Raspaud] - Bugfix: cleanup tmp directory when convertion to lvl 1b is done. [Martin Raspaud] - Bugfix: remove hardcoded pathes in hrpt and eps lvl 1a. [Martin Raspaud] - Bugfix: use mpop's main config path. [Martin Raspaud] - Bugfix: added python 2.4 compatibility. [Martin Raspaud] - Bugfix: allow all masked array as channel data. [Martin Raspaud] - Better support for channel-bound areas. [Martin Raspaud] - Bugfix: 0 reflectances were masked in aapp1b loader. [Martin Raspaud] - Bugfix: tags and gdal_options were class attributes, they should be instance attributes. [Martin Raspaud] - Bugfix: error checking on area_extent for loading. [Martin Raspaud] - Bugfix: non loaded channels should not induce computation of projection. [Martin Raspaud] - Bugfix: thin modis didn't like area extent and was locked in 2010... [Martin Raspaud] - Bugfix: Compatibility with nordrad was broken. [Martin Raspaud] - Bugfix: fixed matching in git command for version numbering. [Martin Raspaud] - Bugfix: Negative temperatures (in K) should not be valid data when reading aapp1b files. [Martin Raspaud] - Bugfix: remove hudson from tags when getting version. [Martin Raspaud] - Bugfix: fixed hdf inconstistencies with the old pyhl reading of msg ctype and ctth files. [Martin Raspaud] - Bugfix: Updated code and tests to validate unittests. [Martin Raspaud] - Bugfix: data reloaded even if the load_again flag was False. [Martin Raspaud] - Bugfix: updated tests for disapearance of avhrr.py. [Martin Raspaud] - Bugfix: access to CompositerClass would fail if using the old interface. [Martin Raspaud] - Bugfix: typesize for msg's ctth didn't please pps... [Martin Raspaud] - Bugfix: fixed data format (uint8) in msg_hdf. [Martin Raspaud] - Bugfix: wrong and forgotten instanciations. [Martin Raspaud] - Bugfix: crashing on missing channels in mipp loading. [Martin Raspaud] - Bugfix: forgot to pass along area_extent in mipp loader. [Martin Raspaud] - Bugfix: fixing integration test (duck typing). [Martin Raspaud] - Bugfix: pyresample.geometry is loaded lazily for area building. [Martin Raspaud] - Bugfix: Updated unit tests. [Martin Raspaud] - Bugfix: Last change introduced empty channel list for meteosat 09. [Martin Raspaud] - Bugfix: Last change introduced empty channel list for meteosat 09. [Martin Raspaud] - Bugfix: update unittests for new internal implementation. [Martin Raspaud] - Bugfix: compression argument was wrong in satelliteinstrumentscene.save. [Martin Raspaud] - Bugfix: adapted mpop to new equality operation in pyresample. [Martin Raspaud] - Bugfix: More robust config reading in projector and test_projector. [Martin Raspaud] - Bugfix: updated the msg_hrit (nwclib based) reader. [Martin Raspaud] - Bugfix: swath processing was broken, now fixed. [Martin Raspaud] - Bugfix: corrected the smaller msg globe area. [Martin Raspaud] - Bugfix: Erraneous assumption on the position of the 0,0 lon lat in the seviri frame led to many wrong things. [Martin Raspaud] - Bugfix: introduced bugs in with last changes. [Martin Raspaud] - Bugfix: new area extent for EuropeCanary. [Martin Raspaud] - Bugfix: Updated setup.py to new structure. [Martin Raspaud] - Bugfix: updated integration test to new structure. [Martin Raspaud] - Bugfix: more verbose crashing when building extensions. [Martin Raspaud] - Bugfix: corrected EuropeCanary region. [Martin Raspaud] - Bugfix: made missing areas message in projector more informative (includes missing area name). [Martin Raspaud] - Bugfix: Added missing import in test_pp_core. [Martin Raspaud] - Bugfix: fixing missing import in test_scene. [Martin Raspaud] - Bugfix: geotiff images were all saved with the wgs84 ellipsoid even when another was specified... [Martin Raspaud] - Bugfix: Corrected the formulas for area_extend computation in geos view. [Martin Raspaud] - Bugfix: satellite number in cf proxy must be an int. Added also instrument_name. [Martin Raspaud] - Bugfix: Erraneous on the fly area building. [Martin Raspaud] - Bugfix: geo_image: gdal_options and tags where [] and {} by default, which is dangerous. [Martin Raspaud] - Bugfix: Support for new namespace for osr. [Martin Raspaud] - Bugfix: remove dubble test in test_channel. [Martin Raspaud] - Bugfix: showing channels couldn't handle masked arrays. [Martin Raspaud] - Bugfix: Scen tests where wrong in project. [Martin Raspaud] - Bugfix: when loading only CTTH or CloudType, the region name was not defined. [Martin Raspaud] - Bugfix: in test_channel, Channel constructor needs an argument. [Martin Raspaud] - Bugfix: in test_cmp, tested GenericChannel instead of Channel. [Martin Raspaud] - Bugfix: Test case for channel initialization expected the wrong error when wavelength argument was of the wrong size. [Martin Raspaud] - Bugfix: Added length check for "wavelength" channel init argument. [Martin Raspaud] - Bugfix: test case for channel resolution did not follow previous patch allowing real resolutions. [Martin Raspaud] - Bugfix: thin modis lon/lat are now masked arrays. [Martin Raspaud] - Bugfix: in channel constructor, wavelength triplet was not correctly checked for type. [Martin Raspaud] Just min wavelength was check three times. Other ~~~~~ - Update changelog. [Martin Raspaud] - Bump version: 0.1.0 → 0.2.0. [Martin Raspaud] - Fix version number. [Martin Raspaud] - Do not fill lon and lat masks with random values. [Martin Raspaud] - Fix AHI reading for new rayleigh correction. [Martin Raspaud] - Add some modifiers for AHI. [Martin Raspaud] - Adjust to requesting rayleigh correction by wavelength. [Martin Raspaud] - Add rayleigh modifier to visir. [Martin Raspaud] - Add angles reading to nc_olci. [Martin Raspaud] - Add pyspectral's generic rayleigh correction. [Martin Raspaud] - Fix cosmetics in scene.py. [Martin Raspaud] - Remove memmap from eps_l1b, use fromfile instead. [Martin Raspaud] This was triggering a `Too many open files` error since the memmap was called for every scanline. - Fix loading for datasets with no navigation. [Martin Raspaud] - Read start and end time from filename for eps_l1b. [Martin Raspaud] This avoids opening every file just for time checks. - Rename file handler's get_area to get_lonlats. [davidh-ssec] There is now a get_area_def and get_lonlats method on individual file handlers - Fix start/end/area parameters in FileYAMLReader. [davidh-ssec] - Move start_time, end_time, area parameters to reader init instead of load. [davidh-ssec] Scenes do not change start_time, end_time, area after init so neither should readers. Same treatment is probably needed for 'sensors'. - Fix avhrr reading. [Martin Raspaud] - Add amsr2 composite config file. [Martin Raspaud] - Adjust OLCI reader for reflectance calibration. [Martin Raspaud] - Delete old reader .cfg config files that are no longer used. [davidh- ssec] - Add forgotten OMPS yaml file. [davidh-ssec] - Convert OMPS reader from .cfg/INI to YAML. [davidh-ssec] - Provide better warning message when specified reader can't be found. [davidh-ssec] - Clean up class declarations in viirs l1b yaml. [davidh-ssec] - Fix VIIRS L1B inplace loading. [davidh-ssec] - Remove duplicate units definition in nucaps reader. [davidh-ssec] - Add standard_name and units to nucaps reader. [davidh-ssec] - Convert nucaps reader to yaml. [davidh-ssec] - Remove `dskey` from reader dataset ID dictionary. [davidh-ssec] The section name for each dataset was not used except to uniquely identify one dataset 'variation' from another similar dataset. For example you could technically have two sections for each calibration of a single dataset. YAML would require a different section name for each of these, but it is not used inside of satpy's readers because the `name` and DatasetID are used for that purpose. - Rename 'navigation' section in reader configs to 'navigations' [davidh-ssec] More consistent and grammatically correct with file_types and datasets - Rename 'corrector' and 'correction' modifiers to 'corrected' [davidh- ssec] Modifier names are applied to DatasetIDs so it was decided that 'corrected' may sound better in the majority of cases than 'corrector'. - Add .info dictionary to SwathDefinition created by YAML Reader. [davidh-ssec] - Fix standard_name of natural_color composite for VIIRS. [davidh-ssec] - Add ratio sharpened natural color for VIIRS. [davidh-ssec] - Rename VIIRSSharpTrueColor to RatioSharpenedRGB. [davidh-ssec] This includes making the ratio sharpened true color the default for VIIRS under the name 'true_color' - Fix tuple expansion in sunz corrector. [davidh-ssec] - Rename I and DNB angle datasets to reflect M band naming. [davidh- ssec] - Allow including directories in file patterns. [Martin Raspaud] - Add navigation to olci reader. [Martin Raspaud] - Add support for OLCI format reading. [Martin Raspaud] - Cleanup SunZenithCorrector. [Martin Raspaud] - Remove some TODOs. [Martin Raspaud] - Fix some seviri composites. [Martin Raspaud] - Add mipp config file for MSG3. [Martin Raspaud] This is needed by mipp when the mipp_hrit reader is used. - Remove `if True` from viirs sharp true color. [davidh-ssec] - Fix small bug in scene when dataset isn't found in a reader. [davidh- ssec] - Update VIIRS sharpened true color to be more flexible when upsampling. [davidh-ssec] - Refactor composite config loading to allow interdependent modifiers. [Martin Raspaud] - Add configuration files for HRIT H8 loading. [Martin Raspaud] - Pass platform_name to mipp for prologue-less hrit formats. [Martin Raspaud] - Provide satellite position information on load (HSD) [Martin Raspaud] - Put AHI HSD reflectances in % [Martin Raspaud] They were between 0 and 1 by default - Fix AHI HSD nav dtype. [Martin Raspaud] lon ssp and lat ssp where swaped - Adjust correct standard names for seviri calibration. [Martin Raspaud] - Fix Seviri CO2 correction buggy yaml def. [Martin Raspaud] - Fix sunz corrector with different resolutions. [davidh-ssec] Includes fix to make sure composites from user-land will overwrite builtin composites. - Update VIIRS L1B LUT variable path construction to be more flexible. [davidh-ssec] - Add recursive dict updating to yaml reader configs. [davidh-ssec] Before this only the top level values would be updated as a whole which wasn't really the intended function of having multiple config files. - Fix coords2area_def with rounding of x and y sizes. [Martin Raspaud] - Fix cos zen normalisation (do not use datetime64) [Martin Raspaud] - Fix start and end time format to use datetime.datetime. [Martin Raspaud] - Add IMAPP file patterns to HDFEOS L1B reader. [davidh-ssec] - Fix hdfeos_l1b due to missing get_area_def method. [davidh-ssec] The HDFEOS file handlers weren't inheriting the proper base classes - Add sunz_corrector modifier to viirs_sdr reader. [davidh-ssec] - Fix available_dataset_names when multiple file types are involved. [davidh-ssec] Also includes a clean up of the available_dataset_names by not providing duplicates (from multiple calibrations and resolutions) - Allow multiple file types in yaml reader. [davidh-ssec] - Add VIIRS SDR M-band angles and DNB angles. [davidh-ssec] - Add VIIRS SDR reader back in [WIP] [davidh-ssec] I've added all the M and I bands, but need to add DNB and the various angle measurements that we use a lot. Also need to add the functionality to load/find the geolocation files from the content in the data files. - Add reader_name and composites keywords to all/available_dataset_names methods. [davidh-ssec] - Fix available_dataset_ids and all_dataset_ids methods. [davidh-ssec] There are not `(all/available)_dataset_(ids/names)` methods on the Scene object. Includes a fix for available composites. - Fix multiple load calls in Scene. [davidh-ssec] This isn't technically a supported feature, but it was a simple fix to get it to work for my case. - Fix compositor loading when optional_prerequisites are more than a name. [davidh-ssec] - Update coord2area_def to be in sync with the mpop version. [Martin Raspaud] - Fix seviri.yaml for new prerequisite syntax. [Martin Raspaud] - Fix EPSG info in geotiffs. [Martin Raspaud] - Adjust crefl for python 3 compatibility. [Martin Raspaud] - Merge branch 'new_prereq_syntax' into feature-yaml. [Martin Raspaud] Conflicts: etc/composites/viirs.yaml etc/composites/visir.yaml satpy/composites/__init__.py satpy/scene.py - Add support for new prerequisite syntax. [Martin Raspaud] - Got VIIRS L1B True color working. [davidh-ssec] Still need work on sharpened true color when I01 is used for ratio sharpening. - Remove unneeded quotes for python names in yaml files. [Martin Raspaud] - Merge branch 'feature-ahi-no-navigation' into feature-yaml. [Martin Raspaud] Conflicts: etc/composites/viirs.yaml satpy/readers/yaml_reader.py - Add viirs composites. [Martin Raspaud] - Fix the area_def concatenation. [Martin Raspaud] - Mask nan in ir calibration for ahi hsd. [Martin Raspaud] - Fix out of place loading, by not using a shuttle. [Martin Raspaud] - Make get_area_def a default method of file_handlers. [Martin Raspaud] - Allow file handler to provide area defs instead of swath. [Martin Raspaud] This is enabled by implementing the `get_area_def` method in the file handler. - Optimize AHI reading using inplace loading. [Martin Raspaud] Navigation is switched off for now. - Allow area loading for the data file handlers. [Martin Raspaud] - Use a named tuple to pass both data, mask and info dict for inplace loading. [Martin Raspaud] - Fix AreaID name to AreaID. [Martin Raspaud] - Fix AreaID name to AreaID. [Martin Raspaud] - Add moon illumination fraction and DNB enhancements for VIIRS. [davidh-ssec] MIF needed some edits to how the reader works since it returns a Dataset (no associated navigation) - Add other basic datasets to VIIRS L1B. [davidh-ssec] I only had I01 and I04 for testing, not has all I, M, and DNB datasets. - Add enhancements configuration directory to the setup.py data_files. [davidh-ssec] - Complete AHI HSD reader. [Martin Raspaud] - Fix missing dependency and python3 compatibility in ahi_hsd. [Martin Raspaud] - Add skeleton for Himawari AHI reading. [Martin Raspaud] - Add a NIR reflectance modifier using pyspectral. [Martin Raspaud] - Add some metadata to projectables in viirs compact. [Martin Raspaud] - Fix optional prerequisites loading. [Martin Raspaud] - Raise an IncompatibleArea exception on RGBCompositor. [Martin Raspaud] - Look for local files even if base_dir and filenames are missing. [Martin Raspaud] - Allow empty scene creation when neither filenames nor base_dir is provided. [Martin Raspaud] - Handle incompatible areas when reading composites. [Martin Raspaud] - Remove dead code. [Martin Raspaud] - Add debug information in viirs compact. [Martin Raspaud] - Get dataset key from calibration in correct order. [Martin Raspaud] - Raise exception when no files are found. [Martin Raspaud] - Add DNB to viirs compact. [Martin Raspaud] - Remove old mpop legacy files. [Martin Raspaud] - Make viirs_compact python 3 compatible. [Martin Raspaud] - Move xmlformat.py to the readers directory, and remove a print statement. [Martin Raspaud] - Fix EPSG projection definition saving to geotiff. [Martin Raspaud] - Remove python 3 incompatible syntax (Tuple Parameter Unpacking) [Martin Raspaud] - Fix crefl further to lower memory consumption. [Martin Raspaud] - Avoid raising an error when no files are found. [Martin Raspaud] Instead, a warning is logged. - Remove unused code from readers/__init__.py. [Martin Raspaud] - Cleanup style. [Martin Raspaud] - Fix unittests. [Martin Raspaud] - Deactivate viirssdr testing while migrating to yaml. [Martin Raspaud] - Refactor parts of compact viirs reader. [Martin Raspaud] - Optimize memory for crefl computation. [Martin Raspaud] - Allow sunz corrector to be provided the sunz angles. [Martin Raspaud] - Make chained modifiers work. [Martin Raspaud] - Cleanup style. [Martin Raspaud] - Add a crefl modifier for viirs. [Martin Raspaud] - Add loading of sun-satellite/sensor viewing angles to aapp-l1b reader. [Adam.Dybbroe] - Add sensor/solar angles loading to compact viirs reader. [Martin Raspaud] - Allow modifier or composites sections to be missing from config. [Martin Raspaud] - Fix some composites. [Martin Raspaud] - Port VIIRS Compact M-bands to yaml. [Martin Raspaud] - Add modifiers feature. [Martin Raspaud] Now modifiers can be added to the prerequisites as dictionnaries. - Add standard_names to channels in mipp_xrit. [Martin Raspaud] - Add a NC4/CF writer. [Martin Raspaud] - Use YAML instead of CFG for composites. [Martin Raspaud] - Rename wavelength_range to wavelength in reader configs. [davidh-ssec] Also rewrote other yaml configs to use new dict identifiers - Add YAML based VIIRS L1B reader (I01 and I04 only) [davidh-ssec] - Allow dict identifiers in reader's datasets config. [davidh-ssec] Some metadata (standard_name, units, etc) are dependent on the calibration, resolution, or other identifying piece of info. Now these make it easier to fully identify a dataset and the multiple ways it may exist. This commit also includes small fixes for how `get_shape` is called and fixes for the netcdf4 handler to match past changes. - Fix numpy warnings when assigning to masked arrays. [davidh-ssec] - Add pyyaml to setup.py requires. [davidh-ssec] - Make base file handler and abstract base class. [davidh-ssec] Also changed start_time and end_time to properties of the file handlers - Make AbstractYAMLReader an actual ABCMeta abstract class. [davidh- ssec] - Fix ReaderFinder when all provided filenames have been found. [davidh- ssec] Also fixed mipp_xrit reader which was providing the set of files that matched rather than the set of files that didn't match. Added start and end time to the xrit reader too. - Rename YAMLBasedReader to FileYAMLReader. [davidh-ssec] As in it is a YAML Based Reader that accepts files where a dataset is not separated among multiple files. - Merge remote-tracking branch 'origin/feature-yaml' into feature-yaml. [davidh-ssec] - Port EPS l1b reader to yaml. [Martin Raspaud] - Combine areas also in combine_info. [Martin Raspaud] - Port mipp xrit reader to yaml. [Martin Raspaud] - Split YAMLBasedReader to accomodate for derivatives. [Martin Raspaud] Some file formats split a dataset on multiple files, a situation which is not covered by the YAMLBasedReader. Some parts of the class being still valid in this situation, we split the class to avoid code duplication, using subclassing instead. - Add hrpt reader. [Martin Raspaud] - Change AMSR2 L1B reader config to be 2 spaces instead of 4. [davidh- ssec] - Remove uncommented blank likes from scene header. [Martin Raspaud] - Allow filenames to be an empty set and still look for files. [Martin Raspaud] - Reorganize imports in mipp reader. [Martin Raspaud] - Beautify resample.py. [Martin Raspaud] - Use uncertainty flags to mask erroneous data. [Martin Raspaud] - Optimize the loading by caching 3b flag. [Martin Raspaud] - Stack the projectable keeping the mask. [Martin Raspaud] - Avoid datasets from being requested multiple times. [Martin Raspaud] - Fix aapp1b to work again. [Martin Raspaud] - Use area ids to carry navigation needs. [Martin Raspaud] - Get the hdfeos_l1b reader to work again. [Martin Raspaud] - Add yaml files to setup.py included data files. [davidh-ssec] - Move start/end/area filtering to reader init. [davidh-ssec] This includes moving file handler opening to the `select_files` method. - Add combine_info method to base file handlers. [davidh-ssec] I needed a way to let file handlers (written by reader developers) to have control over how extra metadata is combined among all of the "joined" datasets of a swath. This should probably be a classmethod, but I worry that may complicate customization and there is always a chance that instance variables may control this behavior. - Add more AMSR2 metadata to loaded datasets. [davidh-ssec] - Change exception to warning when navigation information can't be loaded. [davidh-ssec] - Move reader check to earlier in the file selection process. [davidh- ssec] The code was looking through each reader config file, instantiating each one, then running the `select_files` method only to return right away when the instantiated reader's name didn't equal the user's requested reader. This was a lot of wasted processing and will get worse with every new reader that's added. - Rename amsr2 reader to amsr2_l1b. [davidh-ssec] - Add AMSR2 36.5 channel. [davidh-ssec] - Fix reader finder so it returns when not asked for anything. [davidh- ssec] Resampling in the Scene object requires making an empty Scene. There was an exception being raised because the reader finder was trying to search for files in path `None`. - Add initial AMSR2 L1B reader (yaml) [davidh-ssec] - Make lons/lats for SwathDefinition in to masked arrays. [davidh-ssec] - Rewrite the yaml based reader loading methods. [davidh-ssec] Lightly tested. - Rename utility file handlers and moved base file handlers to new module. [davidh-ssec] The base file handlers being in yaml_reader could potentially cause a circular dependency. The YAML Reader loads a file handler which subclasses one of the base handlers which are in the same module as the yaml reader. - Fix filename_info name in file handler. [davidh-ssec] Oops - Pass filename info to each file handler. [davidh-ssec] There is a lot of information collected while parsing filenames that wasn't being passed to file handlers, now it is. This commit also includes renaming the generic file handler's (hdf5, netcdf) data cache to `file_content` because `metadata` was too generic IMO. - Finish merge of develop to yaml branch. [davidh-ssec] Starting merging develop and a few things didn't make it all the way over cleanly - Remove redundant log message. [davidh-ssec] - Fix reader keyword argument name change. [davidh-ssec] Also raise an exception if no readers are created - Merge branch 'develop' into feature-yaml-amsr2. [davidh-ssec] # Conflicts: # etc/readers/aapp_l1b.yaml # satpy/readers/__init__.py # satpy/readers/aapp_l1b.py # satpy/scene.py - Add OMPS so2_trm dataset. [davidh-ssec] - Rename "scaling_factors" to "factor" in reader configuration. [davidh- ssec] - Merge branch 'feature-omps-reader' into develop. [davidh-ssec] - Add simple OMPS EDR Reader. [davidh-ssec] - Clean up various reader methods. [davidh-ssec] In preparation for OMPS reader - Move HDF5 file wrapper to new hdf5_utils.py. [davidh-ssec] - Add the multiscene module to combine satellite datasets. [Martin Raspaud] The multiscene class adds the possibility to blend different datasets together, given a blend function. - Add a test yaml-based reader for aapp1b. [Martin Raspaud] - Fix manually added datasets not being resampled. [davidh-ssec] - Merge pull request #8 from davidh-ssec/feature-ewa-resampling. [David Hoese] Feature ewa resampling - Update EWA resampler to use new wrapper functions from pyresample. [davidh-ssec] - Move resample import in resample tests. [davidh-ssec] The resample module import now happens inside the test so only the resample tests fail instead of halting all unittests. - Fix resample test from moved resample import. [davidh-ssec] The 'resample' method imported at the top of projectable.py was moved to inside the resample method to avoid circular imports. The resample tests were still patching the global import. Now they modify the original function. I also imported unittest2 in a few modules to be more consistent. - Fix bug in EWA output array shape. [davidh-ssec] - Add initial EWA resampler. [davidh-ssec] - Move resample imports in Projectable to avoid circular imports. [davidh-ssec] - Rename `reader_name` scene keyword to `reader` [davidh-ssec] Also make it possible to pass an instance of a reader or reader-like class. Renaming is similar to how `save_datasets` takes a `writer` keyword. - Fix loading aggregated viirs sdr metadata. [davidh-ssec] Aggregated VIIRS SDR files have multiple `Gran_0` groups with certain attributes and data, like G-Ring information. Loading these in a simple way is a little more complex than the normal variable load and required adding a new metadata join method. - Refix reader_info reference in yaml base reader. [davidh-ssec] This fix got reverted in the last commit for some reason - Add support for modis l1b data. [Martin Raspaud] - Edit the wishlist only when needed. [Martin Raspaud] - Add MODIS l1b reader, no geolocation for now. [Martin Raspaud] - Assign right files to the reader. [Martin Raspaud] No matching of file was done, resulting in assigning all found files to all readers. - Fix reader_info reference in yaml base reader. [davidh-ssec] - Keep channels in the wishlist when necessary. [Martin Raspaud] Due to the creation of a DatasetID for each dataset key, the wishlist wasn't matching the actual ids of the datasets. - Adapt reading to yaml reader way. [Martin Raspaud] Since there is more delegating of tasks to the reader, the reading has to be adapted. - Cleanup using pep8. [Martin Raspaud] - Allow yaml files as config files. [Martin Raspaud] - Add the dependency tree based reading. [Martin Raspaud] - Update the yamlbased aapp reader. [Martin Raspaud] - Move the hdfeos reader to the readers directory. [Martin Raspaud] - Add the multiscene module to combine satellite datasets. [Martin Raspaud] The multiscene class adds the possibility to blend different datasets together, given a blend function. - Add a test yaml-based reader for aapp1b. [Martin Raspaud] - Fix netcdf dimension use to work with older versions of netcdf-python library. [davidh-ssec] - Add 'iter_by_area' method for easier grouping of datasets in special resampling cases. [davidh-ssec] - Fix bug when resampling is done for specific datasets. [davidh-ssec] This fix addresses the case when resampling is done for a specific set of datasets. The compute method will attempt to create datasets that don't exist after resampling. Since we didn't resample all datasets it will always fail. This commit only copies the datasets that were specified in resampling. It is up to the user to care for the wishlist if not using the default (resample all datasets). - Add dimensions to collected metadata for netcdf file wrapper. [davidh- ssec] I needed to use VIIRS L1B like I do VIIRS SDR for some GTM work and needed to copy over some of the metadata. One piece was only available as a global dimension of the NC file so I made it possible to ask for dimensions similar to how you can for attributes. - Fix crefl searching for coefficients by dataset name. [davidh-ssec] - Fix combining info when metadata is a numpy array. [davidh-ssec] - Fix incorrect NUCAPS quality flag masking data. [davidh-ssec] - Add .gitignore with python and C patterns. [davidh-ssec] - Add 'load_tests' for easier test selection. [davidh-ssec] PyCharm and possibly other IDEs don't really play well with unittest TestSuites, but work as expected when `load_tests` is used. - Fix resample hashing when area has no mask. [davidh-ssec] - Add test for scene iter and fix it again. [davidh-ssec] - Fix itervalues usage in scene for python 3. [davidh-ssec] - Allow other array parameters to be passed to MaskedArray through Dataset. [davidh-ssec] - Fix viirs l1b reader to handle newest change in format (no reflectance units) [davidh-ssec] - Fix bug in crefl compositor not respecting input data type. [davidh- ssec] - Fix NUCAPS H2O_MR Dataset to get proper field from file. [davidh-ssec] - Add environment variable SATPY_ANCPATH for crefl composites. [davidh- ssec] - Fix config files being loaded in the correct (reverse) order. [davidh- ssec] INI config files loaded from ConfigParser should be loaded in the correct order so that users' custom configs overwrite the builtin configs. For that to happen the builtin configs must be loaded first. The `config_search_paths` function had this backwards, but the compositor loading function was already reversing them. This commit puts the reverse in the config function. - Update setup.py to always require pillow and not import PIL. [davidh- ssec] It seems that in older versions of setuptools (or maybe even easy_install) that importing certain libraries in setup.py causes an infinite loop and eats up memory until it gets killed by the kernel. - Change NUCAPS H2O to H2O_MR to match name in file. [davidh-ssec] - Add quality flag filtering to nucaps reader. [davidh-ssec] - Change default units for NUCAPS H2O to g/kg. [davidh-ssec] - Add filtering by surface pressure to NUCAPS reader. [davidh-ssec] - Fix composite prereqs not being removed after use. [davidh-ssec] - Update metadata combining in viirs crefl composite. [davidh-ssec] - Perform the sharpening on unresampled data if possible. [Martin Raspaud] - Set the default zero height to the right shape in crefl. [Martin Raspaud] - Fix bug in viirs composites when combining infos. [davidh-ssec] - Add the cloudtop composite for viirs. [Martin Raspaud] - Merge pull request #7 from davidh-ssec/feature-crefl-composites. [David Hoese] Feature crefl composites - Remove ValueError from combine_info for one argument. [davidh-ssec] - Add info dictionary to Areas created in the base reader. [davidh-ssec] - Modify `combine_info` to work on multiple datasets. [davidh-ssec] Also updated a few VIIRS composites as test usages - Add angle datasets to viirs l1b for crefl true color to work. [davidh- ssec] - Cleanup crefl code a bit. [davidh-ssec] - Add sunz correction to CREFL compositor. [davidh-ssec] First attempt at adding modifiers to composites, but this method of doing it probably won't be used in the future. For now we'll keep it. - Fix bug in Scene where composite prereqs aren't removed after resampling. [davidh-ssec] - Rename VIIRS SDR solar and sensor angle datasets. [davidh-ssec] - Update crefl true color to pan sharpen with I01 if available. [davidh- ssec] - Fix crefl utils to use resolution and sensor name to find coefficients. [davidh-ssec] - Fix Dataset `mask` keyword being passed to MaskedArray. [davidh-ssec] - Remove filling masked values in crefl utils. [davidh-ssec] - Fix crefl composite when given percentage reflectances. [davidh-ssec] - Add basic crefl compositor. [davidh-ssec] - Clean up crefl utils and rename main function to run_crefl. [davidh- ssec] - Fix crefl utils bug and other code clean up. [davidh-ssec] - Add M band solar angles and sensor/satellite angles. [davidh-ssec] - Add `datasets` keyword to save_datasets to more easily filter by name. [davidh-ssec] - Make crefl utils more pythonic. [davidh-ssec] - Add original python crefl code from Ralph Kuehn. [davidh-ssec] - Fix the viirs truecolor composite to keep mask info. [Martin Raspaud] - Allow composites to depend on other composites. [Martin Raspaud] In the case of true color with crefl corrected channels for example, the true color needs to depend on 3 corrected channels, which in turn can now be composites. - Add Scene import to __init__ for convience. [davidh-ssec] - Add composites to 'available_datasets' [davidh-ssec] Additionally have Scene try to determine what sensors are involved if they weren't specified by the user. - Add proper "available_datasets" checks in config based readers. [davidh-ssec] - Move config utility functions to separate `config.py` module. [davidh- ssec] - Fix the 'default' keyword not being used checking config dir environment variable. [davidh-ssec] - Add H2O dataset to NUCAPS reader. [davidh-ssec] - Merge pull request #6 from davidh-ssec/feature-nucaps-reader. [David Hoese] Add NUCAPS retrieval reader - Cleanup code according to quantifiedcode. [davidh-ssec] Removed instances of checking length for 0, not using .format for strings, and various other code cleanups in the readers. - Add documentation to various reader functions including NUCAPS reader. [davidh-ssec] - Fix bug when filtering NUCAPS datasets by pressure level. [davidh- ssec] - Add initial NUCAPS retrieval reader. [davidh-ssec] - Move netcdf file handler class to separate module from VIIRS L1B reader. [davidh-ssec] Also prepare generic reader for handling other dimensions besides 2D. - Document the __init__.py files also. [Martin Raspaud] - Mock scipy and osgeo to fix doc generation problems. [Martin Raspaud] - Mock more imports for doc building. [Martin Raspaud] - Remove deprecated doc files. [Martin Raspaud] - Mock trollsift.parser for documentation building. [Martin Raspaud] - Update the doc conf.py file no mock trollsift. [Martin Raspaud] - Add satpy api documentation. [Martin Raspaud] - Post travis notifications to #satpy. [Martin Raspaud] - Fix a few deprecation warnings. [Martin Raspaud] - Document a few Dataset methods. [Martin Raspaud] - Fix div test skip in py3. [Martin Raspaud] - Skip the Dataset __div__ test in python 3. [Martin Raspaud] - Implement numeric type methods for Dataset. [Martin Raspaud] In order to merge or keep metadata for Dataset during arithmetic operations we need to implement the numeric type methods. - Cleanup unused arguments in base reader. [davidh-ssec] Also makes _load_navigation by renaming it to load_navigation to resolve some quantifiedcode code checks. - Add documentation to setup.py data file function. [davidh-ssec] - Fix call to netcdf4's set_auto_maskandscale in viirs l1b reader. [davidh-ssec] - Fix setup.py to find all reader, writer, composite configs. [davidh- ssec] - Merge pull request #5 from davidh-ssec/feature-viirs-l1b. [David Hoese] Add beta VIIRS L1B reader - Add LZA and SZA to VIIRS L1B config for DNB composites. [davidh-ssec] To make certain DNB composites available I added DNB solar and lunar zenith angle as well as moon illumination fraction. This also required detecting units in the ERF DNB composite since it assumes a 0-1 range for the input DNB data. - Remove debug_on from scene.py. [davidh-ssec] - Fix reader not setting units. [davidh-ssec] The default for FileKey objects was None for "units". This means that `setdefault` would never work properly. - Fix config parser error in python 3. [davidh-ssec] I tried to make typing easier by using interpolation (substitution) in the VIIRS L1B reader config, but changing from RawConfigParser to ConfigParser breaks things in python 3. I changed it back in this commit and did the config the "long way" with some find and replace. - Add DNB and I bands to VIIRS L1B reader. [davidh-ssec] - Fix brightness temperature M bands for VIIRS L1B. [davidh-ssec] - Add M bands to VIIRS L1B reader. [davidh-ssec] - Fix VIIRS L1B masking with valid_max. [davidh-ssec] - Add initial VIIRS L1B reader. [davidh-ssec] Currently only supports M01. - Revert test_viirs_sdr to np 1.7.1 compatibility. [Martin Raspaud] - Fix gring test in viirs_sdr. [davidh-ssec] - Add gring_lat and gring_lon as viirs_sdr metadata. [davidh-ssec] Also added join_method `append_granule` as a way to keep each granule's data separate. - Fix composite kd3 resampling. [Martin Raspaud] 3d array masks were not precomputed correctly, so we now make a workaround. A better solution is yet to be found. - Fix kd3 precomputation for AreaDefinitions. [Martin Raspaud] The lons and lats attributes aren't defined by default in AreaDefs, so we now make sure to call the get_lonlats method. - Set default format for dataset saving to geotiff. [Martin Raspaud] - Move `save_datasets` logic from Scene to base Writer. [davidh-ssec] - Fix bug in resample when geolocation is 2D. [davidh-ssec] The builtin 'any' function works for 1D numpy arrays, but raises an exception when 2D numpy arrays are provided which is the usual case for sat imagery. - Allow geotiff creation with no 'area' [davidh-ssec] Geotiff creation used to depend on projection information from the `img.info['area']` object, but it is perfectly legal to make a TIFF image with GDAL by not providing this projection information. This used to raise an exception, now it just warns. - Merge pull request #1 from pytroll/autofix/wrapped2_to3_fix. [Martin Raspaud] Fix "Consider dict comprehensions instead of using 'dict()'" issue - Use dict comprehension instead of dict([...]) [Cody] - Merge pull request #2 from pytroll/autofix/wrapped2_to3_fix-0. [Martin Raspaud] Fix "Explicitly number replacement fields in a format string" issue - Explicitely numbered replacement fields. [Cody] - Merge pull request #3 from pytroll/autofix/wrapped2_to3_fix-1. [Martin Raspaud] Fix "Use `is` or `is not` to compare with `None`" issue - Use `is` operator for comparing with `None` (Pep8) [Cody] - Merge pull request #4 from pytroll/autofix/wrapped2_to3_fix-2. [Martin Raspaud] Fix "Consider an iterator instead of materializing the list" issue - Use generator expression with any/all. [Cody] - Fix resample test for python 3. [Martin Raspaud] the dict `keys` method return views in py3. We now convert to list for consistency. - Add a test case for resample caching. [Martin Raspaud] - Revert resample cache changes. [Martin Raspaud] They didn't seem necessary in the way resampling is called. - Rename to satpy. [Martin Raspaud] - Remove the world_map.ascii file. [Martin Raspaud] - Allow compressed files to be checked by hrit reader. [Martin Raspaud] - Add number of scans metadata to viirs sdr config. [davidh-ssec] Also fixed rows_per_scan being a string instead of an integer when loaded from a navigation section. - Fix bug that removed most recent cached kdtree. [davidh-ssec] Nearest neighbor resampling cached multiple kdtree results and cleans up the cache when there are more than CACHE_SIZE items stored. It was incorrectly cleaning out the most recent key instead of the oldest key. - Fix bug when nearest neighbor source geo definition needs to be copied. [davidh-ssec] - Fix bug when specifying what datasets to resample. [davidh-ssec] - Move geolocation mask blending to resampling step. [davidh-ssec] The mask for geolocation (longitude/latitude) was being OR'd with the mask from the first dataset being loaded in the reader. This was ignoring the possibility that other loaded datasets will have different masks since AreaDefinitions are cached. This blending of the masks was moved to nearest neighbor resampling since it ignored other datasets' masks in the reader and is technically a limitation of the nearest neighbor resampling because the geolocation must be masked with the dataset mask for proper output. May still need work to optimize the resampling. - Add spacecraft_position and midtime metadata to viirs_sdr reader. [davidh-ssec] - Update changelog. [Martin Raspaud] - Bump version: 1.1.0 → 2.0.0-alpha.1. [Martin Raspaud] - Add config files for release utilities. [Martin Raspaud] We add the .bumpversion.cfg and .gitchangelog.rc for easy version bumping and changelog updates. - Remove v from version string. [Martin Raspaud] - Add str and repr methods for composites. [Martin Raspaud] This add simple repl and str methods for compositors. - Restructure the documentation for mpop2. [Martin Raspaud] This is an attempt to reorganize the documentation to prepare for mpop2. Old stuff has been take away, and a fresh quickstart and api are now provided. - Improve the ReaderFinder ImportError message to include original error. [Martin Raspaud] To make the ImportError more useful in ReaderFinder, the original error string is now provided. - Fix save_dataset to allow both empty filename and writer. [Martin Raspaud] When saving a dataset without a filename and writer, save_dataset would crash. Instead, we are now putting writer to "simple_image" in that case. - Rename projectable when assigning it through setitem. [Martin Raspaud] When a new dataset is added to a scene, it's name should match the string key provided by the user. - Remove references to deprecated mpop.projector. [Martin Raspaud] - Allow resample to receive strings as area identifiers. [Martin Raspaud] In resample, the interactive user would most likely use pre-defined areas from a custom area file. In this case, it's much easier to refer to the area by name, than to get the area definition object from the file. This patch allows the `resample` projectable method to work with string ids also. - Add a dataset to whishlish when added with setitem. [Martin Raspaud] When adding a dataset to a scene via the datasetdict.__setitem__ method, it is likely that the user case about this dataset. As such, it should be added to the wishlist in order not to get removed accidently. - Move composite loading out of Scene to mpop.composites. [Martin Raspaud] The loading of compositors was a part of the Scene object. However, it does not belong there, so we decided to move it out of Scene. The next logical place to have it is the mpop.composites modules. As a conterpart, we now provide the `available_composites` method to the Scene to be able to figure out what we have possibility to generate. - Fix the travis file to allow python 2.6 to fail. [Martin Raspaud] - Allow travis to fail on python 2.6. [Martin Raspaud] - Install importlib for travis tests on python 2.6. [Martin Raspaud] - Add `behave` to the pip installations in travis. [Martin Raspaud] - Add behaviour testing to travis and coveralls. [Martin Raspaud] - Add behaviour tests for showing and saving datasets. [Martin Raspaud] Three scenarios were added, testing showing a dataset, saving a dataset, and bulk saving datasets (`save_datasets`). - Fix loading behaviour tests. [Martin Raspaud] A little cleanup, and using builtin functions for getting the dataset_names - Fix DatasetDict's setitem to allow empty md in value. [Martin Raspaud] Sometimes a dataset/projectable doesn't have any info attached to it, eg because the dataset is synthetic. In these cases, setitem would crash. This is now fixed, and if a string is provided as a key in setitem it is used as a name if no better name is already there. - Simplify dataset saving to disk. [Martin Raspaud] saving datasets can now be done one by one. If a writer is not provided, it is guessed from the filename extension. - Add a show method to the Scene class. [Martin Raspaud] That allows the user to interactively vizualize the data - Add a default areas.def file. [Martin Raspaud] - Fix the manifest file to include the config files. [Martin Raspaud] - Add missing config files to setup.py. [Martin Raspaud] - Fix setup.py to add cfg files. [Martin Raspaud] This is in order to make mpop work out of the box after a pip install. - Add a behaviour test to find out the available dataset. [Martin Raspaud] - Prevent crashing when a load requirement is not available. [Martin Raspaud] When requiring a band which isn't available, mpop would crash. This is now fixed and replaced by a warning in the log. - Use behave to do higher level tests. [Martin Raspaud] Two small scenarios for testing the loading of the data are implemented now. - Fix import error in scene. [davidh-ssec] A small refactor was done and then undone to move DatasetDict and DatasetID. This little import change wasn't properly cleaned up. - Fix scene to work with "2 part" compositors and added pan sharpened true color composite as an example. [davidh-ssec] - Added log message to pillow writer to say what filename it was saving to. [davidh-ssec] - Handle optional dependencies for composites (not tested) [davidh-ssec] - Activate the remaining viirs_sdr reader test cases. [Martin Raspaud] - Remove the overview_sun TODO item. [Martin Raspaud] - Fix the multiple load issue for composites. [Martin Raspaud] The composite loading would crash when several composites would be loaded one after the other. This was because composite config files where loaded partially but were considered loaded entirely. In order to fix this problem and make things simpler, we removed the composite config mechanism entirely, so that the composites are reloaded everytime. That allows both config changing on the fly, but also more resilience for multiple sensor cases, like one sensor is loaded after another, and the composites wouldn't get updated. - Fix the name issue in sensor-specific composite requests. [Martin Raspaud] The read_composite_config was requiring wrongly that the provided names should be empty or None, making it not read the sensor config file at all. In turn that meant that generic composites were used instead of sensor- specific ones. - Got metadata requests working for composites. [davidh-ssec] - Use DatasetID in composite requirements instead of names and wavelengths only. [davidh-ssec] - Adds ERF DNB composite and updates compositor base to allow for metadata and optional requirements although they are not completely used yet. [davidh-ssec] - Added adaptive DNB product. [davidh-ssec] - Fixed bug in scene when getting writer instance in save_images. [davidh-ssec] - Fix the dataset str function to allow missing name and sensor keys. [Martin Raspaud] - Add quickstart seviri to the documentation. [Martin Raspaud] - Update the documentation. [Martin Raspaud] - Add a get_writer function to the scene object. [Martin Raspaud] - Updating dataset displaying. [Martin Raspaud] - Add a fixme comment. [Martin Raspaud] - Added histogram_dnb composite as a stepping stone for getting more complex composites added (ex. adaptive_dnb) [davidh-ssec] - Can now retrieve channel with incomplete DatasetID instance. [Martin Raspaud] - First try at loading metadata. [davidh-ssec] - Added python 3.5 to travis tests and removed 3.x as allowed failures. [davidh-ssec] - Added basic test for DatasetDict. [davidh-ssec] - Refactored some file reader methods to properties to be more pythonic. [davidh-ssec] - Viirs test case now works with python3 hopefully. [Martin Raspaud] - Fixed file units for eps l1b reflectances. [davidh-ssec] - Corrected frame indicator for eps l1b band 3a. [davidh-ssec] - Updated eps l1b config with temporary calibration information. [davidh-ssec] - First attempt at rewriting eps l1b reader to be more configurable (overkill?) [davidh-ssec] - Renamed Scene projectables to datasets. [davidh-ssec] - Updated eps l1b file reader to match base class. [davidh-ssec] - Made generic single file reader abstract base class and cleaned up viirs sdr tests. [davidh-ssec] - Added a fixme comment. [Martin Raspaud] - Enable python 3 and osx builds in travis. [Martin Raspaud] - Config treatment for enhancements. [davidh-ssec] - Update config handling for finding composites. [davidh-ssec] - Small fix for dumb environment variable clear on tests. [davidh-ssec] - First attempt at getting readers and writers using PPP_CONFIG_DIR as a supplement to builtin configs. [davidh-ssec] - Fixed scene tests so they pass. [davidh-ssec] - Added base_dir for finding input files and a separate base_dir kwargs on save_images. [davidh-ssec] - Makes wishlist a set and should fix problems with multiple loads. [davidh-ssec] - Fixed calibration and other DatasetID access in reader, hopefully. [davidh-ssec] - Fix the xrit reader. [Martin Raspaud] - Cleanup to prepare for handling calibration better. [davidh-ssec] - Updated filtering based on resolution, calibration, and polarization. [davidh-ssec] - Updated how readers create dataset info and dataset ids. [davidh-ssec] - Added calibration to DatasetID (not used yet) and added helper method on DatasetDict for filtering retrieved items and keys. [davidh-ssec] - Renamed BandID to DatasetID. [davidh-ssec] - Better handling of loading composite dependencies...i think. [davidh- ssec] - Got EPS L1B reader working again with readers being given BandID objects. [davidh-ssec] - Fixed small bug with extra empty string being listed as reader file pattern. [davidh-ssec] - Made DatasetDict accept non-BandID keys during setitem. [davidh-ssec] - Fixed default file reader for the eps l1b reader. [davidh-ssec] - A little more cleanup of unused code in viirs sdr. [davidh-ssec] - More work on viirs sdr using base reader class. [davidh-ssec] - Started using ConfigBasedReader as base class for VIIRS SDR reader. [davidh-ssec] - Fixed failing scene tests. [davidh-ssec] - Got viirs sdr reader working with namedtuple dataset keys. [davidh- ssec] - Continue on python3 compatibility. [Martin Raspaud] - Cleanup. [Martin Raspaud] - WIP: Start python 3 support. [Martin Raspaud] - Smoother transition in the sun zenith correct imagery. [Martin Raspaud] - Move reader discovery out of the scene and into mpop.readers. [Martin Raspaud] The class ReaderFinder was created for this purpose. - Cleanup. [Martin Raspaud] - Fix overview and natural composites. [Martin Raspaud] - Make read and load argument lists consistent. [Martin Raspaud] - Fix the M01 dataset definition in viirs_sdr.cfg. [Martin Raspaud] - Fix some viirs composites. [Martin Raspaud] - Fix viirs_sdr loading using start and end times. [Martin Raspaud] - Introduce BandIDs to allow for more complex referencing of datasets. [Martin Raspaud] - Add the BandID namedtuple (name, wl, resolution, polarization) - Fix querying for compatibility with BandIDs - Fix existing readers for BandIDs Example usage from the user side: scn.load([BandID(wavelength=0.67, resolution=742), BandID(wavelength=0.67, resolution=371), "natural", "true_color"]) BandIDs are now used internally as key for the scene's projectables dict. - Add file keys to metop's getitem. [Martin Raspaud] - Rename metop calibration functions. [Martin Raspaud] - Add file keys for start and end times for metop. [Martin Raspaud] - Merge the old eps l1b reader with the new one. [Martin Raspaud] - More work on EPS l1b reader. [Martin Raspaud] - Initial commit for the metop eps l1b reader. [Martin Raspaud] - New attempt at calibration keyword in viirs sdr reader. [davidh-ssec] - Renamed 'channel' to 'dataset' [davidh-ssec] - Added more tests for VIIRS SDR readers before making calibration or file discovery changes. [davidh-ssec] - Use "super" in the readers. [Martin Raspaud] - Hopefully fixed py2.6 incompatibility in string formatting. [davidh- ssec] - Added viirs sdr tests for MultiFileReader and HDF5MetaData. [davidh- ssec] - More viirs sdr file reader tests. [davidh-ssec] - Simple proof of concept for calibration level in viirs sdr reader. [davidh-ssec] - Fixed getting end orbit from last file reader in viirs sdr reader. [davidh-ssec] - Use unittest2 in viirs sdr tests so we can use new features. [davidh- ssec] - Added unittest2 to py26 travis build to hopefully fix h5py importerror. [davidh-ssec] - Added h5py and hdf5 library to travis. [davidh-ssec] - Started adding basic VIIRS SDR reader tests. [davidh-ssec] - Changed scene to accept sequence instead of *args. [davidh-ssec] - Merge branch 'feature-simplify-newreader' into feature-simplify. [davidh-ssec] - Added simple method for finding geolocation files based on header values. [davidh-ssec] - Added rows per scan to viirs sdr metadata. [davidh-ssec] - Got units and file units working for VIIRS SDR reader. [davidh-ssec] - Cleaner code for viirs sdr scaling factor check and made sure to OR any previous masks. [davidh-ssec] - Better memory usage in new style viirs sdr reader. [davidh-ssec] - First step in proof of concept with new reader design. Mostly working VIIRS SDR frontend. [davidh-ssec] - Fixed get_area_file in the resample.py module. [davidh-ssec] - Allowed sensor to be specified in the reader section. [davidh-ssec] - Added method to base plugin to determine type of a section. [davidh- ssec] - Make sunzenithnormalize a modern class. [Martin Raspaud] - Add sunz correction feature. [Martin Raspaud] - Avoid an infinite loop. [Martin Raspaud] - Add travis notifications to slack. [Martin Raspaud] - Remove unneeded code for composites. [Martin Raspaud] - Add a few composites. [Martin Raspaud] - Cleanup. [Martin Raspaud] - Allow json in enhancement config files. [Martin Raspaud] - Switch on test for writers. [Martin Raspaud] - Move tests for image stuff to corresponding test file. [Martin Raspaud] - Move image stuff out of projectable into writers/__init__.py. [Martin Raspaud] - Forgot to change reader/writer base class imports. [davidh-ssec] - Moved reader and writer base classes to subpackages. [davidh-ssec] - Reworked configuration reading in plugins for less redundancy. [davidh-ssec] - Small fixes to make VIIRS SDR reader work with new resampling. [davidh-ssec] - Fix the wishlist names and removing uneeded info when building RGB composites. [Martin Raspaud] - Dataset is now a subclass of np.ma.MaskedArray. [Martin Raspaud] - Move determine_mode to projectable. [Martin Raspaud] - Add helper function to read config files and get the area def file. [Martin Raspaud] - Rename precompute kwarg to cache_dir. [Martin Raspaud] - Convenience enhancements for resample. [Martin Raspaud] - we can now provide "nearest" or "kdtree" instead of a resampler class. - The precompute/dump kwarg is now a directory where to save the proj info, defaulting to '.' if precompute=True. - Switch to containers in travis. [Martin Raspaud] - Fix repo in .travis. [Martin Raspaud] - Add OrderedDict for python < 2.7. [Martin Raspaud] - Resample is now feature complete. [Martin Raspaud] - Dump kd_tree info to disk when asked - Cache the kd_tree info for later use, but cache is cleaned up. - OO architecture allowing other resampling methods to be implemented. - resampling is divided between pre- and actual computation. - hashing of areas is implemented, resampler-specific. - Fixed bad patch on new scene test. [davidh-ssec] - First try at more scene tests. [davidh-ssec] - Move image generation methods to Dataset and move enh. application to enhancer. [Martin Raspaud] - Sensor is now either None, a string, or a non-empty set. [Martin Raspaud] - Forgot to actually use default writer config filename. [davidh-ssec] - Fixed simple scene test for checking ppp_config_dir. [davidh-ssec] - Slightly better handling of default writer configs and writer arguments. [davidh-ssec] - Add a writer for png images, and move enhancer to mpop.writers. [Martin Raspaud] - Detached the enhancements handling into an Enhancer class. [Martin Raspaud] - Pass ppp_config_dir to writer, still needs work. [davidh-ssec] - First attempt at configured writers and all the stuff that goes along with it. Renamed 'format' in configs to more logical name. [davidh- ssec] - Remove the add_product method. [Martin Raspaud] - Cleanup scene unittest. [Martin Raspaud] - Finish testing scene.get_filenames. [Martin Raspaud] - Testing scene.get_filenames. [Martin Raspaud] - Updated tests to test new string messages. 100%! [davidh-ssec] - Merge branch 'pre-master' into feature-simplify. [Martin Raspaud] Conflicts: mpop/satellites/__init__.py mpop/satin/helper_functions.py mpop/satin/mipp_xrit.py - Add algorithm version in output cloud products. [Martin Raspaud] - Minor PEP8 tweaks. [Panu Lahtinen] - Script to generate external calibration files for AVHRR instruments. [Panu Lahtinen] - Support for external calibration coefficients for AVHRR. [Panu Lahtinen] - Removed obsolete "satname" and "number" from satellite configs, updated documentation. [Panu Lahtinen] - Renamed satellite configs to conform to OSCAR naming scheme. [Panu Lahtinen] - Add luts to the pps products from msg format. [Martin Raspaud] - Add metadata to nwcsaf products. [Martin Raspaud] - Add \0 to palette strings. [Martin Raspaud] - Fix pps format output for msg products. [Martin Raspaud] - Remove phase palette from msg products to avoid confusion. [Martin Raspaud] - Bugfix, np.string -> np.string_ [Martin Raspaud] - Change variable length strings in h5 products to fixed. [Martin Raspaud] - Fix some cloud product conversions. [Martin Raspaud] - Fix MSG format to PPS format conversion. [Martin Raspaud] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Martin Raspaud] - Merge pull request #16 from pnuu/simplified_platforms. [Martin Raspaud] Simplified platform names for reading custom composites - Simplified platform names for reading custom composites. [Panu Lahtinen] - Change: accept arbitrary kwargs for saving msg hdf products. [Martin Raspaud] - Revert concatenation to it's original place, in order to keep the tests working. [Martin Raspaud] - Fix whole globe area_extent for loading. [Martin Raspaud] - Fix rpm building. [Martin Raspaud] - Cleanup. [Martin Raspaud] - Change printing of projectables and cleanup. [Martin Raspaud] - Start testing mpop.scene. [Martin Raspaud] - Fixed assertIn for python 2.6. [davidh-ssec] - Added more tests for projectables and updated projectable 3d resample test. 100% coverage of projectable! [davidh-ssec] - Renamed .products to .compositors and fixed unknown names bug. [davidh-ssec] - Added check to see what composite configs were read already. [davidh- ssec] - Do not reread already loaded projectables. [Martin Raspaud] - Complete .gitignore. [Martin Raspaud] - Fix unittests for python 2.6. [Martin Raspaud] - Unittesting again... [Martin Raspaud] - More unittesting. [Martin Raspaud] - Fix projectables str to look better. [Martin Raspaud] - More unittesting. [Martin Raspaud] - Fix unittests for python 2.6. [Martin Raspaud] - Still cleaning up. [Martin Raspaud] - Cleanup. [Martin Raspaud] - Add tests to the package list in setup.py. [Martin Raspaud] - Make pylint happy. [Martin Raspaud] - Fix tests for projectable to pass on 2.6. [Martin Raspaud] - Start testing the new stuff in travis. [Martin Raspaud] - Cleanup. [Martin Raspaud] - Renamed newscene to scene. [Martin Raspaud] - Moved updated readers from mpop.satin to mpop.readers. [Martin Raspaud] - Changed 'uid' to 'name' for all new components. [davidh-ssec] - Moved composite configs to separate subdirectory. [davidh-ssec] - Add an RGBCompositor class and cleanup. [Martin Raspaud] - Allow passing "areas" to mipp_xrit. [Martin Raspaud] - Fix the overview composite giving sensible defaults. [Martin Raspaud] - Fixed bug with RGB composites with passing the wrong info keywords. [davidh-ssec] - Changed sensor keyword in scene to reader and added new sensor keyword behavior to find readers based on sensor names. [davidh-ssec] - Changed new style composites to use a list of projectables instead of the scene object implemented __setitem__ for scene. [davidh-ssec] - Reworked viirs and xrit reader to use .channels instead of .info. Simplified reader loading in newscene. [davidh-ssec] - Test and fix projectable. [Martin Raspaud] - Allow reading from wavelength, and add Meteosat HRIT support. [Martin Raspaud] - Moved reader init to scene init. Successfully created resampled fog image using composite configs. [davidh-ssec] - Added some default configs for new scene testing. [davidh-ssec] - Started rewriting viirs sdr reader to not need scene and produce projectables. [davidh-ssec] - Better config reading, and scene init. [Martin Raspaud] - WIP: removed CONFIG_PATH and changed projectables list into dict. [davidh-ssec] - Add resampling. Simple for now, with elementary caching. [Martin Raspaud] - WIP. [Martin Raspaud] * Product dependencies * loading from viirs * generating images - WIP: successfully loaded the first viirs granule with newscene! [Martin Raspaud] - Rewriting scene. [Martin Raspaud] - Add helper function to find files. [Martin Raspaud] - Fix the config eval thing in scene. [Martin Raspaud] - Fix masking of lonlats in viirs_sdr. [Martin Raspaud] - Fixing pps-nc reader. [Adam Dybbroe] - Clean temporary files after loading. [Adam Dybbroe] - Pep8 stuff. [Adam Dybbroe] - Fixed polar-stereographic projection bugs, thanks to Ron Goodson. [Lars Orum Rasmussen] - Update changelog. [Martin Raspaud] - Bump version: 1.0.2 → 1.1.0. [Martin Raspaud] - Put config files in etc/pytroll. [Martin Raspaud] - Fix version strings. [Martin.Raspaud] - Don't close the h5 files too soon. [Martin Raspaud] - Close h5 file uppon reading. [Adam Dybbroe] - Bugfix. [Adam Dybbroe] - Try a more clever handling of the case where more level-1b files exist for given sat and orbit. [Adam Dybbroe] - Print out files matching in debug. [Martin Raspaud] - Bugfix. [Adam Dybbroe] - Adding debug info. [Adam Dybbroe] - Bugfix. [Adam Dybbroe] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Adam Dybbroe] - Remove ugly print statements. [Martin Raspaud] - Load the palettes also. [Martin Raspaud] - AAPP1b: use operational coefficients for vis calibrating per default. [Martin Raspaud] - Fallback to pre-launch if not available. - load(..., pre_launch_coeffs=True) to force using pre-launch coeffs) - Correct npp name in h5 files. [Martin Raspaud] - Add the pps v2014 h5 reader. [Martin Raspaud] - Use h5py for lonlat reading also. [Martin Raspaud] - Use h5py instead of netcdf for reading nc files. [Martin Raspaud] - Fix orbit as int in nc_pps loader. [Martin Raspaud] - Add overlay from config feature. [Martin Raspaud] - Remove type testing for orbit number. [Martin Raspaud] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Adam Dybbroe] - Allowing kwargs. [Martin Raspaud] - Add 10 km to the area extent on each side, to avoid tangent cases. [Martin Raspaud] - Orbit doesn't have to be a string anymore. [Martin Raspaud] - Fix multiple file loading for metop l1b data. [Martin Raspaud] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Adam Dybbroe] - Implement save for all cloudproducts. [Martin Raspaud] - Change options names to cloud_product_* and add lookup in os.environ. [Martin Raspaud] - Some fixes to nc_pps_l2 for correct saving. [Martin Raspaud] - Add saving to the cloudtype object. [Martin Raspaud] - Add the save method to cloudtype object. [Martin Raspaud] - Rename _md attribute to mda. [Martin Raspaud] - Mask out bowtie deleted pixels for Suomi-NPP products. [Martin Raspaud] - When a file is provided in nc_pps_l2, just read this file. [Martin Raspaud] - Fix nc_pps_l2 for filename input and PC readiness. [Martin Raspaud] - ViirsSDR: Fix not to crash on single file input. [Martin Raspaud] - Fix aapp1b to be able to run both for given filename and config. [Martin Raspaud] - Try loading according to config if provided file doesn't work, aapp1b. [Martin Raspaud] - Don't crash when reading non aapp1b file. [Martin Raspaud] - Remove "/" from instrument names when loading custom composites. [Martin Raspaud] - Don't say generate lon lat when returning a cached version. [Martin Raspaud] - Nc_pps_l2: don't crash on multiple files, just go through them one at the time. [Martin Raspaud] - Hdfeos: don't just exit when filename doesn't match, try to look for files. [Martin Raspaud] - Don't crash if the file doesn't match (hdfeos) [Martin Raspaud] - Revert nc_reader back until generalization is ready. [Martin Raspaud] - Merge branch 'ppsv2014-reader' of github.com:mraspaud/mpop into ppsv2014-reader. [Martin Raspaud] - Adding dataset attributes to pps reading. [Adam Dybbroe] - Allow inputing filename in the nc_pps_l2 reader. [Martin Raspaud] - Merge branch 'pre-master' into ppsv2014-reader. [Martin Raspaud] - Viirs readers fixes. [Martin Raspaud] - Hdf_eos now uses 1 out of 4 available cores to interpolate data. [Martin Raspaud] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Martin Raspaud] - Fixed bug, now handling fill_value better. [Lars Orum Rasmussen] - More robust tiff header file decoder. [Lars Orum Rasmussen] - Add dnb_overview as a standard product (dnb, dnb, 10.8) [Martin Raspaud] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Martin Raspaud] - Corrected the reader for SAFNWC/PPS v2014. [Sara.Hornquist] - Allow multiresolution loading in hdf eos reader. [Martin Raspaud] - Revert back to old nwcsaf-pps reader for hdf. The reading of the new netcdf format is done with another reader! [Adam Dybbroe] - A new pps reader for the netCDF format of v2014. [Adam Dybbroe] - Adding for new cloudmask and type formats... [Adam Dybbroe] - Enhance nwc-pps reader to support v2014 format. [Adam Dybbroe] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Adam Dybbroe] - Put the config object back in Projector. [Martin Raspaud] - Fix area_file central search. [Martin Raspaud] - Move the area_file search inside Projector. [Martin Raspaud] - Error when satellite config file is not found. [Martin Raspaud] - Get rid of the funky logging style. [Martin Raspaud] - Log the config file used to generate the scene. [Martin Raspaud] - Support filename list to load in viirs_sdr loader. [Martin Raspaud] - Add avhrr/3 as aliar to avhrr in aapp reader. [Martin Raspaud] - Fix name matching in hdfeos_l1b. [Martin Raspaud] The full name didn't work with fnmatch, take basename instead. - Allows hdfeos_l1b to read a batch of files. [Martin Raspaud] - Add delitem, and code cleanup. [Martin Raspaud] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Martin Raspaud] - Added a reader for SAFNWC/PPS v2014 PPS v2014 has a different fileformat than previous SAFNWC/PPS versions. [Sara.Hornquist] - Aapp1b reader, be more clever when (re)reading. [Martin Raspaud] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Adam Dybbroe] Conflicts: mpop/satout/netcdf4.py - Allow reading several files at once in viirs_compact. [Martin Raspaud] - Allow reading several files at once in eps_l1b. [Martin Raspaud] - Style: use in instead for has_key() [Martin Raspaud] - Adding primitive umarf (native) format reader for meteosat. [Martin Raspaud] - Add logging when an info field can't be save to netcdf. [Martin Raspaud] - Add a name to the area when loading aapp data. [Martin Raspaud] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Martin Raspaud] - For PNG files, geo_mage.tags will be saved a PNG metadata. [Lars Orum Rasmussen] - Add a save method to cfscene objects. [Martin Raspaud] - Don't take None as a filename in loading avhrr data. [Martin Raspaud] - Allow loading a file directly for aapp1b and eps_l1b. [Martin Raspaud] Just run global_data.load(..., filename="/path/to/myfile.1b") - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Martin Raspaud] - Viirs_sdr can now load depending on an area. [Martin Raspaud] - Pep8 cosmetics. [Adam Dybbroe] - Merge pull request #12 from pnuu/pre-master. [Martin Raspaud] Fixed "logger" to "LOGGER" - Fixed "logger" to "LOGGER" [Panu Lahtinen] - Moving pysoectral module import down to function where pyspectral is used. [Adam Dybbroe] - Merge branch 'smhi-premaster' into pre-master. [Adam Dybbroe] - Fixing cloudtype product: palette projection. [Adam Dybbroe] - Turned on debugging to geo-test. [Adam Dybbroe] - Added debug printout for cloud product loading. [Adam Dybbroe] - Make snow and microphysics transparent. [Martin Raspaud] - Rename day_solar to snow. [Martin Raspaud] - Keep the name of cloudtype products when projecting. [Martin Raspaud] - Explicitly load parallax corrected files if present. [Martin Raspaud] - Adding logging for MSG cloud products loading. [Martin Raspaud] - Fix the parallax file sorting problem, again. [Martin Raspaud] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Martin Raspaud] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Adam Dybbroe] - Bugfix. [Adam Dybbroe] - Merge branch '3.9reflectance' into pre-master. [Adam Dybbroe] Conflicts: mpop/channel.py mpop/instruments/seviri.py mpop/satin/mipp_xrit.py setup.py - Support for rgbs using the seviri 3.9 reflectance (pyspectral) [Adam Dybbroe] - Adding a sun-corrected overview rgb. [Adam Dybbroe] - Adduing for "day microphysics" RGB. [Adam Dybbroe] - Deriving the day-solar RGB using pyspectral to derive the 3.9 reflectance. [Adam Dybbroe] - Use "imp" to find input plugins. [Martin Raspaud] - Cleanup trailing whitespaces. [Martin Raspaud] - Use cartesian coordinates for lon/lat computation if near-pole situations. [Martin Raspaud] - Set alpha channel to the same type as the other channels. [Martin Raspaud] - Sort the filenames in get_best_products (msg_hdf) [Martin Raspaud] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Martin Raspaud] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Adam Dybbroe] - Merge pull request #10 from pnuu/pre-master. [Martin Raspaud] Fixed failed merging. Thanks Pnuu. - Fixed failed merging (removed "<<<<<<< HEAD" and ">>>>>>> upstream /pre-master" lines) [Panu Lahtinen] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Adam Dybbroe] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Adam Dybbroe] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Adam Dybbroe] - Fix terra and aqua templates for the dual gain channels (13 & 14) [Adam Dybbroe] - Read both parallax corrected and usual cloudtype products. [Martin Raspaud] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Martin Raspaud] - Merge pull request #9 from pnuu/pre-master. [Martin Raspaud] Possibility to get area_extent from area definition(s) - Tests for mpop.satin.helper_functions.boundaries_to_extent. [Panu Lahtinen] - Separated area definitions and boundary calculations. [Panu Lahtinen] - Added test if proj string is in + -format or not. [Panu Lahtinen] - Re-ordered the tests. [Panu Lahtinen] - Fixed incorrect correct values. [Panu Lahtinen] - Test using area definitions instead of definition names. [Panu Lahtinen] - Possibility to give also area definition objects to area_def_names_to_extent() and log a warning if the area definition is not used. [Panu Lahtinen] - Fixed import. [Panu Lahtinen] - Added tests for mpop.satin.helper_functions. [Panu Lahtinen] - Moved to mpop/tests/ [Panu Lahtinen] - Moved to mpop/tests/ [Panu Lahtinen] - Merge remote-tracking branch 'upstream/pre-master' into pre-master. [Panu Lahtinen] Conflicts: mpop/satin/aapp1b.py - Removed unneeded functions. [Panu Lahtinen] - Test for area_def_names_to_extent() [Panu Lahtinen] - Removed unnecessary functions. [Panu Lahtinen] - Removed swath reduction functions. [Panu Lahtinen] - Reverted not to reduce swath data. [Panu Lahtinen] - Added possibility to do data reduction based on target area definition names. [Panu Lahtinen] - Added area extent calculations based on given area definition names. [Panu Lahtinen] - Helper functions for area extent and bondary calculations, and data reducing for swath data. [Panu Lahtinen] - Test for mpop.satin.mipp_xrit.lonlat_to_geo_extent() [Panu Lahtinen] - Support for lon/lat -based area extents. [Panu Lahtinen] - Add start and end time defaults for the images (runner). [Martin Raspaud] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Lars Orum Rasmussen] - Do not mask out negative reflectances in viirs_sdr reading. [Martin Raspaud] - Added navigation to hrpt_hmf plugin. [Martin Raspaud] - Started working on a new plugin version of hdfeos_l1b. [Martin Raspaud] - Cleanup. [Martin Raspaud] - Cleanup. [Martin Raspaud] - Cleanup. [Martin Raspaud] - Cleanup. [Martin Raspaud] - Adding scene tests to the test suite. [Martin Raspaud] - Revamped scene unittests. [Martin Raspaud] - Don't crash on errors. [Martin Raspaud] - Revamped projector tests. [Martin Raspaud] - More geo_image testing. [Martin Raspaud] - Don't use "super" in geo_image. [Martin Raspaud] - Fix testing. [Martin Raspaud] - Mock pyresample and mpop.projector in geo_image tests. [Martin Raspaud] - More testing geo_image. [Martin Raspaud] - Add tests for geo_image. [Martin Raspaud] - Merge branch 'unstable' of ssh://safe/data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Mock gdal for geo_image tests. [Martin Raspaud] - Added netCDF read support for four more projections. [Adam Dybbroe] - Adding support for eqc in cf format. [Adam Dybbroe] - Added config templates for GOES and MTSAT. [Lars Orum Rasmussen] - Copied visir.night_overview to seviri.night_overview, so night_overview.prerequisites is correct when night_overview is called from seviri.py. [ras] - Cloutop in seviri.py now same arguments as cloudtop in visir.py. [Lars Orum Rasmussen] - Fix saving as netcdf. [Martin Raspaud] - Fix floating point tiff saving. [Martin Raspaud] - Make pillow a requirement only if PIL is missing. [Martin Raspaud] - Add some modules to mock in the documentation. [Martin Raspaud] - Add pyorbital to the list of packets to install in travis. [Martin Raspaud] - Merge branch 'feature-travis' into unstable. [Martin Raspaud] - Test_projector doesn't pass. [Martin Raspaud] - Test_projector ? [Martin Raspaud] - Fix travis. [Martin Raspaud] - Adding test_geoimage. [Martin Raspaud] - Test_channel passes, test_image next. [Martin Raspaud] - Test_pp_core crashes, test_channel on. [Martin Raspaud] - Commenting out tests to find out the culprit. [Martin Raspaud] - Ok, last try for travis-ci. [Martin Raspaud] - What is happening with travis ? [Martin Raspaud] - More fiddling to find out why travis-ci complains. [Martin Raspaud] - Testing the simple test way (not coverage) [Martin Raspaud] - Trying to add the tests package for travis-ci. [Martin Raspaud] - Add the tests package. [Martin Raspaud] - Preprare for travis-ci. [Martin Raspaud] - Support 16 bits images (geotiff only at the moment). [Martin Raspaud] - Merge pull request #8 from pnuu/pre-master. [Martin Raspaud] Sun zenith angle correction added. - A section on mpop.tools added to documentation. [Panu Lahtinen] - Extra tests for sun_zen_corr(). [Panu Lahtinen] - Typo. [Panu Lahtinen] - Channel descriptions added. [Panu Lahtinen] - Channel desctiptions are added. [Panu Lahtinen] - Clarification to help sunzen_corr_cos() desctiption. [Panu Lahtinen] - Test cases for channel.sunzen_corr(). [Panu Lahtinen] - Sun zenith angle correction split into two functions. [Panu Lahtinen] - Revert to original version. [Panu Lahtinen] - Initial commit of mpop.tools (with Sun zenith angle correction). [Panu Lahtinen] - Sun zenith angle correction added. [Panu Lahtinen] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [ras] - Solve the multiple channel resolution with automatic resampling radius. [Martin Raspaud] - Add the "nprocs" option to projector objects and scene's project method. [Martin Raspaud] - Now saving orbit number (if available) as global attribute. [ras] - Adding more files to be ignored. [ras] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [ras] - New reader for hrpt level0 format. [Martin Raspaud] - Fix no calibration reading for aapp1b. [Martin Raspaud] - Add the product name to the the image info. [Martin Raspaud] - Add some debugging info about missing pixels in viirs_sdr. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Corrected a comment. [Adam Dybbroe] - Fix for M13 load problem - reported by stefano.cerino@gmail.com. [Adam Dybbroe] - Use number of scan to load the right amount of data in compact viirs reader. [Martin Raspaud] - Fix hook to be able to record both filename and uri. [Martin Raspaud] - Protecting MPOP from netcdf4's unicode variables. [ras] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam Dybbroe] - Adding a new convection RGB with co2 correction for SEVIRI. [Adam Dybbroe] - Temporary hack to solve for hdf5 files with more than one granule per file. [Adam Dybbroe] - Removing messaging code from saturn and added a more generic "hook" argument. [Martin Raspaud] - Bumped up version. [Martin Raspaud] - Make viirs_compact scan number independent. [Martin Raspaud] - Cleanup: marking some deprecated modules, removing unfinished file, improving documentation. [Martin Raspaud] - Adding the ears-viirs compact format reader. Untested. [Martin Raspaud] - Code cleanup. [Martin Raspaud] - Merge branch 'pre-master' into unstable. [Martin Raspaud] Conflicts: mpop/imageo/geo_image.py - Night_color (should had beed called night_overview) is the same as cloudtop. [Lars Orum Rasmussen] - Bug fix from Bocheng. [Lars Orum Rasmussen] - Night_overview is just like cloudtop. [Lars Orum Rasmussen] - Now also handling Polar satellites. [Lars Orum Rasmussen] - Cosmetic. [Lars Orum Rasmussen] - Fixed merge conflict. [Lars Orum Rasmussen] - Trying out a chlorophyll product. [Lars Orum Rasmussen] - Added a night overview composite. [Lars Orum Rasmussen] - Better check for empty array. [Lars Orum Rasmussen] - Fix logging. [Martin Raspaud] - Fix backward compatibility in, and deprecate image.py. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Calling numpy percentile only once when doing left and right cut offs. [Adam Dybbroe] - Add support for identifying npp directories by time-date as well as orbit number. [Adam Dybbroe] - Fix histogram-equalization stretch test. [Adam Dybbroe] - Bugfix in histogram equalization function. [Adam Dybbroe] - Using percentile function to generate histogram with constant number of values in each bin. [Adam Dybbroe] - Using numpy.pecentile function to cut the data in the linear stretch. [Adam Dybbroe] - Fix histogram stretch unit test. [Adam Dybbroe] - Correcting the histogram stretching. The com_histogram function was in error when asking for "normed" histograms. [Adam Dybbroe] - Added histogram method that makes a more populated histogram when the data are heaviliy skeewed. Fixes problem seen by Bocheng in DNB imagery. [Adam Dybbroe] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam Dybbroe] - Don't remove GeolocationFlyweight _instances, but reset it. Allowing for multiple "loads" [Adam Dybbroe] - Add imageo.formats to installation. [Martin Raspaud] - AAPP loading bug fix. [Martin Raspaud] the aapp1b.py loader to aapp data was broken as it was loading both channels 3a and 3b each time, one of them being entirely masked. This of course created some problem further down. Fixed by setting the not loadable channel to None. - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Bugfix in npp.cfg template. [Adam Dybbroe] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam Dybbroe] - Fixing bug concerning the identification of VIIRS geolocation files. Now the configuration specified in npp.cfg overwrites what is actually written in the metadata header of the band files. [Adam Dybbroe] - Make saturn posttroll capable. [Martin Raspaud] - Bump up version number. [Martin Raspaud] - Cosmetics. [Martin Raspaud] - Fixing test cases. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Remove dummy test to boost projection performance. [Martin Raspaud] Mpop was checking in 2 different places if the source and target areas were different, leading to pyresample expanding the area definitions to full lon/lat arrays when checking against a swath definition, and then running an allclose. This was inefficient, and the programming team decided that it was the user's task to know before projection if the source and target area were the same. In other words, the user should be at least a little smart. - Remove dummy test to boost projection performance. [Martin Raspaud] Mpop was checking in 2 different places if the source and target areas were different, leading to pyresample expanding the area definitions to full lon/lat arrays when checking against a swath definition, and then running an allclose. This was inefficient, and the programming team decided that it was the user's task to know before projection if the source and target area were the same. In other words, the user should be at least a little smart. - Update channel list for modis lvl2. [Martin Raspaud] - Bump up version number: 1.0.0. [Martin Raspaud] - Merge branch 'pre-master' into unstable. [Martin Raspaud] - Added Ninjo tiff example areas definitions. [Lars Orum Rasmussen] - Cosmetic. [Lars Orum Rasmussen] - Ninjo tiff writer now handles singel channels. [Lars Orum Rasmussen] Ninjo tiff meta-data can now all be passed as arguments - Better documentation. [Lars Orum Rasmussen] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Changing palette name to something more intuitive. Allow to have orbit number equals None. [Adam Dybbroe] - Fixing aqua/terra template config files for dual gain channels (13&14) [Adam Dybbroe] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam Dybbroe] - Make overview consistent with the standard overview. [Adam Dybbroe] - Cleanup. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] Conflicts: etc/npp.cfg.template - Updated npp-template to fit the new viirs reader using the (new) plugin-loader system. [Adam Dybbroe] - Minor clean up. [Adam Dybbroe] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam Dybbroe] Conflicts: mpop/satin/viirs_sdr.py - Lunar stuff... [Adam Dybbroe] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Adam Dybbroe] - Changed template to fit new npp reader. [krl] - Fix version stuff. [Martin Raspaud] - Merge branch 'feature-optimize_viirs' into unstable. [Martin Raspaud] - Make viirs_sdr a plugin of new format. [Martin Raspaud] - Finalize optimisation i new viirs reader. [Martin Raspaud] - Optimization ongoing. Mask issues. [Martin Raspaud] - Clarify failure to load hrit data. [Martin Raspaud] - Fix install requires. [Martin Raspaud] - Fix projector unit test. [Martin Raspaud] - Merge branch 'pre-master' into unstable. [Martin Raspaud] - Merge branch 'pre-master' of git://github.com/mraspaud/mpop into pre- master. [Martin Raspaud] - Fixed (temporary ?) misuse of Image.SAVE. [Lars Orum Rasmussen] - Now config reader is a singleton. [Lars Orum Rasmussen] - Merge branch 'pre-master' into unstable. [Martin Raspaud] - Merge branch 'pre-master' of git://github.com/mraspaud/mpop into pre- master. [Martin Raspaud] - Tmplate -> template. [Lars Orum Rasmussen] - Added support for saving in Ninjo tiff format. [Lars Orum Rasmussen] - Projector cleanup. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - New VIIRS reader. Better, faster, smarter (consumimg less memory) [Adam Dybbroe] - Fix area hashing. [Martin Raspaud] - Fix install dependency. [Martin Raspaud] - Merge branch 'pre-master' into unstable. [Martin Raspaud] - Merge branch 'pre-master' of git://github.com/mraspaud/mpop into pre- master. [Martin Raspaud] Conflicts: doc/source/conf.py setup.py - Bump up version number for release. [Martin Raspaud] - Optimize. [Martin Raspaud] - Remove the optional ahamap requirement. [Martin Raspaud] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Adam Dybbroe] - Manage version number centrally. [Martin Raspaud] - Merge branch 'pre-master' of git://github.com/mraspaud/mpop into pre- master. [Martin Raspaud] - Bump up version number. [Martin Raspaud] - Make old plugin an info instead of a warning. [Martin Raspaud] - Merge branch 'pre-master' of git://github.com/mraspaud/mpop into pre- master. [Martin Raspaud] - Pep8. [Adam Dybbroe] - Merge branch 'aapp1b' into unstable. [Adam Dybbroe] - Don't mask out IR channel data where count equals zero. [Adam Dybbroe] - Fixing the masking of the ir calibrated Tbs - count=0 not allowed. [Adam Dybbroe] - Make also vis channels masked arrays. [Adam Dybbroe] - Checking if file format is post or pre v4 : If bandcor_2 < 0 we are at versions higher than 4 Masking a bit more strict. [Adam Dybbroe] - Now handle data without a mask and handling lons and lats without crashing. [Lars Orum Rasmussen] - Read signed instead of unsigned (aapp1b). [Martin Raspaud] - Style cleanup. [Martin Raspaud] - Adding calibration type as an option to the loader. So counts, radiances or tbs/refl can be returned. [Adam Dybbroe] - Better show and more cosmetic. [Lars Orum Rasmussen] - Making pylint more happy and some cosmetic. [Lars Orum Rasmussen] - No need to night_overview, use cloudtop with options. [Lars Orum Rasmussen] - Now IR calibration returns a masked array. [Lars Orum Rasmussen] - Added som options for overview image and added a night overview. [Lars Orum Rasmussen] - Finalize aapp1b python-only reader. [Martin Raspaud] - Working on a aapp l1b reader. [oananicola] - Starting a aapp1b branch for directly reading aapp's l1b files. [Lars Orum Rasmussen] - Adding a bit of debug info... [Adam Dybbroe] - Adding orbit number to the cloud mask object. [Adam Dybbroe] - Channel cleanup and tests. [Martin Raspaud] - Merge branch 'feature_plugins' into unstable. [Martin Raspaud] - Make orbit number an 5-character string (padding with '0') [Martin Raspaud] - New plugin implementation, backward compatible. [Martin Raspaud] - Cleanup. [Martin Raspaud] - Add several cores for geoloc in eos. [Martin Raspaud] - Bugfix hdfeos. [Martin Raspaud] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Lars Orum Rasmussen] - Fix loading of terra aqua with multiple cores. [Martin Raspaud] - Add dust, fog, ash composites to VIIRS. [Martin Raspaud] - Enhance error messages. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Make orbit number an 5-character string (padding with '0') [Martin Raspaud] - New template files for regional EARS (AVHRR and NWC) file support. [Adam Dybbroe] - Minor cosmetics. [Adam Dybbroe] - Reverted to previous commit. [Lars Orum Rasmussen] - Correct green-snow. [Martin Raspaud] Use 0.6 instead on 0.8 - Merge branch 'fixrtd' into unstable. [Martin Raspaud] - Add pyresample to mock for doc building. [Martin Raspaud] - Get rid of the np.inf error in rtd. [Martin Raspaud] - Mock some import for the documentation. [Martin Raspaud] - Now, if specified in proj4 object, add EPGS code to tiff metadata. [Lars Orum Rasmussen] - Added, a poor man's version, of Adam's DNB RGB image. [Lars Orum Rasmussen] - Add symlink from README.rst to README. [Martin Raspaud] - Update download link and README. [Martin Raspaud] - Bump up version number. [Martin Raspaud] - Cosmetics. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Add template file for meteosat 10. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam Dybbroe] - Support for calibrate option. [Adam Dybbroe] - Add debug messages to hdf-eos loader. [Martin Raspaud] - Support pnm image formats. [Martin Raspaud] - Introducing clip percentage for SAR average product. [Lars Orum Rasmussen] - The pps palette broke msg compatibility. Now there are two palettes, one for msg and one for pps. [Adam Dybbroe] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam Dybbroe] Conflicts: mpop/satin/viirs_sdr.py - Adapted viirs reader to handle aggregated granule files. [Adam Dybbroe] - Fixing nwcsaf-pps ctth height palette. [Adam Dybbroe] - Take better care of the path (was uri) argument. [Martin Raspaud] - Don't do url parsing in the hdfeos reader. [Martin Raspaud] - Fix unit tests. [Martin Raspaud] - Remove the deprecated append function in scene. [Martin Raspaud] - Return when not locating hdf eos file. [Martin Raspaud] - Remove raveling in kd_tree. [Martin Raspaud] - Make use of the new strftime in the viirs reader. [Martin Raspaud] - Add a custom strftime. [Martin Raspaud] This fixes a bug in windows that prevents running strftime on string that contain mapping keys conversion specifiers. - Catch the error if there is no file to load from. [Martin Raspaud] - Add a proper logger in hdfeos reader. [Martin Raspaud] - Get resolution from filename for eos data. [Martin Raspaud] - Introducing stretch argument for average product. [Lars Orum Rasmussen] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Lars Orum Rasmussen] - Clean up. [Martin Raspaud] - Bump up version number. [Martin Raspaud] - Support passing a uri to hdfeos reader. [Martin Raspaud] - Fix the loading of BT for VIIRS M13 channel. [Martin Raspaud] Has no scale and offset - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Lars Orum Rasmussen] - Refactor the unsigned netcdf packing code. [Martin Raspaud] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Lars Orum Rasmussen] - Support packing data as unsigned in netcdf. [Martin Raspaud] - Replace auto mask and scale from netcdf4. [Martin Raspaud] Eats up too much memory. - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Lars Orum Rasmussen] - Feature: Added template for electro-l satellite. [Martin Raspaud] - Feature: taking care of missing data in the viirs reader, and allow for radiance retrieval. [Martin Raspaud] - Feature: last adjustments to new netcdf format. [Martin Raspaud] - Merge branch 'feature-netcdf-upgrade' into unstable. [Martin Raspaud] Conflicts: mpop/satout/cfscene.py mpop/satout/netcdf4.py - Merge branch 'unstable' into feature-netcdf-upgrade. [Martin Raspaud] - Merge branch 'unstable' into feature-netcdf-upgrade. [Martin Raspaud] Conflicts: mpop/satin/mipp_xsar.py - Work on new netcdf format nearing completion. [Martin Raspaud] - Feature: wrapping up new netcdf format, cf-satellite 0.2. [Martin Raspaud] - Renamed some global attributes. [Martin Raspaud] - Netcdf: working towards better matching CF conventions. [Martin Raspaud] - WIP: NetCDF cleaning. [Martin Raspaud] - scale_factor and add_offset are now single values. - vertical_perspective to geos - Merge branch 'unstable' into feature-netcdf-upgrade. [Martin Raspaud] - Group channels by unit and area. [Martin Raspaud] - Do not apply scale and offset when reading. [Martin Raspaud] - WIP: updating the netcdf interface. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Changed handeling of "_FillValue"-attributes. Added find_FillValue_tags function to search for "_FillValue" attributes. The "_FillValue" attributes are used and set when variables are created. [Nina.Hakansson] - Cosmetics. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Fixing bug concerning viirs bandlist and the issue of preventing the loading of channels when only products are requested. [Adam Dybbroe] - Fixing VIIRS reader - does not try to read SDR data if you only want to load a product. Minor fixes in MODIS and AAPP1b readers. [Adam Dybbroe] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam Dybbroe] - Bugfix in viirs sdr reader. [Adam Dybbroe] - Added ir108 composite to Viirs. [Martin Raspaud] - RUN: add possibility to get prerequisites for a list of areas. [Martin Raspaud] - Updating area_id for the channel during viirs loading and assembling of segments. [Martin Raspaud] - Area handling in viirs and assembling segments. [Martin Raspaud] - Viirs true color should have a transparent background. [Martin Raspaud] - Added enhancements to the image.__call__ function. [Martin Raspaud] - Fixing runner to warn for missing functions (instead of crashing). [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] Conflicts: mpop/satin/viirs_sdr.py - Bug fix green-snow RGB. [Adam Dybbroe] - Cleaning up a bit in viirs reader. [Adam Dybbroe] - Temporary fix to deal with scale-factors (in CLASS archive these are not tuples of 2 but 6). Taken from old fix in npp-support branch. [Adam Dybbroe] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam Dybbroe] - Support for bzip2 compressed NWCSAF products (EARS-NWC) [Adam Dybbroe] - More flexible viirs reading, and fixes to viirs composites. [Martin Raspaud] - Added a stereographic projection translation. [Lars Orum Rasmussen] - Added modist as valid name for 'eos1' [Lars Orum Rasmussen] - Added night_microphysics. [Lars Orum Rasmussen] - Added stretch option. [Lars Orum Rasmussen] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Feature: new function to create an image from a scene. [Martin Raspaud] - Fixed a new npp template config file, with geo_filename example. [Adam Dybbroe] - Adding 500meter scan area. [Adam Dybbroe] - Fixing bug in geolocation reading and removing old style viirs composite file. [Adam Dybbroe] - Using a template from configuration file to find the geolocation file to read - for all VIIRS bands. [Adam Dybbroe] - Fixed bug in hr_natural and added a dnb method. [Adam Dybbroe] - Fixing Bow-tie effects and geolocation for VIIRS when using Cloudtype. Needs to be generalised to all products! [Adam Dybbroe] - Support for tiepoint grids and interpolation + masking out no-data geolocation (handling VIIRS Bow-tie deletetion) [Adam Dybbroe] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam Dybbroe] - Adding viirs composites and pps_odim reader for avhrr and viirs channel data in satellite projection (swath) [Adam Dybbroe] - Added a Geo Phys Product to modis level2. [Lars Orum Rasmussen] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Lars Orum Rasmussen] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Adding support for ob_tran projection even though it is not cf- compatible yet. [Adam Dybbroe] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam Dybbroe] - Added the reading of geolocation data from the PPS formatet level1 file. [Adam Dybbroe] - Added Europe Mesan area to template. [Adam Dybbroe] - Feature: MSG hdf files are now used to determine the area. [Martin Raspaud] - Fixed error message. [Martin Raspaud] - Cleanup: clarified import error. [Martin Raspaud] - Cleanup: More descriptive message when plugin can't be loaded. [Martin Raspaud] - Raised version number. [Martin Raspaud] - More relevant messages in msg_hdf reading. [Martin Raspaud] - Adding a RGB for night condition. [Lars Orum Rasmussen] - Merge branch 'pre-master' into unstable. [Martin Raspaud] - Modis level-2 reader and netcdf writer can now handle scenes containing only geo-physical product (and no channels) [Lars Orum Rasmussen] - Merge branch 'pre-master' into unstable. [Martin Raspaud] - Merge pull request #2 from cheeseblok/FixViirsRedSnow. [Martin Raspaud] Fix typo in red_snow check_channels method - Fix typo in red_snow check_channels method. [Scott Macfarlane] - Feature: Pypi ready. [Martin Raspaud] - Bufix: updating to use python-geotiepoints. [Martin Raspaud] - Bumping up the version number for the next release. [Martin Raspaud] - Doc: updating add_overlay documentation. [Martin Raspaud] - Feature: adding interpolation to modis lon lats. [Martin Raspaud] - Use pynav to get lon/lats if no file can be read. [Martin Raspaud] - Hack to handle both level2 and granules. [Martin Raspaud] - Added the possibility to provide a filename to eps_l1b loader. [Martin Raspaud] - Updated npp confirg file template with geo_filename example. [Adam Dybbroe] - Merge branch 'feature_new_eps_reader' into unstable. [Martin Raspaud] - Added xml file to etc and setup.py. [Martin Raspaud] - Bugfix in geolocation assignment. [Martin Raspaud] - Allowing for both 3a and 3A. [Martin Raspaud] - Put xml file in etc. [Martin Raspaud] - New eps l1b is now feature complete. Comprehensive testing needed. [Martin Raspaud] - Added a new eps l1b reader based on xml description of the format. [Martin Raspaud] - Corrected longitude interpolation to work around datum shift line. [Martin Raspaud] - Cloudtype channel now called "CT". [Martin Raspaud] - Merge branch 'pre-master' of git://github.com/mraspaud/mpop into pre- master. [Martin Raspaud] - SetProjCS is now correctly called after ImportFromProj4. [Lars Orum Rasmussen] Added SetWellKnownGeogCS if available - Merge branch 'pre-master' into unstable. [Martin Raspaud] Conflicts: mpop/satin/mipp_xsar.py - More correct 'new area' [Lars Orum Rasmussen] - Mipp restructure. [Lars Orum Rasmussen] - Merge branch 'pre-master' into area-hash. [Lars Orum Rasmussen] - Merge branch 'pre-master' into area-hash. [Lars Orum Rasmussen] - Now more unique projection filenames (using hash of areas) [Lars Orum Rasmussen] - Enhancements to pps hdf format readers. [Martin Raspaud] - Feature: added support for geotiff float format in geo_image. [Martin Raspaud] - Don't touch satscene.area if already present (mipp reading) [Martin Raspaud] - Feature: get best msg hdf file using area_extent. [Martin Raspaud] - Duck typing for channel assignation. [Martin Raspaud] - Fixed meteosat reading. [Martin Raspaud] - do not change the scene metadata when no channel is loaded - do not crash if no PGE is present - Added shapes in mpop.cfg.template for pycoast. [Martin Raspaud] - Cleanup. [Martin Raspaud] - New add_overlay function, using pycoast. [Martin Raspaud] - Added test for __setitem__ (scene) [Martin Raspaud] - Feature: add a global area if possible. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Fixing so thar also other products (than Channel data) can be assempled. [Adam.Dybbroe] - Adding data member to CloudType. [Adam.Dybbroe] - Added support for trucolor image from modis. [Adam.Dybbroe] - Cleaning up geo_image.py. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] Conflicts: mpop/satin/hdfeos_l1b.py - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam.Dybbroe] - Minor cosmetic/editorial stuff. [Adam.Dybbroe] - Small bugfix - viirs interface. [Adam.Dybbroe] - Feature: wrapping up hdfeos upgrade. [Martin Raspaud] - migrated data to float32 instead of float64 - support only geoloc a 1km resolution at the moment - adjust channel resolution to match loaded data - added template terra.cfg file. - Trimming out dead detectors. [Adam.Dybbroe] - WIP: hdf eos now reads only the needed channels, and can have several resolutions. Geoloc is missing though. [Martin Raspaud] - WIP: Started working on supporting halv/quarter files for modis. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Changed MODIS HDF-EOS level 1b reader to accomodate both the thinned EUMETCasted data and Direct readout data. Changed name from thin_modis.py to hdfeos_l1b.py. Added filename pattern to config. [Adam.Dybbroe] - Fixing indexing bug: missing last line in Metop AVHRR granule. [Adam.Dybbroe] - Revert "Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable" [Martin Raspaud] This reverts commit 45809273f2f9670c8282c32197ef47071aecaa74, reversing changes made to 10ae6838131ae1b6e119e05e08496d1ec9018a4a. - Revert "Reapplying thin_modis cleaning" [Martin Raspaud] This reverts commit 52c63d6fbc9f12c03b645f29dd58250da943d24a. - Reapplying thin_modis cleaning. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam.Dybbroe] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam.Dybbroe] - Merge branch 'pre-master' into unstable. [Adam.Dybbroe] Conflicts: mpop/satin/eps_avhrr.py - Minor enhancements to nwcsaf pps cloud type reading: Adding support for phase and quality flags. [Adam.Dybbroe] - Fixing indexing bug: missing last line in Metop AVHRR granule. [Adam.Dybbroe] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam.Dybbroe] Conflicts: doc/source/conf.py mpop/instruments/mviri.py mpop/instruments/seviri.py mpop/instruments/test_mviri.py mpop/instruments/test_seviri.py mpop/instruments/test_visir.py mpop/instruments/visir.py mpop/satin/test_mipp.py mpop/satin/thin_modis.py mpop/saturn/runner.py mpop/scene.py setup.py version.py - Merge branch 'unstable' of https://github.com/mraspaud/mpop into unstable. [Adam.Dybbroe] - Thin_modis Cleanup. [Martin Raspaud] - Cleanup. [Martin Raspaud] - Style: Cleaning up. [Martin Raspaud] - Doc: added screenshots. [Martin Raspaud] - Cleanup, switch to compositer globaly. [Martin Raspaud] - Doc: added more documentation to polar_segments.py. [Martin Raspaud] - Cleanup: remove old unit test for assemble_swath. [Martin Raspaud] - Bugfix in assemble_segments. [Martin Raspaud] - Cleanup: removed old assemble_swath function. [Martin Raspaud] - Doc: update docstring for project. [Martin Raspaud] - Upgrade: assemble_segments now uses scene factory. [Martin Raspaud] - DOC: examples are now functional. [Martin Raspaud] - Cleanup: removed old plugins directory. [Martin Raspaud] - Merge branch 'new_plugins' into unstable. [Martin Raspaud] Conflicts: mpop/plugin_base.py - Init file for plugins initialization. [Adam.Dybbroe] - Merge branch 'new_plugins' of https://github.com/mraspaud/mpop into new_plugins. [Adam.Dybbroe] - Removing old deprecated and now buggy part - has been caught by the try-exception since long. Adding for plugins directory. [Adam.Dybbroe] - Corrected import bug. [Adam.Dybbroe] - Merge branch 'unstable' into new_plugins. [Adam.Dybbroe] - Bug correction - config file reading section 'format' [Adam.Dybbroe] - Removing old deprecated and now buggy part - has been caught by the try-exception since long. Adding for plugins directory. [Adam.Dybbroe] - Merge branch 'unstable' of github.com:mraspaud/mpop into unstable. [Martin Raspaud] - Merge branch 'unstable' of https://github.com/mraspaud/mpop into unstable. [Adam.Dybbroe] - First time in git. [Adam.Dybbroe] - Merge branch 'unstable' of https://github.com/mraspaud/mpop into unstable. [Adam.Dybbroe] - Meris level-2 reader - first commit. [Adam.Dybbroe] - Minor fixes. [Adam.Dybbroe] - Fixed typo. [Adam.Dybbroe] - Feature: updating mipp test to use factory. [Martin Raspaud] - Cleaning up an old print. [Martin Raspaud] - Merge branch 'v0.10.2-support' into unstable. [Martin Raspaud] - Feature: added support for new eumetsat names (modis) and terra. [Martin Raspaud] - Merge branch 'new_plugins' into unstable. [Martin Raspaud] - Moved mipp plugin back to satin. [Martin Raspaud] - Feature: all former plugins are adapted to newer format. [Martin Raspaud] - Style: finalizing plugin system. Now plugins directories loaded from mpop.cfg. [Martin Raspaud] - Cleanup: removing old stuff. [Martin Raspaud] - Feature: added reader plugins as attributes to the scene, called "_reader". [Martin Raspaud] - Feature: new plugin format, added a few getters and made scene reference weak. [Martin Raspaud] - New plugin system. [Martin Raspaud] Transfered the mipp plugin. - DOC: fixed path for examples. [Martin Raspaud] - DOC: Added documentation examples to the project. [Martin Raspaud] - Merge branch 'unstable' of github.com:mraspaud/mpop into unstable. [Martin Raspaud] - Using LOG call instead of print. [Adam.Dybbroe] - Fixed missing LOG import. [Adam.Dybbroe] - Further improvements to MODIS level2 reader and processor. [Adam.Dybbroe] - Feature: Added projection to the pps_hdf channels. [Martin Raspaud] - DOC: added use examples in the documentation directory. [Martin Raspaud] - Merge branch 'master' into unstable. [Martin Raspaud] - Added posibility to have instrument_name in the filenames. [Adam.Dybbroe] - Making sure we pass on orbit number when projecting the scene. [Adam.Dybbroe] - Added colour map for Modis Chlorophyl-A product. [Adam.Dybbroe] - Taking away the alpha parameters for RGB modes. [Martin Raspaud] - Added areas in channels for test. [Martin Raspaud] - Added the radius parameter to runner. [Martin Raspaud] - Adding preliminary NWCSAF pps product reader. [Adam.Dybbroe] - Cleaning up. [Martin Raspaud] - Updated satpos file directories. [Martin Raspaud] - Cleaning up. [Martin Raspaud] - Merge branch 'pre-master' into unstable. [Martin Raspaud] - Updated copyright and version number. [Martin Raspaud] - Merge branch 'release-0.11' [Martin Raspaud] - Merge branch 'pre-master' into release-0.11. [Martin Raspaud] - Updated copyright dates in setup.py. [Martin Raspaud] - Bumped version number to 0.11.0. [Martin Raspaud] - Updating setup stuff. [Martin Raspaud] - Merge branch 'pre-master' into unstable. [Martin Raspaud] - Adding Day/Night band support. [Adam.Dybbroe] - Adding area for mapping sample data i-bands. [Adam.Dybbroe] - Scaling reflectances to percent (%) as required in mpop. [Adam.Dybbroe] - Adding support for I-bands. [Adam.Dybbroe] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Adam.Dybbroe] - Merge branch 'npp-support' into pre-master. [Adam.Dybbroe] - Renamed to npp1.cfg. [Adam.Dybbroe] - VIIRS composites - M-bands only so far. [Adam.Dybbroe] - Cleaning print statements. [Adam.Dybbroe] - NPP template. [Adam.Dybbroe] - Adding NPP/VIIRS test area for sample data: M-bands. [Adam.Dybbroe] - Adding I-band support. [Adam.Dybbroe] - Fixing for re-projection. [Adam.Dybbroe] - Various small corrections. [Adam.Dybbroe] - Corrected band widths - ned to be in microns not nm. [Adam.Dybbroe] - Support for NPP/JPSS VIIRS. [Adam.Dybbroe] - Updated copyright in sphinx doc. [Martin Raspaud] - Deprecating add_overlay in favor of pycoast. [Martin Raspaud] - Merge branch 'feature-new-nc-format' into unstable. [Martin Raspaud] - Added support for different ordering of dimensions in band data. [Martin Raspaud] Use the band_axis keyword argument. - NC reader support different dimension orderings for band-data. [Martin Raspaud] - NC: now band data is of shape (band, x, y). [Martin Raspaud] - Merge branch 'pre-master' into unstable. [Martin Raspaud] - Now a channel can be added to a scene dynamically using dict notation. [esn] - Added units to aapp1b reader. [Martin Raspaud] - Deactivating mipp loading test. [Martin Raspaud] - Adjusted tests for compositer. [Martin Raspaud] - Merge branch 'feature-cleaning' into unstable. [Martin Raspaud] - Merge branch 'unstable' into feature-cleaning. [Martin Raspaud] - Merge branch 'pre-master' into unstable. [Martin Raspaud] - Added append function to scene.py. [Esben S. Nielsen] - New error message when no instrument-levelN section is there in the satellite config file. [Martin Raspaud] - Merge branch 'feature-radius-of-influence' into unstable. [Martin Raspaud] - Syntax bug fixed. [Martin Raspaud] - Made orbit number default to None for PolarFactory's create_scene. [Martin Raspaud] - Merge branch 'feature-radius-of-influence' into unstable. [Martin Raspaud] - Radius of influence is now a keyword parameter to the scene.project method. [Martin Raspaud] - Merge branch 'pre-master' into unstable. [Martin Raspaud] - Can now get reader plugin from PYTHONPATH. [Esben S. Nielsen] - Renamed asimage to as_image. [Martin Raspaud] - Wavelength and resolution are not requirements in config files anymore. [Martin Raspaud] - Merge branch 'feature-channel-to-image' into unstable. [Martin Raspaud] - Feature: added the asimage method to channels, to retrieve a black and white image from the channel data. [Martin Raspaud] - Merge branch 'feature-doc-examples' into unstable. [Martin Raspaud] - Doc: added more documentation to polar_segments.py. [Martin Raspaud] - DOC: examples are now functional. [Martin Raspaud] - DOC: fixed path for examples. [Martin Raspaud] - DOC: Added documentation examples to the project. [Martin Raspaud] - DOC: added use examples in the documentation directory. [Martin Raspaud] - Merge branch 'feature-project-mode' into unstable. [Martin Raspaud] - Doc: update docstring for project. [Martin Raspaud] - Cleanup. [Martin Raspaud] - Switched seviri and mviri to compositer. [Martin Raspaud] - Cleanup. [Martin Raspaud] - Style: Cleaning up. [Martin Raspaud] - Doc: added screenshots. [Martin Raspaud] - Cleanup, switch to compositer globaly. [Martin Raspaud] Conflicts: mpop/instruments/visir.py mpop/satin/hrpt.py mpop/saturn/runner.py - Cleanup: remove old unit test for assemble_swath. [Martin Raspaud] - Bugfix in assemble_segments. [Martin Raspaud] - Cleanup: removed old assemble_swath function. [Martin Raspaud] Conflicts: mpop/scene.py - Upgrade: assemble_segments now uses scene factory. [Martin Raspaud] - Fixed typo. [Adam.Dybbroe] - Feature: updating mipp test to use factory. [Martin Raspaud] - Cleaning up an old print. [Martin Raspaud] Conflicts: mpop/satin/mipp.py - Cleanup: removing old stuff. [Martin Raspaud] - Cleaned up and updated meteosat 9 cfg template further. [Martin Raspaud] - Updated templates to match pytroll MSG tutorial. [Esben S. Nielsen] - Simplified reading of log-level. [Lars Orum Rasmussen] - Proposal for reading loglevel from config file. [Lars Orum Rasmussen] - Cfscene now handles channels with all masked data. [Esben S. Nielsen] - Netcdf area fix. [Martin Raspaud] - Syle: copyright updates. [Martin Raspaud] - Modified the modis-lvl2 loader and extended a bit the cf-io interfaces. [Adam.Dybbroe] - First time in GIT A new reader for EOS-HDF Modis level-2 files from NASA. See http://oceancolor.gsfc.nasa.gov/DOCS/ocformats.html#3 for format description. [Adam.Dybbroe] - Added license. [Martin Raspaud] - Merge branch 'unstable' of github.com:mraspaud/mpop into unstable. [Martin Raspaud] - Info needs to be an instance attribute. [Lars Orum Rasmussen] - Fix initialization of self.time_slot. [Lars Orum Rasmussen] - Merge branch 'v0.10.2-support' into unstable. [Martin Raspaud] - Added pyc and ~ files to gitignore. [Martin Raspaud] - Updated thin modis reader for new file name. [Martin Raspaud] - Merge branch 'v0.10.1-support' into unstable. [Martin Raspaud] - Compression and tiling as default for geotifs. [Martin Raspaud] - Merge branch 'v0.10.0-support' into unstable. [Martin Raspaud] - Feauture: support for qc_straylight. [Martin Raspaud] - Compression and tiling as default for geotifs. [Martin Raspaud] - WIP: attempting interrupt switch for sequential runner. [Martin Raspaud] - Feature: changing filewatcher from processes to threads. [Martin Raspaud] - Feauture: support for qc_straylight. [Martin Raspaud] - Compression and tiling as default for geotifs. [Martin Raspaud] - Update: modis enhancements. [Martin Raspaud] - Feature: filewatcher keeps arrival order. [Martin Raspaud] - Feature: concatenation loads channels. [Martin Raspaud] - Feature: use local tles instead of downloading systematically. [Martin Raspaud] - Feature: move pyaapp as single module. [Martin Raspaud] - Feature: added ana geoloc for hrpt and eps lvl 1a. [Martin Raspaud] - Cosmetics. [Martin Raspaud] - Added gatherer and two_line_elements. [Martin Raspaud] - Merge branch 'unstable' of github.com:mraspaud/mpop into unstable. [Martin Raspaud] - Moved a parenthesis six characters to the left. [Lars Orum Rasmussen] - Feature: assemble_segments function, more clever and should replace assemble_swaths. [Martin Raspaud] - Feature: thin modis reader upgrade, with lonlat estimator and channel trimmer for broken sensors. [Martin Raspaud] - Merge branch 'unstable' of github.com:mraspaud/mpop into unstable. [Martin Raspaud] - Netcdf bandname now only uses integer part of resolution. [Esben S. Nielsen] - Improvement: made resolution int in band names, for netcdf. [Martin Raspaud] - Cleaning. [Martin Raspaud] - WIP: ears. [Martin Raspaud] - Trying to revive the pynwclib module. [Martin Raspaud] - Cleaning. [Martin Raspaud] - Wip: polar hrpt 0 to 1b. [Martin Raspaud] - Feature: Added proj4 parameters for meteosat 7. [Martin Raspaud] - Merge branch 'unstable' of github.com:mraspaud/mpop into unstable. [Martin Raspaud] - Cosmetic. [Esben S. Nielsen] - Now channels are read and saved in order. Optimized scaling during CF save. [Esben S. Nielsen] - Feature: Adding more factories. [Martin Raspaud] - Documentation: adding something on factories and area_extent. [Martin Raspaud] - Documentation: added needed files in setup.py. [Martin Raspaud] - Style: remove a print statement and an unused import. [Martin Raspaud] - Feature: Added natural composite to default composite list. [Martin Raspaud] - Feature: made compositer sensitive to custom composites. [Martin Raspaud] - Documentation: Upgraded documentation to 0.10.0. [Martin Raspaud] - Merge branch 'unstable' of github.com:mraspaud/mpop into unstable. [Martin Raspaud] - The RELEASE-VERSION file should not be checked into git. [Lars Orum Rasmussen] - Optimized parts of mpop. Fixed projector caching. [Esben S. Nielsen] - Optimized parts of mpop processing. Made projector caching functional. [Esben S. Nielsen] - Ignore build directory. [Lars Orum Rasmussen] - Check array in stretch_logarithmic. [Lars Orum Rasmussen] - Prevent adding unintended logging handlers. [Lars Orum Rasmussen] - Feature: Adding extra tags to the image allowed in local_runner. [Martin Raspaud] - Style: lines to 80 chars. [Martin Raspaud] - Merge branch 'unstable' [Martin Raspaud] - Feature: pps hdf loading and polar production update. [Martin Raspaud] - Style: cleanup. [Martin Raspaud] - Merge branch 'unstable' of github.com:mraspaud/mpop into unstable. [Martin Raspaud] Conflicts: mpop/satin/mipp.py - Fixed memory problems. Workaround for lazy import of pyresample. Now uses weakref for compositor. [Esben S. Nielsen] - Better logging in scene loading function. [Martin Raspaud] - Remove unneeded import. [Martin Raspaud] - New version. [Martin Raspaud] - Merge branch 'master' of github.com:mraspaud/mpop. [Lars Orum Rasmussen] - Feature: direct_readout chain in place. [Martin Raspaud] - Removing no longer needed avhrr.py. [Martin Raspaud] - Made scaling expression in cfscene.py nicer. [Esben S. Nielsen] - Corrected shallow copy problem with compositor. Simplyfied usage of GeostationaryFactory. [Esben S. Nielsen] - Feature: cleaner hdf reading for both pps and msg. [Martin Raspaud] - Stability: added failsafe in case no config file is there when loading. [Martin Raspaud] - Merge branch 'pps_hdf' into unstable. [Martin Raspaud] - Feature: Support area_extent in scene.load. [Martin Raspaud] - Feature: Cleaning and use the mipp area_extent and sublon. [Martin Raspaud] - Style: Allow to exclude all the *level? sections. [Martin Raspaud] - Redespached a few composites. [Martin Raspaud] - Style: cosmetics. [Martin Raspaud] - Feature: added the power operation to channels. [Martin Raspaud] - Removed the no longer needed meteosat09.py file. [Martin Raspaud] - Wip: iterative loading, untested. [Martin Raspaud] - More on versionning. [Martin Raspaud] - Merge branch 'unstable' into pps_hdf. [Martin Raspaud] - Feature: started working on the PPS support. [Martin Raspaud] - Spelling. [Martin Raspaud] - Added logarithmic enhancement. [Lars Orum Rasmussen] - Removed unneeded file. [Martin Raspaud] - Api: new version of mipp. [Martin Raspaud] - Added automatic version numbering. [Martin Raspaud] - Version update to 0.10.0alpha1. [Martin Raspaud] - Api: unload takes separate channels (not iterable) as input. [Martin Raspaud] - Doc: updated the meteosat 9 template config. [Martin Raspaud] - Merge branch 'unstable' of github.com:mraspaud/mpop into unstable. [Martin Raspaud] Conflicts: mpop/satellites/meteosat09.py - Feature: Introduced compound satscene objects. [Martin Raspaud] This is done through the use of an "image" attribute, created by the factory in the "satellites" package. The image attribute holds all the compositing functions, while the satscene object remains solely a container for satellite data and metadata. - Feature: added the get_custom_composites function and a composites section in mpop.cfg to load custom made composites on the fly. [Martin Raspaud] - Feature: make use of mipp's area_extent function. [Martin Raspaud] - Style: cleanup channels_to_load after loading. [Martin Raspaud] - Doc: introduce mpop.cfg. [Martin Raspaud] - Feature: make use of the new mpop.cfg file to find the area file. Added the get_area_def helper function in projector. [Martin Raspaud] - Feature: Added the new pge02f product for met09. [Martin Raspaud] - Feature: New format keyword for images. [Martin Raspaud] - Update: new version of mipp, putting the image upright when slicing. [Martin Raspaud] - Merge branch 'unstable' of github.com:mraspaud/mpop into unstable. [Martin Raspaud] Conflicts: mpop/satout/netcdf4.py mpop/scene.py - Corrected mipp slicing in mipp.py. Added keyword for selecting datatype in cfscene.py. Corrected transformation for netCDF data type in cfscene.py. [Esben S. Nielsen] - New add_history function, and some changes in the netcdf handling. [Martin Raspaud] - Upgrade: Upgraded the assemble_segments module to use only one coordinate class. [Martin Raspaud] - Cosmetics: Added log message when slicing in mipp. [Martin Raspaud] - Move everything to a mpop folder, so that import mpop should be used. [Martin Raspaud] - WIP: Completing the nc4 reader. [Martin Raspaud] - Doc: Added credits. [Martin Raspaud] - Doc: updated build for github. [Martin Raspaud] - Feature: Started to support arithmetic operations on channels. [Martin Raspaud] - Feature: support for calibration flag for met 9. [Martin Raspaud] - Cosmetics: Added names to copyrigths. [Martin Raspaud] - Changed default logging. [Esben S. Nielsen] - Merge branch 'dmi_fix' into unstable. [Martin Raspaud] Conflicts: pp/scene.py - Added fill_valued as a keyworded argument. [Lars Orum Rasmussen] - Fixed oversampling error when pyresample is not present. Added compression as default option when writing netCDF files. [Esben S. Nielsen] - Moved pyresample and osgeo dependency in geo_image.py. [Esben S. Nielsen] - Feature: support umarf files for eps avhrr. [Martin Raspaud] - Feature: support the load_again flag for meteosat 9. [Martin Raspaud] - Feature: Allows passing arguments to reader plugins in SatelliteScene.load, and in particular "calibrate" to mipp. [Martin Raspaud] - Feature: added the fill_value argument to channel_image function. [Martin Raspaud] - Cosmetics: reorganized imports. [Martin Raspaud] - Cosmetics: Updated some template files. [Martin Raspaud] - Feature: Added the resave argument for saving projector objects. [Martin Raspaud] - Installation: Updated version number, removed obsolete file to install, and made the package non zip-safe. [Martin Raspaud] - Testing: Added tests for pp.satellites, and some cosmetics. [Martin Raspaud] - Feature: Handled the case of several instruments for get_satellite_class. [Martin Raspaud] - Cosmetics: changed the name of the satellite classes generated on the fly. [Martin Raspaud] - Testing: more on scene unit tests. [Martin Raspaud] - Testing: started integration testing of pp core parts. [Martin Raspaud] - Testing: completed seviri tests. [Martin Raspaud] - Testing: completed avhrr test. [Martin Raspaud] - Testing: Added tests for instruments : seviri, mviri, avhrr. [Martin Raspaud] - Testing: took away prerequisites tests for python 2.4 compatibility. [Martin Raspaud] - Testing: final adjustments for visir. [Martin Raspaud] - Testing: visir tests complete. [Martin Raspaud] - Testing: fixed nosetest running in test_visir. [Martin Raspaud] - Testing: corrected scene patching for visir tests. [Martin Raspaud] - Tests: started testing the visir instrument. [Martin Raspaud] - Cosmetics and documentation in the scene module. [Martin Raspaud] - Feature: better handling of tags and gdal options in geo_images. [Martin Raspaud] - Cleanup: removed uneeded hardcoded satellites and instruments. [Martin Raspaud] - Documentation: Updated readme, with link to the documentation. [Martin Raspaud] - Documentation: Added a paragraph on geolocalisation. [Martin Raspaud] - Refactoring: took away the precompute flag from the projector constructor, added the save method instead. [Martin Raspaud] - Cosmetics. [Martin Raspaud] - Cosmetics. [Martin Raspaud] - Feature: pyresample 0.7 for projector, and enhanced unittesting. [Martin Raspaud] - New template file for areas. [Martin Raspaud] - Feature: First draft for the hrpt reading (using aapp) and eps1a reading (using aapp and kai). [Martin Raspaud] - Cosmetics: cleaning up the etc directory. [Martin Raspaud] - Testing: Basic mipp testing. [Martin Raspaud] - Cosmetics: cfscene. [Martin Raspaud] - Feature: One mipp reader fits all :) [Martin Raspaud] - Feature: helper "debug_on" function. [Martin Raspaud] - Feature: save method for satscene. Supports only netcdf4 for now. [Martin Raspaud] - Feature: reload keyword for loading channels. [Martin Raspaud] - Documentation: better pp.satellites docstring. [Martin Raspaud] - Testing: updated the test_scene file to reflect scene changes. [Martin Raspaud] - Documentation: changed a couple of docstrings. [Martin Raspaud] - Feature: support pyresample areas in geo images. [Martin Raspaud] - Cosmetics: changing area_id to area. [Martin Raspaud] - Feature: adding metadata handling to channels. [Martin Raspaud] - Feature: now scene and channel accept a pyresample area as area attribute. [Martin Raspaud] - Enhancement: making a better mipp plugin. [Martin Raspaud] - Feature: Finished the netcdf writer. [Martin Raspaud] - Feature: updated the netcdf writer and added a proxy scene class for cf conventions. [Martin Raspaud] - Documentation: big update. [Martin Raspaud] - Documentation: quickstart now passes the doctest. [Martin Raspaud] - Documentation: reworking. [Martin Raspaud] - Feature: Moved get_satellite_class and build_satellite_class to pp.satellites. [Martin Raspaud] - Doc: starting documentation update. [Martin Raspaud] - Enhanced mipp reader. [Martin Raspaud] * Added metadata when loading scenes. * Added slicing when reading data from seviri * Added a draft generic reader - Cosmetics: enhanced error description and debug message in aapp1b, giving names to loaded/missing files. [Martin Raspaud] - Testing: updated test_scene. [Martin Raspaud] - Feature: Added automatic retreiving of product list for a given satellite. [Martin Raspaud] - Cleaning: remove class retrieving and building from runner.py. [Martin Raspaud] - Cosmetics: Better error message in scene when a reader is not found, plus some code enbelishment. [Martin Raspaud] - Feature: made scene object iteratable (channels are iterated). [Martin Raspaud] - Feature: Adding functions to retreive a satellite class from the satellites name and to build it on the fly from a configuration file. [Martin Raspaud] - Testing: more on channel. [Martin Raspaud] - Testing: added test for pp.scene.assemble_swaths. [Martin Raspaud] - Testing: scene loading tested. [Martin Raspaud] - Cleaning: test_scene is now more pylint friendly. [Martin Raspaud] - Feature: extended scene test. [Martin Raspaud] - Feature: more testing of scene.py. [Martin Raspaud] - Merge branch 'unstable' of github.com:mraspaud/mpop into unstable. [Martin Raspaud] Conflicts: pp/test_scene.py - Feature: Enhanced unitests for scene. [Martin Raspaud] - Feature: Enhanced unitests for scene. [Martin Raspaud] - Tests: Improving unittests for channel classes. [Martin Raspaud] - Feature: Project function won't crash if pyresample can't be loaded. Returns the untouched scene instead. [Martin Raspaud] - Rewrote Filewatcher code. [Martin Raspaud] - Feature: added the refresh option to filewatcher to call the processing function even if no new file has come. [Martin Raspaud] - Refactoring: satellite, number, variant arguments to runner __init__ are now a single list argument. [Martin Raspaud] - Cleaning: Removing pylint errors from runner.py code. [Martin Raspaud] - Resolution can now be a floating point number. [Martin Raspaud] - Added the osgeo namespace when importing gdal. [Martin Raspaud] - Warning: Eps spline interpolation does not work around poles. [Martin Raspaud] - Added the "info" attribute to channel and scene as metadata holder. [Martin Raspaud] - Functionality: Automatically build satellite classes from config files. [Martin Raspaud] - Added copyright notices and updated version. [Martin Raspaud] - Changed channel names for seviri. [Martin Raspaud] - Added info stuff in mipp reader. [Martin Raspaud] - Added info.area_name update on projection. [Martin Raspaud] - Added quick mode for projecting fast and dirty. [Martin Raspaud] - Added single channel image building. [Martin Raspaud] - Added support for gdal_options when saving a geo_image. [Martin Raspaud] - Made satout a package. [Martin Raspaud] - Added a few information tags. [Martin Raspaud] - Added support for mipp reading of met 09. [Martin Raspaud] - Added reader and writer to netcdf format. [Martin Raspaud] - Added info object to the scene object in preparation for the netCDF/CF writer. [Adam Dybbroe] - Added support for FY3 satellite and MERSI instrument. [Adam Dybbroe] - Merge branch 'unstable' of git@github.com:mraspaud/mpop into unstable. [Martin Raspaud] Conflicts: imageo/test_image.py Conflicts: imageo/test_image.py - Bugfix in image unit test: testing "almost equal" instead of "equal" for image inversion (floating point errors). [Martin Raspaud] - Bugfix in image unit test: testing "almost equal" instead of "equal" for image inversion (floating point errors). [Martin Raspaud] - Modified image inversion unit test to reflect new behaviour. [Martin Raspaud] - New rebase. [Martin Raspaud] satpy-0.34.0/continuous_integration/000077500000000000000000000000001420401153000175355ustar00rootroot00000000000000satpy-0.34.0/continuous_integration/environment.yaml000066400000000000000000000013511420401153000227650ustar00rootroot00000000000000name: test-environment channels: - conda-forge dependencies: - xarray - dask - distributed - donfig - appdirs - toolz - Cython - sphinx - cartopy - pillow - matplotlib - scipy - pyyaml - pyproj - pyresample - coveralls - coverage - codecov - behave - netcdf4 - h5py - h5netcdf - gdal - rasterio - bottleneck - rioxarray - defusedxml - imageio - pyhdf - mock - libtiff - geoviews - zarr - python-eccodes # 2.19.1 seems to cause library linking issues - eccodes>=2.20 - geoviews - pytest - pytest-cov - pytest-lazy-fixture - fsspec - pylibtiff - python-geotiepoints - pooch - pip - pip: - trollsift - trollimage - pyspectral - pyorbital satpy-0.34.0/doc/000077500000000000000000000000001420401153000134715ustar00rootroot00000000000000satpy-0.34.0/doc/Makefile000066400000000000000000000061361420401153000151370ustar00rootroot00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = build # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source .PHONY: help clean html dirhtml pickle json htmlhelp qthelp latex changes linkcheck doctest help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " changes to make an overview of all changed/added/deprecated items" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: -rm -rf $(BUILDDIR)/* -rm -rf source/api/*.rst html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/NWCSAFMSGPP.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/NWCSAFMSGPP.qhc" latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \ "run these through (pdf)latex." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." satpy-0.34.0/doc/README000066400000000000000000000002601420401153000143470ustar00rootroot00000000000000With sphinx and satpy's dependencies installed documentation can be generated by running: make html The generated HTML documentation pages are available in `build/html`. satpy-0.34.0/doc/rtd_environment.yml000066400000000000000000000006171420401153000174350ustar00rootroot00000000000000name: readthedocs channels: - conda-forge dependencies: - python=3.7 - pip - dask - donfig - appdirs - graphviz - numpy - pillow - pooch - pyresample - setuptools - setuptools_scm - setuptools_scm_git_archive - sphinx - sphinx_rtd_theme - sphinxcontrib-apidoc - trollsift - xarray - zarr - pip: - graphviz - .. # relative path to the satpy project satpy-0.34.0/doc/source/000077500000000000000000000000001420401153000147715ustar00rootroot00000000000000satpy-0.34.0/doc/source/_static/000077500000000000000000000000001420401153000164175ustar00rootroot00000000000000satpy-0.34.0/doc/source/_static/theme_overrides.css000066400000000000000000000005551420401153000223220ustar00rootroot00000000000000/* override table width restrictions */ @media screen and (min-width: 767px) { .wy-table-responsive table td { /* !important prevents the common CSS stylesheets from overriding this as on RTD they are loaded after this stylesheet */ white-space: normal !important; } .wy-table-responsive { overflow: visible !important; } } satpy-0.34.0/doc/source/api/000077500000000000000000000000001420401153000155425ustar00rootroot00000000000000satpy-0.34.0/doc/source/api/.gitkeep000066400000000000000000000000001420401153000171610ustar00rootroot00000000000000satpy-0.34.0/doc/source/composites.rst000066400000000000000000000465271420401153000177260ustar00rootroot00000000000000========== Composites ========== Composites are defined as arrays of data that are created by processing and/or combining one or multiple data arrays (prerequisites) together. Composites are generated in satpy using Compositor classes. The attributes of the resulting composites are usually a combination of the prerequisites' attributes and the key/values of the DataID used to identify it. Built-in Compositors ==================== .. py:currentmodule:: satpy.composites There are several built-in compositors available in SatPy. All of them use the :class:`GenericCompositor` base class which handles various image modes (`L`, `LA`, `RGB`, and `RGBA` at the moment) and updates attributes. The below sections summarize the composites that come with SatPy and show basic examples of creating and using them with an existing :class:`~satpy.scene.Scene` object. It is recommended that any composites that are used repeatedly be configured in YAML configuration files. General-use compositor code dealing with visible or infrared satellite data can be put in a configuration file called ``visir.yaml``. Composites that are specific to an instrument can be placed in YAML config files named accordingly (e.g., ``seviri.yaml`` or ``viirs.yaml``). See the `satpy repository `_ for more examples. GenericCompositor ----------------- :class:`GenericCompositor` class can be used to create basic single channel and RGB composites. For example, building an overview composite can be done manually within Python code with:: >>> from satpy.composites import GenericCompositor >>> compositor = GenericCompositor("overview") >>> composite = compositor([local_scene[0.6], ... local_scene[0.8], ... local_scene[10.8]]) One important thing to notice is that there is an internal difference between a composite and an image. A composite is defined as a special dataset which may have several bands (like `R`, `G` and `B` bands). However, the data isn't stretched, or clipped or gamma filtered until an image is generated. To get an image out of the above composite:: >>> from satpy.writers import to_image >>> img = to_image(composite) >>> img.invert([False, False, True]) >>> img.stretch("linear") >>> img.gamma(1.7) >>> img.show() This part is called `enhancement`, and is covered in more detail in :doc:`enhancements`. DifferenceCompositor -------------------- :class:`DifferenceCompositor` calculates a difference of two datasets:: >>> from satpy.composites import DifferenceCompositor >>> compositor = DifferenceCompositor("diffcomp") >>> composite = compositor([local_scene[10.8], local_scene[12.0]]) FillingCompositor ----------------- :class:`FillingCompositor`:: fills the missing values in three datasets with the values of another dataset::: >>> from satpy.composites import FillingCompositor >>> compositor = FillingCompositor("fillcomp") >>> filler = local_scene[0.6] >>> data_with_holes_1 = local_scene['ch_a'] >>> data_with_holes_2 = local_scene['ch_b'] >>> data_with_holes_3 = local_scene['ch_c'] >>> composite = compositor([filler, data_with_holes_1, data_with_holes_2, ... data_with_holes_3]) PaletteCompositor ------------------ :class:`PaletteCompositor` creates a color version of a single channel categorical dataset using a colormap:: >>> from satpy.composites import PaletteCompositor >>> compositor = PaletteCompositor("palcomp") >>> composite = compositor([local_scene['cma'], local_scene['cma_pal']]) The palette should have a single entry for all the (possible) values in the dataset mapping the value to an RGB triplet. Typically the palette comes with the categorical (e.g. cloud mask) product that is being visualized. DayNightCompositor ------------------ :class:`DayNightCompositor` merges two different composites. The first composite will be placed on the day-side of the scene, and the second one on the night side. The transition from day to night is done by calculating solar zenith angle (SZA) weighed average of the two composites. The SZA can optionally be given as third dataset, and if not given, the angles will be calculated. Three arguments are used to generate the image (default values shown in the example below). They can be defined when initializing the compositor:: - lim_low (float): lower limit of Sun zenith angle for the blending of the given channels - lim_high (float): upper limit of Sun zenith angle for the blending of the given channels Together with `lim_low` they define the width of the blending zone - day_night (string): "day_night" means both day and night portions will be kept "day_only" means only day portion will be kept "night_only" means only night portion will be kept Usage (with default values):: >>> from satpy.composites import DayNightCompositor >>> compositor = DayNightCompositor("dnc", lim_low=85., lim_high=88., day_night="day_night") >>> composite = compositor([local_scene['true_color'], ... local_scene['night_fog']]) As above, with `day_night` flag it is also available to use only a day product or only a night product and mask out (make transparent) the opposite portion of the image (night or day). The example below provides only a day product with night portion masked-out:: >>> from satpy.composites import DayNightCompositor >>> compositor = DayNightCompositor("dnc", lim_low=85., lim_high=88., day_night="day_only") >>> composite = compositor([local_scene['true_color']) RealisticColors --------------- :class:`RealisticColors` compositor is a special compositor that is used to create realistic near-true-color composite from MSG/SEVIRI data:: >>> from satpy.composites import RealisticColors >>> compositor = RealisticColors("realcols", lim_low=85., lim_high=95.) >>> composite = compositor([local_scene['VIS006'], ... local_scene['VIS008'], ... local_scene['HRV']]) CloudCompositor --------------- :class:`CloudCompositor` can be used to threshold the data so that "only" clouds are visible. These composites can be used as an overlay on top of e.g. static terrain images to show a rough idea where there are clouds. The data are thresholded using three variables:: - `transition_min`: values below or equal to this are clouds -> opaque white - `transition_max`: values above this are cloud free -> transparent - `transition_gamma`: gamma correction applied to clarify the clouds Usage (with default values):: >>> from satpy.composites import CloudCompositor >>> compositor = CloudCompositor("clouds", transition_min=258.15, ... transition_max=298.15, ... transition_gamma=3.0) >>> composite = compositor([local_scene[10.8]]) Support for using this compositor for VIS data, where the values for high/thick clouds tend to be in reverse order to brightness temperatures, is to be added. RatioSharpenedRGB ----------------- :class:`RatioSharpenedRGB` SelfSharpenedRGB ---------------- :class:`SelfSharpenedRGB` sharpens the RGB with ratio of a band with a strided version of itself. LuminanceSharpeningCompositor ----------------------------- :class:`LuminanceSharpeningCompositor` replaces the luminance from an RGB composite with luminance created from reflectance data. If the resolutions of the reflectance data _and_ of the target area definition are higher than the base RGB, more details can be retrieved. This compositor can be useful also with matching resolutions, e.g. to highlight shadowing at cloudtops in colorized infrared composite. >>> from satpy.composites import LuminanceSharpeningCompositor >>> compositor = LuminanceSharpeningCompositor("vis_sharpened_ir") >>> vis_data = local_scene['HRV'] >>> colorized_ir_clouds = local_scene['colorized_ir_clouds'] >>> composite = compositor([vis_data, colorized_ir_clouds]) SandwichCompositor ------------------ Similar to :class:`LuminanceSharpeningCompositor`, :class:`SandwichCompositor` uses reflectance data to bring out more details out of infrared or low-resolution composites. :class:`SandwichCompositor` multiplies the RGB channels with (scaled) reflectance. >>> from satpy.composites import SandwichCompositor >>> compositor = SandwichCompositor("ir_sandwich") >>> vis_data = local_scene['HRV'] >>> colorized_ir_clouds = local_scene['colorized_ir_clouds'] >>> composite = compositor([vis_data, colorized_ir_clouds]) StaticImageCompositor --------------------- :class:`StaticImageCompositor` can be used to read an image from disk and used just like satellite data, including resampling and using as a part of other composites. >>> from satpy.composites import StaticImageCompositor >>> compositor = StaticImageCompositor("static_image", filename="image.tif") >>> composite = compositor() BackgroundCompositor -------------------- :class:`BackgroundCompositor` can be used to stack two composites together. If the composites don't have `alpha` channels, the `background` is used where `foreground` has no data. If `foreground` has alpha channel, the `alpha` values are used to weight when blending the two composites. >>> from satpy import Scene >>> from satpy.composites import BackgroundCompositor >>> compositor = BackgroundCompositor() >>> clouds = local_scene['ir_cloud_day'] >>> background = local_scene['overview'] >>> composite = compositor([clouds, background]) CategoricalDataCompositor ------------------------- :class:`CategoricalDataCompositor` can be used to recategorize categorical data. This is for example useful to combine comparable categories into a common category. The category remapping from `data` to `composite` is done using a look-up-table (`lut`):: composite = [[lut[data[0,0]], lut[data[0,1]], lut[data[0,Nj]]], [[lut[data[1,0]], lut[data[1,1]], lut[data[1,Nj]], [[lut[data[Ni,0]], lut[data[Ni,1]], lut[data[Ni,Nj]]] Hence, `lut` must have a length that is greater than the maximum value in `data` in orer to avoid an `IndexError`. Below is an example on how to create a binary clear-sky/cloud mask from a pseodu cloud type product with six categories representing clear sky (cat1/cat5), cloudy features (cat2-cat4) and missing/undefined data (cat0):: >>> cloud_type = local_scene['cloud_type'] # 0 - cat0, 1 - cat1, 2 - cat2, 3 - cat3, 4 - cat4, 5 - cat5, # categories: 0 1 2 3 4 5 >>> lut = [np.nan, 0, 1, 1, 1, 0] >>> compositor = CategoricalDataCompositor('binary_cloud_mask', lut=lut) >>> composite = compositor([cloud_type]) # 0 - cat1/cat5, 1 - cat2/cat3/cat4, nan - cat0 Creating composite configuration files ====================================== To save the custom composite, follow the :ref:`component_configuration` documentation. Once your component configuration directory is created you can create your custom composite YAML configuration files. Compositors that can be used for multiple instruments can be placed in the generic ``$SATPY_CONFIG_PATH/composites/visir.yaml`` file. Composites that are specific to one sensor should be placed in ``$SATPY_CONFIG_PATH/composites/.yaml``. Custom enhancements for your new composites can be stored in ``$SATPY_CONFIG_PATH/enhancements/generic.yaml`` or ``$SATPY_CONFIG_PATH/enhancements/.yaml``. With that, you should be able to load your new composite directly. Example configuration files can be found in the satpy repository as well as a few simple examples below. Simple RGB composite -------------------- This is the overview composite shown in the first code example above using :class:`GenericCompositor`:: sensor_name: visir composites: overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - 0.6 - 0.8 - 10.8 standard_name: overview For an instrument specific version (here MSG/SEVIRI), we should use the channel _names_ instead of wavelengths. Note also that the sensor_name is now combination of visir and seviri, which means that it extends the generic visir composites:: sensor_name: visir/seviri composites: overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - VIS006 - VIS008 - IR_108 standard_name: overview In the following examples only the composite receipes are shown, and the header information (sensor_name, composites) and intendation needs to be added. Using modifiers --------------- In many cases the basic datasets that go into the composite need to be adjusted, e.g. for Solar zenith angle normalization. These modifiers can be applied in the following way:: overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: VIS006 modifiers: [sunz_corrected] - name: VIS008 modifiers: [sunz_corrected] - IR_108 standard_name: overview Here we see two changes: 1. channels with modifiers need to have either `name` or `wavelength` added in front of the channel name or wavelength, respectively 2. a list of modifiers attached to the dictionary defining the channel The modifier above is a built-in that normalizes the Solar zenith angle to Sun being directly at the zenith. More examples can be found in Satpy source code directory `satpy/etc/composites `_. See the :doc:`modifiers` documentation for more information on available built-in modifiers. Using other composites ---------------------- Often it is handy to use other composites as a part of the composite. In this example we have one composite that relies on solar channels on the day side, and another for the night side:: natural_with_night_fog: compositor: !!python/name:satpy.composites.DayNightCompositor prerequisites: - natural_color - night_fog standard_name: natural_with_night_fog This compositor has three additional keyword arguments that can be defined (shown with the default values, thus identical result as above):: natural_with_night_fog: compositor: !!python/name:satpy.composites.DayNightCompositor prerequisites: - natural_color - night_fog lim_low: 85.0 lim_high: 88.0 day_night: "day_night" standard_name: natural_with_night_fog Defining other composites in-line --------------------------------- It is also possible to define sub-composites in-line. This example is the built-in airmass composite:: airmass: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - wavelength: 6.2 - wavelength: 7.3 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - wavelength: 9.7 - wavelength: 10.8 - wavelength: 6.2 standard_name: airmass Using a pre-made image as a background -------------------------------------- Below is an example composite config using :class:`StaticImageCompositor`, :class:`DayNightCompositor`, :class:`CloudCompositor` and :class:`BackgroundCompositor` to show how to create a composite with a blended day/night imagery as background for clouds. As the images are in PNG format, and thus not georeferenced, the name of the area definition for the background images are given. When using GeoTIFF images the `area` parameter can be left out. .. note:: The background blending uses the current time if there is no timestamps in the image filenames. :: clouds_with_background: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: clouds_with_background prerequisites: - ir_cloud_day - compositor: !!python/name:satpy.composites.DayNightCompositor prerequisites: - static_day - static_night static_day: compositor: !!python/name:satpy.composites.StaticImageCompositor standard_name: static_day filename: /path/to/day_image.png area: euro4 static_night: compositor: !!python/name:satpy.composites.StaticImageCompositor standard_name: static_night filename: /path/to/night_image.png area: euro4 To ensure that the images aren't auto-stretched and possibly altered, the following should be added to enhancement config (assuming 8-bit image) for both of the static images:: static_day: standard_name: static_day operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [255, 255, 255] .. _enhancing-the-images: Enhancing the images ==================== .. todo:: Explain how composite names, composite standard_name, enhancement names, and enhancement standard_name are related to each other Explain what happens when no enhancement is configured for a product (= use the default enhancement). Explain that the methods are often just a wrapper for XRImage methods, but can also be something completely custom. List and explain in detail the built-in enhancements: - stretch - gamma - invert - crefl_scaling - cira_stretch - lookup - colorize - palettize - three_d_effect - btemp_threshold .. todo:: Should this be in another file/page? After the composite is defined and created, it needs to be converted to an image. To do this, it is necessary to describe how the data values are mapped to values stored in the image format. This procedure is called ``stretching``, and in SatPy it is implemented by ``enhancements``. The first step is to convert the composite to an :class:`~trollimage.xrimage.XRImage` object:: >>> from satpy.writers import to_image >>> img = to_image(composite) Now it is possible to apply enhancements available in the class:: >>> img.invert([False, False, True]) >>> img.stretch("linear") >>> img.gamma(1.7) And finally either show or save the image:: >>> img.show() >>> img.save('image.tif') As pointed out in the composite section, it is better to define frequently used enhancements in configuration files under ``$SATPY_CONFIG_PATH/enhancements/``. The enhancements can either be in ``generic.yaml`` or instrument-specific file (e.g., ``seviri.yaml``). The above enhancement can be written (with the headers necessary for the file) as:: enhancements: overview: standard_name: overview operations: - name: inverse method: !!python/name:satpy.enhancements.invert args: [False, False, True] - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: linear - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [1.7, 1.7, 1.7] More examples can be found in SatPy source code directory ``satpy/etc/enhancements/generic.yaml``. See the :doc:`enhancements` documentation for more information on available built-in enhancements. .. include:: modifiers.rst satpy-0.34.0/doc/source/conf.py000066400000000000000000000230201420401153000162650ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # NWCSAF/MSG PP documentation build configuration file, created by # sphinx-quickstart on Fri Sep 25 16:58:28 2009. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. """Sphinx documentation configuration and setup.""" from __future__ import annotations import os import sys from datetime import datetime from pkg_resources import get_distribution # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.append(os.path.abspath('../../')) sys.path.append(os.path.abspath(os.path.dirname(__file__))) # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # get version using setuptools-scm release = get_distribution('satpy').version # The full version, including alpha/beta/rc tags. # for example take major/minor version = '.'.join(release.split('.')[:2]) class Mock(object): # noqa """Mock class for mocking module instances.""" def __init__(self, *args, **kwargs): """Mask any arguments to mock object.""" pass def __call__(self, *args, **kwargs): """Mock a function and class object when accessed from mocked module.""" return Mock() @classmethod def __getattr__(cls, name): """Mock common module attributes used in documentation.""" if name in ('__file__', '__path__'): return '/dev/null' elif name[0] == name[0].upper(): mockType = type(name, (), {}) mockType.__module__ = __name__ return mockType elif name == "inf": return 0 else: return Mock() # https://github.com/sphinx-doc/sphinx/issues/3920 MOCK_MODULES = ['h5py'] for mod_name in MOCK_MODULES: sys.modules[mod_name] = Mock() # type: ignore autodoc_mock_imports = ['cf', 'glymur', 'h5netcdf', 'imageio', 'mipp', 'netCDF4', 'pygac', 'pygrib', 'pyhdf', 'pyninjotiff', 'pyorbital', 'pyspectral', 'rasterio', 'trollimage', 'zarr'] autoclass_content = 'both' # append class __init__ docstring to the class docstring # -- General configuration ----------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.doctest', 'sphinx.ext.napoleon', 'sphinx.ext.autosummary', 'doi_role', 'sphinx.ext.viewcode', 'sphinxcontrib.apidoc'] # API docs apidoc_module_dir = "../../satpy" apidoc_output_dir = "api" apidoc_excluded_paths = [ 'readers/caliop_l2_cloud.py', 'readers/ghrsst_l3c_sst.py', 'readers/li_l2.py', 'readers/scatsat1_l2b.py', ] apidoc_separate_modules = True # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. # source_encoding = 'utf-8' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Satpy' copyright = u'2009-{}, The PyTroll Team'.format(datetime.utcnow().strftime("%Y")) # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # today = '' # Else, today_fmt is used as the format for a strftime call. # today_fmt = '%B %d, %Y' # List of documents that shouldn't be included in the build. # unused_docs = [] # List of directories, relative to source directory, that shouldn't be searched # for source files. exclude_trees: list[str] = [] # The reST default role (used for this markup: `text`) to use for all documents. # default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. # add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). # add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. # show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. Major themes that come with # Sphinx are currently 'default' and 'sphinxdoc'. html_theme = 'sphinx_rtd_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". # html_title = None # A shorter title for the navigation bar. Default is the same as html_title. # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] html_css_files = [ 'theme_overrides.css', # override wide tables in RTD theme ] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. # html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. # html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. # html_additional_pages = {} # If false, no module index is generated. # html_use_modindex = True # If false, no index is generated. # html_use_index = True # If true, the index is split into individual pages for each letter. # html_split_index = False # If true, links to the reST sources are added to the pages. # html_show_sourcelink = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # html_use_opensearch = '' # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = '' # Output file base name for HTML help builder. htmlhelp_basename = 'NWCSAFMSGPPdoc' # -- Options for LaTeX output -------------------------------------------------- # The paper size ('letter' or 'a4'). # latex_paper_size = 'letter' # The font size ('10pt', '11pt' or '12pt'). # latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'satpy.tex', 'Satpy Documentation', 'Satpy Developers', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # latex_use_parts = False # Additional stuff for the LaTeX preamble. # latex_preamble = '' # Documents to append as an appendix to all manuals. # latex_appendices = [] # If false, no module index is generated. # latex_use_modindex = True # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { 'dask': ('https://docs.dask.org/en/latest', None), 'geoviews': ('http://geoviews.org', None), 'jobqueue': ('https://jobqueue.dask.org/en/latest', None), 'numpy': ('https://numpy.org/doc/stable', None), 'pydecorate': ('https://pydecorate.readthedocs.io/en/stable', None), 'pyorbital': ('https://pyorbital.readthedocs.io/en/stable', None), 'pyproj': ('https://pyproj4.github.io/pyproj/dev', None), 'pyresample': ('https://pyresample.readthedocs.io/en/stable', None), 'pytest': ('https://docs.pytest.org/en/stable/', None), 'python': ('https://docs.python.org/3', None), 'scipy': ('http://scipy.github.io/devdocs', None), 'trollimage': ('https://trollimage.readthedocs.io/en/stable', None), 'trollsift': ('https://trollsift.readthedocs.io/en/stable', None), 'xarray': ('https://xarray.pydata.org/en/stable', None), 'rasterio': ('https://rasterio.readthedocs.io/en/latest', None), 'donfig': ('https://donfig.readthedocs.io/en/latest', None), 'pooch': ('https://www.fatiando.org/pooch/latest/', None), } satpy-0.34.0/doc/source/config.rst000066400000000000000000000243711420401153000167770ustar00rootroot00000000000000Configuration ============= Satpy has two levels of configuration that allow to control how Satpy and its various components behave. There are a series of "settings" that change the global Satpy behavior. There are also a series of "component configuration" YAML files for controlling the complex functionality in readers, compositors, writers, and other Satpy components that can't be controlled with traditional keyword arguments. Settings -------- There are configuration parameters in Satpy that are not specific to one component and control more global behavior of Satpy. These parameters can be set in one of three ways: 1. Environment variable 2. YAML file 3. At runtime with ``satpy.config`` This functionality is provided by the :doc:`donfig ` library. The currently available settings are described below. Each option is available from all three methods. If specified as an environment variable or specified in the YAML file on disk, it must be set **before** Satpy is imported. **YAML Configuration** YAML files that include these parameters can be in any of the following locations: 1. ``/etc/satpy/satpy.yaml`` 2. ``/satpy.yaml`` (see below) 3. ``~/.satpy/satpy.yaml`` 4. ``/satpy.yaml`` (see :ref:`config_path_setting` below) The above ``user_config_dir`` is provided by the ``appdirs`` package and differs by operating system. Typical user config directories are: * Mac OSX: ``~/Library/Preferences/satpy`` * Unix/Linux: ``~/.config/satpy`` * Windows: ``C:\\Users\\\\AppData\\Local\\pytroll\\satpy`` All YAML files found from the above paths will be merged into one configuration object (accessed via ``satpy.config``). The YAML contents should be a simple mapping of configuration key to its value. For example: .. code-block:: yaml cache_dir: "/tmp" data_dir: "/tmp" Lastly, it is possible to specify an additional config path to the above options by setting the environment variable ``SATPY_CONFIG``. The file specified with this environment variable will be added last after all of the above paths have been merged together. **At runtime** After import, the values can be customized at runtime by doing: .. code-block:: python import satpy satpy.config.set(cache_dir="/my/new/cache/path") # ... normal satpy code ... Or for specific blocks of code: .. code-block:: python import satpy with satpy.config.set(cache_dir="/my/new/cache/path"): # ... some satpy code ... # ... code using the original cache_dir Similarly, if you need to access one of the values you can use the ``satpy.config.get`` method. Cache Directory ^^^^^^^^^^^^^^^ * **Environment variable**: ``SATPY_CACHE_DIR`` * **YAML/Config Key**: ``cache_dir`` * **Default**: See below Directory where any files cached by Satpy will be stored. This directory is not necessarily cleared out by Satpy, but is rarely used without explicitly being enabled by the user. This defaults to a different path depending on your operating system following the `appdirs `_ "user cache dir". .. _config_cache_lonlats_setting: Cache Longitudes and Latitudes ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ * **Environment variable**: ``SATPY_CACHE_LONLATS`` * **YAML/Config Key**: ``cache_lonlats`` * **Default**: ``False`` Whether or not generated longitude and latitude coordinates should be cached to on-disk zarr arrays. Currently this only works in very specific cases. Mainly the lon/lats that are generated when computing sensor and solar zenith and azimuth angles used in various modifiers and compositors. This caching is only done for ``AreaDefinition``-based geolocation, not ``SwathDefinition``. Arrays are stored in ``cache_dir`` (see above). When setting this as an environment variable, this should be set with the string equivalent of the Python boolean values ``="True"`` or ``="False"``. See also ``cache_sensor_angles`` below. .. warning:: This caching does not limit the number of entries nor does it expire old entries. It is up to the user to manage the contents of the cache directory. .. _config_cache_sensor_angles_setting: Cache Sensor Angles ^^^^^^^^^^^^^^^^^^^ * **Environment variable**: ``SATPY_CACHE_SENSOR_ANGLES`` * **YAML/Config Key**: ``cache_sensor_angles`` * **Default**: ``False`` Whether or not generated sensor azimuth and sensor zenith angles should be cached to on-disk zarr arrays. These angles are primarily used in certain modifiers and compositors. This caching is only done for ``AreaDefinition``-based geolocation, not ``SwathDefinition``. Arrays are stored in ``cache_dir`` (see above). This caching requires producing an estimate of the angles to avoid needing to generate new angles for every new data case. This happens because the angle generation depends on the observation time of the data and the position of the satellite (longitude, latitude, altitude). The angles are estimated by using a constant observation time for all cases (maximum ~1e-10 error) and by rounding satellite position coordinates to the nearest tenth of a degree for longitude and latitude and nearest tenth meter (maximum ~0.058 error). Note these estimations are only done if caching is enabled (this parameter is True). When setting this as an environment variable, this should be set with the string equivalent of the Python boolean values ``="True"`` or ``="False"``. See also ``cache_lonlats`` above. .. warning:: This caching does not limit the number of entries nor does it expire old entries. It is up to the user to manage the contents of the cache directory. .. _config_path_setting: Component Configuration Path ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ * **Environment variable**: ``SATPY_CONFIG_PATH`` * **YAML/Config Key**: ``config_path`` * **Default**: ``[]`` Base directory, or directories, where Satpy component YAML configuration files are stored. Satpy expects configuration files for specific component types to be in appropriate subdirectories (ex. ``readers``, ``writers``, etc), but these subdirectories should not be included in the ``config_path``. For example, if you have custom composites configured in ``/my/config/dir/etc/composites/visir.yaml``, then ``config_path`` should include ``/my/config/dir/etc`` for Satpy to find this configuration file when searching for composites. This option replaces the legacy ``PPP_CONFIG_DIR`` environment variable. Note that this value must be a list. In Python, this could be set by doing: .. code-block:: python satpy.config.set(config_path=['/path/custom1', '/path/custom2']) If setting an environment variable then it must be a colon-separated (``:``) string on Linux/OSX or semicolon-separate (``;``) separated string and must be set **before** calling/importing Satpy. If the environment variable is a single path it will be converted to a list when Satpy is imported. .. code-block:: bash export SATPY_CONFIG_PATH="/path/custom1:/path/custom2" On Windows, with paths on the `C:` drive, these paths would be: .. code-block:: bash set SATPY_CONFIG_PATH="C:/path/custom1;C:/path/custom2" Satpy will always include the builtin configuration files that it is distributed with regardless of this setting. When a component supports merging of configuration files, they are merged in reverse order. This means "base" configuration paths should be at the end of the list and custom/user paths should be at the beginning of the list. .. _data_dir_setting: Data Directory ^^^^^^^^^^^^^^ * **Environment variable**: ``SATPY_DATA_DIR`` * **YAML/Config Key**: ``data_dir`` * **Default**: See below Directory where any data Satpy needs to perform certain operations will be stored. This replaces the legacy ``SATPY_ANCPATH`` environment variable. This defaults to a different path depending on your operating system following the `appdirs `_ "user data dir". .. _download_aux_setting: Demo Data Directory ^^^^^^^^^^^^^^^^^^^ * **Environment variable**: ``SATPY_DEMO_DATA_DIR`` * **YAML/Config Key**: ``demo_data_dir`` * **Default**: Directory where demo data functions will download data files to. Available demo data functions can be found in :mod:`satpy.demo` subpackage. Download Auxiliary Data ^^^^^^^^^^^^^^^^^^^^^^^ * **Environment variable**: ``SATPY_DOWNLOAD_AUX`` * **YAML/Config Key**: ``download_aux`` * **Default**: True Whether to allow downloading of auxiliary files for certain Satpy operations. See :doc:`dev_guide/aux_data` for more information. If ``True`` then Satpy will download and cache any necessary data files to :ref:`data_dir_setting` when needed. If ``False`` then pre-downloaded files will be used, but any other files will not be downloaded or checked for validity. .. _component_configuration: Component Configuration ----------------------- Much of the functionality of Satpy comes from the various components it uses, like readers, writers, compositors, and enhancements. These components are configured for reuse from YAML files stored inside Satpy or in custom user configuration files. Custom directories can be provided by specifying the :ref:`config_path setting ` mentioned above. To create and use your own custom component configuration you should: 1. Create a directory to store your new custom YAML configuration files. The files for each component will go in a subdirectory specific to that component (ex. ``composites``, ``enhancements``, ``readers``, ``writers``). 2. Set the Satpy :ref:`config_path ` to point to your new directory. This could be done by setting the environment variable ``SATPY_CONFIG_PATH`` to your custom directory (don't include the component sub-directory) or one of the other methods for setting this path. 3. Create YAML configuration files with your custom YAML files. In most cases there is no need to copy configuration from the builtin Satpy files as these will be merged with your custom files. 4. If your custom configuration uses custom Python code, this code must be importable by Python. This means your code must either be installed in your Python environment or you must set your ``PYTHONPATH`` to the location of the modules. 5. Run your Satpy code and access your custom components like any of the builtin components. satpy-0.34.0/doc/source/data_download.rst000066400000000000000000000060271420401153000203300ustar00rootroot00000000000000Downloading Data ================ One of the main features of Satpy is its ability to read various satellite data formats. However, it currently only provides limited methods for downloading data from remote sources and these methods are limited to demo data for `Pytroll examples `_. See the examples and the :mod:`~satpy.demo` API documentation for details. Otherwise, Satpy assumes all data is available through the local system, either as a local directory or network mounted file systems. Certain readers that use ``xarray`` to open data files may be able to load files from remote systems by using OpenDAP or similar protocols. As a user there are two options for getting access to data: 1. Download data to your local machine. 2. Connect to a remote system that already has access to data. The most common case of a remote system having access to data is with a cloud computing service like Google Cloud Platform (GCP) or Amazon Web Services (AWS). Another possible case is an organization having direct broadcast antennas where they receive data directly from the satellite or satellite mission organization (NOAA, NASA, EUMETSAT, etc). In these cases data is usually available as a mounted network file system and can be accessed like a normal local path (with the added latency of network communications). Below are some data sources that provide data that can be read by Satpy. If you know of others please let us know by either creating a GitHub issue or pull request. NOAA GOES on Amazon Web Services -------------------------------- * `Resource Description `__ * `Data Browser `__ * Associated Readers: ``abi_l1b`` In addition to the pages above, Brian Blaylock's `GOES-2-Go `_ python package is useful for downloading GOES data to your local machine. Brian also prepared some instructions for using the ``rclone`` tool for downloading AWS data to a local machine. The instructions can be found `here `_. NOAA GOES on Google Cloud Platform ---------------------------------- GOES-16 ^^^^^^^ * `Resource Description `__ * `Data Browser `__ * Associated Readers: ``abi_l1b`` GOES-17 ^^^^^^^ * `Resource Description `__ * `Data Browser `__ * Associated Readers: ``abi_l1b`` NOAA CLASS ---------- * `Data Ordering `__ * Associated Readers: ``viirs_sdr`` NASA VIIRS Atmosphere SIPS -------------------------- * `Resource Description `__ * Associated Readers: ``viirs_l1b`` EUMETSAT Data Center -------------------- * `Data Ordering `__ satpy-0.34.0/doc/source/dev_guide/000077500000000000000000000000001420401153000167245ustar00rootroot00000000000000satpy-0.34.0/doc/source/dev_guide/CONTRIBUTING.rst000077700000000000000000000000001420401153000246622../../../CONTRIBUTING.rstustar00rootroot00000000000000satpy-0.34.0/doc/source/dev_guide/aux_data.rst000066400000000000000000000124271420401153000212520ustar00rootroot00000000000000Auxiliary Data Download ======================= Sometimes Satpy components need some extra data files to get their work done properly. These include files like Look Up Tables (LUTs), coefficients, or Earth model data (ex. elevations). This includes any file that would be too large to be included in the Satpy python package; anything bigger than a small text file. To help with this, Satpy includes utilities for downloading and caching these files only when your component is used. This saves the user from wasting time and disk space downloading files they may never use. This functionality is made possible thanks to the `Pooch library `_. Downloaded files are stored in the directory configured by :ref:`data_dir_setting`. Adding download functionality ----------------------------- The utility functions for data downloading include a two step process: 1. **Registering**: Tell Satpy what files might need to be downloaded and used later. 2. **Retrieving**: Ask Satpy to download and store the files locally. Registering ^^^^^^^^^^^ Registering a file for downloading tells Satpy the remote URL for the file, and an optional hash. The hash is used to verify a successful download. Registering can also include a ``filename`` to tell Satpy what to name the file when it is downloaded. If not provided it will be determined from the URL. Once registered, Satpy can be told to retrieve the file (see below) by using a "cache key". Cache keys follow the general scheme of ``/`` (ex. ``readers/README.rst``). Satpy includes a low-level function and a high-level Mixin class for registering files. The higher level class is recommended for any Satpy component like readers, writers, and compositors. The lower-level :func:`~satpy.aux_download.register_file` function can be used for any other use case. The :class:`~satpy.aux_download.DataMixIn` class is automatically included in the :class:`~satpy.readers.yaml_reader.FileYAMLReader` and :class:`~satpy.writers.Writer` base classes. For any other component (like a compositor) you should include it as another parent class: .. code-block:: python from satpy.aux_download import DataDownloadMixin from satpy.composites import GenericCompositor class MyCompositor(GenericCompositor, DataDownloadMixin): """Compositor that uses downloaded files.""" def __init__(self, name, url=None, known_hash=None, **kwargs): super().__init__(name, **kwargs) data_files = [{'url': url, 'known_hash': known_hash}] self.register_data_files(data_files) However your code registers files, to be consistent it must do it during initialization so that the :func:`~satpy.aux_download.find_registerable_files`. If your component isn't a reader, writer, or compositor then this function will need to be updated to find and load your registered files. See :ref:`offline_aux_downloads` below for more information. As mentioned, the mixin class is included in the base reader and writer class. To register files in these cases, include a ``data_files`` section in your YAML configuration file. For readers this would go under the ``reader`` section and for writers the ``writer`` section. This parameter is a list of dictionaries including a ``url``, ``known_hash``, and optional ``filename``. For example:: reader: name: abi_l1b short_name: ABI L1b long_name: GOES-R ABI Level 1b ... other metadata ... data_files: - url: "https://example.com/my_data_file.dat" - url: "https://raw.githubusercontent.com/pytroll/satpy/main/README.rst" known_hash: "sha256:5891286b63e7745de08c4b0ac204ad44cfdb9ab770309debaba90308305fa759" - url: "https://raw.githubusercontent.com/pytroll/satpy/main/RELEASING.md" filename: "satpy_releasing.md" known_hash: null See the :class:`~satpy.aux_download.DataDownloadMixin` for more information. Retrieving ^^^^^^^^^^ Files that have been registered (see above) can be retrieved by calling the :func:`~satpy.aux_download.retrieve` function. This function expects a single argument: the cache key. Cache keys are returned by registering functions, but can also be pre-determined by following the scheme ``/`` (ex. ``readers/README.rst``). Retrieving a file will download it to local disk if needed and then return the local pathname. Data is stored locally in the :ref:`data_dir_setting`. It is up to the caller to then open the file. .. _offline_aux_downloads: Offline Downloads ----------------- To assist with operational environments, Satpy includes a :func:`~satpy.aux_download.retrieve_all` function that will try to find all files that Satpy components may need to download in the future and download them to the current directory specified by :ref:`data_dir_setting`. This function allows you to specify a list of ``readers``, ``writers``, or ``composite_sensors`` to limit what components are checked for files to download. The ``retrieve_all`` function is also available through a command line script called ``satpy_retrieve_all_aux_data``. Run the following for usage information. .. code-block:: bash satpy_retrieve_all_aux_data --help To make sure that no additional files are downloaded when running Satpy see :ref:`download_aux_setting`. satpy-0.34.0/doc/source/dev_guide/custom_reader.rst000066400000000000000000000567131420401153000223260ustar00rootroot00000000000000================================= Adding a Custom Reader to Satpy ================================= In order to add a reader to satpy, you will need to create two files: - a YAML file for describing the files to read and the datasets that are available - a python file implementing the actual reading of the datasets and metadata Satpy implements readers by defining a single "reader" object that pulls information from one or more file handler objects. The base reader class provided by Satpy is enough for most cases and does not need to be modified. The individual file handler classes do need to be created due to the small differences between file formats. The below documentation will walk through each part of making a reader in detail. To do this we will implement a reader for the EUMETSAT NetCDF format for SEVIRI data. .. _reader_naming: Naming your reader ------------------ Satpy tries to follow a standard scheme for naming its readers. These names are used in filenames, but are also used by users so it is important that the name be recognizable and clear. Although some special cases exist, most fit in to the following naming scheme: .. parsed-literal:: [_[_]][_] All components of the name should be lowercase and use underscores as the main separator between fields. Hyphens should be used as an intra-field separator if needed (ex. goes-imager). :sensor: The first component of the name represents the sensor or instrument that observed the data stored in the files being read. If the files are the output of a specific processing software or a certain algorithm implementation that supports multiple sensors then a lowercase version of that software's name should be used (e.g. clavrx for CLAVR-x, nucaps for NUCAPS). The ``sensor`` field is the only required field of the naming scheme. If it is actually an instrument name then the reader name should include one of the other optional fields. If sensor is a software package then that may be enough without any additional information to uniquely identify the reader. :processing level: This field marks the specific level of processing or calibration that has been performed to produce the data in the files being read. Common values of this field include: ``sdr`` for Sensor Data Record (SDR), ``edr`` for Environmental Data Record (EDR), ``l1b`` for Level 1B, and ``l2`` for Level 2. :level detail: In cases where the processing level is not enough to completely define the reader this field can be used to provide a little more context. For example, some VIIRS EDR products are specific to a particular field of study or type of scientific event, like a flood or cloud product. In these cases the detail field can be added to produce a name like ``viirs_edr_flood``. This field shouldn't be used unless processing level is also specified. :file format: If the file format of the files is informative to the user or can distinguish one reader from another then this field should be specified. Common format names should be abbreviated following existing abbreviations like `nc` for NetCDF3 or NetCDF4, `hdf` for HDF4, `h5` for HDF5. The existing :ref:`reader's table ` can be used for reference. When in doubt, reader names can be discussed in the github pull request when this reader is added to Satpy or a github issue. The YAML file ------------- The yaml file is composed of three sections: - the :ref:`reader ` section, that provides basic parameters for the reader - the :ref:`file_types ` section, that gives the patterns of the files this reader can handle - the :ref:`datasets ` section, that describes the datasets available from this reader .. _custom_reader_reader_section: The ``reader`` section ~~~~~~~~~~~~~~~~~~~~~~ The ``reader`` section provides basic parameters for the overall reader. The parameters to provide in this section are: - name: This is the name of the reader, it should be the same as the filename (without the .yaml extension). The naming convention for this is described above in the :ref:`reader_naming` section above. - short_name (optional): Human-readable version of the reader 'name'. If not provided, applications using this can default to taking the 'name', replacing ``_`` with spaces and uppercasing every letter. - long_name: Human-readable title for the reader. This may be used as a section title on a website or in GUI applications using Satpy. Default naming scheme is `` Level []``. For example, for the ``abi_l1b`` reader this is ``"GOES-R ABI Level 1b"`` where "GOES-R" is the name of the program and **not** the name of the platform/satellite. This scheme may not work for all readers, but in general should be followed. See existing readers for more examples. - description: General description of the reader. This may include any `restructuredtext `_ formatted text like links to PDFs or sites with more information on the file format. This can be multiline if formatted properly in YAML (see example below). - sensors: The list of sensors this reader will support. This must be all lowercase letters for full support throughout in Satpy. - reader: The main python reader class to use, in most cases the ``FileYAMLReader`` is a good choice. .. code:: yaml reader: name: seviri_l1b_nc short_name: SEVIRI L1b NetCDF4 long_name: MSG SEVIRI Level 1b (NetCDF4) description: > NetCDF4 reader for EUMETSAT MSG SEVIRI Level 1b files. sensors: [seviri] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader Optionally, if you need to customize the `DataID` for this reader, you can provide the relevant keys with a `data_identification_keys` item here. See the :doc:`satpy_internals` section for more information. .. _custom_reader_file_types_section: The ``file_types`` section ~~~~~~~~~~~~~~~~~~~~~~~~~~ Each file type needs to provide: - ``file_reader``, the class that will handle the files for this reader, that you will implement in the corresponding python file. See the :ref:`custom_reader_python` section below. - ``file_patterns``, the patterns to match to find files this reader can handle. The syntax to use is basically the same as ``format`` with the addition of time. See the `trollsift package documentation `__ for more details. - Optionally, a file type can have a ``requires`` field: it is a list of file types that the current file types needs to function. For example, the HRIT MSG format segment files each need a prologue and epilogue file to be read properly, hence in this case we have added ``requires: [HRIT_PRO, HRIT_EPI]`` to the file type definition. .. code:: yaml file_types: nc_seviri_l1b: file_reader: !!python/name:satpy.readers.nc_seviri_l1b.NCSEVIRIFileHandler file_patterns: ['W_XX-EUMETSAT-Darmstadt,VIS+IR+IMAGERY,{satid:4s}+SEVIRI_C_EUMG_{processing_time:%Y%m%d%H%M%S}.nc'] nc_seviri_l1b_hrv: file_reader: !!python/name:satpy.readers.nc_seviri_l1b.NCSEVIRIHRVFileHandler file_patterns: ['W_XX-EUMETSAT-Darmstadt,HRV+IMAGERY,{satid:4s}+SEVIRI_C_EUMG_{processing_time:%Y%m%d%H%M%S}.nc'] .. _custom_reader_datasets_section: The ``datasets`` section ~~~~~~~~~~~~~~~~~~~~~~~~ The datasets section describes each dataset available in the files. The parameters provided are made available to the methods of the implemented python class. If your input files contain all the necessary metadata or you have a lot of datasets to configure look at the :ref:`custom_reader_available_datasets` section below. Implementing this will save you from having to write a lot of configuration in the YAML files. Parameters you can define for example are: - name - sensor - resolution - wavelength - polarization - standard\_name: The `CF standard name `_ for the dataset that will be used to determine the type of data. See existing readers for common standard names in Satpy or the CF standard name documentation for other available names or how to define your own. Satpy does not currently have a hard requirement on these names being completely CF compliant, but consistency across readers is important. - units: The units of the data when returned by the file handler. Although not technically a requirement, it is common for Satpy datasets to use "%" for reflectance fields and "K" for brightness temperature fields. - modifiers: The modification(s) that have already been applied to the data when it is returned by the file handler. Only a few of these have been standardized across Satpy, but are based on the names of the modifiers configured in the "composites" YAML files. Examples include ``sunz_corrected`` or ``rayleigh_corrected``. See the `metadata wiki `_ for more information. - file\_type: Name of file type (see above). - coordinates: An optional two-element list with the names of the longitude and latitude datasets describing the location of this dataset. This is optional if the data being read is gridded already. Swath data, from example data from some polar-orbiting satellites, should have these defined or no geolocation information will be available when the data is loaded. For gridded datasets a `get_area_def` function will be implemented in python (see below) to define geolocation information. - Any other field that is relevant for the reader or could be useful metadata provided to the user. This section can be copied and adapted simply from existing seviri readers, like for example the ``msg_native`` reader. .. code:: yaml datasets: HRV: name: HRV resolution: 1000.134348869 wavelength: [0.5, 0.7, 0.9] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: nc_seviri_l1b_hrv IR_016: name: IR_016 resolution: 3000.403165817 wavelength: [1.5, 1.64, 1.78] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: nc_seviri_l1b nc_key: 'ch3' IR_039: name: IR_039 resolution: 3000.403165817 wavelength: [3.48, 3.92, 4.36] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: nc_seviri_l1b nc_key: 'ch4' IR_087: name: IR_087 resolution: 3000.403165817 wavelength: [8.3, 8.7, 9.1] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: nc_seviri_l1b IR_097: name: IR_097 resolution: 3000.403165817 wavelength: [9.38, 9.66, 9.94] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: nc_seviri_l1b IR_108: name: IR_108 resolution: 3000.403165817 wavelength: [9.8, 10.8, 11.8] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: nc_seviri_l1b IR_120: name: IR_120 resolution: 3000.403165817 wavelength: [11.0, 12.0, 13.0] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: nc_seviri_l1b IR_134: name: IR_134 resolution: 3000.403165817 wavelength: [12.4, 13.4, 14.4] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: nc_seviri_l1b VIS006: name: VIS006 resolution: 3000.403165817 wavelength: [0.56, 0.635, 0.71] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: nc_seviri_l1b VIS008: name: VIS008 resolution: 3000.403165817 wavelength: [0.74, 0.81, 0.88] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: nc_seviri_l1b WV_062: name: WV_062 resolution: 3000.403165817 wavelength: [5.35, 6.25, 7.15] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: nc_seviri_l1b WV_073: name: WV_073 resolution: 3000.403165817 wavelength: [6.85, 7.35, 7.85] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: nc_seviri_l1b The YAML file is now ready and you can move on to writing your python code. .. _custom_reader_available_datasets: Dynamic Dataset Configuration ----------------------------- The above "datasets" section for reader configuration is the most explicit method for specifying metadata about possible data that can be loaded from input files. It is also the easiest way for people with little python experience to customize or add new datasets to a reader. However, some file formats may have 10s or even 100s of datasets or variations of datasets. Writing the metadata and access information for every one of these datasets can easily become a problem. To help in these cases the :meth:`~satpy.readers.file_handlers.BaseFileHandler.available_datasets` file handler interface can be used. This method, if needed, should be implemented in your reader's file handler classes. The best information for what this method does and how to use it is available in the :meth:`API documentation `. This method is good when you want to: 1. Define datasets dynamically without needing to define them in the YAML. 2. Supplement metadata from the YAML file with information from the file content (ex. `resolution`). 3. Determine if a dataset is available by the file contents. This differs from the default behavior of a dataset being considered loadable if its "file_type" is loaded. Note that this is considered an advanced interface and involves more advanced Python concepts like generators. If you need help with anything feel free to ask questions in your pull request or on the :ref:`Pytroll Slack `. .. _custom_reader_python: The python file --------------- The python files needs to implement a file handler class for each file type that we want to read. Such a class needs to implement a few methods: - the ``__init__`` method, that takes as arguments - the filename (string) - the filename info (dict) that we get by parsing the filename using the pattern defined in the yaml file - the filetype info that we get from the filetype definition in the yaml file This method can also receive other file handler instances as parameter if the filetype at hand has requirements. (See the explanation in the YAML file filetype section above) - the ``get_dataset`` method, which takes as arguments - the dataset ID of the dataset to load - the dataset info that is the description of the channel in the YAML file This method has to return an xarray.DataArray instance if the loading is successful, containing the data and :ref:`metadata ` of the loaded dataset, or return None if the loading was unsuccessful. The DataArray should at least have a ``y`` dimension. For data covering a 2D region on the Earth, their should be at least a ``y`` and ``x`` dimension. This applies to non-gridded data like that of a polar-orbiting satellite instrument. The latitude dimension is typically named ``y`` and longitude named ``x``. This may require renaming dimensions from the file, see for the :meth:`xarray.DataArray.rename` method for more information and its use in the example below. - the ``get_area_def`` method, that takes as single argument the :class:`~satpy.dataset.DataID` for which we want the area. It should return a :class:`~pyresample.geometry.AreaDefinition` object. For data that cannot be geolocated with an area definition, the pixel coordinates will be loaded using the ``get_dataset`` method for the resulting scene to be navigated. The names of the datasets to be loaded should be specified as a special ``coordinates`` attribute in the YAML file. For example, by specifying ``coordinates: [longitude_dataset, latitude_dataset]`` in the YAML, Satpy will call ``get_dataset`` twice, once to load the dataset named ``longitude_dataset`` and once to load ``latitude_dataset``. Satpy will then create a :class:`~pyresample.geometry.SwathDefinition` with this coordinate information and assign it to the dataset's ``.attrs['area']`` attribute. - Optionally, the ``get_bounding_box`` method can be implemented if filtering files by area is desirable for this data type On top of that, two attributes need to be defined: ``start_time`` and ``end_time``, that define the start and end times of the sensing. If you are writing a file handler for more common formats like HDF4, HDF5, or NetCDF4 you may want to consider using the utility base classes for each: :class:`satpy.readers.hdf4_utils.HDF4FileHandler`, :class:`satpy.readers.hdf5_utils.HDF5FileHandler`, and :class:`satpy.readers.netcdf_utils.NetCDF4FileHandler`. These were added as a convenience and are not required to read these formats. In many cases using the :func:`xarray.open_dataset` function in a custom file handler is a much better idea. .. note:: Be careful about the data types of the datasets your reader is returning. It is easy to let the data be coerced into double precision floats (`np.float64`). At the moment, satellite instruments are rarely measuring in a resolution greater than what can be encoded in 16 bits. As such, to preserve processing power, please consider carefully what data type you should scale or calibrate your data to. Single precision floats (`np.float32`) is a good compromise, as it has 23 significant bits (mantissa) and can thus represent 16 bit integers exactly, as well as keeping the memory footprint half of a double precision float. One commonly used method in readers is :meth:`xarray.DataArray.where` (to mask invalid data) which can be coercing the data to `np.float64`. To ensure for example that integer data is coerced to `np.float32` when :meth:`xarray.DataArray.where` is used, you can do:: my_float_dataarray = my_int_dataarray.where(some_condition, np.float32(np.nan)) One way of implementing a file handler is shown below: .. code:: python # this is seviri_l1b_nc.py from satpy.readers.file_handlers import BaseFileHandler from pyresample.geometry import AreaDefinition class NCSEVIRIFileHandler(BaseFileHandler): def __init__(self, filename, filename_info, filetype_info): super(NCSEVIRIFileHandler, self).__init__(filename, filename_info, filetype_info) self.nc = None def get_dataset(self, dataset_id, dataset_info): if dataset_id['calibration'] != 'radiance': # TODO: implement calibration to reflectance or brightness temperature return if self.nc is None: self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=True, chunks={'num_columns_vis_ir': CHUNK_SIZE, 'num_rows_vis_ir': CHUNK_SIZE}) self.nc = self.nc.rename({'num_columns_vir_ir': 'x', 'num_rows_vir_ir': 'y'}) dataset = self.nc[dataset_info['nc_key']] dataset.attrs.update(dataset_info) return dataset def get_area_def(self, dataset_id): return pyresample.geometry.AreaDefinition( "some_area_name", "on-the-fly area", "geos", "+a=6378169.0 +h=35785831.0 +b=6356583.8 +lon_0=0 +proj=geos", 3636, 3636, [-5456233.41938636, -5453233.01608472, 5453233.01608472, 5456233.41938636]) class NCSEVIRIHRVFileHandler(): # left as an exercise to the reader :) If you have any questions, please contact the :ref:`Satpy developers `. Auxiliary File Download ----------------------- If your reader needs additional data files to do calibrations, corrections, or anything else see the :doc:`aux_data` document for more information on how to download and cache these files without including them in the Satpy python package. satpy-0.34.0/doc/source/dev_guide/index.rst000066400000000000000000000101151420401153000205630ustar00rootroot00000000000000================= Developer's Guide ================= The below sections will walk through how to set up a development environment, make changes to the code, and test that they work. See the :doc:`CONTRIBUTING` section for more information on getting started and contributor expectations. Additional information for developer's can be found at the pages listed below. .. toctree:: :maxdepth: 1 CONTRIBUTING xarray_migration custom_reader plugins satpy_internals aux_data Coding guidelines ================= Satpy is part of `Pytroll `_, and all code should follow the `Pytroll coding guidelines and best practices `_. Satpy is now Python 3 only and it is no longer needed to support Python 2. Check ``setup.py`` for the current Python versions any new code needs to support. .. _devinstall: Development installation ======================== See the :doc:`../install` section for basic installation instructions. When it comes time to install Satpy it should be installed from a clone of the git repository and in development mode so that local file changes are automatically reflected in the python environment. We highly recommend making a separate conda environment or virtualenv for development. For example, you can do this using conda_:: conda create -n satpy-dev python=3.8 conda activate satpy-dev .. _conda: https://conda.io/ This will create a new environment called "satpy-dev" with Python 3.8 installed. The second command will activate the environment so any future conda, python, or pip commands will use this new environment. If you plan on contributing back to the project you should first `fork the repository `_ and clone your fork. The package can then be installed in development mode by doing:: conda install --only-deps satpy pip install -e . The first command will install all dependencies needed by the Satpy conda-forge package, but won't actually install Satpy. The second command should be run from the root of the cloned Satpy repository (where the ``setup.py`` is) and will install the actual package. You can now edit the python files in your cloned repository and have them immediately reflected in your conda environment. Running tests ============= Satpy tests are written using the third-party :doc:`pytest ` package. There is usually no need to run all Satpy tests, but instead only run the tests related to the component you are working on. All tests are automatically run from the GitHub Pull Request using multiple versions of Python, multiple operating systems, and multiple versions of dependency libraries. If you want to run all Satpy tests you will need to install additional dependencies that aren't needed for regular Satpy usage. To install them run:: pip install -e .[tests] Satpy tests can be executed by running:: pytest satpy/tests You can also run a specific tests by specifying a sub-directory or module:: pytest satpy/tests/reader_tests/test_abi_l1b.py Running benchmarks ================== Satpy benchmarks are written using the `Airspeed Velocity `_ package (:mod:`asv`). The benchmarks can be run using:: asv run These are pretty computation intensive, and shouldn't be run unless you want to diagnose some performance issue for example. Once the benchmarks have run, you can use:: asv publish asv preview to have a look at the results. Again, have a look at the `asv` documentation for more information. Documentation ============= Satpy's documentation is built using Sphinx. All documentation lives in the ``doc/`` directory of the project repository. After editing the source files there the documentation can be generated locally:: cd doc make html The output of the make command should be checked for warnings and errors. If code has been changed (new functions or classes) then the API documentation files should be regenerated before running the above command:: sphinx-apidoc -f -T -o source/api ../satpy ../satpy/tests satpy-0.34.0/doc/source/dev_guide/plugins.rst000066400000000000000000000021321420401153000211350ustar00rootroot00000000000000================================================ Adding new functionality to Satpy via plugins ================================================ .. warning:: This feature is experimental and being modified without warnings. For now, it should not be used for anything else than toy examples and should not be relied on. Satpy has the capability of using plugins. At the moment, new composites can be added to satpy through external plugins. Plugins for reader and writers may be added at a later date (PRs are welcome!). Here is an `example `_ of a composites plugin. The key is to use the same configuration directory structure as satpy and add a `satpy.composites` entry point in the setup.py file of the plugin: .. code: python from setuptools import setup import os setup( name='satpy_cpe', entry_points={ 'satpy.composites': [ 'example_composites = satpy_cpe', ], }, package_data={'satpy_cpe': [os.path.join('etc', 'composites/*.yaml')]}, ) satpy-0.34.0/doc/source/dev_guide/satpy_internals.rst000066400000000000000000000152631420401153000227040ustar00rootroot00000000000000====================================================== Satpy internal workings: having a look under the hood ====================================================== Querying and identifying data arrays ==================================== DataQuery --------- The loading of data in Satpy is usually done through giving the name or the wavelength of the data arrays we are interested in. This way, the highest, most calibrated data arrays is often returned. However, in some cases, we need more control over the loading of the data arrays. The way to accomplish this is to load data arrays using queries, eg:: scn.load([DataQuery(name='channel1', resolution=400)] Here a data array with name `channel1` and of resolution `400` will be loaded if available. Note that None is not a valid value, and keys having a value set to None will simply be ignored. If one wants to use wildcards to query data, just provide `'*'`, eg:: scn.load([DataQuery(name='channel1', resolution=400, calibration='*')] Alternatively, one can provide a list as parameter to query data, like this:: scn.load([DataQuery(name='channel1', resolution=[400, 800])] DataID ------ Satpy stores loaded data arrays in a special dictionary (`DatasetDict`) inside scene objects. In order to identify each data array uniquely, Satpy is assigning an ID to each data array, which is then used as the key in the scene object. These IDs are of type `DataID` and are immutable. They are not supposed to be used by regular users and should only be created in special circumstances. Satpy should take care of creating and assigning these automatically. They are also stored in the `attrs` of each data array as `_satpy_id`. Default and custom metadata keys ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ One thing however that the user has control over is which metadata keys are relevant to which datasets. Satpy provides two default sets of metadata key (or ID keys), one for regular imager bands, and the other for composites. The first one contains: name, wavelength, resolution, calibration, modifiers. The second one contains: name, resolution. As an example here is the definition of the first one in yaml: .. code-block:: yaml data_identification_keys: name: required: true wavelength: type: !!python/name:satpy.dataset.WavelengthRange resolution: calibration: enum: - reflectance - brightness_temperature - radiance - counts transitive: true modifiers: required: true default: [] type: !!python/name:satpy.dataset.ModifierTuple To create a new set, the user can provide indications in the relevant yaml file. It has to be provided in header of the reader configuration file, under the `reader` section, as `data_identification_keys`. Each key under this is the name of relevant metadata key that will used to find relevant information in the attributes of the data arrays. Under each of this, a few options are available: - `required`: if the item is required, False by default - `type`: the type to use. More on this further down. - `enum`: if the item has to be limited to a finite number of options, an enum can be used. Be sure to place the options in the order of preference, with the most desirable option on top. - `default`: the default value to assign to the item if nothing (or None) is provided. If this option isn't provided, the key will simply be omitted if it is not present in the attrs or if it is None. It will be passed to the type's `convert` method if available. - `transitive`: whether the key is to be passed when looking for dependencies of composites/modifiers. Here for example, a composite that has in a given calibration type will pass this calibration type requirement to its dependencies. If the definition of the metadata keys need to be done in python rather than in a yaml file, it will be a dictionary very similar to the yaml code. Here is the same example as above in python: .. code-block:: python from satpy.dataset import WavelengthRange, ModifierTuple id_keys_config = {'name': { 'required': True, }, 'wavelength': { 'type': WavelengthRange, }, 'resolution': None, 'calibration': { 'enum': [ 'reflectance', 'brightness_temperature', 'radiance', 'counts' ], 'transitive': True, }, 'modifiers': { 'required': True, 'default': ModifierTuple(), 'type': ModifierTuple, }, } Types ~~~~~ Types are classes that implement a type to be used as value for metadata in the `DataID`. They have to implement a few methods: - a `convert` class method that returns it's argument as an instance of the class - `__hash__`, `__eq__` and `__ne__` methods - a `distance` method the tells how "far" an instance of this class is from it's argument. An example of such a class is the :class:`WavelengthRange ` class. Through its implementation, it allows us to use the wavelength in a query to find out which of the DataID in a list which has its central wavelength closest to that query for example. DataID and DataQuery interactions ================================= Different DataIDs and DataQuerys can have different metadata items defined. As such we define equality between different instances of these classes, and across the classes as equality between the sorted key/value pairs shared between the instances. If a DataQuery has one or more values set to `'*'`, the corresponding key/value pair will be omitted from the comparison. Instances sharing no keys will no be equal. Breaking changes from DatasetIDs ================================ - The way to access values from the DataID and DataQuery is through getitem: `my_dataid['resolution']` - For checking if a dataset is loaded, use `'mydataset' in scene`, as `'mydataset' in scene.keys()` will always return `False`: the `DatasetDict` instance only supports `DataID` as key type. Creating DataID for tests ========================= Sometimes, it is useful to create `DataID` instances for testing purposes. For these cases, the `satpy.tests.utils` module now has a `make_dsid` function that can be used just for this:: from satpy.tests.utils import make_dataid did = make_dataid(name='camembert', modifiers=('runny',)) satpy-0.34.0/doc/source/dev_guide/xarray_migration.rst000066400000000000000000000261701420401153000230430ustar00rootroot00000000000000============================ Migrating to xarray and dask ============================ Many python developers dealing with meteorologic satellite data begin with using NumPy arrays directly. This work usually involves masked arrays, boolean masks, index arrays, and reshaping. Due to the libraries used by Satpy these operations can't always be done in the same way. This guide acts as a starting point for new Satpy developers in transitioning from NumPy's array operations to Satpy's operations, although they are very similar. To provide the most functionality for users, Satpy uses the `xarray `_ library's :class:`~xarray.DataArray` object as the main representation for its data. DataArray objects can also benefit from the `dask `_ library. The combination of these libraries allow Satpy to easily distribute operations over multiple workers, lazy evaluate operations, and keep track additional metadata and coordinate information. XArray ------ .. code-block:: python import xarray as xr :class:`XArray's DataArray ` is now the standard data structure for arrays in satpy. They allow the array to define dimensions, coordinates, and attributes (that we use for metadata). To create such an array, you can do for example .. code-block:: python my_dataarray = xr.DataArray(my_data, dims=['y', 'x'], coords={'x': np.arange(...)}, attrs={'sensor': 'olci'}) where ``my_data`` can be a regular numpy array, a numpy memmap, or, if you want to keep things lazy, a dask array (more on dask later). Satpy uses dask arrays with all of its DataArrays. Dimensions ********** In satpy, the dimensions of the arrays should include: - `x` for the x or column or pixel dimension - `y` for the y or row or line dimension - `bands` for composites - `time` can also be provided, but we have limited support for it at the moment. Use metadata for common cases (`start_time`, `end_time`) Dimensions are accessible through :attr:`my_dataarray.dims `. To get the size of a given dimension, use :attr:`~xarray.DataArray.sizes`: .. code-block:: python my_dataarray.sizes['x'] Coordinates *********** Coordinates can be defined for those dimensions when it makes sense: - `x` and `y`: Usually defined when the data's area is an :class:`~pyresample.geometry.AreaDefinition`, and they contain the projection coordinates in x and y. - `bands`: Contain the letter of the color they represent, eg ``['R', 'G', 'B']`` for an RGB composite. This allows then to select for example a single band like this: .. code-block:: python red = my_composite.sel(bands='R') or even multiple bands: .. code-block:: python red_and_blue = my_composite.sel(bands=['R', 'B']) To access the coordinates of the data array, use the following syntax: .. code-block:: python x_coords = my_dataarray['x'] my_dataarray['y'] = np.arange(...) Most of the time, satpy will fill the coordinates for you, so you just need to provide the dimension names. Attributes ********** To save metadata, we use the :attr:`~xarray.DataArray.attrs` dictionary. .. code-block:: python my_dataarray.attrs['platform_name'] = 'Sentinel-3A' Some metadata that should always be present in our dataarrays: - ``area`` the area of the dataset. This should be handled in the reader. - ``start_time``, ``end_time`` - ``sensor`` Operations on DataArrays ************************ DataArrays work with regular arithmetic operation as one would expect of eg numpy arrays, with the exception that using an operator on two DataArrays requires both arrays to share the same dimensions, and coordinates if those are defined. For mathematical functions like cos or log, you can use numpy functions directly and they will return a DataArray object: .. code-block:: python import numpy as np cos_zen = np.cos(zen_xarray) Masking data ************ In DataArrays, masked data is represented with NaN values. Hence the default type is ``float64``, but ``float32`` works also in this case. XArray can't handle masked data for integer data, but in satpy we try to use the special ``_FillValue`` attribute (in ``.attrs``) to handle this case. If you come across a case where this isn't handled properly, contact us. Masking data from a condition can be done with: .. code-block:: python result = my_dataarray.where(my_dataarray > 5) Result is then analogous to my_dataarray, with values lower or equal to 5 replaced by NaNs. Further reading *************** http://xarray.pydata.org/en/stable/generated/xarray.DataArray.html#xarray.DataArray Dask ---- .. code-block:: python import dask.array as da The data part of the DataArrays we use in satpy are mostly dask Arrays. That allows lazy and chunked operations for efficient processing. Creation ******** From a numpy array ++++++++++++++++++ To create a dask array from a numpy array, one can call the :func:`~dask.array.from_array` function: .. code-block:: python darr = da.from_array(my_numpy_array, chunks=4096) The *chunks* keyword tells dask the size of a chunk of data. If the numpy array is 3-dimensional, the chunk size provide above means that one chunk will be 4096x4096x4096 elements. To prevent this, one can provide a tuple: .. code-block:: python darr = da.from_array(my_numpy_array, chunks=(4096, 1024, 2)) meaning a chunk will be 4096x1024x2 elements in size. Even more detailed sizes for the chunks can be provided if needed, see the :doc:`dask documentation `. From memmaps or other lazy objects ++++++++++++++++++++++++++++++++++ To avoid loading the data into memory when creating a dask array, other kinds of arrays can be passed to :func:`~dask.array.from_array`. For example, a numpy memmap allows dask to know where the data is, and will only be loaded when the actual values need to be computed. Another example is a hdf5 variable read with h5py. Procedural generation of data +++++++++++++++++++++++++++++ Some procedural generation function are available in dask, eg :func:`~dask.array.meshgrid`, :func:`~dask.array.arange`, or :func:`random.random `. From XArray to Dask and back **************************** Certain operations are easiest to perform on dask arrays by themselves, especially when certain functions are only available from the dask library. In these cases you can operate on the dask array beneath the DataArray and create a new DataArray when done. Note dask arrays do not support in-place operations. In-place operations on xarray DataArrays will reassign the dask array automatically. .. code-block:: python dask_arr = my_dataarray.data dask_arr = dask_arr + 1 # ... other non-xarray operations ... new_dataarr = xr.DataArray(dask_arr, dims=my_dataarray.dims, attrs=my_dataarray.attrs.copy()) Or if the operation should be assigned back to the original DataArray (if and only if the data is the same size): .. code-block:: python my_dataarray.data = dask_arr Operations and how to get actual results **************************************** Regular arithmetic operations are provided, and generate another dask array. >>> arr1 = da.random.uniform(0, 1000, size=(1000, 1000), chunks=100) >>> arr2 = da.random.uniform(0, 1000, size=(1000, 1000), chunks=100) >>> arr1 + arr2 dask.array In order to compute the actual data during testing, use the :func:`~dask.compute` method. In normal Satpy operations you will want the data to be evaluated as late as possible to improve performance so `compute` should only be used when needed. >>> (arr1 + arr2).compute() array([[ 898.08811639, 1236.96107629, 1154.40255292, ..., 1537.50752674, 1563.89278664, 433.92598566], [ 1657.43843608, 1063.82390257, 1265.08687916, ..., 1103.90421234, 1721.73564104, 1276.5424228 ], [ 1620.11393216, 212.45816261, 771.99348555, ..., 1675.6561068 , 585.89123159, 935.04366354], ..., [ 1533.93265862, 1103.33725432, 191.30794159, ..., 520.00434673, 426.49238283, 1090.61323471], [ 816.6108554 , 1526.36292498, 412.91953023, ..., 982.71285721, 699.087645 , 1511.67447362], [ 1354.6127365 , 1671.24591983, 1144.64848757, ..., 1247.37586051, 1656.50487092, 978.28184726]]) Dask also provides `cos`, `log` and other mathematical function, that you can use with :func:`da.cos ` and :func:`da.log `. However, since satpy uses xarrays as standard data structure, prefer the xarray functions when possible (they call in turn the dask counterparts when possible). Wrapping non-dask friendly functions ************************************ Some operations are not supported by dask yet or are difficult to convert to take full advantage of dask's multithreaded operations. In these cases you can wrap a function to run on an entire dask array when it is being computed and pass on the result. Note that this requires fully computing all of the dask inputs to the function and are passed as a numpy array or in the case of an XArray DataArray they will be a DataArray with a numpy array underneath. You should *NOT* use dask functions inside the delayed function. .. code-block:: python import dask import dask.array as da def _complex_operation(my_arr1, my_arr2): return my_arr1 + my_arr2 delayed_result = dask.delayed(_complex_operation)(my_dask_arr1, my_dask_arr2) # to create a dask array to use in the future my_new_arr = da.from_delayed(delayed_result, dtype=my_dask_arr1.dtype, shape=my_dask_arr1.shape) Dask Delayed objects can also be computed ``delayed_result.compute()`` if the array is not needed or if the function doesn't return an array. http://dask.pydata.org/en/latest/array-api.html#dask.array.from_delayed Map dask blocks to non-dask friendly functions ********************************************** If the complicated operation you need to perform can be vectorized and does not need the entire data array to do its operations you can use :func:`da.map_blocks ` to get better performance than creating a delayed function. Similar to delayed functions the inputs to the function are fully computed DataArrays or numpy arrays, but only the individual chunks of the dask array at a time. Note that ``map_blocks`` must be provided dask arrays and won't function properly on XArray DataArrays. It is recommended that the function object passed to ``map_blocks`` **not** be an internal function (a function defined inside another function) or it may be unserializable and can cause issues in some environments. .. code-block:: python my_new_arr = da.map_blocks(_complex_operation, my_dask_arr1, my_dask_arr2, dtype=my_dask_arr1.dtype) Helpful functions ***************** - :func:`~dask.array.core.map_blocks` - :func:`~dask.array.map_overlap` - :func:`~dask.array.core.atop` - :func:`~dask.array.store` - :func:`~dask.array.tokenize` - :func:`~dask.compute` - :doc:`delayed` - :func:`~dask.array.rechunk` - :attr:`~dask.array.Array.vindex` satpy-0.34.0/doc/source/doi_role.py000066400000000000000000000036651420401153000171510ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Create sphinx roles for referencing the DOI of a published paper. Extension to add links to DOIs. With this extension you can use e.g. :doi:`10.1016/S0022-2836(05)80360-2` in your documents. This will create a link to a DOI resolver (``https://doi.org/10.1016/S0022-2836(05)80360-2``). The link caption will be the raw DOI. You can also give an explicit caption, e.g. :doi:`Basic local alignment search tool <10.1016/S0022-2836(05)80360-2>`. :copyright: Copyright 2015 Jon Lund Steffensen. Based on extlinks by the Sphinx team. :license: BSD. """ from docutils import nodes, utils from sphinx.util.nodes import split_explicit_title def doi_role(typ, rawtext, text, lineno, inliner, options=None, content=None): if options is None: options = {} if content is None: content = [] text = utils.unescape(text) has_explicit_title, title, part = split_explicit_title(text) full_url = 'https://doi.org/' + part if not has_explicit_title: title = 'DOI:' + part pnode = nodes.reference(title, title, internal=False, refuri=full_url) return [pnode], [] def arxiv_role(typ, rawtext, text, lineno, inliner, options=None, content=None): if options is None: options = {} if content is None: content = [] text = utils.unescape(text) has_explicit_title, title, part = split_explicit_title(text) full_url = 'https://arxiv.org/abs/' + part if not has_explicit_title: title = 'arXiv:' + part pnode = nodes.reference(title, title, internal=False, refuri=full_url) return [pnode], [] def setup_link_role(app): app.add_role('doi', doi_role, override=True) app.add_role('DOI', doi_role, override=True) app.add_role('arXiv', arxiv_role, override=True) app.add_role('arxiv', arxiv_role, override=True) def setup(app): app.connect('builder-inited', setup_link_role) return {'version': '0.1', 'parallel_read_safe': True} satpy-0.34.0/doc/source/enhancements.rst000066400000000000000000000053221420401153000201750ustar00rootroot00000000000000============ Enhancements ============ Built-in enhancement methods ============================ stretch ------- The most basic operation is to stretch the image so that the data fits to the output format. There are many different ways to stretch the data, which are configured by giving them in `kwargs` dictionary, like in the example above. The default, if nothing else is defined, is to apply a linear stretch. For more details, see :ref:`enhancing the images `. linear ****** As the name suggests, linear stretch converts the input values to output values in a linear fashion. By default, 5% of the data is cut on both ends of the scale, but these can be overridden with ``cutoffs=(0.005, 0.005)`` argument:: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: linear cutoffs: [0.003, 0.005] .. note:: This enhancement is currently not optimized for dask because it requires getting minimum/maximum information for the entire data array. crude ***** The crude stretching is used to limit the input values to a certain range by clipping the data. This is followed by a linear stretch with no cutoffs specified (see above). Example:: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [100, 100, 100] It is worth noting that this stretch can also be used to _invert_ the data by giving larger values to the min_stretch than to max_stretch. histogram ********* gamma ----- invert ------ crefl_scaling ------------- Deprecated. Use 'piecewise_linear_stretch' instead. piecewise_linear_stretch ------------------------ Use :func:`numpy.interp` to linearly interpolate data to a new range. See :func:`satpy.enhancements.piecewise_linear_stretch` for more information and examples. cira_stretch ------------ Logarithmic stretch based on a cira recipe. reinhard_to_srgb ---------------- Stretch method based on the Reinhard algorithm, using luminance. The function includes conversion to sRGB colorspace. Reinhard, Erik & Stark, Michael & Shirley, Peter & Ferwerda, James. (2002). Photographic Tone Reproduction For Digital Images. ACM Transactions on Graphics. :doi: `21. 10.1145/566654.566575` lookup ------ colorize -------- palettize --------- three_d_effect -------------- The `three_d_effect` enhancement adds an 3D look to an image by convolving with a 3x3 kernel. User can adjust the strength of the effect by determining the weight (default: 1.0). Example:: - name: 3d_effect method: !!python/name:satpy.enhancements.three_d_effect kwargs: weight: 1.0 btemp_threshold --------------- satpy-0.34.0/doc/source/examples/000077500000000000000000000000001420401153000166075ustar00rootroot00000000000000satpy-0.34.0/doc/source/examples/fci_l1c_natural_color.rst000066400000000000000000000035321420401153000235700ustar00rootroot00000000000000MTG FCI - Natural Color Example =============================== Satpy includes a reader for the Meteosat Third Generation (MTG) FCI Level 1c data. The following Python code snippet shows an example on how to use Satpy to generate a Natural Color RGB composite over the European area. .. warning:: This example is currently a work in progress. Some of the below code may not work with the currently released version of Satpy. Additional updates to this example will be coming soon. .. code-block:: python from satpy.scene import Scene from satpy import find_files_and_readers # define path to FCI test data folder path_to_data = 'your/path/to/FCI/data/folder/' # find files and assign the FCI reader files = find_files_and_readers(base_dir=path_to_data, reader='fci_l1c_nc') # create an FCI scene from the selected files scn = Scene(filenames=files) # print available dataset names for this scene (e.g. 'vis_04', 'vis_05','ir_38',...) print(scn.available_dataset_names()) # print available composite names for this scene (e.g. 'natural_color', 'airmass', 'convection',...) print(scn.available_composite_names()) # load the datasets/composites of interest scn.load(['natural_color','vis_04'], upper_right_corner='NE') # note: the data inside the FCI files is stored upside down. The upper_right_corner='NE' argument # flips it automatically in upright position. # you can access the values of a dataset as a Numpy array with vis_04_values = scn['vis_04'].values # resample the scene to a specified area (e.g. "eurol1" for Europe in 1km resolution) scn_resampled = scn.resample("eurol", resampler='nearest', radius_of_influence=5000) # save the resampled dataset/composite to disk scn_resampled.save_dataset("natural_color", filename='./fci_natural_color_resampled.png') satpy-0.34.0/doc/source/examples/index.rst000066400000000000000000000061351420401153000204550ustar00rootroot00000000000000Examples ======== Satpy examples are available as Jupyter Notebooks on the `pytroll-examples `_ git repository. Some examples are described in further detail as separate pages in this documentation. They include python code, PNG images, and descriptions of what the example is doing. Below is a list of some of the examples and a brief summary. Additional examples can be found at the repository mentioned above or as explanations in the various sections of this documentation. .. toctree:: :hidden: :maxdepth: 1 fci_l1c_natural_color vii_l1b_nc .. list-table:: :header-rows: 1 * - Name - Description * - `Quickstart with MSG data `_ - Satpy quickstart for loading and processing satellite data, with MSG data in this examples * - `Cartopy Plot `_ - Plot a single VIIRS SDR granule using Cartopy and matplotlib * - `Himawari-8 AHI True Color `_ - Generate and resample a rayleigh corrected true color RGB from Himawari-8 AHI data * - `Sentinel-3 OLCI True Color `_ - Reading OLCI data from Sentinel 3 with Pytroll/Satpy * - `Sentinel 2 MSI true color `_ - Reading MSI data from Sentinel 2 with Pytroll/Satpy * - `Suomi-NPP VIIRS SDR True Color `_ - Generate a rayleigh corrected true color RGB from VIIRS I- and M-bands * - `Aqua/Terra MODIS True Color `_ - Generate and resample a rayleigh corrected true color RGB from MODIS * - `Sentinel 1 SAR-C False Color `_ - Generate a false color composite RGB from SAR-C polarized datasets * - `Level 2 EARS-NWC cloud products `_ - Reading Level 2 EARS-NWC cloud products * - `Level 2 MAIA cloud products `_ - Reading Level 2 MAIA cloud products * - :doc:`Meteosat Third Generation FCI Natural Color RGB ` - Generate Natural Color RGB from Meteosat Third Generation (MTG) FCI Level 1c data * - :doc:`Reading EPS-SG Visible and Infrared Imager (VII) with Pytroll ` - Read and visualize EPS-SG VII L1B test data and save it to an image satpy-0.34.0/doc/source/examples/vii_l1b_nc.rst000066400000000000000000000025051420401153000213500ustar00rootroot00000000000000EPS-SG VII netCDF Example =============================== Satpy includes a reader for the EPS-SG Visible and Infrared Imager (VII) Level 1b data. The following Python code snippet shows an example on how to use Satpy to read a channel and resample and save the image over the European area. .. warning:: This example is currently a work in progress. Some of the below code may not work with the currently released version of Satpy. Additional updates to this example will be coming soon. .. code-block:: python import glob from satpy.scene import Scene # find the file/files to be read filenames = glob.glob('/path/to/VII/data/W_xx-eumetsat-darmstadt,SAT,SGA1-VII-1B-RAD_C_EUMT_20191007055100*') # create a VII scene from the selected granule(s) scn = Scene(filenames=filenames, reader='vii_l1b_nc') # print available dataset names for this scene print(scn.available_dataset_names()) # load the datasets of interest # NOTE: only radiances are supported for test data scn.load(["vii_668"], calibration="radiance") # resample the scene to a specified area (e.g. "eurol1" for Europe in 1km resolution) eur = scn.resample("eurol", resampler='nearest', radius_of_influence=5000) # save the resampled data to disk eur.save_dataset("vii_668", filename='./vii_668_eur.png') satpy-0.34.0/doc/source/faq.rst000066400000000000000000000137421420401153000163010ustar00rootroot00000000000000FAQ === Below you'll find frequently asked questions, performance tips, and other topics that don't really fit in to the rest of the Satpy documentation. If you have any other questions that aren't answered here feel free to make an issue on GitHub or talk to us on the Slack team or mailing list. See the :ref:`contributing ` documentation for more information. .. contents:: Topics :depth: 1 :local: Why is Satpy slow on my powerful machine? ----------------------------------------- Satpy depends heavily on the dask library for its performance. However, on some systems dask's default settings can actually hurt performance. By default dask will create a "worker" for each logical core on your system. In most systems you have twice as many logical cores (also known as threaded cores) as physical cores. Managing and communicating with all of these workers can slow down dask, especially when they aren't all being used by most Satpy calculations. One option is to limit the number of workers by doing the following at the **top** of your python code: .. code-block:: python import dask dask.config.set(num_workers=8) # all other Satpy imports and code This will limit dask to using 8 workers. Typically numbers between 4 and 8 are good starting points. Number of workers can also be set from an environment variable before running the python script, so code modification isn't necessary: .. code-block:: bash DASK_NUM_WORKERS=4 python myscript.py Similarly, if you have many workers processing large chunks of data you may be using much more memory than you expect. If you limit the number of workers *and* the size of the data chunks being processed by each worker you can reduce the overall memory usage. Default chunk size can be configured in Satpy by setting the following environment variable: .. code-block:: bash export PYTROLL_CHUNK_SIZE=2048 This could also be set inside python using ``os.environ``, but must be set **before** Satpy is imported. This value defaults to 4096, meaning each chunk of data will be 4096 rows by 4096 columns. In the future setting this value will change to be easier to set in python. Why multiple CPUs are used even with one worker? ------------------------------------------------ Many of the underlying Python libraries use math libraries like BLAS and LAPACK written in C or FORTRAN, and they are often compiled to be multithreaded. If necessary, it is possible to force the number of threads they use by setting an environment variable: .. code-block:: bash OMP_NUM_THREADS=2 python myscript.py What is the difference between number of workers and number of threads? ----------------------------------------------------------------------- The above questions handle two different stages of parallellization: Dask workers and math library threading. The number of Dask workers affect how many separate tasks are started, effectively telling how many chunks of the data are processed at the same time. The more workers are in use, the higher also the memory usage will be. The number of threads determine how much parallel computations are run for the chunk handled by each worker. This has minimal effect on memory usage. The optimal setup is often a mix of these two settings, for example .. code-block:: bash DASK_NUM_WORKERS=2 OMP_NUM_THREADS=4 python myscript.py would create two workers, and each of them would process their chunk of data using 4 threads when calling the underlying math libraries. How do I avoid memory errors? ----------------------------- If your environment is using many dask workers, it may be using more memory than it needs to be using. See the "Why is Satpy slow on my powerful machine?" question above for more information on changing Satpy's memory usage. Reducing GDAL output size? -------------------------- Sometimes GDAL-based products, like geotiffs, can be much larger than expected. This can be caused by GDAL's internal memory caching conflicting with dask's chunking of the data arrays. Modern versions of GDAL default to using 5% of available memory for holding on to data before compressing it and writing it to disk. On more powerful systems (~128GB of memory) this is usually not a problem. However, on low memory systems this may mean that GDAL is only compressing a small amount of data before writing it to disk. This results in poor compression and large overhead from the many small compressed areas. One solution is to increase the chunk size used by dask but this can result in poor performance during computation. Another solution is to increase ``GDAL_CACHEMAX``, an environment variable that GDAL uses. This defaults to ``"5%"``, but can be increased:: export GDAL_CACHEMAX="15%" For more information see `GDAL's documentation `_. How do I use multi-threaded compression when writing GeoTIFFs? -------------------------------------------------------------- The GDAL library's GeoTIFF driver has a lot of options for changing how your GeoTIFF is formatted and written. One of the most important ones when it comes to writing GeoTIFFs is using multiple threads to compress your data. By default Satpy will use DEFLATE compression which can be slower to compress than other options out there, but faster to read. GDAL gives us the option to control the number of threads used during compression by specifying the ``num_threads`` option. This option defaults to ``1``, but it is recommended to set this to at least the same number of dask workers you use. Do this by adding ``num_threads`` to your `save_dataset` or `save_datasets` call:: scn.save_datasets(base_dir='/tmp', num_threads=8) Satpy also stores our data as "tiles" instead of "stripes" which is another way to get more efficient compression of our GeoTIFF image. You can disable this with ``tiled=False``. See the `GDAL GeoTIFF documentation `_ for more information on the creation options available including other compression choices. satpy-0.34.0/doc/source/index.rst000066400000000000000000000225211420401153000166340ustar00rootroot00000000000000===================== Satpy's Documentation ===================== Satpy is a python library for reading, manipulating, and writing data from remote-sensing earth-observing satellite instruments. Satpy provides users with readers that convert geophysical parameters from various file formats to the common Xarray :class:`~xarray.DataArray` and :class:`~xarray.Dataset` classes for easier interoperability with other scientific python libraries. Satpy also provides interfaces for creating RGB (Red/Green/Blue) images and other composite types by combining data from multiple instrument bands or products. Various atmospheric corrections and visual enhancements are provided for improving the usefulness and quality of output images. Output data can be written to multiple output file formats such as PNG, GeoTIFF, and CF standard NetCDF files. Satpy also allows users to resample data to geographic projected grids (areas). Satpy is maintained by the open source `Pytroll `_ group. The Satpy library acts as a high-level abstraction layer on top of other libraries maintained by the Pytroll group including: - `pyresample `_ - `pyspectral `_ - `trollimage `_ - `pycoast `_ - `pydecorate `_ - `python-geotiepoints `_ - `pyninjotiff `_ Go to the Satpy project_ page for source code and downloads. Satpy is designed to be easily extendable to support any earth observation satellite by the creation of plugins (readers, compositors, writers, etc). The table at the bottom of this page shows the input formats supported by the base Satpy installation. .. note:: Satpy's interfaces are not guaranteed stable and may change until version 1.0 when backwards compatibility will be a main focus. .. versionchanged:: 0.20.0 Dropped Python 2 support. .. _project: http://github.com/pytroll/satpy .. toctree:: :maxdepth: 2 overview install config data_download examples/index quickstart readers composites resample enhancements writers multiscene dev_guide/index .. toctree:: :maxdepth: 1 Satpy API faq Release Notes Security Policy .. _reader_table: .. list-table:: Satpy Readers :header-rows: 1 :widths: 45 25 30 * - Description - Reader name - Status * - MSG (Meteosat 8 to 11) SEVIRI data in HRIT format - `seviri_l1b_hrit` - Nominal * - MSG (Meteosat 8 to 11) SEVIRI data in native format - `seviri_l1b_native` - Nominal. * - MSG (Meteosat 8 to 11) SEVIRI data in netCDF format - `seviri_l1b_nc` - | HRV channel not supported, incomplete metadata | in the files. EUMETSAT has been notified. * - MSG (Meteosat 8 to 11) L2 products in BUFR format - `seviri_l2_bufr` - AMV BUFR products not supported yet. * - MSG (Meteosat 8 to 11) L2 products in GRIB2 format - `seviri_l2_grib` - In development, CLM, OCA and FIR products supported * - MFG (Meteosat 2 to 7) MVIRI data in netCDF format (FIDUCEO FCDR) - `mviri_l1b_fiduceo_nc` - Beta * - Himawari 8 and 9 AHI data in HSD format - `ahi_hsd` - Nominal * - Himawari 8 and 9 AHI data in HRIT format - `ahi_hrit` - Nominal * - Himawari 8 and 9 AHI data in Gridded binary format, from http://www.cr.chiba-u.jp/databases/GEO/H8_9/FD/index_jp.html - `ahi_l1b_gridded_bin` - Nominal * - MTSAT-1R JAMI data in JMA HRIT format - `jami_hrit` - Beta * - MTSAT-2 Imager data in JMA HRIT format - `mtsat2-imager_hrit` - Beta * - GOES-R imager data in netcdf format - `abi_l1b` - Nominal * - NOAA GOES-R ABI L2+ products in netcdf format - `abi_l2_nc` - Beta * - GOES 11 to 15 imager data in HRIT format - `goes-imager_hrit` - Nominal * - GOES 8 to 15 imager data in netCDF format (from NOAA CLASS) - `goes-imager_nc` - Beta * - Electro-L N2 MSU-GS data in HRIT format - `electrol_hrit` - Nominal * - NOAA 15 to 19, Metop A to C AVHRR data in AAPP format - `avhrr_l1b_aapp` - Nominal * - Metop A to C AVHRR in native level 1 format - `avhrr_l1b_eps` - Nominal * - Tiros-N, NOAA 7 to 19 AVHRR data in GAC and LAC format - `avhrr_l1b_gaclac` - Nominal * - NOAA 15 to 19 AVHRR data in raw HRPT format - `avhrr_l1b_hrpt` - In development * - GCOM-W1 AMSR2 data in HDF5 format - `amsr2_l1b` - Nominal * - MTG FCI Level 1C data in NetCDF format - `fci_l1c_nc` - In development (beta for FDHSI files, HRFI not supported yet) * - Callipso Caliop Level 2 Cloud Layer data (v3) in EOS-hdf4 format - `caliop_l2_cloud` - In development * - Terra and Aqua MODIS data in EOS-hdf4 level-1 format as produced by IMAPP and IPOPP or downloaded from LAADS - `modis_l1b` - Nominal * - NWCSAF GEO 2016 products in netCDF4 format (limited to SEVIRI) - `nwcsaf-geo` - In development * - NWCSAF PPS 2014, 2018 products in netCDF4 format - `nwcsaf-pps_nc` - | Not yet support for remapped netCDF products. | Only the standard swath based output is supported. | CPP products not supported yet * - Sentinel-1 A and B SAR-C data in SAFE format - `sar-c_safe` - Nominal * - Sentinel-2 A and B MSI data in SAFE format - `msi_safe` - Nominal * - Sentinel-3 A and B OLCI Level 1B data in netCDF4 format - `olci_l1b` - Nominal * - Sentinel-3 A and B OLCI Level 2 data in netCDF4 format - `olci_l2` - Nominal * - Sentinel-3 A and B SLSTR data in netCDF4 format - `slstr_l1b` - In development * - OSISAF SST data in GHRSST (netcdf) format - `ghrsst_l3c_sst` - In development * - NUCAPS EDR Retrieval in NetCDF4 format - `nucaps` - Nominal * - NOAA Level 2 ACSPO SST data in netCDF4 format - `acspo` - Nominal * - GEOstationary Cloud Algorithm Test-bed (GEOCAT) - `geocat` - Nominal * - The Clouds from AVHRR Extended (CLAVR-x) - `clavrx` - Nominal * - SNPP VIIRS data in HDF5 SDR format - `viirs_sdr` - Nominal * - SNPP VIIRS data in netCDF4 L1B format - `viirs_l1b` - Nominal * - SNPP VIIRS SDR data in HDF5 Compact format - `viirs_compact` - Nominal * - AAPP MAIA VIIRS and AVHRR products in hdf5 format - `maia` - Nominal * - VIIRS EDR Active Fires data in NetCDF4 & CSV .txt format - `viirs_edr_active_fires` - Beta * - VIIRS EDR Flood data in hdf4 format - `viirs_edr_flood` - Beta * - GRIB2 format - `grib` - Beta * - SCMI ABI L1B format - `abi_l1b_scmi` - Beta * - VIRR data in HDF5 format - `virr_l1b` - Beta * - MERSI-2 L1B data in HDF5 format - `mersi2_l1b` - Beta * - FY-4A AGRI L1 data in HDF5 format - `agri_l1` - Beta * - Vaisala Global Lightning Dataset GLD360 data in ASCII format - `vaisala_gld360` - Beta * - TROPOMI L2 data in NetCDF4 format - `tropomi_l2` - Beta * - Hydrology SAF products in GRIB format - `hsaf_grib` - | Beta | Only the h03, h03b, h05 and h05B products are supported at-present * - GEO-KOMPSAT-2 AMI L1B data in NetCDF4 format - `ami_l1b` - Beta * - GOES-R GLM Grided Level 2 in NetCDF4 format - `glm_l2` - Beta * - Sentinel-3 SLSTR SST data in NetCDF4 format - `slstr_l2` - Beta * - IASI level 2 SO2 in BUFR format - `iasi_l2_so2_bufr` - Beta * - HY-2B Scatterometer level 2b data in HDF5 format from both EUMETSAT and NSOAS - `hy2_scat_l2b_h5` - Beta * - OMPS EDR data in HDF5 format - `omps_edr` - Beta * - VII Level 2 in NetCDF4 format - `vii_l2_nc` - Beta * - VII Level 1b in NetCDF4 format - `vii_l1b_nc` - Beta * - MTG FCI Level 2 in NetCDF4 format - `fci_l2_nc` - Beta * - SMOS level 2 wind data in NetCDF4 format - `smos_l2_wind` - Beta * - AMSR2 level 2 wind data in HDF5 format - `amsr2_l2` - Beta * - GPM IMERG level 3 precipitation data in HDF5 format - `gpm_imerg` - Nominal * - AMSR2 level 2 GAASP in NetCDF4 format - `amsr2_l2_gaasp` - Beta * - MiRS level 2 Precipitation and Surface Products (IMG) in NetCDF4 format - `mirs` - Beta * - MIMIC Total Precipitable Water Product Reader in NetCDF format - mimicTPW2_comp - Beta * - SEADAS L2 Chlorphyll A product in HDF4 format - seadas_l2 - Beta * - AAPP L1C MHS format - `aapp_mhs_l1c` - Nominal * - AAPP L1C AMSU-B format - `aapp_amsub_l1c` - Beta * - Arctica-M (N1) MSU-GS/A data in HDF5 format - `msu_gsa_l1b` - Beta Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` satpy-0.34.0/doc/source/install.rst000066400000000000000000000074471420401153000172050ustar00rootroot00000000000000========================= Installation Instructions ========================= Satpy is available from conda-forge (via conda), PyPI (via pip), or from source (via pip+git). The below instructions show how to install stable versions of Satpy. For a development/unstable version see :ref:`devinstall`. Conda-based Installation ======================== Satpy can be installed into a conda environment by installing the package from the conda-forge channel. If you do not already have access to a conda installation, we recommend installing `miniconda `_ for the smallest and easiest installation. The commands below will use ``-c conda-forge`` to make sure packages are downloaded from the conda-forge channel. Alternatively, you can tell conda to always use conda-forge by running: .. code-block:: bash $ conda config --add channels conda-forge In a new conda environment -------------------------- We recommend creating a separate environment for your work with Satpy. To create a new environment and install Satpy all in one command you can run: .. code-block:: bash $ conda create -c conda-forge -n my_satpy_env python satpy You must then activate the environment so any future python or conda commands will use this environment. .. code-block:: $ conda activate my_satpy_env This method of creating an environment with Satpy (and optionally other packages) installed can generally be created faster than creating an environment and then later installing Satpy and other packages (see the section below). In an existing environment -------------------------- .. note:: It is recommended that when first exploring Satpy, you create a new environment specifically for this rather than modifying one used for other work. If you already have a conda environment, it is activated, and would like to install Satpy into it, run the following: .. code-block:: bash $ conda install -c conda-forge satpy .. note:: Satpy only automatically installs the dependencies needed to process the most common use cases. Additional dependencies may need to be installed with conda or pip if import errors are encountered. To check your installation use the ``check_satpy`` function discussed :ref:`here `. Pip-based Installation ====================== Satpy is available from the Python Packaging Index (PyPI). A sandbox environment for `satpy` can be created using `Virtualenv `_. To install the `satpy` package and the minimum amount of python dependencies: .. code-block:: bash $ pip install satpy Additional dependencies can be installed as "extras" and are grouped by reader, writer, or feature added. Extras available can be found in the `setup.py `_ file. They can be installed individually: .. code-block:: bash $ pip install "satpy[viirs_sdr]" Or all at once, although this isn't recommended due to the large number of dependencies: .. code-block:: bash $ pip install "satpy[all]" Ubuntu System Python Installation ================================= To install Satpy on an Ubuntu system we recommend using virtual environments to separate Satpy and its dependencies from the rest of the system. Note that these instructions require using "sudo" privileges which may not be available to all users and can be very dangerous. The following instructions attempt to install some Satpy dependencies using the Ubuntu `apt` package manager to ease installation. Replace `/path/to/pytroll-env` with the environment to be created. .. code-block:: bash $ sudo apt-get install python-pip python-gdal $ sudo pip install virtualenv $ virtualenv /path/to/pytroll-env $ source /path/to/pytroll-env/bin/activate $ pip install satpy satpy-0.34.0/doc/source/modifiers.rst000066400000000000000000000046741420401153000175170ustar00rootroot00000000000000Modifiers ========= Modifiers are filters applied to datasets prior to computing composites. They take at least one input (a dataset) and have exactly one output (the same dataset, modified). They can take additional input datasets or parameters. Modifiers are defined in composites files in ``etc/composites`` within ``$SATPY_CONFIG_PATH``. The instruction to use a certain modifier can be contained in a composite definition or in a reader definition. If it is defined in a composite definition, it is applied upon constructing the composite. When using built-in composites, Satpy users do not need to understand the mechanics of modifiers, as they are applied automatically. The :doc:`composites` documentation contains information on how to apply modifiers when creating new composites. Some readers read data where certain modifiers are already applied. Here, the reader definition will refer to the Satpy modifier. This marking adds the modifier to the metadata to prevent it from being applied again upon composite calculation. Commonly used modifiers are listed in the table below. Further details on those modifiers can be found in the linked API documentation. .. list-table:: Commonly used modifiers :header-rows: 1 * - Label - Class - Description * - ``sunz_corrected`` - :class:`~satpy.modifiers.geometry.SunZenithCorrector` - Modifies solar channels for the solar zenith angle to provide smoother images. * - ``effective_solar_pathlength_corrected`` - :class:`~satpy.modifiers.geometry.EffectiveSolarPathLengthCorrector` - Modifies solar channels for atmospheric path length of solar radiation. * - ``nir_reflectance`` - :class:`~satpy.modifiers.spectral.NIRReflectance` - Calculates reflective part of channels at the edge of solar and terrestrial radiation (3.7 µm or 3.9 µm). * - ``nir_emissive`` - :class:`~satpy.modifiers.spectral.NIREmissivePartFromReflectance` - Calculates emissive part of channels at the edge of solar and terrestrial radiation (3.7 µm or 3.9 µm) * - ``rayleigh_corrected`` - :class:`~satpy.modifiers.atmosphere.PSPRayleighReflectance` - Modifies solar channels to filter out the visual impact of rayleigh scattering. A complete list can be found in the `etc/composites `_ source code and in the :mod:`~satpy.modifiers` module documentation. satpy-0.34.0/doc/source/multiscene.rst000066400000000000000000000310021420401153000176670ustar00rootroot00000000000000MultiScene (Experimental) ========================= Scene objects in Satpy are meant to represent a single geographic region at a specific single instant in time or range of time. This means they are not suited for handling multiple orbits of polar-orbiting satellite data, multiple time steps of geostationary satellite data, or other special data cases. To handle these cases Satpy provides the `MultiScene` class. The below examples will walk through some basic use cases of the MultiScene. .. warning:: These features are still early in development and may change overtime as more user feedback is received and more features added. MultiScene Creation ------------------- There are two ways to create a ``MultiScene``. Either by manually creating and providing the scene objects, >>> from satpy import Scene, MultiScene >>> from glob import glob >>> scenes = [ ... Scene(reader='viirs_sdr', filenames=glob('/data/viirs/day_1/*t180*.h5')), ... Scene(reader='viirs_sdr', filenames=glob('/data/viirs/day_2/*t180*.h5')) ... ] >>> mscn = MultiScene(scenes) >>> mscn.load(['I04']) or by using the :meth:`MultiScene.from_files ` class method to create a ``MultiScene`` from a series of files. This uses the :func:`~satpy.readers.group_files` utility function to group files by start time or other filenames parameters. >>> from satpy import MultiScene >>> from glob import glob >>> mscn = MultiScene.from_files(glob('/data/abi/day_1/*C0[12]*.nc'), reader='abi_l1b') >>> mscn.load(['C01', 'C02']) .. versionadded:: 0.12 The ``from_files`` and ``group_files`` functions were added in Satpy 0.12. See below for an alternative solution. For older versions of Satpy we can manually create the `Scene` objects used. The :func:`~glob.glob` function and for loops are used to group files into Scene objects that, if used individually, could load the data we want. The code below is equivalent to the ``from_files`` code above: >>> from satpy import Scene, MultiScene >>> from glob import glob >>> scene_files = [] >>> for time_step in ['1800', '1810', '1820', '1830']: ... scene_files.append(glob('/data/abi/day_1/*C0[12]*s???????{}*.nc'.format(time_step))) >>> scenes = [ ... Scene(reader='abi_l1b', filenames=files) for files in sorted(scene_files) ... ] >>> mscn = MultiScene(scenes) >>> mscn.load(['C01', 'C02']) Blending Scenes in MultiScene ----------------------------- Scenes contained in a MultiScene can be combined in different ways. Stacking scenes *************** The code below uses the :meth:`~satpy.multiscene.MultiScene.blend` method of the ``MultiScene`` object to stack two separate orbits from a VIIRS sensor. By default the ``blend`` method will use the :func:`~satpy.multiscene.stack` function which uses the first dataset as the base of the image and then iteratively overlays the remaining datasets on top. >>> from satpy import Scene, MultiScene >>> from glob import glob >>> from pyresample.geometry import AreaDefinition >>> my_area = AreaDefinition(...) >>> scenes = [ ... Scene(reader='viirs_sdr', filenames=glob('/data/viirs/day_1/*t180*.h5')), ... Scene(reader='viirs_sdr', filenames=glob('/data/viirs/day_2/*t180*.h5')) ... ] >>> mscn = MultiScene(scenes) >>> mscn.load(['I04']) >>> new_mscn = mscn.resample(my_area) >>> blended_scene = new_mscn.blend() >>> blended_scene.save_datasets() Grouping Similar Datasets ^^^^^^^^^^^^^^^^^^^^^^^^^ By default, ``MultiScene`` only operates on datasets shared by all scenes. Use the :meth:`~satpy.multiscene.MultiScene.group` method to specify groups of datasets that shall be treated equally by ``MultiScene``, even if their names or wavelengths are different. Example: Stacking scenes from multiple geostationary satellites acquired at roughly the same time. First, create scenes and load datasets individually: >>> from satpy import Scene >>> from glob import glob >>> h8_scene = satpy.Scene(filenames=glob('/data/HS_H08_20200101_1200*'), ... reader='ahi_hsd') >>> h8_scene.load(['B13']) >>> g16_scene = satpy.Scene(filenames=glob('/data/OR_ABI*s20200011200*.nc'), ... reader='abi_l1b') >>> g16_scene.load(['C13']) >>> met10_scene = satpy.Scene(filenames=glob('/data/H-000-MSG4*-202001011200-__'), ... reader='seviri_l1b_hrit') >>> met10_scene.load(['IR_108']) Now create a ``MultiScene`` and group the three similar IR channels together: >>> from satpy import MultiScene, DataQuery >>> mscn = MultiScene([h8_scene, g16_scene, met10_scene]) >>> groups = {DataQuery('IR_group', wavelength=(10, 11, 12)): ['B13', 'C13', 'IR_108']} >>> mscn.group(groups) Finally, resample the datasets to a common grid and blend them together: >>> from pyresample.geometry import AreaDefinition >>> my_area = AreaDefinition(...) >>> resampled = mscn.resample(my_area, reduce_data=False) >>> blended = resampled.blend() # you can also use a custom blend function You can access the results via ``blended['IR_group']``. Timeseries ********** Using the :meth:`~satpy.multiscene.MultiScene.blend` method with the :func:`~satpy.multiscene.timeseries` function will combine multiple scenes from different time slots by time. A single `Scene` with each dataset/channel extended by the time dimension will be returned. If used together with the :meth:`~satpy.scene.Scene.to_geoviews` method, creation of interactive timeseries Bokeh plots is possible. >>> from satpy import Scene, MultiScene >>> from satpy.multiscene import timeseries >>> from glob import glob >>> from pyresample.geometry import AreaDefinition >>> my_area = AreaDefinition(...) >>> scenes = [ ... Scene(reader='viirs_sdr', filenames=glob('/data/viirs/day_1/*t180*.h5')), ... Scene(reader='viirs_sdr', filenames=glob('/data/viirs/day_2/*t180*.h5')) ... ] >>> mscn = MultiScene(scenes) >>> mscn.load(['I04']) >>> new_mscn = mscn.resample(my_area) >>> blended_scene = new_mscn.blend(blend_function=timeseries) >>> blended_scene['I04'] dask.array Coordinates: * time (time) datetime64[ns] 2012-02-25T18:01:24.570942 2012-02-25T18:02:49.975797 Dimensions without coordinates: y, x Saving frames of an animation ----------------------------- The MultiScene can take "frames" of data and join them together in a single animation movie file. Saving animations requires the `imageio` python library and for most available formats the ``ffmpeg`` command line tool suite should also be installed. The below example saves a series of GOES-EAST ABI channel 1 and channel 2 frames to MP4 movie files. >>> from satpy import Scene, MultiScene >>> from glob import glob >>> mscn = MultiScene.from_files(glob('/data/abi/day_1/*C0[12]*.nc'), reader='abi_l1b') >>> mscn.load(['C01', 'C02']) >>> mscn.save_animation('{name}_{start_time:%Y%m%d_%H%M%S}.mp4', fps=2) This will compute one video frame (image) at a time and write it to the MPEG-4 video file. For users with more powerful systems it is possible to use the ``client`` and ``batch_size`` keyword arguments to compute multiple frames in parallel using the dask ``distributed`` library (if installed). See the :doc:`dask distributed ` documentation for information on creating a ``Client`` object. If working on a cluster you may want to use :doc:`dask jobqueue ` to take advantage of multiple nodes at a time. It is possible to add an overlay or decoration to each frame of an animation. For text added as a decoration, string substitution will be applied based on the attributes of the dataset, for example: >>> mscn.save_animation( ... "{name:s}_{start_time:%Y%m%d_%H%M}.mp4", ... enh_args={ ... "decorate": { ... "decorate": [ ... {"text": { ... "txt": "time {start_time:%Y-%m-%d %H:%M}", ... "align": { ... "top_bottom": "bottom", ... "left_right": "right"}, ... "font": '/usr/share/fonts/truetype/arial.ttf', ... "font_size": 20, ... "height": 30, ... "bg": "black", ... "bg_opacity": 255, ... "line": "white"}}]}}) If your file covers ABI MESO data for an hour for channel 2 lasting from 2020-04-12 01:00-01:59, then the output file will be called ``C02_20200412_0100.mp4`` (because the first dataset/frame corresponds to an image that started to be taken at 01:00), consist of sixty frames (one per minute for MESO data), and each frame will have the start time for that frame floored to the minute blended into the frame. Note that this text is "burned" into the video and cannot be switched on or off later. .. warning:: GIF images, although supported, are not recommended due to the large file sizes that can be produced from only a few frames. Saving multiple scenes ---------------------- The ``MultiScene`` object includes a :meth:`~satpy.multiscene.MultiScene.save_datasets` method for saving the data from multiple Scenes to disk. By default this will operate on one Scene at a time, but similar to the ``save_animation`` method above this method can accept a dask distributed ``Client`` object via the ``client`` keyword argument to compute scenes in parallel (see documentation above). Note however that some writers, like the ``geotiff`` writer, do not support multi-process operations at this time and will fail when used with dask distributed. To save multiple Scenes use: >>> from satpy import Scene, MultiScene >>> from glob import glob >>> mscn = MultiScene.from_files(glob('/data/abi/day_1/*C0[12]*.nc'), reader='abi_l1b') >>> mscn.load(['C01', 'C02']) >>> mscn.save_datasets(base_dir='/path/for/output') Combining multiple readers -------------------------- .. versionadded:: 0.23 The :meth:`~satpy.multiscene.MultiScene.from_files` constructor allows to automatically combine multiple readers into a single MultiScene. It is no longer necessary for the user to create the :class:`~satpy.scene.Scene` objects themselves. For example, you can combine Advanced Baseline Imager (ABI) and Global Lightning Mapper (GLM) measurements. Constructing a multi-reader MultiScene requires more parameters than a single-reader MultiScene, because Satpy can poorly guess how to group files belonging to different instruments. For an example creating a video with lightning superimposed on ABI channel 14 (11.2 µm) using the built-in composite ``C14_flash_extent_density``, which superimposes flash extent density from GLM (read with the :class:`~satpy.readers.glm_l2.NCGriddedGLML2` or ``glm_l2`` reader) on ABI channel 14 data (read with the :class:`~satpy.readers.abi_l1b.NC_ABI_L1B` or ``abi_l1b`` reader), and therefore needs Scene objects that combine both readers: >>> glm_dir = "/path/to/GLMC/" >>> abi_dir = "/path/to/ABI/" >>> ms = satpy.MultiScene.from_files( ... glob.glob(glm_dir + "OR_GLM-L2-GLMC-M3_G16_s202010418*.nc") + ... glob.glob(abi_dir + "C*/OR_ABI-L1b-RadC-M6C*_G16_s202010418*_e*_c*.nc"), ... reader=["glm_l2", "abi_l1b"], ... ensure_all_readers=True, ... group_keys=["start_time"], ... time_threshold=30) >>> ms.load(["C14_flash_extent_density"]) >>> ms = ms.resample(ms.first_scene["C14"].attrs["area"]) >>> ms.save_animation("/path/for/output/{name:s}_{start_time:%Y%m%d_%H%M}.mp4") In this example, we pass to :meth:`~satpy.multiscene.MultiScene.from_files` the additional parameters ``ensure_all_readers=True, group_keys=["start_time"], time_threshold=30`` so we only get scenes at times that both ABI and GLM have a file starting within 30 seconds from each other, and ignore all other differences for the purposes of grouping the two. For this example, the ABI files occur every 5 minutes but the GLM files (processed with glmtools) every minute. Scenes where there is a GLM file without an ABI file starting within at most ±30 seconds are skipped. The ``group_keys`` and ``time_threshold`` keyword arguments are processed by the :func:`~satpy.readers.group_files` function. The heavy work of blending the two instruments together is performed by the :class:`~satpy.composites.BackgroundCompositor` class through the `"C14_flash_extent_density"` composite. satpy-0.34.0/doc/source/overview.rst000066400000000000000000000144571420401153000174040ustar00rootroot00000000000000======== Overview ======== Satpy is designed to provide easy access to common operations for processing meteorological remote sensing data. Any details needed to perform these operations are configured internally to Satpy meaning users should not have to worry about *how* something is done, only ask for what they want. Most of the features provided by Satpy can be configured by keyword arguments (see the :doc:`API Documentation ` or other specific section for more details). For more complex customizations or added features Satpy uses a set of configuration files that can be modified by the user. The various components and concepts of Satpy are described below. The :doc:`quickstart` guide also provides simple example code for the available features of Satpy. Scene ===== Satpy provides most of its functionality through the :class:`~satpy.scene.Scene` class. This acts as a container for the datasets being operated on and provides methods for acting on those datasets. It attempts to reduce the amount of low-level knowledge needed by the user while still providing a pythonic interface to the functionality underneath. A Scene object represents a single geographic region of data, typically at a single continuous time range. It is possible to combine Scenes to form a Scene with multiple regions or multiple time observations, but it is not guaranteed that all functionality works in these situations. DataArrays ========== Satpy's lower-level container for data is the :class:`xarray.DataArray`. For historical reasons DataArrays are often referred to as "Datasets" in Satpy. These objects act similar to normal numpy arrays, but add additional metadata and attributes for describing the data. Metadata is stored in a ``.attrs`` dictionary and named dimensions can be accessed in a ``.dims`` attribute, along with other attributes. In most use cases these objects can be operated on like normal NumPy arrays with special care taken to make sure the metadata dictionary contains expected values. See the XArray documentation for more info on handling :class:`xarray.DataArray` objects. Additionally, Satpy uses a special form of DataArrays where data is stored in :class:`dask.array.Array` objects which allows Satpy to perform multi-threaded lazy operations vastly improving the performance of processing. For help on developing with dask and xarray see :doc:`dev_guide/xarray_migration` or the documentation for the specific project. To uniquely identify ``DataArray`` objects Satpy uses `DataID`. A ``DataID`` consists of various pieces of available metadata. This usually includes `name` and `wavelength` as identifying metadata, but can also include `resolution`, `calibration`, `polarization`, and additional `modifiers` to further distinguish one dataset from another. For more information on `DataID` objects, have a look a :doc:`dev_guide/satpy_internals`. .. warning:: XArray includes other object types called "Datasets". These are different from the "Datasets" mentioned in Satpy. Reading ======= One of the biggest advantages of using Satpy is the large number of input file formats that it can read. It encapsulates this functionality into individual :doc:`readers`. Satpy Readers handle all of the complexity of reading whatever format they represent. Meteorological Satellite file formats can be extremely complex and formats are rarely reused across satellites or instruments. No matter the format, Satpy's Reader interface is meant to provide a consistent data loading interface while still providing flexibility to add new complex file formats. Compositing =========== Many users of satellite imagery combine multiple sensor channels to bring out certain features of the data. This includes using one dataset to enhance another, combining 3 or more datasets in to an RGB image, or any other combination of datasets. Satpy comes with a lot of common composite combinations built-in and allows the user to request them like any other dataset. Satpy also makes it possible to create your own custom composites and have Satpy treat them like any other dataset. See :doc:`composites` for more information. Resampling ========== Satellite imagery data comes in two forms when it comes to geolocation, native satellite swath coordinates and uniform gridded projection coordinates. It is also common to see the channels from a single sensor in multiple resolutions, making it complicated to combine or compare the datasets. Many use cases of satellite data require the data to be in a certain projection other than the native projection or to have output imagery cover a specific area of interest. Satpy makes it easy to resample datasets to allow for users to combine them or grid them to these projections or areas of interest. Satpy uses the PyTroll `pyresample` package to provide nearest neighbor, bilinear, or elliptical weighted averaging resampling methods. See :doc:`resample` for more information. Enhancements ============ When making images from satellite data the data has to be manipulated to be compatible with the output image format and still look good to the human eye. Satpy calls this functionality "enhancing" the data, also commonly called scaling or stretching the data. This process can become complicated not just because of how subjective the quality of an image can be, but also because of historical expectations of forecasters and other users for how the data should look. Satpy tries to hide the complexity of all the possible enhancement methods from the user and just provide the best looking image by default. Satpy still makes it possible to customize these procedures, but in most cases it shouldn't be necessary. See the documentation on :doc:`writers` for more information on what's possible for output formats and enhancing images. Writing ======= Satpy is designed to make data loading, manipulating, and analysis easy. However, the best way to get satellite imagery data out to as many users as possible is to make it easy to save it in multiple formats. Satpy allows users to save data in image formats like PNG or GeoTIFF as well as data file formats like NetCDF. Each format's complexity is hidden behind the interface of individual Writer objects and includes keyword arguments for accessing specific format features like compression and output data type. See the :doc:`writers` documentation for the available writers and how to use them. satpy-0.34.0/doc/source/quickstart.rst000066400000000000000000000275721420401153000177320ustar00rootroot00000000000000========== Quickstart ========== Loading and accessing data ========================== .. testsetup:: * >>> import sys >>> reload(sys) >>> sys.setdefaultencoding('utf8') To work with weather satellite data you must create a :class:`~satpy.scene.Scene` object. Satpy does not currently provide an interface to download satellite data, it assumes that the data is on a local hard disk already. In order for Satpy to get access to the data the Scene must be told what files to read and what :ref:`Satpy Reader ` should read them: >>> from satpy import Scene >>> from glob import glob >>> filenames = glob("/home/a001673/data/satellite/Meteosat-10/seviri/lvl1.5/2015/04/20/HRIT/*201504201000*") >>> global_scene = Scene(reader="seviri_l1b_hrit", filenames=filenames) To load data from the files use the :meth:`Scene.load ` method. Printing the Scene object will list each of the :class:`xarray.DataArray` objects currently loaded: >>> global_scene.load([0.6, 0.8, 10.8]) >>> print(global_scene) dask.array Coordinates: * x (x) float64 5.567e+06 5.564e+06 5.561e+06 5.558e+06 5.555e+06 ... * y (y) float64 -5.567e+06 -5.564e+06 -5.561e+06 -5.558e+06 ... Attributes: satellite_longitude: 0.0 sensor: seviri satellite_altitude: 35785831.0 platform_name: Meteosat-11 standard_name: brightness_temperature units: K wavelength: (9.8, 10.8, 11.8) satellite_latitude: 0.0 start_time: 2018-02-28 15:00:10.814000 end_time: 2018-02-28 15:12:43.956000 area: Area ID: some_area_name\nDescription: On-the-fly ar... name: IR_108 resolution: 3000.40316582 calibration: brightness_temperature polarization: None level: None modifiers: () ancillary_variables: [] dask.array Coordinates: * x (x) float64 5.567e+06 5.564e+06 5.561e+06 5.558e+06 5.555e+06 ... * y (y) float64 -5.567e+06 -5.564e+06 -5.561e+06 -5.558e+06 ... Attributes: satellite_longitude: 0.0 sensor: seviri satellite_altitude: 35785831.0 platform_name: Meteosat-11 standard_name: toa_bidirectional_reflectance units: % wavelength: (0.74, 0.81, 0.88) satellite_latitude: 0.0 start_time: 2018-02-28 15:00:10.814000 end_time: 2018-02-28 15:12:43.956000 area: Area ID: some_area_name\nDescription: On-the-fly ar... name: VIS008 resolution: 3000.40316582 calibration: reflectance polarization: None level: None modifiers: () ancillary_variables: [] dask.array Coordinates: * x (x) float64 5.567e+06 5.564e+06 5.561e+06 5.558e+06 5.555e+06 ... * y (y) float64 -5.567e+06 -5.564e+06 -5.561e+06 -5.558e+06 ... Attributes: satellite_longitude: 0.0 sensor: seviri satellite_altitude: 35785831.0 platform_name: Meteosat-11 standard_name: toa_bidirectional_reflectance units: % wavelength: (0.56, 0.635, 0.71) satellite_latitude: 0.0 start_time: 2018-02-28 15:00:10.814000 end_time: 2018-02-28 15:12:43.956000 area: Area ID: some_area_name\nDescription: On-the-fly ar... name: VIS006 resolution: 3000.40316582 calibration: reflectance polarization: None level: None modifiers: () ancillary_variables: [] Satpy allows loading file data by wavelengths in micrometers (shown above) or by channel name:: >>> global_scene.load(["VIS006", "VIS008", "IR_108"]) To have a look at the available channels for loading from your :class:`~satpy.scene.Scene` object use the :meth:`~satpy.scene.Scene.available_dataset_names` method: >>> global_scene.available_dataset_names() ['HRV', 'IR_108', 'IR_120', 'VIS006', 'WV_062', 'IR_039', 'IR_134', 'IR_097', 'IR_087', 'VIS008', 'IR_016', 'WV_073'] To access the loaded data use the wavelength or name: >>> print(global_scene[0.6]) For more information on loading datasets by resolution, calibration, or other advanced loading methods see the :doc:`readers` documentation. Calculating measurement values and navigation coordinates ========================================================= Once loaded, measurement values can be calculated from a DataArray within a scene, using .values to get a fully calculated numpy array: >>> vis006 = global_scene["VIS006"] >>> vis006_meas = vis006.values Note that for very large images, such as half-kilometer geostationary imagery, calculated measurement arrays may require multiple gigabytes of memory; using deferred computation and/or subsetting of datasets may be preferred in such cases. The 'area' attribute of the DataArray, if present, can be converted to latitude and longitude arrays. For some instruments (typically polar-orbiters), the get_lonlats() may result in arrays needing an additional .compute() or .values extraction. >>> vis006_lon, vis006_lat = vis006.attrs['area'].get_lonlats() Visualizing data ================ To visualize loaded data in a pop-up window: >>> global_scene.show(0.6) Alternatively if working in a Jupyter notebook the scene can be converted to a `geoviews `_ object using the :meth:`~satpy.scene.Scene.to_geoviews` method. The geoviews package is not a requirement of the base satpy install so in order to use this feature the user needs to install the geoviews package himself. >>> import holoviews as hv >>> import geoviews as gv >>> import geoviews.feature as gf >>> gv.extension("bokeh", "matplotlib") >>> %opts QuadMesh Image [width=600 height=400 colorbar=True] Feature [apply_ranges=False] >>> %opts Image QuadMesh (cmap='RdBu_r') >>> gview = global_scene.to_geoviews(vdims=[0.6]) >>> gview[::5,::5] * gf.coastline * gf.borders Creating new datasets ===================== Calculations based on loaded datasets/channels can easily be assigned to a new dataset: >>> global_scene["ndvi"] = (global_scene[0.8] - global_scene[0.6]) / (global_scene[0.8] + global_scene[0.6]) >>> global_scene.show("ndvi") When doing calculations Xarray, by default, will drop all attributes so attributes need to be copied over by hand. The :func:`~satpy.dataset.combine_metadata` function can assist with this task. Assigning additional custom metadata is also possible. >>> from satpy.dataset import combine_metadata >>> scene['new_band'] = scene[0.8] / scene[0.6] >>> scene['new_band'].attrs = combine_metadata(scene[0.8], scene[0.6]) >>> scene['new_band'].attrs['some_other_key'] = 'whatever_value_you_want' Generating composites ===================== Satpy comes with many composite recipes built-in and makes them loadable like any other dataset: >>> global_scene.load(['overview']) To get a list of all available composites for the current scene: >>> global_scene.available_composite_names() ['overview_sun', 'airmass', 'natural_color', 'night_fog', 'overview', 'green_snow', 'dust', 'fog', 'natural_color_raw', 'cloudtop', 'convection', 'ash'] Loading composites will load all necessary dependencies to make that composite and unload them after the composite has been generated. .. note:: Some composite require datasets to be at the same resolution or shape. When this is the case the Scene object must be resampled before the composite can be generated (see below). Resampling ========== .. todo:: Explain where and how to define new areas In certain cases it may be necessary to resample datasets whether they come from a file or are generated composites. Resampling is useful for mapping data to a uniform grid, limiting input data to an area of interest, changing from one projection to another, or for preparing datasets to be combined in a composite (see above). For more details on resampling, different resampling algorithms, and creating your own area of interest see the :doc:`resample` documentation. To resample a Satpy Scene: >>> local_scene = global_scene.resample("eurol") This creates a copy of the original ``global_scene`` with all loaded datasets resampled to the built-in "eurol" area. Any composites that were requested, but could not be generated are automatically generated after resampling. The new ``local_scene`` can now be used like the original ``global_scene`` for working with datasets, saving them to disk or showing them on screen: >>> local_scene.show('overview') >>> local_scene.save_dataset('overview', './local_overview.tif') Saving to disk ============== To save all loaded datasets to disk as geotiff images: >>> global_scene.save_datasets() To save all loaded datasets to disk as PNG images: >>> global_scene.save_datasets(writer='simple_image') Or to save an individual dataset: >>> global_scene.save_dataset('VIS006', 'my_nice_image.png') Datasets are automatically scaled or "enhanced" to be compatible with the output format and to provide the best looking image. For more information on saving datasets and customizing enhancements see the documentation on :doc:`writers`. Slicing and subsetting scenes ============================= Array slicing can be done at the scene level in order to get subsets with consistent navigation throughout. Note that this does not take into account scenes that may include channels at multiple resolutions, i.e. index slicing does not account for dataset spatial resolution. >>> scene_slice = global_scene[2000:2004, 2000:2004] >>> vis006_slice = scene_slice['VIS006'] >>> vis006_slice_meas = vis006_slice.values >>> vis006_slice_lon, vis006_slice_lat = vis006_slice.attrs['area'].get_lonlats() To subset multi-resolution data consistently, use the :meth:`~satpy.scene.Scene.crop` method. >>> scene_llbox = global_scene.crop(ll_bbox=(-4.0, -3.9, 3.9, 4.0)) >>> vis006_llbox = scene_llbox['VIS006'] >>> vis006_llbox_meas = vis006_llbox.values >>> vis006_llbox_lon, vis006_llbox_lat = vis006_llbox.attrs['area'].get_lonlats() .. _troubleshooting: Troubleshooting =============== When something goes wrong, a first step to take is check that the latest Version of satpy and its dependencies are installed. Satpy drags in a few packages as dependencies per default, but each reader and writer has it's own dependencies which can be unfortunately easy to miss when just doing a regular `pip install`. To check the missing dependencies for the readers and writers, a utility function called :func:`~satpy.utils.check_satpy` can be used: >>> from satpy.utils import check_satpy >>> check_satpy() Due to the way Satpy works, producing as many datasets as possible, there are times that behavior can be unexpected but with no exceptions raised. To help troubleshoot these situations log messages can be turned on. To do this run the following code before running any other Satpy code: >>> from satpy.utils import debug_on >>> debug_on() satpy-0.34.0/doc/source/readers.rst000066400000000000000000000251401420401153000171520ustar00rootroot00000000000000======= Readers ======= .. todo:: How to read cloud products from NWCSAF software. (separate document?) Satpy supports reading and loading data from many input file formats and schemes. The :class:`~satpy.scene.Scene` object provides a simple interface around all the complexity of these various formats through its ``load`` method. The following sections describe the different way data can be loaded, requested, or added to a Scene object. Available Readers ================= To get a list of available readers use the `available_readers` function. By default, it returns the names of available readers. To return additional reader information use `available_readers(as_dict=True)`:: >>> from satpy import available_readers >>> available_readers() Filter loaded files =================== Coming soon... Load data ========= Datasets in Satpy are identified by certain pieces of metadata set during data loading. These include `name`, `wavelength`, `calibration`, `resolution`, `polarization`, and `modifiers`. Normally, once a ``Scene`` is created requesting datasets by `name` or `wavelength` is all that is needed:: >>> from satpy import Scene >>> scn = Scene(reader="seviri_l1b_hrit", filenames=filenames) >>> scn.load([0.6, 0.8, 10.8]) >>> scn.load(['IR_120', 'IR_134']) However, in many cases datasets are available in multiple spatial resolutions, multiple calibrations (``brightness_temperature``, ``reflectance``, ``radiance``, etc), multiple polarizations, or have corrections or other modifiers already applied to them. By default Satpy will provide the version of the dataset with the highest resolution and the highest level of calibration (brightness temperature or reflectance over radiance). It is also possible to request one of these exact versions of a dataset by using the :class:`~satpy.dataset.DataQuery` class:: >>> from satpy import DataQuery >>> my_channel_id = DataQuery(name='IR_016', calibration='radiance') >>> scn.load([my_channel_id]) >>> print(scn['IR_016']) Or request multiple datasets at a specific calibration, resolution, or polarization:: >>> scn.load([0.6, 0.8], resolution=1000) Or multiple calibrations:: >>> scn.load([0.6, 10.8], calibration=['brightness_temperature', 'radiance']) In the above case Satpy will load whatever dataset is available and matches the specified parameters. So the above ``load`` call would load the ``0.6`` (a visible/reflectance band) radiance data and ``10.8`` (an IR band) brightness temperature data. For geostationary satellites that have the individual channel data separated to several files (segments) the missing segments are padded by default to full disk area. This is made to simplify caching of resampling look-up tables (see :doc:`resample` for more information). To disable this, the user can pass ``pad_data`` keyword argument when loading datasets:: >>> scn.load([0.6, 10.8], pad_data=False) For geostationary products, where the imagery is stored in the files in an unconventional orientation (e.g. MSG SEVIRI L1.5 data are stored with the southwest corner in the upper right), the keyword argument ``upper_right_corner`` can be passed into the load call to automatically flip the datasets to the wished orientation. Accepted argument values are ``'NE'``, ``'NW'``, ``'SE'``, ``'SW'``, and ``'native'``. By default, no flipping is applied (corresponding to ``upper_right_corner='native'``) and the data are delivered in the original format. To get the data in the common upright orientation, load the datasets using e.g.:: >>> scn.load(['VIS008'], upper_right_corner='NE') .. note:: If a dataset could not be loaded there is no exception raised. You must check the :meth:`scn.missing_datasets ` property for any ``DataID`` that could not be loaded. To find out what datasets are available from a reader from the files that were provided to the ``Scene`` use :meth:`~satpy.scene.Scene.available_dataset_ids`:: >>> scn.available_dataset_ids() Or :meth:`~satpy.scene.Scene.available_dataset_names` for just the string names of Datasets:: >>> scn.available_dataset_names() Search for local files ====================== Satpy provides a utility :func:`~satpy.readers.find_files_and_readers` for searching for files in a base directory matching various search parameters. This function discovers files based on filename patterns. It returns a dictionary mapping reader name to a list of filenames supported. This dictionary can be passed directly to the :class:`~satpy.scene.Scene` initialization. :: >>> from satpy import find_files_and_readers, Scene >>> from datetime import datetime >>> my_files = find_files_and_readers(base_dir='/data/viirs_sdrs', ... reader='viirs_sdr', ... start_time=datetime(2017, 5, 1, 18, 1, 0), ... end_time=datetime(2017, 5, 1, 18, 30, 0)) >>> scn = Scene(filenames=my_files) See the :func:`~satpy.readers.find_files_and_readers` documentation for more information on the possible parameters. .. _dataset_metadata: Metadata ======== The datasets held by a scene also provide vital metadata such as dataset name, units, observation time etc. The following attributes are standardized across all readers: * ``name``, and other identifying metadata keys: See :doc:`dev_guide/satpy_internals`. * ``start_time``: Left boundary of the time interval covered by the dataset. * ``end_time``: Right boundary of the time interval covered by the dataset. * ``area``: :class:`~pyresample.geometry.AreaDefinition` or :class:`~pyresample.geometry.SwathDefinition` if data is geolocated. Areas are used for gridded projected data and Swaths when data must be described by individual longitude/latitude coordinates. See the Coordinates section below. * ``reader``: The name of the Satpy reader that produced the dataset. * ``orbital_parameters``: Dictionary of orbital parameters describing the satellite's position. * For *geostationary* satellites it is described using the following scalar attributes: * ``satellite_actual_longitude/latitude/altitude``: Current position of the satellite at the time of observation in geodetic coordinates (i.e. altitude is relative and normal to the surface of the ellipsoid). * ``satellite_nominal_longitude/latitude/altitude``: Center of the station keeping box (a confined area in which the satellite is actively maintained in using maneuvres). Inbetween major maneuvres, when the satellite is permanently moved, the nominal position is constant. * ``nadir_longitude/latitude``: Intersection of the instrument's Nadir with the surface of the earth. May differ from the actual satellite position, if the instrument is pointing slightly off the axis (satellite, earth-center). If available, this should be used to compute viewing angles etc. Otherwise, use the actual satellite position. * ``projection_longitude/latitude/altitude``: Projection center of the re-projected data. This should be used to compute lat/lon coordinates. Note that the projection center can differ considerably from the actual satellite position. For example MSG-1 was at times positioned at 3.4 degrees west, while the image data was re-projected to 0 degrees. * [DEPRECATED] ``satellite_longitude/latitude/altitude``: Current position of the satellite at the time of observation in geodetic coordinates. .. note:: Longitudes and latitudes are given in degrees, altitude in meters. For use in pyorbital, the altitude has to be converted to kilometers, see for example :func:`pyorbital.orbital.get_observer_look`. * For *polar orbiting* satellites the readers usually provide coordinates and viewing angles of the swath as ancillary datasets. Additional metadata related to the satellite position include: * ``tle``: Two-Line Element (TLE) set used to compute the satellite's orbit * ``raw_metadata``: Raw, unprocessed metadata from the reader. Note that the above attributes are not necessarily available for each dataset. Coordinates =========== Each :class:`~xarray.DataArray` produced by Satpy has several Xarray coordinate variables added to them. * ``x`` and ``y``: Projection coordinates for gridded and projected data. By default `y` and `x` are the preferred **dimensions** for all 2D data, but these **coordinates** are only added for gridded (non-swath) data. For 1D data only the ``y`` dimension may be specified. * ``crs``: A :class:`~pyproj.crs.CRS` object defined the Coordinate Reference System for the data. Requires pyproj 2.0 or later to be installed. This is stored as a scalar array by Xarray so it must be accessed by doing ``crs = my_data_arr.attrs['crs'].item()``. For swath data this defaults to a ``longlat`` CRS using the WGS84 datum. * ``longitude``: Array of longitude coordinates for swath data. * ``latitude``: Array of latitude coordinates for swath data. Readers are free to define any coordinates in addition to the ones above that are automatically added. Other possible coordinates you may see: * ``acq_time``: Instrument data acquisition time per scan or row of data. Adding a Reader to Satpy ======================== This is described in the developer guide, see :doc:`dev_guide/custom_reader`. Implemented readers =================== SEVIRI L1.5 data readers ------------------------ .. automodule:: satpy.readers.seviri_base :noindex: SEVIRI HRIT format reader ^^^^^^^^^^^^^^^^^^^^^^^^^ .. automodule:: satpy.readers.seviri_l1b_hrit :noindex: SEVIRI Native format reader ^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. automodule:: satpy.readers.seviri_l1b_native :noindex: SEVIRI netCDF format reader ^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. automodule:: satpy.readers.seviri_l1b_nc :noindex: Other xRIT-based readers ------------------------ .. automodule:: satpy.readers.hrit_base :noindex: JMA HRIT format reader ^^^^^^^^^^^^^^^^^^^^^^ .. automodule:: satpy.readers.hrit_jma :noindex: GOES HRIT format reader ^^^^^^^^^^^^^^^^^^^^^^^ .. automodule:: satpy.readers.goes_imager_hrit :noindex: Electro-L HRIT format reader ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. automodule:: satpy.readers.electrol_hrit :noindex: hdf-eos based readers --------------------- .. automodule:: satpy.readers.modis_l1b :noindex: .. automodule:: satpy.readers.modis_l2 :noindex: satpy cf nc readers --------------------- .. automodule:: satpy.readers.satpy_cf_nc :noindex: hdf5 based readers ------------------ Arctica-M N1 HDF5 format reader ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. automodule:: satpy.readers.msu_gsa_l1b :noindex: satpy-0.34.0/doc/source/resample.rst000066400000000000000000000001171420401153000173320ustar00rootroot00000000000000========== Resampling ========== .. automodule:: satpy.resample :noindex: satpy-0.34.0/doc/source/writers.rst000066400000000000000000000111531420401153000172230ustar00rootroot00000000000000======= Writers ======= Satpy makes it possible to save datasets in multiple formats. For details on additional arguments and features available for a specific Writer see the table below. Most use cases will want to save datasets using the :meth:`~satpy.scene.Scene.save_datasets` method:: >>> scn.save_datasets(writer='simple_image') The ``writer`` parameter defaults to using the ``geotiff`` writer. One common parameter across almost all Writers is ``filename`` and ``base_dir`` to help automate saving files with custom filenames:: >>> scn.save_datasets( ... filename='{name}_{start_time:%Y%m%d_%H%M%S}.tif', ... base_dir='/tmp/my_ouput_dir') .. versionchanged:: 0.10 The `file_pattern` keyword argument was renamed to `filename` to match the `save_dataset` method's keyword argument. .. _writer_table: .. list-table:: Satpy Writers :header-rows: 1 * - Description - Writer name - Status - Examples * - GeoTIFF - :class:`geotiff ` - Nominal - * - Simple Image (PNG, JPEG, etc) - :class:`simple_image ` - Nominal - * - NinJo TIFF (using ``pyninjotiff`` package) - :class:`ninjotiff ` - To be deprecated (use ninjogeotiff) - * - NetCDF (Standard CF) - :class:`cf ` - Pre-alpha - :mod:`Usage example ` * - AWIPS II Tiled NetCDF4 - :class:`awips_tiled ` - Beta - * - GeoTIFF with NinJo tags - :class:`ninjogeotiff ` - Beta - Available Writers ================= To get a list of available writers use the `available_writers` function:: >>> from satpy import available_writers >>> available_writers() Colorizing and Palettizing using user-supplied colormaps ======================================================== .. note:: In the future this functionality will be added to the ``Scene`` object. It is possible to create single channel "composites" that are then colorized using users' own colormaps. The colormaps are Numpy arrays with shape (num, 3), see the example below how to create the mapping file(s). This example creates a 2-color colormap, and we interpolate the colors between the defined temperature ranges. Beyond those limits the image clipped to the specified colors. >>> import numpy as np >>> from satpy.composites import BWCompositor >>> from satpy.enhancements import colorize >>> from satpy.writers import to_image >>> arr = np.array([[0, 0, 0], [255, 255, 255]]) >>> np.save("/tmp/binary_colormap.npy", arr) >>> compositor = BWCompositor("test", standard_name="colorized_ir_clouds") >>> composite = compositor((local_scene[10.8], )) >>> img = to_image(composite) >>> kwargs = {"palettes": [{"filename": "/tmp/binary_colormap.npy", ... "min_value": 223.15, "max_value": 303.15}]} >>> colorize(img, **kwargs) >>> img.show() Similarly it is possible to use discrete values without color interpolation using `palettize()` instead of `colorize()`. You can define several colormaps and ranges in the `palettes` list and they are merged together. See trollimage_ documentation for more information how colormaps and color ranges are merged. The above example can be used in enhancements YAML config like this: .. code-block:: yaml hot_or_cold: standard_name: hot_or_cold operations: - name: colorize method: &colorizefun !!python/name:satpy.enhancements.colorize '' kwargs: palettes: - {filename: /tmp/binary_colormap.npy, min_value: 223.15, max_value: 303.15} .. _trollimage: http://trollimage.readthedocs.io/en/latest/ Saving multiple Scenes in one go ================================ As mentioned earlier, it is possible to save `Scene` datasets directly using :meth:`~satpy.scene.Scene.save_datasets` method. However, sometimes it is beneficial to collect more `Scene`\ s together and process and save them all at once. :: >>> from satpy.writers import compute_writer_results >>> res1 = scn.save_datasets(filename="/tmp/{name}.png", ... writer='simple_image', ... compute=False) >>> res2 = scn.save_datasets(filename="/tmp/{name}.tif", ... writer='geotiff', ... compute=False) >>> results = [res1, res2] >>> compute_writer_results(results) satpy-0.34.0/pyproject.toml000066400000000000000000000006341420401153000156430ustar00rootroot00000000000000[build-system] requires = ["setuptools>=45", "wheel", "setuptools_scm[toml]>=6.2", 'setuptools_scm_git_archive'] build-backend = "setuptools.build_meta" [tool.setuptools_scm] write_to = "satpy/version.py" [tool.isort] sections = ["FUTURE", "STDLIB", "THIRDPARTY", "FIRSTPARTY", "LOCALFOLDER"] profile = "black" skip_gitignore = true default_section = "THIRDPARTY" known_first_party = "satpy" line_length = 120 satpy-0.34.0/satpy/000077500000000000000000000000001420401153000140645ustar00rootroot00000000000000satpy-0.34.0/satpy/__init__.py000066400000000000000000000030531420401153000161760ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2009-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Satpy Package initializer.""" import os try: from satpy.version import version as __version__ # noqa except ModuleNotFoundError: raise ModuleNotFoundError( "No module named satpy.version. This could mean " "you didn't install 'satpy' properly. Try reinstalling ('pip " "install').") CHUNK_SIZE = int(os.getenv('PYTROLL_CHUNK_SIZE', 4096)) from satpy._config import config # noqa from satpy.dataset import DataID, DataQuery # noqa from satpy.dataset.data_dict import DatasetDict # noqa from satpy.multiscene import MultiScene # noqa from satpy.readers import available_readers # noqa from satpy.readers import find_files_and_readers # noqa from satpy.scene import Scene # noqa from satpy.utils import get_logger # noqa from satpy.writers import available_writers # noqa log = get_logger('satpy') satpy-0.34.0/satpy/_compat.py000066400000000000000000000062001420401153000160560ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Backports and compatibility fixes for satpy.""" from threading import RLock _NOT_FOUND = object() class CachedPropertyBackport: """Backport of cached_property from Python-3.8. Source: https://github.com/python/cpython/blob/v3.8.0/Lib/functools.py#L930 """ def __init__(self, func): # noqa self.func = func self.attrname = None self.__doc__ = func.__doc__ self.lock = RLock() def __set_name__(self, owner, name): # noqa if self.attrname is None: self.attrname = name elif name != self.attrname: raise TypeError( "Cannot assign the same cached_property to two different names " f"({self.attrname!r} and {name!r})." ) def __get__(self, instance, owner=None): # noqa if instance is None: return self if self.attrname is None: raise TypeError( "Cannot use cached_property instance without calling __set_name__ on it.") try: cache = instance.__dict__ except AttributeError: # not all objects have __dict__ (e.g. class defines slots) msg = ( f"No '__dict__' attribute on {type(instance).__name__!r} " f"instance to cache {self.attrname!r} property." ) raise TypeError(msg) from None val = cache.get(self.attrname, _NOT_FOUND) if val is _NOT_FOUND: with self.lock: # check if another thread filled cache while we awaited lock val = cache.get(self.attrname, _NOT_FOUND) if val is _NOT_FOUND: val = self.func(instance) try: cache[self.attrname] = val except TypeError: msg = ( f"The '__dict__' attribute on {type(instance).__name__!r} instance " f"does not support item assignment for caching {self.attrname!r} property." ) raise TypeError(msg) from None return val try: from functools import cached_property # type: ignore except ImportError: # for python < 3.8 cached_property = CachedPropertyBackport # type: ignore try: from numpy.typing import ArrayLike # noqa except ImportError: # numpy <1.20 from numpy import ndarray as ArrayLike # noqa satpy-0.34.0/satpy/_config.py000066400000000000000000000141701420401153000160450ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Satpy Configuration directory and file handling.""" import ast import glob import logging import os import sys from collections import OrderedDict import appdirs import pkg_resources from donfig import Config LOG = logging.getLogger(__name__) BASE_PATH = os.path.dirname(os.path.realpath(__file__)) # FIXME: Use package_resources? PACKAGE_CONFIG_PATH = os.path.join(BASE_PATH, 'etc') _satpy_dirs = appdirs.AppDirs(appname='satpy', appauthor='pytroll') _CONFIG_DEFAULTS = { 'cache_dir': _satpy_dirs.user_cache_dir, 'cache_lonlats': False, 'cache_sensor_angles': False, 'config_path': [], 'data_dir': _satpy_dirs.user_data_dir, 'demo_data_dir': '.', 'download_aux': True, } # Satpy main configuration object # See https://donfig.readthedocs.io/en/latest/configuration.html # for more information. # # Configuration values will be loaded from files at: # 1. The builtin package satpy.yaml (not present currently) # 2. $SATPY_ROOT_CONFIG (default: /etc/satpy/satpy.yaml) # 3. /etc/satpy/satpy.yaml # 4. ~/.config/satpy/satpy.yaml # 5. ~/.satpy/satpy.yaml # 6. $SATPY_CONFIG_PATH/satpy.yaml if present (colon separated) _CONFIG_PATHS = [ os.path.join(PACKAGE_CONFIG_PATH, 'satpy.yaml'), os.getenv('SATPY_ROOT_CONFIG', os.path.join('/etc', 'satpy', 'satpy.yaml')), os.path.join(sys.prefix, 'etc', 'satpy', 'satpy.yaml'), os.path.join(_satpy_dirs.user_config_dir, 'satpy.yaml'), os.path.join(os.path.expanduser('~'), '.satpy', 'satpy.yaml'), ] # The above files can also be directories. If directories all files # with `.yaml`., `.yml`, or `.json` extensions will be used. _ppp_config_dir = os.getenv('PPP_CONFIG_DIR', None) _satpy_config_path = os.getenv('SATPY_CONFIG_PATH', None) if _ppp_config_dir is not None and _satpy_config_path is None: LOG.warning("'PPP_CONFIG_DIR' is deprecated. Please use 'SATPY_CONFIG_PATH' instead.") _satpy_config_path = _ppp_config_dir if _satpy_config_path is not None: if _satpy_config_path.startswith("["): # 'SATPY_CONFIG_PATH' is set by previous satpy config as a reprsentation of a 'list' # need to use 'ast.literal_eval' to parse the string back to a list _satpy_config_path_list = ast.literal_eval(_satpy_config_path) else: # colon-separated are ordered by custom -> builtins # i.e. last-applied/highest priority to first-applied/lowest priority _satpy_config_path_list = _satpy_config_path.split(os.pathsep) os.environ['SATPY_CONFIG_PATH'] = repr(_satpy_config_path_list) for config_dir in _satpy_config_path_list: _CONFIG_PATHS.append(os.path.join(config_dir, 'satpy.yaml')) _ancpath = os.getenv('SATPY_ANCPATH', None) _data_dir = os.getenv('SATPY_DATA_DIR', None) if _ancpath is not None and _data_dir is None: LOG.warning("'SATPY_ANCPATH' is deprecated. Please use 'SATPY_DATA_DIR' instead.") os.environ['SATPY_DATA_DIR'] = _ancpath config = Config("satpy", defaults=[_CONFIG_DEFAULTS], paths=_CONFIG_PATHS) def get_config_path_safe(): """Get 'config_path' and check for proper 'list' type.""" config_path = config.get('config_path') if not isinstance(config_path, list): raise ValueError("Satpy config option 'config_path' must be a " "list, not '{}'".format(type(config_path))) return config_path def get_entry_points_config_dirs(name, include_config_path=True): """Get the config directories for all entry points of given name.""" dirs = [] for entry_point in pkg_resources.iter_entry_points(name): package_name = entry_point.module_name.split('.', 1)[0] new_dir = os.path.join(entry_point.dist.module_path, package_name, 'etc') if not dirs or dirs[-1] != new_dir: dirs.append(new_dir) if include_config_path: dirs.extend(config.get('config_path')[::-1]) return dirs def config_search_paths(filename, search_dirs=None, **kwargs): """Get series of configuration base paths where Satpy configs are located.""" if search_dirs is None: search_dirs = get_config_path_safe()[::-1] paths = [filename, os.path.basename(filename)] paths += [os.path.join(search_dir, filename) for search_dir in search_dirs] paths += [os.path.join(PACKAGE_CONFIG_PATH, filename)] paths = [os.path.abspath(path) for path in paths] if kwargs.get("check_exists", True): paths = [x for x in paths if os.path.isfile(x)] paths = list(OrderedDict.fromkeys(paths)) # flip the order of the list so builtins are loaded first return paths[::-1] def glob_config(pattern, search_dirs=None): """Return glob results for all possible configuration locations. Note: This method does not check the configuration "base" directory if the pattern includes a subdirectory. This is done for performance since this is usually used to find *all* configs for a certain component. """ patterns = config_search_paths(pattern, search_dirs=search_dirs, check_exists=False) for pattern_fn in patterns: for path in glob.iglob(pattern_fn): yield path def get_config_path(filename): """Get the path to the highest priority version of a config file.""" paths = config_search_paths(filename) for path in paths[::-1]: if os.path.exists(path): return path raise FileNotFoundError("Could not find file in configuration path: " "'{}'".format(filename)) satpy-0.34.0/satpy/aux_download.py000066400000000000000000000363051420401153000171310ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Functions and utilities for downloading ancillary data.""" import logging import os import pooch import satpy logger = logging.getLogger(__name__) _FILE_REGISTRY = {} _FILE_URLS = {} RUNNING_TESTS = False def register_file(url, filename, component_type=None, known_hash=None): """Register file for future retrieval. This function only prepares Satpy to be able to download and cache the provided file. It will not download the file. See :func:`satpy.aux_download.retrieve` for more information. Args: url (str): URL where remote file can be downloaded. filename (str): Filename used to identify and store the downloaded file as. component_type (str or None): Name of the type of Satpy component that will use this file. Typically "readers", "composites", "writers", or "enhancements" for consistency. This will be prepended to the filename when storing the data in the cache. known_hash (str): Hash used to verify the file is downloaded correctly. See https://www.fatiando.org/pooch/v1.3.0/beginner.html#hashes for more information. If not provided then the file is not checked. Returns: Cache key that can be used to retrieve the file later. The cache key consists of the ``component_type`` and provided ``filename``. This should be passed to :func:`satpy.aux_download_retrieve` when the file will be used. """ fname = _generate_filename(filename, component_type) _FILE_REGISTRY[fname] = known_hash _FILE_URLS[fname] = url return fname def _generate_filename(filename, component_type): if filename is None: return None path = filename if component_type: path = '/'.join([component_type, path]) return path def _retrieve_offline(data_dir, cache_key): logger.debug('Downloading auxiliary files is turned off, will check ' 'local files.') local_file = os.path.join(data_dir, *cache_key.split('/')) if not os.path.isfile(local_file): raise RuntimeError("Satpy 'download_aux' setting is False meaning " "no new files will be downloaded and the local " "file '{}' does not exist.".format(local_file)) return local_file def _should_download(cache_key): """Check if we're running tests and can download this file.""" return not RUNNING_TESTS or 'README' in cache_key def retrieve(cache_key, pooch_kwargs=None): """Download and cache the file associated with the provided ``cache_key``. Cache location is controlled by the config ``data_dir`` key. See :ref:`data_dir_setting` for more information. Args: cache_key (str): Cache key returned by :func:`~satpy.aux_download.register_file`. pooch_kwargs (dict or None): Extra keyword arguments to pass to :meth:`pooch.Pooch.fetch`. Returns: Local path of the cached file. """ pooch_kwargs = pooch_kwargs or {} path = satpy.config.get('data_dir') if not satpy.config.get('download_aux'): return _retrieve_offline(path, cache_key) if not _should_download(cache_key): raise RuntimeError("Auxiliary data download is not allowed during " "tests. Mock the appropriate components of your " "tests to not need the 'retrieve' function.") # reuse data directory as the default URL where files can be downloaded from pooch_obj = pooch.create(path, path, registry=_FILE_REGISTRY, urls=_FILE_URLS) return pooch_obj.fetch(cache_key, **pooch_kwargs) def _retrieve_all_with_pooch(pooch_kwargs): if pooch_kwargs is None: pooch_kwargs = {} path = satpy.config.get('data_dir') pooch_obj = pooch.create(path, path, registry=_FILE_REGISTRY, urls=_FILE_URLS) for fname in _FILE_REGISTRY: logger.info("Downloading extra data file '%s'...", fname) pooch_obj.fetch(fname, **pooch_kwargs) def retrieve_all(readers=None, writers=None, composite_sensors=None, pooch_kwargs=None): """Find cache-able data files for Satpy and download them. The typical use case for this function is to download all ancillary files before going to an environment/system that does not have internet access. Args: readers (list or None): Limit searching to these readers. If not specified or ``None`` then all readers are searched. If an empty list then no readers are searched. writers (list or None): Limit searching to these writers. If not specified or ``None`` then all writers are searched. If an empty list then no writers are searched. composite_sensors (list or None): Limit searching to composite configuration files for these sensors. If ``None`` then all sensor configs will be searched. If an empty list then no composites will be searched. pooch_kwargs (dict): Additional keyword arguments to pass to pooch ``fetch``. """ if not satpy.config.get('download_aux'): raise RuntimeError("Satpy 'download_aux' setting is False so no files " "will be downloaded.") find_registerable_files(readers=readers, writers=writers, composite_sensors=composite_sensors) _retrieve_all_with_pooch(pooch_kwargs) logger.info("Done downloading all extra files.") def find_registerable_files(readers=None, writers=None, composite_sensors=None): """Load all Satpy components so they can be downloaded. Args: readers (list or None): Limit searching to these readers. If not specified or ``None`` then all readers are searched. If an empty list then no readers are searched. writers (list or None): Limit searching to these writers. If not specified or ``None`` then all writers are searched. If an empty list then no writers are searched. composite_sensors (list or None): Limit searching to composite configuration files for these sensors. If ``None`` then all sensor configs will be searched. If an empty list then no composites will be searched. """ _find_registerable_files_compositors(composite_sensors) _find_registerable_files_readers(readers) _find_registerable_files_writers(writers) return sorted(_FILE_REGISTRY.keys()) def _find_registerable_files_compositors(sensors=None): """Load all compositor configs so that files are registered. Compositor objects should register files when they are initialized. """ from satpy.composites.config_loader import all_composite_sensors, load_compositor_configs_for_sensors if sensors is None: sensors = all_composite_sensors() if sensors: mods = load_compositor_configs_for_sensors(sensors)[1] _register_modifier_files(mods) def _register_modifier_files(modifiers): for mod_sensor_dict in modifiers.values(): for mod_name, (mod_cls, mod_props) in mod_sensor_dict.items(): try: mod_cls(**mod_props) except (ValueError, RuntimeError): logger.error("Could not initialize modifier '%s' for " "auxiliary download registration.", mod_name) def _find_registerable_files_readers(readers=None): """Load all readers so that files are registered.""" import yaml from satpy.readers import configs_for_reader, load_reader for reader_configs in configs_for_reader(reader=readers): try: load_reader(reader_configs) except (ModuleNotFoundError, yaml.YAMLError): continue def _find_registerable_files_writers(writers=None): """Load all writers so that files are registered.""" from satpy.writers import configs_for_writer, load_writer_configs for writer_configs in configs_for_writer(writer=writers): try: load_writer_configs(writer_configs) except ValueError: continue class DataDownloadMixin: """Mixin class for Satpy components to download files. This class simplifies the logic needed to download and cache data files needed for operations in a Satpy component (readers, writers, etc). It does this in a two step process where files that might be downloaded are "registered" and then "retrieved" when they need to be used. To use this class include it as one of the subclasses of your Satpy component. Then in the ``__init__`` method, call the ``register_data_files`` function during initialization. .. note:: This class is already included in the ``FileYAMLReader`` and ``Writer`` base classes. There is no need to define a custom class. The below code is shown as an example:: from satpy.readers.yaml_reader import AbstractYAMLReader from satpy.aux_download import DataDownloadMixin class MyReader(AbstractYAMLReader, DataDownloadMixin): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.register_data_files() This class expects data files to be configured in either a ``self.info['data_files']`` (standard for readers/writers) or ``self.config['data_files']`` list. The ``data_files`` item itself is a list of dictionaries. This information can also be passed directly to ``register_data_files`` for more complex cases. In YAML, for a reader, this might look like this:: reader: name: abi_l1b short_name: ABI L1b long_name: GOES-R ABI Level 1b ... other metadata ... data_files: - url: "https://example.com/my_data_file.dat" - url: "https://raw.githubusercontent.com/pytroll/satpy/main/README.rst" known_hash: "sha256:5891286b63e7745de08c4b0ac204ad44cfdb9ab770309debaba90308305fa759" - url: "https://raw.githubusercontent.com/pytroll/satpy/main/RELEASING.md" filename: "satpy_releasing.md" In this example we register two files that might be downloaded. If ``known_hash`` is not provided or None (null in YAML) then the data file will not be checked for validity when downloaded. See :func:`~satpy.aux_download.register_file` for more information. You can optionally specify ``filename`` to define the in-cache name when this file is downloaded. This can be useful in cases when the filename can not be easily determined from the URL. When it comes time to needing the file, you can retrieve the local path by calling ``~satpy.aux_download.retrieve(cache_key)`` with the "cache key" generated during registration. These keys will be in the format: ``/``. For a reader this would be ``readers/satpy_release.md``. This Mixin is not the only way to register and download files for a Satpy component, but is the most generic and flexible. Feel free to use the :func:`~satpy.aux_download.register_file` and :func:`~satpy.aux_download.retrieve` functions directly. However, :meth:`~satpy.aux_download.find_registerable_files` must also be updated to support your component (if files are not register during initialization). """ DATA_FILE_COMPONENTS = { 'reader': 'readers', 'writer': 'writers', 'composit': 'composites', 'modifi': 'modifiers', 'corr': 'modifiers', } @property def _data_file_component_type(self): cls_name = self.__class__.__name__.lower() for cls_name_sub, comp_type in self.DATA_FILE_COMPONENTS.items(): if cls_name_sub in cls_name: return comp_type return 'other' def register_data_files(self, data_files=None): """Register a series of files that may be downloaded later. See :class:`~satpy.aux_download.DataDownloadMixin` for more information on the assumptions and structure of the data file configuration dictionary. """ comp_type = self._data_file_component_type if data_files is None: df_parent = getattr(self, 'info', self.config) data_files = df_parent.get('data_files', []) cache_keys = [] for data_file_entry in data_files: cache_key = self._register_data_file(data_file_entry, comp_type) cache_keys.append(cache_key) return cache_keys @staticmethod def _register_data_file(data_file_entry, comp_type): url = data_file_entry['url'] filename = data_file_entry.get('filename', os.path.basename(url)) known_hash = data_file_entry.get('known_hash') return register_file(url, filename, component_type=comp_type, known_hash=known_hash) def retrieve_all_cmd(argv=None): """Call 'retrieve_all' function from console script 'satpy_retrieve_all'.""" import argparse parser = argparse.ArgumentParser(description="Download auxiliary data files used by Satpy.") parser.add_argument('--data-dir', help="Override 'SATPY_DATA_DIR' for destination of " "downloaded files. This does NOT change the " "directory Satpy will look at when searching " "for files outside of this script.") parser.add_argument('--composite-sensors', nargs="*", help="Limit loaded composites for the specified " "sensors. If specified with no arguments, " "no composite files will be downloaded.") parser.add_argument('--readers', nargs="*", help="Limit searching to these readers. If specified " "with no arguments, no reader files will be " "downloaded.") parser.add_argument('--writers', nargs="*", help="Limit searching to these writers. If specified " "with no arguments, no writer files will be " "downloaded.") args = parser.parse_args(argv) logging.basicConfig(level=logging.INFO) if args.data_dir is None: args.data_dir = satpy.config.get('data_dir') with satpy.config.set(data_dir=args.data_dir): retrieve_all(readers=args.readers, writers=args.writers, composite_sensors=args.composite_sensors) satpy-0.34.0/satpy/composites/000077500000000000000000000000001420401153000162515ustar00rootroot00000000000000satpy-0.34.0/satpy/composites/__init__.py000066400000000000000000001706731420401153000204000ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Base classes for composite objects.""" from __future__ import annotations import logging import os import warnings import dask.array as da import numpy as np import xarray as xr import satpy from satpy.aux_download import DataDownloadMixin from satpy.dataset import DataID, combine_metadata from satpy.dataset.dataid import minimal_default_keys_config from satpy.utils import unify_chunks from satpy.writers import get_enhanced_image LOG = logging.getLogger(__name__) NEGLIGIBLE_COORDS = ['time'] """Keywords identifying non-dimensional coordinates to be ignored during composite generation.""" MASKING_COMPOSITOR_METHODS = ['less', 'less_equal', 'equal', 'greater_equal', 'greater', 'not_equal', 'isnan', 'isfinite', 'isneginf', 'isposinf'] class IncompatibleAreas(Exception): """Error raised upon compositing things of different shapes.""" class IncompatibleTimes(Exception): """Error raised upon compositing things from different times.""" def check_times(projectables): """Check that *projectables* have compatible times.""" times = [] for proj in projectables: try: if proj['time'].size and proj['time'][0] != 0: times.append(proj['time'][0].values) else: break # right? except KeyError: # the datasets don't have times break except IndexError: # time is a scalar if proj['time'].values != 0: times.append(proj['time'].values) else: break else: # Is there a more gracious way to handle this ? if np.max(times) - np.min(times) > np.timedelta64(1, 's'): raise IncompatibleTimes mid_time = (np.max(times) - np.min(times)) / 2 + np.min(times) return mid_time def sub_arrays(proj1, proj2): """Substract two DataArrays and combine their attrs.""" attrs = combine_metadata(proj1.attrs, proj2.attrs) if (attrs.get('area') is None and proj1.attrs.get('area') is not None and proj2.attrs.get('area') is not None): raise IncompatibleAreas res = proj1 - proj2 res.attrs = attrs return res class CompositeBase: """Base class for all compositors and modifiers. A compositor in Satpy is a class that takes in zero or more input DataArrays and produces a new DataArray with its own identifier (name). The result of a compositor is typically a brand new "product" that represents something different than the inputs that went into the operation. See the :class:`~satpy.composites.ModifierBase` class for information on the similar concept of "modifiers". """ def __init__(self, name, prerequisites=None, optional_prerequisites=None, **kwargs): """Initialise the compositor.""" # Required info kwargs["name"] = name kwargs["prerequisites"] = prerequisites or [] kwargs["optional_prerequisites"] = optional_prerequisites or [] self.attrs = kwargs @property def id(self): """Return the DataID of the object.""" try: return self.attrs['_satpy_id'] except KeyError: id_keys = self.attrs.get('_satpy_id_keys', minimal_default_keys_config) return DataID(id_keys, **self.attrs) def __call__(self, datasets, optional_datasets=None, **info): """Generate a composite.""" raise NotImplementedError() def __str__(self): """Stringify the object.""" from pprint import pformat return pformat(self.attrs) def __repr__(self): """Represent the object.""" from pprint import pformat return pformat(self.attrs) def apply_modifier_info(self, origin, destination): """Apply the modifier info from *origin* to *destination*.""" o = getattr(origin, 'attrs', origin) d = getattr(destination, 'attrs', destination) try: dataset_keys = self.attrs['_satpy_id'].id_keys.keys() except KeyError: dataset_keys = ['name', 'modifiers'] for k in dataset_keys: if k == 'modifiers' and k in self.attrs: d[k] = self.attrs[k] elif d.get(k) is None: if self.attrs.get(k) is not None: d[k] = self.attrs[k] elif o.get(k) is not None: d[k] = o[k] def match_data_arrays(self, data_arrays): """Match data arrays so that they can be used together in a composite. For the purpose of this method, "can be used together" means: - All arrays should have the same dimensions. - Either all arrays should have an area, or none should. - If all have an area, the areas should be all the same. In addition, negligible non-dimensional coordinates are dropped (see :meth:`drop_coordinates`) and dask chunks are unified (see :func:`satpy.utils.unify_chunks`). Args: data_arrays (List[arrays]): Arrays to be checked Returns: data_arrays (List[arrays]): Arrays with negligible non-dimensional coordinates removed. Raises: :class:`IncompatibleAreas`: If dimension or areas do not match. :class:`ValueError`: If some, but not all data arrays lack an area attribute. """ self.check_geolocation(data_arrays) new_arrays = self.drop_coordinates(data_arrays) new_arrays = list(unify_chunks(*new_arrays)) return new_arrays def drop_coordinates(self, data_arrays): """Drop negligible non-dimensional coordinates. Drops negligible coordinates if they do not correspond to any dimension. Negligible coordinates are defined in the :attr:`NEGLIGIBLE_COORDS` module attribute. Args: data_arrays (List[arrays]): Arrays to be checked """ new_arrays = [] for ds in data_arrays: drop = [coord for coord in ds.coords if coord not in ds.dims and any([neglible in coord for neglible in NEGLIGIBLE_COORDS])] if drop: new_arrays.append(ds.drop(drop)) else: new_arrays.append(ds) return new_arrays def check_geolocation(self, data_arrays): """Check that the geolocations of the *data_arrays* are compatible. For the purpose of this method, "compatible" means: - All arrays should have the same dimensions. - Either all arrays should have an area, or none should. - If all have an area, the areas should be all the same. Args: data_arrays (List[arrays]): Arrays to be checked Raises: :class:`IncompatibleAreas`: If dimension or areas do not match. :class:`ValueError`: If some, but not all data arrays lack an area attribute. """ if len(data_arrays) == 1: return if 'x' in data_arrays[0].dims and \ not all(x.sizes['x'] == data_arrays[0].sizes['x'] for x in data_arrays[1:]): raise IncompatibleAreas("X dimension has different sizes") if 'y' in data_arrays[0].dims and \ not all(x.sizes['y'] == data_arrays[0].sizes['y'] for x in data_arrays[1:]): raise IncompatibleAreas("Y dimension has different sizes") areas = [ds.attrs.get('area') for ds in data_arrays] if all(a is None for a in areas): return if any(a is None for a in areas): raise ValueError("Missing 'area' attribute") if not all(areas[0] == x for x in areas[1:]): LOG.debug("Not all areas are the same in " "'{}'".format(self.attrs['name'])) raise IncompatibleAreas("Areas are different") class DifferenceCompositor(CompositeBase): """Make the difference of two data arrays.""" def __call__(self, projectables, nonprojectables=None, **attrs): """Generate the composite.""" if len(projectables) != 2: raise ValueError("Expected 2 datasets, got %d" % (len(projectables),)) projectables = self.match_data_arrays(projectables) info = combine_metadata(*projectables) info['name'] = self.attrs['name'] info.update(attrs) proj = projectables[0] - projectables[1] proj.attrs = info return proj class SingleBandCompositor(CompositeBase): """Basic single-band composite builder. This preserves all the attributes of the dataset it is derived from. """ @staticmethod def _update_missing_metadata(existing_attrs, new_attrs): for key, val in new_attrs.items(): if key not in existing_attrs and val is not None: existing_attrs[key] = val def __call__(self, projectables, nonprojectables=None, **attrs): """Build the composite.""" if len(projectables) != 1: raise ValueError("Can't have more than one band in a single-band composite") data = projectables[0] new_attrs = data.attrs.copy() self._update_missing_metadata(new_attrs, attrs) resolution = new_attrs.get('resolution', None) new_attrs.update(self.attrs) if resolution is not None: new_attrs['resolution'] = resolution return xr.DataArray(data=data.data, attrs=new_attrs, dims=data.dims, coords=data.coords) class CategoricalDataCompositor(CompositeBase): """Compositor used to recategorize categorical data using a look-up-table. Each value in the data array will be recategorized to a new category defined in the look-up-table using the original value as an index for that look-up-table. Example: data = [[1, 3, 2], [4, 2, 0]] lut = [10, 20, 30, 40, 50] res = [[20, 40, 30], [50, 30, 10]] """ def __init__(self, name, lut=None, **kwargs): """Get look-up-table used to recategorize data. Args: lut (list): a list of new categories. The lenght must be greater than the maximum value in the data array that should be recategorized. """ self.lut = np.array(lut) super(CategoricalDataCompositor, self).__init__(name, **kwargs) def _update_attrs(self, new_attrs): """Modify name and add LUT.""" new_attrs['name'] = self.attrs['name'] new_attrs['composite_lut'] = list(self.lut) @staticmethod def _getitem(block, lut): return lut[block] def __call__(self, projectables, **kwargs): """Recategorize the data.""" if len(projectables) != 1: raise ValueError("Can't have more than one dataset for a categorical data composite") data = projectables[0].astype(int) res = data.data.map_blocks(self._getitem, self.lut, dtype=self.lut.dtype) new_attrs = data.attrs.copy() self._update_attrs(new_attrs) return xr.DataArray(res, dims=data.dims, attrs=new_attrs, coords=data.coords) class GenericCompositor(CompositeBase): """Basic colored composite builder.""" modes = {1: 'L', 2: 'LA', 3: 'RGB', 4: 'RGBA'} def __init__(self, name, common_channel_mask=True, **kwargs): """Collect custom configuration values. Args: common_channel_mask (bool): If True, mask all the channels with a mask that combines all the invalid areas of the given data. """ self.common_channel_mask = common_channel_mask super(GenericCompositor, self).__init__(name, **kwargs) @classmethod def infer_mode(cls, data_arr): """Guess at the mode for a particular DataArray.""" if 'mode' in data_arr.attrs: return data_arr.attrs['mode'] if 'bands' not in data_arr.dims: return cls.modes[1] if 'bands' in data_arr.coords and isinstance(data_arr.coords['bands'][0].item(), str): return ''.join(data_arr.coords['bands'].values) return cls.modes[data_arr.sizes['bands']] def _concat_datasets(self, projectables, mode): try: data = xr.concat(projectables, 'bands', coords='minimal') data['bands'] = list(mode) except ValueError as e: LOG.debug("Original exception for incompatible areas: {}".format(str(e))) raise IncompatibleAreas return data def _get_sensors(self, projectables): sensor = set() for projectable in projectables: current_sensor = projectable.attrs.get("sensor", None) if current_sensor: if isinstance(current_sensor, (str, bytes)): sensor.add(current_sensor) else: sensor |= current_sensor if len(sensor) == 0: sensor = None elif len(sensor) == 1: sensor = list(sensor)[0] return sensor def __call__(self, projectables, nonprojectables=None, **attrs): """Build the composite.""" num = len(projectables) mode = attrs.get('mode') if mode is None: # num may not be in `self.modes` so only check if we need to mode = self.modes[num] if len(projectables) > 1: projectables = self.match_data_arrays(projectables) data = self._concat_datasets(projectables, mode) # Skip masking if user wants it or a specific alpha channel is given. if self.common_channel_mask and mode[-1] != 'A': data = data.where(data.notnull().all(dim='bands')) else: data = projectables[0] # if inputs have a time coordinate that may differ slightly between # themselves then find the mid time and use that as the single # time coordinate value if len(projectables) > 1: time = check_times(projectables) if time is not None and 'time' in data.dims: data['time'] = [time] new_attrs = combine_metadata(*projectables) # remove metadata that shouldn't make sense in a composite new_attrs["wavelength"] = None new_attrs.pop("units", None) new_attrs.pop('calibration', None) new_attrs.pop('modifiers', None) new_attrs.update({key: val for (key, val) in attrs.items() if val is not None}) resolution = new_attrs.get('resolution', None) new_attrs.update(self.attrs) if resolution is not None: new_attrs['resolution'] = resolution new_attrs["sensor"] = self._get_sensors(projectables) new_attrs["mode"] = mode return xr.DataArray(data=data.data, attrs=new_attrs, dims=data.dims, coords=data.coords) class FillingCompositor(GenericCompositor): """Make a regular RGB, filling the RGB bands with the first provided dataset's values.""" def __call__(self, projectables, nonprojectables=None, **info): """Generate the composite.""" projectables = self.match_data_arrays(projectables) projectables[1] = projectables[1].fillna(projectables[0]) projectables[2] = projectables[2].fillna(projectables[0]) projectables[3] = projectables[3].fillna(projectables[0]) return super(FillingCompositor, self).__call__(projectables[1:], **info) class Filler(GenericCompositor): """Fix holes in projectable 1 with data from projectable 2.""" def __call__(self, projectables, nonprojectables=None, **info): """Generate the composite.""" projectables = self.match_data_arrays(projectables) filled_projectable = projectables[0].fillna(projectables[1]) return super(Filler, self).__call__([filled_projectable], **info) class MultiFiller(GenericCompositor): """Fix holes in projectable 1 with data from the next projectables.""" def __call__(self, projectables, nonprojectables=None, **info): """Generate the composite.""" projectables = self.match_data_arrays(projectables) filled_projectable = projectables[0] for next_projectable in projectables[1:]: filled_projectable = filled_projectable.fillna(next_projectable) if 'optional_datasets' in info.keys(): for next_projectable in info['optional_datasets']: filled_projectable = filled_projectable.fillna(next_projectable) return super(MultiFiller, self).__call__([filled_projectable], **info) class RGBCompositor(GenericCompositor): """Make a composite from three color bands (deprecated).""" def __call__(self, projectables, nonprojectables=None, **info): """Generate the composite.""" warnings.warn("RGBCompositor is deprecated, use GenericCompositor instead.", DeprecationWarning) if len(projectables) != 3: raise ValueError("Expected 3 datasets, got %d" % (len(projectables),)) return super(RGBCompositor, self).__call__(projectables, **info) class ColormapCompositor(GenericCompositor): """A compositor that uses colormaps.""" @staticmethod def build_colormap(palette, dtype, info): """Create the colormap from the `raw_palette` and the valid_range. Colormaps come in different forms, but they are all supposed to have color values between 0 and 255. The following cases are considered: - Palettes comprised of only a list on colors. If *dtype* is uint8, the values of the colormap are the enumeration of the colors. Otherwise, the colormap values will be spread evenly from the min to the max of the valid_range provided in `info`. - Palettes that have a palette_meanings attribute. The palette meanings will be used as values of the colormap. """ from trollimage.colormap import Colormap squeezed_palette = np.asanyarray(palette).squeeze() / 255.0 set_range = True if hasattr(palette, 'attrs') and 'palette_meanings' in palette.attrs: set_range = False meanings = palette.attrs['palette_meanings'] iterator = zip(meanings, squeezed_palette) else: iterator = enumerate(squeezed_palette[:-1]) if dtype == np.dtype('uint8'): tups = [(val, tuple(tup)) for (val, tup) in iterator] colormap = Colormap(*tups) elif 'valid_range' in info: tups = [(val, tuple(tup)) for (val, tup) in iterator] colormap = Colormap(*tups) if set_range: sf = info.get('scale_factor', np.array(1)) colormap.set_range( *(np.array(info['valid_range']) * sf + info.get('add_offset', 0))) else: raise AttributeError("Data needs to have either a valid_range or be of type uint8" + " in order to be displayable with an attached color-palette!") return colormap, squeezed_palette def __call__(self, projectables, **info): """Generate the composite.""" if len(projectables) != 2: raise ValueError("Expected 2 datasets, got %d" % (len(projectables), )) data, palette = projectables colormap, palette = self.build_colormap(palette, data.dtype, data.attrs) channels = self._apply_colormap(colormap, data, palette) return self._create_composite_from_channels(channels, data) def _create_composite_from_channels(self, channels, template): mask = self._get_mask_from_data(template) channels = [self._create_masked_dataarray_like(channel, template, mask) for channel in channels] res = super(ColormapCompositor, self).__call__(channels, **template.attrs) res.attrs['_FillValue'] = np.nan return res @staticmethod def _get_mask_from_data(data): fill_value = data.attrs.get('_FillValue', np.nan) if np.isnan(fill_value): mask = data.notnull() else: mask = data != data.attrs['_FillValue'] return mask @staticmethod def _create_masked_dataarray_like(array, template, mask): return xr.DataArray(array.reshape(template.shape), dims=template.dims, coords=template.coords, attrs=template.attrs).where(mask) class ColorizeCompositor(ColormapCompositor): """A compositor colorizing the data, interpolating the palette colors when needed.""" @staticmethod def _apply_colormap(colormap, data, palette): del palette return colormap.colorize(data.data.squeeze()) class PaletteCompositor(ColormapCompositor): """A compositor colorizing the data, not interpolating the palette colors.""" @staticmethod def _apply_colormap(colormap, data, palette): channels, colors = colormap.palettize(data.data.squeeze()) channels = channels.map_blocks(_insert_palette_colors, palette, dtype=palette.dtype, new_axis=2, chunks=list(channels.chunks) + [palette.shape[1]]) return [channels[:, :, i] for i in range(channels.shape[2])] def _insert_palette_colors(channels, palette): channels = palette[channels] return channels class DayNightCompositor(GenericCompositor): """A compositor that blends day data with night data. Using the `day_night` flag it is also possible to provide only a day product or only a night product and mask out (make transparent) the opposite portion of the image (night or day). See the documentation below for more details. """ def __init__(self, name, lim_low=85., lim_high=88., day_night="day_night", **kwargs): """Collect custom configuration values. Args: lim_low (float): lower limit of Sun zenith angle for the blending of the given channels lim_high (float): upper limit of Sun zenith angle for the blending of the given channels day_night (string): "day_night" means both day and night portions will be kept "day_only" means only day portion will be kept "night_only" means only night portion will be kept """ self.lim_low = lim_low self.lim_high = lim_high self.day_night = day_night super(DayNightCompositor, self).__init__(name, **kwargs) def __call__(self, projectables, **kwargs): """Generate the composite.""" projectables = self.match_data_arrays(projectables) # At least one composite is requested. foreground_data = projectables[0] lim_low = np.cos(np.deg2rad(self.lim_low)) lim_high = np.cos(np.deg2rad(self.lim_high)) try: coszen = np.cos(np.deg2rad(projectables[2 if self.day_night == "day_night" else 1])) except IndexError: from pyorbital.astronomy import cos_zen LOG.debug("Computing sun zenith angles.") # Get chunking that matches the data try: chunks = foreground_data.sel(bands=foreground_data['bands'][0]).chunks except KeyError: chunks = foreground_data.chunks lons, lats = foreground_data.attrs["area"].get_lonlats(chunks=chunks) coszen = xr.DataArray(cos_zen(foreground_data.attrs["start_time"], lons, lats), dims=['y', 'x'], coords=[foreground_data['y'], foreground_data['x']]) # Calculate blending weights coszen -= np.min((lim_high, lim_low)) coszen /= np.abs(lim_low - lim_high) coszen = coszen.clip(0, 1) # Apply enhancements foreground_data = enhance2dataset(foreground_data) if "only" in self.day_night: # Only one portion (day or night) is selected. One composite is requested. # Add alpha band to single L/RGB composite to make the masked-out portion transparent # L -> LA # RGB -> RGBA foreground_data = add_alpha_bands(foreground_data) # No need to replace missing channel data with zeros # Get metadata attrs = foreground_data.attrs.copy() # Determine the composite position day_data = foreground_data if "day" in self.day_night else 0 night_data = foreground_data if "night" in self.day_night else 0 else: # Both day and night portions are selected. Two composites are requested. Get the second one merged. background_data = projectables[1] # Apply enhancements background_data = enhance2dataset(background_data) # Adjust bands so that they match # L/RGB -> RGB/RGB # LA/RGB -> RGBA/RGBA # RGB/RGBA -> RGBA/RGBA foreground_data = add_bands(foreground_data, background_data['bands']) background_data = add_bands(background_data, foreground_data['bands']) # Replace missing channel data with zeros foreground_data = zero_missing_data(foreground_data, background_data) background_data = zero_missing_data(background_data, foreground_data) # Get merged metadata attrs = combine_metadata(foreground_data, background_data) # Determine the composite position day_data = foreground_data night_data = background_data # Blend the two images together day_portion = coszen * day_data night_portion = (1 - coszen) * night_data data = night_portion + day_portion data.attrs = attrs # Split to separate bands so the mode is correct data = [data.sel(bands=b) for b in data['bands']] return super(DayNightCompositor, self).__call__(data, **kwargs) def add_alpha_bands(data): """Only used for DayNightCompositor. Add an alpha band to L or RGB composite as prerequisites for the following band matching to make the masked-out area transparent. """ if 'A' not in data['bands'].data: new_data = [data.sel(bands=band) for band in data['bands'].data] # Create alpha band based on a copy of the first "real" band alpha = new_data[0].copy() alpha.data = da.ones((data.sizes['y'], data.sizes['x']), chunks=new_data[0].chunks) # Rename band to indicate it's alpha alpha['bands'] = 'A' new_data.append(alpha) new_data = xr.concat(new_data, dim='bands') new_data.attrs['mode'] = data.attrs['mode'] + 'A' data = new_data return data def enhance2dataset(dset, convert_p=False): """Return the enhancement dataset *dset* as an array. If `convert_p` is True, enhancements generating a P mode will be converted to RGB or RGBA. """ attrs = dset.attrs data = _get_data_from_enhanced_image(dset, convert_p) data.attrs = attrs # remove 'mode' if it is specified since it may have been updated data.attrs.pop('mode', None) # update mode since it may have changed (colorized/palettize) data.attrs['mode'] = GenericCompositor.infer_mode(data) return data def _get_data_from_enhanced_image(dset, convert_p): img = get_enhanced_image(dset) if convert_p and img.mode == 'P': img = _apply_palette_to_image(img) if img.mode != 'P': data = img.data.clip(0.0, 1.0) else: data = img.data return data def _apply_palette_to_image(img): if len(img.palette[0]) == 3: img = img.convert('RGB') elif len(img.palette[0]) == 4: img = img.convert('RGBA') return img def add_bands(data, bands): """Add bands so that they match *bands*.""" # Add R, G and B bands, remove L band bands = bands.compute() if 'P' in data['bands'].data or 'P' in bands.data: raise NotImplementedError('Cannot mix datasets of mode P with other datasets at the moment.') if 'L' in data['bands'].data and 'R' in bands.data: lum = data.sel(bands='L') # Keep 'A' if it was present if 'A' in data['bands']: alpha = data.sel(bands='A') new_data = (lum, lum, lum, alpha) new_bands = ['R', 'G', 'B', 'A'] mode = 'RGBA' else: new_data = (lum, lum, lum) new_bands = ['R', 'G', 'B'] mode = 'RGB' data = xr.concat(new_data, dim='bands', coords={'bands': new_bands}) data['bands'] = new_bands data.attrs['mode'] = mode # Add alpha band if 'A' not in data['bands'].data and 'A' in bands.data: new_data = [data.sel(bands=band) for band in data['bands'].data] # Create alpha band based on a copy of the first "real" band alpha = new_data[0].copy() alpha.data = da.ones((data.sizes['y'], data.sizes['x']), chunks=new_data[0].chunks) # Rename band to indicate it's alpha alpha['bands'] = 'A' new_data.append(alpha) new_data = xr.concat(new_data, dim='bands') new_data.attrs['mode'] = data.attrs['mode'] + 'A' data = new_data return data def zero_missing_data(data1, data2): """Replace NaN values with zeros in data1 if the data is valid in data2.""" nans = np.logical_and(np.isnan(data1), np.logical_not(np.isnan(data2))) return data1.where(~nans, 0) class RealisticColors(GenericCompositor): """Create a realistic colours composite for SEVIRI.""" def __call__(self, projectables, *args, **kwargs): """Generate the composite.""" projectables = self.match_data_arrays(projectables) vis06 = projectables[0] vis08 = projectables[1] hrv = projectables[2] try: ch3 = 3 * hrv - vis06 - vis08 ch3.attrs = hrv.attrs except ValueError: raise IncompatibleAreas ndvi = (vis08 - vis06) / (vis08 + vis06) ndvi = np.where(ndvi < 0, 0, ndvi) ch1 = ndvi * vis06 + (1 - ndvi) * vis08 ch1.attrs = vis06.attrs ch2 = ndvi * vis08 + (1 - ndvi) * vis06 ch2.attrs = vis08.attrs res = super(RealisticColors, self).__call__((ch1, ch2, ch3), *args, **kwargs) return res class CloudCompositor(GenericCompositor): """Detect clouds based on thresholding and use it as a mask for compositing.""" def __init__(self, name, transition_min=258.15, transition_max=298.15, transition_gamma=3.0, **kwargs): """Collect custom configuration values. Args: transition_min (float): Values below or equal to this are clouds -> opaque white transition_max (float): Values above this are cloud free -> transparent transition_gamma (float): Gamma correction to apply at the end """ self.transition_min = transition_min self.transition_max = transition_max self.transition_gamma = transition_gamma super(CloudCompositor, self).__init__(name, **kwargs) def __call__(self, projectables, **kwargs): """Generate the composite.""" data = projectables[0] # Default to rough IR thresholds # Values below or equal to this are clouds -> opaque white tr_min = self.transition_min # Values above this are cloud free -> transparent tr_max = self.transition_max # Gamma correction gamma = self.transition_gamma slope = 1 / (tr_min - tr_max) offset = 1 - slope * tr_min alpha = data.where(data > tr_min, 1.) alpha = alpha.where(data <= tr_max, 0.) alpha = alpha.where((data <= tr_min) | (data > tr_max), slope * data + offset) # gamma adjustment alpha **= gamma res = super(CloudCompositor, self).__call__((data, alpha), **kwargs) return res class RatioSharpenedRGB(GenericCompositor): """Sharpen RGB bands with ratio of a high resolution band to a lower resolution version. Any pixels where the ratio is computed to be negative or infinity, it is reset to 1. Additionally, the ratio is limited to 1.5 on the high end to avoid high changes due to small discrepancies in instrument detector footprint. Note that the input data to this compositor must already be resampled so all data arrays are the same shape. Example:: R_lo - 1000m resolution - shape=(2000, 2000) G - 1000m resolution - shape=(2000, 2000) B - 1000m resolution - shape=(2000, 2000) R_hi - 500m resolution - shape=(4000, 4000) ratio = R_hi / R_lo new_R = R_hi new_G = G * ratio new_B = B * ratio """ def __init__(self, *args, **kwargs): """Instanciate the ration sharpener.""" self.high_resolution_band = kwargs.pop("high_resolution_band", "red") if self.high_resolution_band not in ['red', 'green', 'blue', None]: raise ValueError("RatioSharpenedRGB.high_resolution_band must " "be one of ['red', 'green', 'blue', None]. Not " "'{}'".format(self.high_resolution_band)) kwargs.setdefault('common_channel_mask', False) super(RatioSharpenedRGB, self).__init__(*args, **kwargs) def _get_band(self, high_res, low_res, color, ratio): """Figure out what data should represent this color.""" if self.high_resolution_band == color: ret = high_res else: ret = low_res * ratio ret.attrs = low_res.attrs.copy() return ret def __call__(self, datasets, optional_datasets=None, **info): """Sharpen low resolution datasets by multiplying by the ratio of ``high_res / low_res``.""" if len(datasets) != 3: raise ValueError("Expected 3 datasets, got %d" % (len(datasets), )) if not all(x.shape == datasets[0].shape for x in datasets[1:]) or \ (optional_datasets and optional_datasets[0].shape != datasets[0].shape): raise IncompatibleAreas('RatioSharpening requires datasets of ' 'the same size. Must resample first.') new_attrs = {} if optional_datasets: datasets = self.match_data_arrays(datasets + optional_datasets) p1 = datasets[0] p2 = datasets[1] p3 = datasets[2] high_res = datasets[3] if 'rows_per_scan' in high_res.attrs: new_attrs.setdefault('rows_per_scan', high_res.attrs['rows_per_scan']) new_attrs.setdefault('resolution', high_res.attrs['resolution']) colors = ['red', 'green', 'blue'] if self.high_resolution_band in colors: LOG.debug("Sharpening image with high resolution {} band".format(self.high_resolution_band)) low_res = datasets[:3][colors.index(self.high_resolution_band)] ratio = high_res / low_res # make ratio a no-op (multiply by 1) where the ratio is NaN or # infinity or it is negative. ratio = ratio.where(np.isfinite(ratio) & (ratio >= 0), 1.) # we don't need ridiculously high ratios, they just make bright pixels ratio = ratio.clip(0, 1.5) else: LOG.debug("No sharpening band specified for ratio sharpening") high_res = None ratio = 1. r = self._get_band(high_res, p1, 'red', ratio) g = self._get_band(high_res, p2, 'green', ratio) b = self._get_band(high_res, p3, 'blue', ratio) else: datasets = self.match_data_arrays(datasets) r = datasets[0] g = datasets[1] b = datasets[2] # combine the masks mask = ~(r.isnull() | g.isnull() | b.isnull()) r = r.where(mask) g = g.where(mask) b = b.where(mask) # Collect information that is the same between the projectables # we want to use the metadata from the original datasets since the # new r, g, b arrays may have lost their metadata during calculations info = combine_metadata(*datasets) info.update(new_attrs) # Update that information with configured information (including name) info.update(self.attrs) # Force certain pieces of metadata that we *know* to be true info.setdefault("standard_name", "true_color") return super(RatioSharpenedRGB, self).__call__((r, g, b), **info) def _mean4(data, offset=(0, 0), block_id=None): rows, cols = data.shape # we assume that the chunks except the first ones are aligned if block_id[0] == 0: row_offset = offset[0] % 2 else: row_offset = 0 if block_id[1] == 0: col_offset = offset[1] % 2 else: col_offset = 0 row_after = (row_offset + rows) % 2 col_after = (col_offset + cols) % 2 pad = ((row_offset, row_after), (col_offset, col_after)) rows2 = rows + row_offset + row_after cols2 = cols + col_offset + col_after av_data = np.pad(data, pad, 'edge') new_shape = (int(rows2 / 2.), 2, int(cols2 / 2.), 2) data_mean = np.nanmean(av_data.reshape(new_shape), axis=(1, 3)) data_mean = np.repeat(np.repeat(data_mean, 2, axis=0), 2, axis=1) data_mean = data_mean[row_offset:row_offset + rows, col_offset:col_offset + cols] return data_mean class SelfSharpenedRGB(RatioSharpenedRGB): """Sharpen RGB with ratio of a band with a strided-version of itself. Example:: R - 500m resolution - shape=(4000, 4000) G - 1000m resolution - shape=(2000, 2000) B - 1000m resolution - shape=(2000, 2000) ratio = R / four_element_average(R) new_R = R new_G = G * ratio new_B = B * ratio """ @staticmethod def four_element_average_dask(d): """Average every 4 elements (2x2) in a 2D array.""" try: offset = d.attrs['area'].crop_offset except (KeyError, AttributeError): offset = (0, 0) res = d.data.map_blocks(_mean4, offset=offset, dtype=d.dtype) return xr.DataArray(res, attrs=d.attrs, dims=d.dims, coords=d.coords) def __call__(self, datasets, optional_datasets=None, **attrs): """Generate the composite.""" colors = ['red', 'green', 'blue'] if self.high_resolution_band not in colors: raise ValueError("SelfSharpenedRGB requires at least one high resolution band, not " "'{}'".format(self.high_resolution_band)) high_res = datasets[colors.index(self.high_resolution_band)] high_mean = self.four_element_average_dask(high_res) red = high_mean if self.high_resolution_band == 'red' else datasets[0] green = high_mean if self.high_resolution_band == 'green' else datasets[1] blue = high_mean if self.high_resolution_band == 'blue' else datasets[2] return super(SelfSharpenedRGB, self).__call__((red, green, blue), optional_datasets=(high_res,), **attrs) class LuminanceSharpeningCompositor(GenericCompositor): """Create a high resolution composite by sharpening a low resolution using high resolution luminance. This is done by converting to YCbCr colorspace, replacing Y, and convertin back to RGB. """ def __call__(self, projectables, *args, **kwargs): """Generate the composite.""" from trollimage.image import rgb2ycbcr, ycbcr2rgb projectables = self.match_data_arrays(projectables) luminance = projectables[0].copy() luminance /= 100. # Limit between min(luminance) ... 1.0 luminance = da.where(luminance > 1., 1., luminance) # Get the enhanced version of the composite to be sharpened rgb_img = enhance2dataset(projectables[1]) # This all will be eventually replaced with trollimage convert() method # ycbcr_img = rgb_img.convert('YCbCr') # ycbcr_img.data[0, :, :] = luminance # rgb_img = ycbcr_img.convert('RGB') # Replace luminance of the IR composite y__, cb_, cr_ = rgb2ycbcr(rgb_img.data[0, :, :], rgb_img.data[1, :, :], rgb_img.data[2, :, :]) r__, g__, b__ = ycbcr2rgb(luminance, cb_, cr_) y_size, x_size = r__.shape r__ = da.reshape(r__, (1, y_size, x_size)) g__ = da.reshape(g__, (1, y_size, x_size)) b__ = da.reshape(b__, (1, y_size, x_size)) rgb_img.data = da.vstack((r__, g__, b__)) return super(LuminanceSharpeningCompositor, self).__call__(rgb_img, *args, **kwargs) class SandwichCompositor(GenericCompositor): """Make a sandwich product.""" def __call__(self, projectables, *args, **kwargs): """Generate the composite.""" projectables = self.match_data_arrays(projectables) luminance = projectables[0] luminance = luminance / 100. # Limit between min(luminance) ... 1.0 luminance = luminance.clip(max=1.) # Get the enhanced version of the RGB composite to be sharpened rgb_img = enhance2dataset(projectables[1]) rgb_img *= luminance return super(SandwichCompositor, self).__call__(rgb_img, *args, **kwargs) # TODO: Turn this into a weighted RGB compositor class NaturalEnh(GenericCompositor): """Enhanced version of natural color composite by Simon Proud. Args: ch16_w (float): weight for red channel (1.6 um). Default: 1.3 ch08_w (float): weight for green channel (0.8 um). Default: 2.5 ch06_w (float): weight for blue channel (0.6 um). Default: 2.2 """ def __init__(self, name, ch16_w=1.3, ch08_w=2.5, ch06_w=2.2, *args, **kwargs): """Initialize the class.""" self.ch06_w = ch06_w self.ch08_w = ch08_w self.ch16_w = ch16_w super(NaturalEnh, self).__init__(name, *args, **kwargs) def __call__(self, projectables, *args, **kwargs): """Generate the composite.""" projectables = self.match_data_arrays(projectables) ch16 = projectables[0] ch08 = projectables[1] ch06 = projectables[2] ch1 = self.ch16_w * ch16 + self.ch08_w * ch08 + self.ch06_w * ch06 ch1.attrs = ch16.attrs ch2 = ch08 ch3 = ch06 return super(NaturalEnh, self).__call__((ch1, ch2, ch3), *args, **kwargs) class StaticImageCompositor(GenericCompositor, DataDownloadMixin): """A compositor that loads a static image from disk. Environment variables in the filename are automatically expanded. """ def __init__(self, name, filename=None, url=None, known_hash=None, area=None, **kwargs): """Collect custom configuration values. Args: filename (str): Name to use when storing and referring to the file in the ``data_dir`` cache. If ``url`` is provided (preferred), then this is used as the filename in the cache and will be appended to ``/composites//``. If ``url`` is provided and ``filename`` is not then the ``filename`` will be guessed from the ``url``. If ``url`` is not provided, then it is assumed ``filename`` refers to a local file. If the ``filename`` does not come with an absolute path, ``data_dir`` will be used as the directory path. Environment variables are expanded. url (str): URL to remote file. When the composite is created the file will be downloaded and cached in Satpy's ``data_dir``. Environment variables are expanded. known_hash (str or None): Hash of the remote file used to verify a successful download. If not provided then the download will not be verified. See :func:`satpy.aux_download.register_file` for more information. area (str): Name of area definition for the image. Optional for images with built-in area definitions (geotiff). Use cases: 1. url + no filename: Satpy determines the filename based on the filename in the URL, then downloads the URL, and saves it to /. If the file already exists and known_hash is also provided, then the pooch library compares the hash of the file to the known_hash. If it does not match, then the URL is re-downloaded. If it matches then no download. 2. url + relative filename: Same as case 1 but filename is already provided so download goes to /. Same hashing behavior. This does not check for an absolute path. 3. No url + absolute filename: No download, filename is passed directly to generic_image reader. No hashing is done. 4. No url + relative filename: Check if / exists. If it does then make filename an absolute path. If it doesn't, then keep it as is and let the exception at the bottom of the method get raised. """ filename, url = self._get_cache_filename_and_url(filename, url) self._cache_filename = filename self._url = url self._known_hash = known_hash self.area = None if area is not None: from satpy.resample import get_area_def self.area = get_area_def(area) super(StaticImageCompositor, self).__init__(name, **kwargs) cache_keys = self.register_data_files([]) self._cache_key = cache_keys[0] @staticmethod def _check_relative_filename(filename): data_dir = satpy.config.get('data_dir') path = os.path.join(data_dir, filename) return path if os.path.exists(path) else filename def _get_cache_filename_and_url(self, filename, url): if filename: filename = os.path.expanduser(os.path.expandvars(filename)) if not os.path.isabs(filename) and not url: filename = self._check_relative_filename(filename) if url: url = os.path.expandvars(url) if not filename: filename = os.path.basename(url) elif not filename or not os.path.isabs(filename): raise ValueError("StaticImageCompositor needs a remote 'url', " "or absolute path to 'filename', " "or an existing 'filename' relative to Satpy's 'data_dir'.") return filename, url def register_data_files(self, data_files): """Tell Satpy about files we may want to download.""" if os.path.isabs(self._cache_filename): return [None] return super().register_data_files([{ 'url': self._url, 'known_hash': self._known_hash, 'filename': self._cache_filename, }]) def _retrieve_data_file(self): from satpy.aux_download import retrieve if os.path.isabs(self._cache_filename): return self._cache_filename return retrieve(self._cache_key) def __call__(self, *args, **kwargs): """Call the compositor.""" from satpy import Scene local_file = self._retrieve_data_file() scn = Scene(reader='generic_image', filenames=[local_file]) scn.load(['image']) img = scn['image'] # use compositor parameters as extra metadata # most important: set 'name' of the image img.attrs.update(self.attrs) # Check for proper area definition. Non-georeferenced images # do not have `area` in the attributes if 'area' not in img.attrs: if self.area is None: raise AttributeError("Area definition needs to be configured") img.attrs['area'] = self.area img.attrs['sensor'] = None img.attrs['mode'] = ''.join(img.bands.data) img.attrs.pop('modifiers', None) img.attrs.pop('calibration', None) # Add start time if not present in the filename if 'start_time' not in img.attrs or not img.attrs['start_time']: import datetime as dt img.attrs['start_time'] = dt.datetime.utcnow() if 'end_time' not in img.attrs or not img.attrs['end_time']: import datetime as dt img.attrs['end_time'] = dt.datetime.utcnow() return img class BackgroundCompositor(GenericCompositor): """A compositor that overlays one composite on top of another.""" def __call__(self, projectables, *args, **kwargs): """Call the compositor.""" projectables = self.match_data_arrays(projectables) # Get enhanced datasets foreground = enhance2dataset(projectables[0], convert_p=True) background = enhance2dataset(projectables[1], convert_p=True) # Adjust bands so that they match # L/RGB -> RGB/RGB # LA/RGB -> RGBA/RGBA # RGB/RGBA -> RGBA/RGBA foreground = add_bands(foreground, background['bands']) background = add_bands(background, foreground['bands']) attrs = self._combine_metadata_with_mode_and_sensor(foreground, background) data = self._get_merged_image_data(foreground, background) res = super(BackgroundCompositor, self).__call__(data, **kwargs) res.attrs.update(attrs) return res def _combine_metadata_with_mode_and_sensor(self, foreground: xr.DataArray, background: xr.DataArray ) -> dict: # Get merged metadata attrs = combine_metadata(foreground, background) # 'mode' is no longer valid after we've remove the 'A' # let the base class __call__ determine mode attrs.pop("mode", None) if attrs.get('sensor') is None: # sensor can be a set attrs['sensor'] = self._get_sensors([foreground, background]) return attrs @staticmethod def _get_merged_image_data(foreground: xr.DataArray, background: xr.DataArray ) -> list[xr.DataArray]: if 'A' in foreground.attrs['mode']: # Use alpha channel as weight and blend the two composites alpha = foreground.sel(bands='A') data = [] # NOTE: there's no alpha band in the output image, it will # be added by the data writer for band in foreground.mode[:-1]: fg_band = foreground.sel(bands=band) bg_band = background.sel(bands=band) chan = (fg_band * alpha + bg_band * (1 - alpha)) chan = xr.where(chan.isnull(), bg_band, chan) data.append(chan) else: data_arr = xr.where(foreground.isnull(), background, foreground) # Split to separate bands so the mode is correct data = [data_arr.sel(bands=b) for b in data_arr['bands']] return data class MaskingCompositor(GenericCompositor): """A compositor that masks e.g. IR 10.8 channel data using cloud products from NWC SAF.""" _supported_modes = {"LA", "RGBA"} def __init__(self, name, transparency=None, conditions=None, mode="LA", **kwargs): """Collect custom configuration values. Kwargs: transparency (dict): transparency for each cloud type as key-value pairs in a dictionary. Will be converted to `conditions`. DEPRECATED. conditions (list): list of three items determining the masking settings. mode (str, optional): Image mode to return. For single-band input, this shall be "LA" (default) or "RGBA". For multi-band input, this argument is ignored as the result is always RGBA. Each condition in *conditions* consists of three items: - `method`: Numpy method name. The following are supported operations: `less`, `less_equal`, `equal`, `greater_equal`, `greater`, `not_equal`, `isnan`, `isfinite`, `isinf`, `isneginf`, or `isposinf`. - `value`: threshold value of the *mask* applied with the operator. Can be a string, in which case the corresponding value will be determined from `flag_meanings` and `flag_values` attributes of the mask. NOTE: the `value` should not be given to 'is*` methods. - `transparency`: transparency from interval [0 ... 100] used for the method/threshold. Value of 100 is fully transparent. Example:: >>> conditions = [{'method': 'greater_equal', 'value': 0, 'transparency': 100}, {'method': 'greater_equal', 'value': 1, 'transparency': 80}, {'method': 'greater_equal', 'value': 2, 'transparency': 0}, {'method': 'isnan', 'transparency': 100}] >>> compositor = MaskingCompositor("masking compositor", transparency=transparency) >>> result = compositor([data, mask]) This will set transparency of `data` based on the values in the `mask` dataset. Locations where `mask` has values of `0` will be fully transparent, locations with `1` will be semi-transparent and locations with `2` will be fully visible in the resulting image. In the end all `NaN` areas in the mask are set to full transparency. All the unlisted locations will be visible. The transparency is implemented by adding an alpha layer to the composite. The locations with transparency of `100` will be set to NaN in the data. If the input `data` contains an alpha channel, it will be discarded. """ if transparency: LOG.warning("Using 'transparency' is deprecated in " "MaskingCompositor, use 'conditions' instead.") self.conditions = [] for key, transp in transparency.items(): self.conditions.append({'method': 'equal', 'value': key, 'transparency': transp}) LOG.info("Converted 'transparency' to 'conditions': %s", str(self.conditions)) else: self.conditions = conditions if self.conditions is None: raise ValueError("Masking conditions not defined.") if mode not in self._supported_modes: raise ValueError(f"Invalid mode {mode!s}. Supported modes: " + ", ".join(self._supported_modes)) self.mode = mode super(MaskingCompositor, self).__init__(name, **kwargs) def __call__(self, projectables, *args, **kwargs): """Call the compositor.""" if len(projectables) != 2: raise ValueError("Expected 2 datasets, got %d" % (len(projectables),)) projectables = self.match_data_arrays(projectables) data_in = projectables[0] mask_in = projectables[1] alpha_attrs = data_in.attrs.copy() data = self._select_data_bands(data_in) alpha = self._get_alpha_bands(data, mask_in, alpha_attrs) data.append(alpha) res = super(MaskingCompositor, self).__call__(data, **kwargs) return res def _get_mask(self, method, value, mask_data): """Get mask array from *mask_data* using *method* and threshold *value*. The *method* is the name of a numpy function. """ if method not in MASKING_COMPOSITOR_METHODS: raise AttributeError("Unsupported Numpy method %s, use one of %s", method, str(MASKING_COMPOSITOR_METHODS)) func = getattr(np, method) if value is None: return func(mask_data) return func(mask_data, value) def _set_data_nans(self, data, mask, attrs): """Set *data* to nans where *mask* is True. The attributes *attrs** will be written to each band in *data*. """ for i, dat in enumerate(data): data[i] = xr.where(mask, np.nan, dat) data[i].attrs = attrs return data def _select_data_bands(self, data_in): """Select data to be composited from input data. From input data, select the bands that need to have masking applied. """ if 'bands' in data_in.dims: return [data_in.sel(bands=b) for b in data_in['bands'] if b != 'A'] if self.mode == "RGBA": return [data_in, data_in, data_in] return [data_in] def _get_alpha_bands(self, data, mask_in, alpha_attrs): """Get alpha bands. From input data, masks, and attributes, get alpha band. """ # Create alpha band mask_data = mask_in.data alpha = da.ones((data[0].sizes['y'], data[0].sizes['x']), chunks=data[0].chunks) for condition in self.conditions: method = condition['method'] value = condition.get('value', None) if isinstance(value, str): value = _get_flag_value(mask_in, value) transparency = condition['transparency'] mask = self._get_mask(method, value, mask_data) if transparency == 100.0: data = self._set_data_nans(data, mask, alpha_attrs) alpha_val = 1. - transparency / 100. alpha = da.where(mask, alpha_val, alpha) return xr.DataArray(data=alpha, attrs=alpha_attrs, dims=data[0].dims, coords=data[0].coords) def _get_flag_value(mask, val): """Get a numerical value of the named flag. This function assumes the naming used in product generated with NWC SAF GEO/PPS softwares. """ flag_meanings = mask.attrs['flag_meanings'] flag_values = mask.attrs['flag_values'] if isinstance(flag_meanings, str): flag_meanings = flag_meanings.split() index = flag_meanings.index(val) return flag_values[index] class LongitudeMaskingCompositor(GenericCompositor): """Masks areas outside defined longitudes.""" def __init__(self, name, lon_min=None, lon_max=None, **kwargs): """Collect custom configuration values. Args: lon_min (float): lower longitude limit lon_max (float): upper longitude limit """ self.lon_min = lon_min self.lon_max = lon_max if self.lon_min is None and self.lon_max is None: raise ValueError("Masking conditions not defined. \ At least lon_min or lon_max has to be specified.") if not self.lon_min: self.lon_min = -180. if not self.lon_max: self.lon_max = 180. super(LongitudeMaskingCompositor, self).__init__(name, **kwargs) def __call__(self, projectables, nonprojectables=None, **info): """Generate the composite.""" projectable = projectables[0] lons, lats = projectable.attrs["area"].get_lonlats() if self.lon_max > self.lon_min: lon_min_max = np.logical_and(lons >= self.lon_min, lons <= self.lon_max) else: lon_min_max = np.logical_or(lons >= self.lon_min, lons <= self.lon_max) masked_projectable = projectable.where(lon_min_max) return super(LongitudeMaskingCompositor, self).__call__([masked_projectable], **info) satpy-0.34.0/satpy/composites/abi.py000066400000000000000000000045161420401153000173640ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Composite classes for the ABI instrument.""" import logging from satpy.composites import GenericCompositor LOG = logging.getLogger(__name__) class SimulatedGreen(GenericCompositor): """A single-band dataset resembling a Green (0.55 µm) band. This compositor creates a single band product by combining three other bands in various amounts. The general formula with dependencies (d) and fractions (f) is:: result = d1 * f1 + d2 * f2 + d3 * f3 See the `fractions` keyword argument for more information. Common used fractions for ABI data with C01, C02, and C03 inputs include: - SatPy default (historical): (0.465, 0.465, 0.07) - `CIMSS (Kaba) `_: (0.45, 0.45, 0.10) - `EDC `_: (0.45706946, 0.48358168, 0.06038137) """ def __init__(self, name, fractions=(0.465, 0.465, 0.07), **kwargs): """Initialize fractions for input channels. Args: name (str): Name of this composite fractions (iterable): Fractions of each input band to include in the result. """ self.fractions = fractions super(SimulatedGreen, self).__init__(name, **kwargs) def __call__(self, projectables, optional_datasets=None, **attrs): """Generate the single band composite.""" c01, c02, c03 = self.match_data_arrays(projectables) res = c01 * self.fractions[0] + c02 * self.fractions[1] + c03 * self.fractions[2] res.attrs = c03.attrs.copy() return super(SimulatedGreen, self).__call__((res,), **attrs) satpy-0.34.0/satpy/composites/ahi.py000066400000000000000000000034161420401153000173700ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Composite classes for the AHI instrument.""" import logging from satpy.composites import GenericCompositor from satpy.dataset import combine_metadata LOG = logging.getLogger(__name__) class GreenCorrector(GenericCompositor): """Corrector of the AHI green band to compensate for the deficit of chlorophyll signal.""" def __init__(self, *args, fractions=(0.85, 0.15), **kwargs): """Set default keyword argument values.""" # XXX: Should this be 0.93 and 0.07 self.fractions = fractions super(GreenCorrector, self).__init__(*args, **kwargs) def __call__(self, projectables, optional_datasets=None, **attrs): """Boost vegetation effect thanks to NIR (0.8µm) band.""" LOG.info('Boosting vegetation on green band') projectables = self.match_data_arrays(projectables) new_green = sum(fraction * value for fraction, value in zip(self.fractions, projectables)) new_green.attrs = combine_metadata(*projectables) return super(GreenCorrector, self).__call__((new_green,), **attrs) satpy-0.34.0/satpy/composites/cloud_products.py000066400000000000000000000107071420401153000216610ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Compositors for cloud products.""" import numpy as np from satpy.composites import ColormapCompositor, GenericCompositor class CloudTopHeightCompositor(ColormapCompositor): """Colorize with a palette, put cloud-free pixels as black.""" @staticmethod def build_colormap(palette, info): """Create the colormap from the `raw_palette` and the valid_range.""" from trollimage.colormap import Colormap if 'palette_meanings' in palette.attrs: palette_indices = palette.attrs['palette_meanings'] else: palette_indices = range(len(palette)) squeezed_palette = np.asanyarray(palette).squeeze() / 255.0 tups = [(val, tuple(tup)) for (val, tup) in zip(palette_indices, squeezed_palette)] colormap = Colormap(*tups) if 'palette_meanings' not in palette.attrs: sf = info.get('scale_factor', np.array(1)) colormap.set_range( *(np.array(info['valid_range']) * sf + info.get('add_offset', 0))) return colormap, squeezed_palette def __call__(self, projectables, **info): """Create the composite.""" if len(projectables) != 3: raise ValueError("Expected 3 datasets, got %d" % (len(projectables), )) data, palette, status = projectables fill_value_color = palette.attrs.get("fill_value_color", [0, 0, 0]) colormap, palette = self.build_colormap(palette, data.attrs) mapped_channels = colormap.colorize(data.data) valid = status != status.attrs['_FillValue'] # cloud-free pixels are marked invalid (fill_value in ctth_alti) but have status set to 1. status_not_cloud_free = status % 2 == 0 not_cloud_free = np.logical_or(status_not_cloud_free, np.logical_not(valid)) channels = [] for (channel, cloud_free_color) in zip(mapped_channels, fill_value_color): channel_data = self._create_masked_dataarray_like(channel, data, valid) # Set cloud-free pixels as fill_value_color channels.append(channel_data.where(not_cloud_free, cloud_free_color)) res = GenericCompositor.__call__(self, channels, **data.attrs) res.attrs['_FillValue'] = np.nan return res class PrecipCloudsRGB(GenericCompositor): """Precipitation clouds compositor.""" def __call__(self, projectables, *args, **kwargs): """Make an RGB image out of the three probability categories of the NWCSAF precip product.""" projectables = self.match_data_arrays(projectables) light = projectables[0] moderate = projectables[1] intense = projectables[2] status_flag = projectables[3] if np.bitwise_and(status_flag, 4).any(): # AMSU is used maxs1 = 70 maxs2 = 70 maxs3 = 100 else: # avhrr only maxs1 = 30 maxs2 = 50 maxs3 = 40 scalef3 = 1.0 / maxs3 - 1 / 255.0 scalef2 = 1.0 / maxs2 - 1 / 255.0 scalef1 = 1.0 / maxs1 - 1 / 255.0 p1data = (light*scalef1).where(light != 0) p1data = p1data.where(light != light.attrs['_FillValue']) p1data.attrs = light.attrs data = moderate*scalef2 p2data = data.where(moderate != 0) p2data = p2data.where(moderate != moderate.attrs['_FillValue']) p2data.attrs = moderate.attrs data = intense*scalef3 p3data = data.where(intense != 0) p3data = p3data.where(intense != intense.attrs['_FillValue']) p3data.attrs = intense.attrs res = super(PrecipCloudsRGB, self).__call__((p3data, p2data, p1data), *args, **kwargs) return res satpy-0.34.0/satpy/composites/config_loader.py000066400000000000000000000302431420401153000214200ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020-2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Classes for loading compositor and modifier configuration files.""" from __future__ import annotations import logging import os import warnings from functools import lru_cache, update_wrapper from typing import Callable, Iterable import yaml try: from yaml import UnsafeLoader except ImportError: from yaml import Loader as UnsafeLoader # type: ignore import satpy from satpy import DataID, DataQuery from satpy._config import config_search_paths, get_entry_points_config_dirs, glob_config from satpy.dataset.dataid import minimal_default_keys_config from satpy.utils import recursive_dict_update logger = logging.getLogger(__name__) def _convert_dep_info_to_data_query(dep_info): key_item = dep_info.copy() key_item.pop('prerequisites', None) key_item.pop('optional_prerequisites', None) if 'modifiers' in key_item: key_item['modifiers'] = tuple(key_item['modifiers']) key = DataQuery.from_dict(key_item) return key class _CompositeConfigHelper: """Helper class for parsing composite configurations. The provided `loaded_compositors` dictionary is updated inplace. """ def __init__(self, loaded_compositors, sensor_id_keys): self.loaded_compositors = loaded_compositors self.sensor_id_keys = sensor_id_keys def _create_comp_from_info(self, composite_info, loader): key = DataID(self.sensor_id_keys, **composite_info) comp = loader(_satpy_id=key, **composite_info) return key, comp def _handle_inline_comp_dep(self, dep_info, dep_num, parent_name): # Create an unique temporary name for the composite sub_comp_name = '_' + parent_name + '_dep_{}'.format(dep_num) dep_info['name'] = sub_comp_name self._load_config_composite(dep_info) @staticmethod def _get_compositor_loader_from_config(composite_name, composite_info): try: loader = composite_info.pop('compositor') except KeyError: raise ValueError("'compositor' key missing or empty for '{}'. Option keys = {}".format( composite_name, str(composite_info.keys()))) return loader def _process_composite_deps(self, composite_info): dep_num = -1 for prereq_type in ['prerequisites', 'optional_prerequisites']: prereqs = [] for dep_info in composite_info.get(prereq_type, []): dep_num += 1 if not isinstance(dep_info, dict): prereqs.append(dep_info) continue elif 'compositor' in dep_info: self._handle_inline_comp_dep( dep_info, dep_num, composite_info['name']) prereq_key = _convert_dep_info_to_data_query(dep_info) prereqs.append(prereq_key) composite_info[prereq_type] = prereqs def _load_config_composite(self, composite_info): composite_name = composite_info['name'] loader = self._get_compositor_loader_from_config(composite_name, composite_info) self._process_composite_deps(composite_info) key, comp = self._create_comp_from_info(composite_info, loader) self.loaded_compositors[key] = comp def _load_config_composites(self, configured_composites): for composite_name, composite_info in configured_composites.items(): composite_info['name'] = composite_name self._load_config_composite(composite_info) def parse_config(self, configured_composites, composite_configs): """Parse composite configuration dictionary.""" try: self._load_config_composites(configured_composites) except (ValueError, KeyError): raise RuntimeError("Failed to load composites from configs " "'{}'".format(composite_configs)) class _ModifierConfigHelper: """Helper class for parsing modifier configurations. The provided `loaded_modifiers` dictionary is updated inplace. """ def __init__(self, loaded_modifiers, sensor_id_keys): self.loaded_modifiers = loaded_modifiers self.sensor_id_keys = sensor_id_keys @staticmethod def _get_modifier_loader_from_config(modifier_name, modifier_info): try: loader = modifier_info.pop('modifier', None) if loader is None: loader = modifier_info.pop('compositor') warnings.warn("Modifier '{}' uses deprecated 'compositor' " "key to point to Python class, replace " "with 'modifier'.".format(modifier_name)) except KeyError: raise ValueError("'modifier' key missing or empty for '{}'. Option keys = {}".format( modifier_name, str(modifier_info.keys()))) return loader def _process_modifier_deps(self, modifier_info): for prereq_type in ['prerequisites', 'optional_prerequisites']: prereqs = [] for dep_info in modifier_info.get(prereq_type, []): if not isinstance(dep_info, dict): prereqs.append(dep_info) continue prereq_key = _convert_dep_info_to_data_query(dep_info) prereqs.append(prereq_key) modifier_info[prereq_type] = prereqs def _load_config_modifier(self, modifier_info): modifier_name = modifier_info['name'] loader = self._get_modifier_loader_from_config(modifier_name, modifier_info) self._process_modifier_deps(modifier_info) self.loaded_modifiers[modifier_name] = (loader, modifier_info) def _load_config_modifiers(self, configured_modifiers): for modifier_name, modifier_info in configured_modifiers.items(): modifier_info['name'] = modifier_name self._load_config_modifier(modifier_info) def parse_config(self, configured_modifiers, composite_configs): """Parse modifier configuration dictionary.""" try: self._load_config_modifiers(configured_modifiers) except (ValueError, KeyError): raise RuntimeError("Failed to load modifiers from configs " "'{}'".format(composite_configs)) def _load_config(composite_configs): if not isinstance(composite_configs, (list, tuple)): composite_configs = [composite_configs] conf = {} for composite_config in composite_configs: with open(composite_config, 'r', encoding='utf-8') as conf_file: conf = recursive_dict_update(conf, yaml.load(conf_file, Loader=UnsafeLoader)) try: sensor_name = conf['sensor_name'] except KeyError: logger.debug('No "sensor_name" tag found in %s, skipping.', composite_configs) return sensor_compositors = {} # DatasetDict() sensor_modifiers = {} dep_id_keys = None sensor_deps = sensor_name.split('/')[:-1] if sensor_deps: # get dependent for sensor_dep in sensor_deps: dep_comps, dep_mods, dep_id_keys = load_compositor_configs_for_sensor(sensor_dep) # the last parent should include all of its parents so only add the last one sensor_compositors.update(dep_comps) sensor_modifiers.update(dep_mods) id_keys = _get_sensor_id_keys(conf, dep_id_keys) mod_config_helper = _ModifierConfigHelper(sensor_modifiers, id_keys) configured_modifiers = conf.get('modifiers', {}) mod_config_helper.parse_config(configured_modifiers, composite_configs) comp_config_helper = _CompositeConfigHelper(sensor_compositors, id_keys) configured_composites = conf.get('composites', {}) comp_config_helper.parse_config(configured_composites, composite_configs) return sensor_compositors, sensor_modifiers, id_keys def _get_sensor_id_keys(conf, parent_id_keys): try: id_keys = conf['composite_identification_keys'] except KeyError: id_keys = parent_id_keys if not id_keys: id_keys = minimal_default_keys_config return id_keys def _lru_cache_with_config_path(func: Callable): """Use lru_cache but include satpy's current config_path.""" @lru_cache() def _call_without_config_path_wrapper(sensor_name, _): return func(sensor_name) def _add_config_path_wrapper(sensor_name: str): config_path = satpy.config.get("config_path") # make sure config_path is hashable, but keep original order since it matters config_path = tuple(config_path) return _call_without_config_path_wrapper(sensor_name, config_path) wrapper = update_wrapper(_add_config_path_wrapper, func) wrapper = _update_cached_wrapper(wrapper, _call_without_config_path_wrapper) return wrapper def _update_cached_wrapper(wrapper, cached_func): for meth_name in ("cache_clear", "cache_parameters", "cache_info"): if hasattr(cached_func, meth_name): setattr(wrapper, meth_name, getattr(cached_func, meth_name)) return wrapper @_lru_cache_with_config_path def load_compositor_configs_for_sensor(sensor_name: str) -> tuple[dict[str, dict], dict[str, dict], dict]: """Load compositor, modifier, and DataID key information from configuration files for the specified sensor. Args: sensor_name: Sensor name that has matching ``sensor_name.yaml`` config files. Returns: (comps, mods, data_id_keys): Where `comps` is a dictionary: composite ID -> compositor object And `mods` is a dictionary: modifier name -> (modifier class, modifiers options) Add `data_id_keys` is a dictionary: DataID key -> key properties """ config_filename = sensor_name + ".yaml" logger.debug("Looking for composites config file %s", config_filename) paths = get_entry_points_config_dirs('satpy.composites') composite_configs = config_search_paths( os.path.join("composites", config_filename), search_dirs=paths, check_exists=True) if not composite_configs: logger.debug("No composite config found called %s", config_filename) return {}, {}, minimal_default_keys_config return _load_config(composite_configs) def load_compositor_configs_for_sensors(sensor_names: Iterable[str]) -> tuple[dict[str, dict], dict[str, dict]]: """Load compositor and modifier configuration files for the specified sensors. Args: sensor_names (list of strings): Sensor names that have matching ``sensor_name.yaml`` config files. Returns: (comps, mods): Where `comps` is a dictionary: sensor_name -> composite ID -> compositor object And `mods` is a dictionary: sensor_name -> modifier name -> (modifier class, modifiers options) """ comps = {} mods = {} for sensor_name in sensor_names: sensor_comps, sensor_mods = load_compositor_configs_for_sensor(sensor_name)[:2] comps[sensor_name] = sensor_comps mods[sensor_name] = sensor_mods return comps, mods def all_composite_sensors(): """Get all sensor names from available composite configs.""" paths = get_entry_points_config_dirs('satpy.composites') composite_configs = glob_config( os.path.join("composites", "*.yaml"), search_dirs=paths) yaml_names = set([os.path.splitext(os.path.basename(fn))[0] for fn in composite_configs]) non_sensor_yamls = ('visir',) sensor_names = [x for x in yaml_names if x not in non_sensor_yamls] return sensor_names satpy-0.34.0/satpy/composites/glm.py000066400000000000000000000114251420401153000174050ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Composite classes for the GLM instrument.""" import logging import xarray as xr from satpy.composites import GenericCompositor from satpy.writers import get_enhanced_image LOG = logging.getLogger(__name__) class HighlightCompositor(GenericCompositor): """Highlight pixels of a layer by an amount determined by a secondary layer. The highlighting is applied per channel to either add or subtract an intensity from the primary image. In the addition case, the code is essentially doing:: highlight_factor = (highlight_data - min_highlight) / (max_highlight - min_highlight) channel_result = primary_data + highlight_factor * max_factor The ``max_factor`` is defined per channel and can be positive for an additive effect, negative for a subtractive effect, or zero for no effect. """ def __init__(self, name, min_highlight=0.0, max_highlight=10.0, max_factor=(0.8, 0.8, -0.8, 0), **kwargs): """Initialize composite with highlight factor options. Args: min_highlight (float): Minimum raw value of the "highlight" data that will be used for linearly scaling the data along with ``max_hightlight``. max_highlight (float): Maximum raw value of the "highlight" data that will be used for linearly scaling the data along with ``min_hightlight``. max_factor (tuple): Maximum effect that the highlight data can have on each channel of the primary image data. This will be multiplied by the linearly scaled highlight data and then added or subtracted from the highlight channels. See class docstring for more information. By default this is set to ``(0.8, 0.8, -0.8, 0)`` meaning the Red and Green channel will be added to by at most 0.8, the Blue channel will be subtracted from by at most 0.8, and the Alpha channel will not be effected. """ self.min_highlight = min_highlight self.max_highlight = max_highlight self.max_factor = max_factor super().__init__(name, **kwargs) @staticmethod def _get_enhanced_background_data(background_layer): img = get_enhanced_image(background_layer) img.data = img.data.clip(0.0, 1.0) img = img.convert('RGBA') return img.data def _get_highlight_factor(self, highlight_data): factor = (highlight_data - self.min_highlight) / (self.max_highlight - self.min_highlight) factor = factor.where(factor.notnull(), 0) return factor def _apply_highlight_effect(self, background_data, factor): new_channels = [] for max_factor, band_name in zip(self.max_factor, "RGBA"): new_channel = background_data.sel(bands=[band_name]) if max_factor != 0 or max_factor is not None: new_channel = new_channel + factor * max_factor new_channels.append(new_channel) return new_channels def _update_attrs(self, new_data, background_layer, highlight_layer): new_data.attrs = background_layer.attrs.copy() new_data.attrs['units'] = 1 new_sensors = self._get_sensors((highlight_layer, background_layer)) new_data.attrs.update({ 'sensor': new_sensors, }) def __call__(self, projectables, optional_datasets=None, **attrs): """Create RGBA image with highlighted pixels.""" highlight_product, background_layer = self.match_data_arrays(projectables) background_data = self._get_enhanced_background_data(background_layer) # Adjust the colors of background by highlight layer factor = self._get_highlight_factor(highlight_product) new_channels = self._apply_highlight_effect(background_data, factor) new_data = xr.concat(new_channels, dim='bands') self._update_attrs(new_data, background_layer, highlight_product) return super(HighlightCompositor, self).__call__((new_data,), **attrs) satpy-0.34.0/satpy/composites/sar.py000066400000000000000000000100121420401153000174020ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Composite classes for the VIIRS instrument.""" import logging import numpy as np from satpy.composites import GenericCompositor from satpy.dataset import combine_metadata LOG = logging.getLogger(__name__) def overlay(top, bottom, maxval=None): """Blending two layers. from: https://docs.gimp.org/en/gimp-concepts-layer-modes.html """ if maxval is None: maxval = np.maximum(top.max(), bottom.max()) res = ((2 * top / maxval - 1) * bottom + 2 * top) * bottom / maxval return res.clip(min=0) def soft_light(top, bottom, maxval): """Apply soft light. http://www.pegtop.net/delphi/articles/blendmodes/softlight.htm """ a = top / maxval b = bottom / maxval return (2*a*b + a*a * (1 - 2*b)) * maxval class SARIce(GenericCompositor): """The SAR Ice composite.""" def __call__(self, projectables, *args, **kwargs): """Create the SAR Ice composite.""" (mhh, mhv) = projectables ch1attrs = mhh.attrs ch2attrs = mhv.attrs mhh = np.sqrt(mhh + 0.002) - 0.04 mhv = np.sqrt(mhv + 0.002) - 0.04 mhh.attrs = ch1attrs mhv.attrs = ch2attrs green = overlay(mhh, mhv, 30) * 1000 green.attrs = combine_metadata(mhh, mhv) return super(SARIce, self).__call__((mhv, green, mhh), *args, **kwargs) def _square_root_channels(*projectables): """Return the square root of the channels, preserving the attributes.""" results = [] for projectable in projectables: attrs = projectable.attrs projectable = np.sqrt(projectable) projectable.attrs = attrs results.append(projectable) return results class SARIceLegacy(GenericCompositor): """The SAR Ice composite, legacy version with dynamic stretching.""" def __call__(self, projectables, *args, **kwargs): """Create the SAR RGB composite.""" mhh, mhv = _square_root_channels(*projectables) green = overlay(mhh, mhv) green.attrs = combine_metadata(mhh, mhv) return super(SARIceLegacy, self).__call__((mhv, green, mhh), *args, **kwargs) class SARIceLog(GenericCompositor): """The SAR Ice composite, using log-scale data.""" def __call__(self, projectables, *args, **kwargs): """Create the SAR Ice Log composite.""" mhh, mhv = projectables mhh = mhh.clip(-40) mhv = mhv.clip(-38) green = soft_light(mhh + 100, mhv + 100, 100) - 100 green.attrs = combine_metadata(mhh, mhv) return super().__call__((mhv, green, mhh), *args, **kwargs) class SARRGB(GenericCompositor): """The SAR RGB composite.""" def __call__(self, projectables, *args, **kwargs): """Create the SAR RGB composite.""" mhh, mhv = _square_root_channels(*projectables) green = overlay(mhh, mhv) green.attrs = combine_metadata(mhh, mhv) return super(SARRGB, self).__call__((-mhv, -green, -mhh), *args, **kwargs) class SARQuickLook(GenericCompositor): """The SAR QuickLook composite.""" def __call__(self, projectables, *args, **kwargs): """Create the SAR QuickLook composite.""" mhh, mhv = _square_root_channels(*projectables) blue = mhv / mhh blue.attrs = combine_metadata(mhh, mhv) return super(SARQuickLook, self).__call__((mhh, mhv, blue), *args, **kwargs) satpy-0.34.0/satpy/composites/viirs.py000066400000000000000000001256671420401153000200000ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Composite classes for the VIIRS instrument.""" import logging import dask import dask.array as da import numpy as np import xarray as xr from satpy.composites import CompositeBase, GenericCompositor from satpy.dataset import combine_metadata LOG = logging.getLogger(__name__) class HistogramDNB(CompositeBase): """Histogram equalized DNB composite. The logic for this code was taken from Polar2Grid and was originally developed by Eva Schiffer (SSEC). This composite separates the DNB data in to 3 main regions: Day, Night, and Mixed. Each region is equalized separately to bring out the most information from the region due to the high dynamic range of the DNB data. Optionally, the mixed region can be separated in to multiple smaller regions by using the `mixed_degree_step` keyword. """ def __init__(self, *args, **kwargs): """Initialize the compositor with values from the user or from the configuration file. :param high_angle_cutoff: solar zenith angle threshold in degrees, values above this are considered "night" :param low_angle_cutoff: solar zenith angle threshold in degrees, values below this are considered "day" :param mixed_degree_step: Step interval to separate "mixed" region in to multiple parts by default does whole mixed region """ self.high_angle_cutoff = int(kwargs.pop("high_angle_cutoff", 100)) self.low_angle_cutoff = int(kwargs.pop("low_angle_cutoff", 88)) self.mixed_degree_step = int(kwargs.pop( "mixed_degree_step")) if "mixed_degree_step" in kwargs else None super(HistogramDNB, self).__init__(*args, **kwargs) def _run_dnb_normalization(self, dnb_data, sza_data): """Scale the DNB data using a histogram equalization method. Args: dnb_data (ndarray): Day/Night Band data array sza_data (ndarray): Solar Zenith Angle data array """ # convert dask arrays to DataArray objects dnb_data = xr.DataArray(dnb_data, dims=('y', 'x')) sza_data = xr.DataArray(sza_data, dims=('y', 'x')) good_mask = ~(dnb_data.isnull() | sza_data.isnull()) output_dataset = dnb_data.where(good_mask) # we only need the numpy array output_dataset = output_dataset.values.copy() dnb_data = dnb_data.values sza_data = sza_data.values day_mask, mixed_mask, night_mask = make_day_night_masks( sza_data, good_mask.values, self.high_angle_cutoff, self.low_angle_cutoff, stepsDegrees=self.mixed_degree_step) did_equalize = False if day_mask.any(): LOG.debug("Histogram equalizing DNB day data...") histogram_equalization(dnb_data, day_mask, out=output_dataset) did_equalize = True if mixed_mask: for mask in mixed_mask: if mask.any(): LOG.debug("Histogram equalizing DNB mixed data...") histogram_equalization(dnb_data, mask, out=output_dataset) did_equalize = True if night_mask.any(): LOG.debug("Histogram equalizing DNB night data...") histogram_equalization(dnb_data, night_mask, out=output_dataset) did_equalize = True if not did_equalize: raise RuntimeError("No valid data found to histogram equalize") return output_dataset def __call__(self, datasets, **info): """Create the composite by scaling the DNB data using a histogram equalization method. :param datasets: 2-element tuple (Day/Night Band data, Solar Zenith Angle data) :param **info: Miscellaneous metadata for the newly produced composite """ if len(datasets) != 2: raise ValueError("Expected 2 datasets, got %d" % (len(datasets), )) dnb_data = datasets[0] sza_data = datasets[1] delayed = dask.delayed(self._run_dnb_normalization)(dnb_data.data, sza_data.data) output_dataset = dnb_data.copy() output_data = da.from_delayed(delayed, dnb_data.shape, dnb_data.dtype) output_dataset.data = output_data.rechunk(dnb_data.data.chunks) info = dnb_data.attrs.copy() info.update(self.attrs) info["standard_name"] = "equalized_radiance" info["mode"] = "L" output_dataset.attrs = info return output_dataset class AdaptiveDNB(HistogramDNB): """Adaptive histogram equalized DNB composite. The logic for this code was taken from Polar2Grid and was originally developed by Eva Schiffer (SSEC). This composite separates the DNB data in to 3 main regions: Day, Night, and Mixed. Each region is equalized separately to bring out the most information from the region due to the high dynamic range of the DNB data. Optionally, the mixed region can be separated in to multiple smaller regions by using the `mixed_degree_step` keyword. """ def __init__(self, *args, **kwargs): """Initialize the compositor with values from the user or from the configuration file. Adaptive histogram equalization and regular histogram equalization can be configured independently for each region: day, night, or mixed. A region can be set to use adaptive equalization "always", or "never", or only when there are multiple regions in a single scene "multiple" via the `adaptive_X` keyword arguments (see below). :param adaptive_day: one of ("always", "multiple", "never") meaning when adaptive equalization is used. :param adaptive_mixed: one of ("always", "multiple", "never") meaning when adaptive equalization is used. :param adaptive_night: one of ("always", "multiple", "never") meaning when adaptive equalization is used. """ self.adaptive_day = kwargs.pop("adaptive_day", "always") self.adaptive_mixed = kwargs.pop("adaptive_mixed", "always") self.adaptive_night = kwargs.pop("adaptive_night", "always") self.day_radius_pixels = int(kwargs.pop("day_radius_pixels", 400)) self.mixed_radius_pixels = int(kwargs.pop("mixed_radius_pixels", 100)) self.night_radius_pixels = int(kwargs.pop("night_radius_pixels", 400)) super(AdaptiveDNB, self).__init__(*args, **kwargs) def _run_dnb_normalization(self, dnb_data, sza_data): """Scale the DNB data using a adaptive histogram equalization method. Args: dnb_data (ndarray): Day/Night Band data array sza_data (ndarray): Solar Zenith Angle data array """ # convert dask arrays to DataArray objects dnb_data = xr.DataArray(dnb_data, dims=('y', 'x')) sza_data = xr.DataArray(sza_data, dims=('y', 'x')) good_mask = ~(dnb_data.isnull() | sza_data.isnull()) # good_mask = ~(dnb_data.mask | sza_data.mask) output_dataset = dnb_data.where(good_mask) # we only need the numpy array output_dataset = output_dataset.values.copy() dnb_data = dnb_data.values sza_data = sza_data.values day_mask, mixed_mask, night_mask = make_day_night_masks( sza_data, good_mask.values, self.high_angle_cutoff, self.low_angle_cutoff, stepsDegrees=self.mixed_degree_step) did_equalize = False has_multi_times = len(mixed_mask) > 0 if day_mask.any(): did_equalize = True if self.adaptive_day == "always" or ( has_multi_times and self.adaptive_day == "multiple"): LOG.debug("Adaptive histogram equalizing DNB day data...") local_histogram_equalization( dnb_data, day_mask, valid_data_mask=good_mask.values, local_radius_px=self.day_radius_pixels, out=output_dataset) else: LOG.debug("Histogram equalizing DNB day data...") histogram_equalization(dnb_data, day_mask, out=output_dataset) if mixed_mask: for mask in mixed_mask: if mask.any(): did_equalize = True if self.adaptive_mixed == "always" or ( has_multi_times and self.adaptive_mixed == "multiple"): LOG.debug( "Adaptive histogram equalizing DNB mixed data...") local_histogram_equalization( dnb_data, mask, valid_data_mask=good_mask.values, local_radius_px=self.mixed_radius_pixels, out=output_dataset) else: LOG.debug("Histogram equalizing DNB mixed data...") histogram_equalization(dnb_data, day_mask, out=output_dataset) if night_mask.any(): did_equalize = True if self.adaptive_night == "always" or ( has_multi_times and self.adaptive_night == "multiple"): LOG.debug("Adaptive histogram equalizing DNB night data...") local_histogram_equalization( dnb_data, night_mask, valid_data_mask=good_mask.values, local_radius_px=self.night_radius_pixels, out=output_dataset) else: LOG.debug("Histogram equalizing DNB night data...") histogram_equalization(dnb_data, night_mask, out=output_dataset) if not did_equalize: raise RuntimeError("No valid data found to histogram equalize") return output_dataset class ERFDNB(CompositeBase): """Equalized DNB composite using the error function (erf). The logic for this code was taken from Polar2Grid and was originally developed by Curtis Seaman and Steve Miller. The original code was written in IDL and is included as comments in the code below. """ def __init__(self, *args, **kwargs): """Initialize ERFDNB specific keyword arguments.""" self.saturation_correction = kwargs.pop("saturation_correction", False) super(ERFDNB, self).__init__(*args, **kwargs) def _saturation_correction(self, dnb_data, unit_factor, min_val, max_val): saturation_pct = float(np.count_nonzero(dnb_data > max_val)) / dnb_data.size LOG.debug("Dynamic DNB saturation percentage: %f", saturation_pct) while saturation_pct > 0.005: max_val *= 1.1 * unit_factor saturation_pct = float(np.count_nonzero( dnb_data > max_val)) / dnb_data.size LOG.debug("Dynamic DNB saturation percentage: %f", saturation_pct) inner_sqrt = (dnb_data - min_val) / (max_val - min_val) # clip negative values to 0 before the sqrt inner_sqrt[inner_sqrt < 0] = 0 return np.sqrt(inner_sqrt) def __call__(self, datasets, **info): """Create the composite DataArray object for ERFDNB.""" if len(datasets) != 4: raise ValueError("Expected 4 datasets, got %d" % (len(datasets), )) from scipy.special import erf dnb_data = datasets[0] sza_data = datasets[1] lza_data = datasets[2] output_dataset = dnb_data.where(~(dnb_data.isnull() | sza_data.isnull())) # this algorithm assumes units of "W cm-2 sr-1" so if there are other # units we need to adjust for that if dnb_data.attrs.get("units", "W m-2 sr-1") == "W m-2 sr-1": unit_factor = 10000. else: unit_factor = 1. # convert to decimal instead of % moon_illum_fraction = da.mean(datasets[3].data) * 0.01 # From Steve Miller and Curtis Seaman # maxval = 10.^(-1.7 - (((2.65+moon_factor1+moon_factor2))*(1+erf((solar_zenith-95.)/(5.*sqrt(2.0)))))) # minval = 10.^(-4. - ((2.95+moon_factor2)*(1+erf((solar_zenith-95.)/(5.*sqrt(2.0)))))) # scaled_radiance = (radiance - minval) / (maxval - minval) # radiance = sqrt(scaled_radiance) # Version 2: Update from Curtis Seaman # maxval = 10.^(-1.7 - (((2.65+moon_factor1+moon_factor2))*(1+erf((solar_zenith-95.)/(5.*sqrt(2.0)))))) # minval = 10.^(-4. - ((2.95+moon_factor2)*(1+erf((solar_zenith-95.)/(5.*sqrt(2.0)))))) # saturated_pixels = where(radiance gt maxval, nsatpx) # saturation_pct = float(nsatpx)/float(n_elements(radiance)) # print, 'Saturation (%) = ', saturation_pct # # while saturation_pct gt 0.005 do begin # maxval = maxval*1.1 # saturated_pixels = where(radiance gt maxval, nsatpx) # saturation_pct = float(nsatpx)/float(n_elements(radiance)) # print, saturation_pct # endwhile # # scaled_radiance = (radiance - minval) / (maxval - minval) # radiance = sqrt(scaled_radiance) moon_factor1 = 0.7 * (1.0 - moon_illum_fraction) moon_factor2 = 0.0022 * lza_data.data erf_portion = 1 + erf((sza_data.data - 95.0) / (5.0 * np.sqrt(2.0))) max_val = da.power( 10, -1.7 - (2.65 + moon_factor1 + moon_factor2) * erf_portion) * unit_factor min_val = da.power(10, -4.0 - (2.95 + moon_factor2) * erf_portion) * unit_factor # Update from Curtis Seaman, increase max radiance curve until less # than 0.5% is saturated if self.saturation_correction: delayed = dask.delayed(self._saturation_correction)(output_dataset.data, unit_factor, min_val, max_val) output_dataset.data = da.from_delayed(delayed, output_dataset.shape, output_dataset.dtype) output_dataset.data = output_dataset.data.rechunk(dnb_data.data.chunks) else: inner_sqrt = (output_dataset - min_val) / (max_val - min_val) # clip negative values to 0 before the sqrt inner_sqrt.data = np.clip(inner_sqrt.data, 0, None) output_dataset.data = np.sqrt(inner_sqrt).data info = dnb_data.attrs.copy() info.update(self.attrs) info["standard_name"] = "equalized_radiance" info["mode"] = "L" output_dataset.attrs = info return output_dataset def make_day_night_masks(solarZenithAngle, good_mask, highAngleCutoff, lowAngleCutoff, stepsDegrees=None): """Generate masks for day, night, and twilight regions. Masks are created from the provided solar zenith angle data. Optionally provide the highAngleCutoff and lowAngleCutoff that define the limits of the terminator region (if no cutoffs are given the DEFAULT_HIGH_ANGLE and DEFAULT_LOW_ANGLE will be used). Optionally provide the stepsDegrees that define how many degrees each "mixed" mask in the terminator region should be (if no stepsDegrees is given, the whole terminator region will be one mask). """ # if the caller passes None, we're only doing one step stepsDegrees = highAngleCutoff - lowAngleCutoff if stepsDegrees is None else stepsDegrees night_mask = (solarZenithAngle > highAngleCutoff) & good_mask day_mask = (solarZenithAngle <= lowAngleCutoff) & good_mask mixed_mask = [] steps = list(range(lowAngleCutoff, highAngleCutoff + 1, stepsDegrees)) if steps[-1] >= highAngleCutoff: steps[-1] = highAngleCutoff steps = zip(steps, steps[1:]) for i, j in steps: LOG.debug("Processing step %d to %d" % (i, j)) tmp = (solarZenithAngle > i) & (solarZenithAngle <= j) & good_mask if tmp.any(): LOG.debug("Adding step %d to %d" % (i, j)) # log.debug("Points to process in this range: " + str(np.sum(tmp))) mixed_mask.append(tmp) del tmp return day_mask, mixed_mask, night_mask def histogram_equalization( data, mask_to_equalize, number_of_bins=1000, std_mult_cutoff=4.0, do_zerotoone_normalization=True, valid_data_mask=None, # these are theoretically hooked up, but not useful with only one # equalization clip_limit=None, slope_limit=None, # these parameters don't do anything, they're just here to mirror those # in the other call do_log_scale=False, log_offset=None, local_radius_px=None, out=None): """Perform a histogram equalization on the data. Data is selected by the mask_to_equalize mask. The data will be separated into number_of_bins levels for equalization and outliers beyond +/- std_mult_cutoff*std will be ignored. If do_zerotoone_normalization is True the data selected by mask_to_equalize will be returned in the 0 to 1 range. Otherwise the data selected by mask_to_equalize will be returned in the 0 to number_of_bins range. Note: the data will be changed in place. """ out = out if out is not None else data.copy() mask_to_use = mask_to_equalize if valid_data_mask is None else valid_data_mask LOG.debug("determining DNB data range for histogram equalization") avg = np.mean(data[mask_to_use]) std = np.std(data[mask_to_use]) # limit our range to +/- std_mult_cutoff*std; e.g. the default # std_mult_cutoff is 4.0 so about 99.8% of the data concervative_mask = (data < (avg + std * std_mult_cutoff)) & ( data > (avg - std * std_mult_cutoff)) & mask_to_use LOG.debug("running histogram equalization") cumulative_dist_function, temp_bins = _histogram_equalization_helper( data[concervative_mask], number_of_bins, clip_limit=clip_limit, slope_limit=slope_limit) # linearly interpolate using the distribution function to get the new # values out[mask_to_equalize] = np.interp(data[mask_to_equalize], temp_bins[:-1], cumulative_dist_function) # if we were asked to, normalize our data to be between zero and one, # rather than zero and number_of_bins if do_zerotoone_normalization: _linear_normalization_from_0to1(out, mask_to_equalize, number_of_bins) return out def local_histogram_equalization(data, mask_to_equalize, valid_data_mask=None, number_of_bins=1000, std_mult_cutoff=3.0, do_zerotoone_normalization=True, local_radius_px=300, clip_limit=60.0, # 20.0, slope_limit=3.0, # 0.5, do_log_scale=True, # can't take the log of zero, so the offset # may be needed; pass 0.0 if your data doesn't # need it log_offset=0.00001, out=None ): """Equalize the provided data (in the mask_to_equalize) using adaptive histogram equalization. Tiles of width/height (2 * local_radius_px + 1) will be calculated and results for each pixel will be bilinearly interpolated from the nearest 4 tiles when pixels fall near the edge of the image (there is no adjacent tile) the resultant interpolated sum from the available tiles will be multiplied to account for the weight of any missing tiles:: pixel total interpolated value = pixel available interpolated value / (1 - missing interpolation weight) If ``do_zerotoone_normalization`` is True the data will be scaled so that all data in the mask_to_equalize falls between 0 and 1; otherwise the data in mask_to_equalize will all fall between 0 and number_of_bins. Returns: The equalized data """ out = out if out is not None else np.zeros_like(data) # if we don't have a valid mask, use the mask of what we should be equalizing if valid_data_mask is None: valid_data_mask = mask_to_equalize # calculate some useful numbers for our tile math total_rows = data.shape[0] total_cols = data.shape[1] tile_size = int((local_radius_px * 2.0) + 1.0) row_tiles = int(total_rows / tile_size) if ( (total_rows % tile_size) == 0) else int(total_rows / tile_size) + 1 col_tiles = int(total_cols / tile_size) if ( (total_cols % tile_size) == 0) else int(total_cols / tile_size) + 1 # an array of our distribution functions for equalization all_cumulative_dist_functions = [[] for _ in range(row_tiles)] # an array of our bin information for equalization all_bin_information = [[] for _ in range(row_tiles)] # loop through our tiles and create the histogram equalizations for each one for num_row_tile in range(row_tiles): for num_col_tile in range(col_tiles): tile_dist_func, tile_bin_info = _histogram_equalize_one_tile( data, valid_data_mask, std_mult_cutoff, do_log_scale, log_offset, clip_limit, slope_limit, number_of_bins, num_row_tile, num_col_tile, tile_size ) all_cumulative_dist_functions[num_row_tile].append(tile_dist_func) all_bin_information[num_row_tile].append(tile_bin_info) # get the tile weight array so we can use it to interpolate our data tile_weights = _calculate_weights(tile_size) # now loop through our tiles and linearly interpolate the equalized # versions of the data for num_row_tile in range(row_tiles): for num_col_tile in range(col_tiles): _interpolate_local_equalized_tiles( data, out, mask_to_equalize, valid_data_mask, do_log_scale, log_offset, tile_weights, all_bin_information, all_cumulative_dist_functions, num_row_tile, num_col_tile, row_tiles, col_tiles, tile_size, ) # if we were asked to, normalize our data to be between zero and one, # rather than zero and number_of_bins if do_zerotoone_normalization: _linear_normalization_from_0to1(out, mask_to_equalize, number_of_bins) return out def _histogram_equalize_one_tile( data, valid_data_mask, std_mult_cutoff, do_log_scale, log_offset, clip_limit, slope_limit, number_of_bins, num_row_tile, num_col_tile, tile_size): # calculate the range for this tile (min is inclusive, max is # exclusive) min_row = num_row_tile * tile_size max_row = min_row + tile_size min_col = num_col_tile * tile_size max_col = min_col + tile_size # for speed of calculation, pull out the mask of pixels that should # be used to calculate the histogram mask_valid_data_in_tile = valid_data_mask[min_row:max_row, min_col:max_col] # if we have any valid data in this tile, calculate a histogram equalization for this tile # (note: even if this tile does no fall in the mask_to_equalize, it's histogram may be used by other tiles) if not mask_valid_data_in_tile.any(): return None, None # use all valid data in the tile, so separate sections will # blend cleanly temp_valid_data = data[min_row:max_row, min_col:max_col][ mask_valid_data_in_tile] temp_valid_data = temp_valid_data[ temp_valid_data >= 0 ] # TEMP, testing to see if negative data is messing everything up # limit the contrast by only considering data within a certain # range of the average if std_mult_cutoff is not None: avg = np.mean(temp_valid_data) std = np.std(temp_valid_data) # limit our range to avg +/- std_mult_cutoff*std; e.g. the # default std_mult_cutoff is 4.0 so about 99.8% of the data concervative_mask = ( temp_valid_data < (avg + std * std_mult_cutoff)) & ( temp_valid_data > (avg - std * std_mult_cutoff)) temp_valid_data = temp_valid_data[concervative_mask] # if we are taking the log of our data, do so now if do_log_scale: temp_valid_data = np.log(temp_valid_data + log_offset) # do the histogram equalization and get the resulting # distribution function and bin information if not temp_valid_data.size: return None, None cumulative_dist_function, temp_bins = _histogram_equalization_helper( temp_valid_data, number_of_bins, clip_limit=clip_limit, slope_limit=slope_limit) return cumulative_dist_function, temp_bins def _interpolate_local_equalized_tiles( data, out, mask_to_equalize, valid_data_mask, do_log_scale, log_offset, tile_weights, all_bin_information, all_cumulative_dist_functions, row_idx, col_idx, row_tiles, col_tiles, tile_size): # calculate the range for this tile (min is inclusive, max is # exclusive) num_row_tile = row_idx num_col_tile = col_idx min_row = num_row_tile * tile_size max_row = min_row + tile_size min_col = num_col_tile * tile_size max_col = min_col + tile_size # for convenience, pull some of these tile sized chunks out temp_all_data = data[min_row:max_row, min_col:max_col].copy() temp_mask_to_equalize = mask_to_equalize[min_row:max_row, min_col:max_col] temp_all_valid_data_mask = valid_data_mask[min_row:max_row, min_col:max_col] # if we have any data in this tile, calculate our weighted sum if not temp_mask_to_equalize.any(): return if do_log_scale: temp_all_data[temp_all_valid_data_mask] = np.log( temp_all_data[temp_all_valid_data_mask] + log_offset) temp_data_to_equalize = temp_all_data[temp_mask_to_equalize] temp_all_valid_data = temp_all_data[temp_all_valid_data_mask] # a place to hold our weighted sum that represents the interpolated contributions # of the histogram equalizations from the surrounding tiles temp_sum = np.zeros_like(temp_data_to_equalize) # how much weight were we unable to use because those tiles # fell off the edge of the image? unused_weight = np.zeros(temp_data_to_equalize.shape, dtype=tile_weights.dtype) # loop through all the surrounding tiles and process their # contributions to this tile for weight_row in range(3): for weight_col in range(3): # figure out which adjacent tile we're processing (in # overall tile coordinates instead of relative to our # current tile) calculated_row = num_row_tile - 1 + weight_row calculated_col = num_col_tile - 1 + weight_col tmp_tile_weights = tile_weights[ weight_row, weight_col][np.where(temp_mask_to_equalize)] # if we're inside the tile array and the tile we're # processing has a histogram equalization for us to # use, process it if ((calculated_row >= 0) and (calculated_row < row_tiles) and (calculated_col >= 0) and (calculated_col < col_tiles) and ( all_bin_information[calculated_row][ calculated_col] is not None) and (all_cumulative_dist_functions[calculated_row][ calculated_col] is not None)): # equalize our current tile using the histogram # equalization from the tile we're processing temp_equalized_data = np.interp( temp_all_valid_data, all_bin_information[calculated_row][calculated_col][:-1], all_cumulative_dist_functions[calculated_row][ calculated_col]) temp_equalized_data = temp_equalized_data[np.where( temp_mask_to_equalize[temp_all_valid_data_mask])] # add the contribution for the tile we're # processing to our weighted sum temp_sum += temp_equalized_data * tmp_tile_weights # if the tile we're processing doesn't exist, hang onto the weight we # would have used for it so we can correct that later else: unused_weight -= tmp_tile_weights # if we have unused weights, scale our values to correct for that if unused_weight.any(): # TODO: if the mask masks everything out this will be a zero! temp_sum /= unused_weight + 1 # now that we've calculated the weighted sum for this tile, set # it in our data array out[min_row:max_row, min_col:max_col][ temp_mask_to_equalize] = temp_sum # TEMP, test without using weights # data[min_row:max_row, min_col:max_col][temp_mask_to_equalize] = \ # np.interp(temp_data_to_equalize, all_bin_information[num_row_tile][num_col_tile][:-1], # all_cumulative_dist_functions[num_row_tile][num_col_tile]) def _histogram_equalization_helper(valid_data, number_of_bins, clip_limit=None, slope_limit=None): """Calculate the simplest possible histogram equalization, using only valid data. Returns: cumulative distribution function and bin information """ # bucket all the selected data using np's histogram function temp_histogram, temp_bins = np.histogram(valid_data, number_of_bins) # if we have a clip limit and we should do our clipping before building # the cumulative distribution function, clip off our histogram if clip_limit is not None: # clip our histogram and remember how much we removed pixels_to_clip_at = int(clip_limit * (valid_data.size / float(number_of_bins))) mask_to_clip = temp_histogram > clip_limit # num_bins_clipped = sum(mask_to_clip) # num_pixels_clipped = sum(temp_histogram[mask_to_clip]) - (num_bins_clipped * pixels_to_clip_at) temp_histogram[mask_to_clip] = pixels_to_clip_at # calculate the cumulative distribution function cumulative_dist_function = temp_histogram.cumsum() # if we have a clip limit and we should do our clipping after building the # cumulative distribution function, clip off our cdf if slope_limit is not None: # clip our cdf and remember how much we removed pixel_height_limit = int(slope_limit * (valid_data.size / float(number_of_bins))) cumulative_excess_height = 0 num_clipped_pixels = 0 weight_metric = np.zeros(cumulative_dist_function.shape, dtype=float) for pixel_index in range(1, cumulative_dist_function.size): current_pixel_count = cumulative_dist_function[pixel_index] diff_from_acceptable = ( current_pixel_count - cumulative_dist_function[pixel_index - 1] - pixel_height_limit - cumulative_excess_height) if diff_from_acceptable < 0: weight_metric[pixel_index] = abs(diff_from_acceptable) cumulative_excess_height += max(diff_from_acceptable, 0) cumulative_dist_function[ pixel_index] = current_pixel_count - cumulative_excess_height num_clipped_pixels = num_clipped_pixels + cumulative_excess_height # now normalize the overall distribution function cumulative_dist_function = (number_of_bins - 1) * cumulative_dist_function / cumulative_dist_function[-1] # return what someone else will need in order to apply the equalization later return cumulative_dist_function, temp_bins def _calculate_weights(tile_size): """Calculate a weight array for bilinear interpolation of histogram tiles. The weight array will be used to quickly bilinearly-interpolate the histogram equalizations tile size should be the width and height of a tile in pixels. Returns: 4D weight array where the first 2 dimensions correspond to the grid of where the tiles are relative to the tile being interpolated. """ # we are essentially making a set of weight masks for an ideal center tile # that has all 8 surrounding tiles available # create our empty template tiles template_tile = np.zeros((3, 3, tile_size, tile_size), dtype=np.float32) # TEMP FOR TESTING, create a weight tile that does no interpolation # template_tile[1,1] = template_tile[1,1] + 1.0 # for ease of calculation, figure out the index of the center pixel in a tile # and how far that pixel is from the edge of the tile (in pixel units) center_index = int(tile_size / 2) center_dist = tile_size / 2.0 # loop through each pixel in the tile and calculate the 9 weights for that pixel # were weights for a pixel are 0.0 they are not set (since the template_tile # starts out as all zeros) for row in range(tile_size): for col in range(tile_size): vertical_dist = abs( center_dist - row ) # the distance from our pixel to the center of our tile, vertically horizontal_dist = abs( center_dist - col ) # the distance from our pixel to the center of our tile, horizontally # pre-calculate which 3 adjacent tiles will affect our tile # (note: these calculations aren't quite right if center_index equals the row or col) horizontal_index = 0 if col < center_index else 2 vertical_index = 0 if row < center_index else 2 # if this is the center pixel, we only need to use it's own tile # for it if (row is center_index) and (col is center_index): # all of the weight for this pixel comes from it's own tile template_tile[1, 1][row, col] = 1.0 # if this pixel is in the center row, but is not the center pixel # we're going to need to linearly interpolate it's tile and the # tile that is horizontally nearest to it elif (row is center_index) and (col is not center_index): # linear interp horizontally beside_weight = horizontal_dist / tile_size # the weight from the adjacent tile local_weight = ( tile_size - horizontal_dist) / tile_size # the weight from this tile # set the weights for the two relevant tiles template_tile[1, 1][row, col] = local_weight template_tile[1, horizontal_index][row, col] = beside_weight # if this pixel is in the center column, but is not the center pixel # we're going to need to linearly interpolate it's tile and the # tile that is vertically nearest to it elif (row is not center_index) and (col is center_index): # linear interp vertical beside_weight = vertical_dist / tile_size # the weight from the adjacent tile local_weight = ( tile_size - vertical_dist) / tile_size # the weight from this tile # set the weights for the two relevant tiles template_tile[1, 1][row, col] = local_weight template_tile[vertical_index, 1][row, col] = beside_weight # if the pixel is in one of the four quadrants that are above or below the center # row and column, we need to bilinearly interpolate it between the # nearest four tiles else: # bilinear interpolation local_weight = ((tile_size - vertical_dist) / tile_size) * ( (tile_size - horizontal_dist) / tile_size) # the weight from this tile vertical_weight = ((vertical_dist) / tile_size) * ( (tile_size - horizontal_dist) / tile_size ) # the weight from the vertically adjacent tile horizontal_weight = ( (tile_size - vertical_dist) / tile_size) * ( (horizontal_dist) / tile_size ) # the weight from the horizontally adjacent tile diagonal_weight = ((vertical_dist) / tile_size) * ( (horizontal_dist) / tile_size ) # the weight from the diagonally adjacent tile # set the weights for the four relevant tiles template_tile[1, 1, row, col] = local_weight template_tile[vertical_index, 1, row, col] = vertical_weight template_tile[1, horizontal_index, row, col] = horizontal_weight template_tile[vertical_index, horizontal_index, row, col] = diagonal_weight # return the weights for an ideal center tile return template_tile def _linear_normalization_from_0to1( data, mask, theoretical_max, theoretical_min=0, message="normalizing equalized data to fit in 0 to 1 range"): """Do a linear normalization so all data is in the 0 to 1 range. This is a sloppy but fast calculation that relies on parameters giving it the correct theoretical current max and min so it can scale the data accordingly. """ LOG.debug(message) if theoretical_min != 0: data[mask] = data[mask] - theoretical_min theoretical_max = theoretical_max - theoretical_min data[mask] = data[mask] / theoretical_max class NCCZinke(CompositeBase): """Equalized DNB composite using the Zinke algorithm [#ncc1]_. References: .. [#ncc1] Stephan Zinke (2017), A simplified high and near-constant contrast approach for the display of VIIRS day/night band imagery :doi:`10.1080/01431161.2017.1338838` """ def __call__(self, datasets, **info): """Create HNCC DNB composite.""" if len(datasets) != 4: raise ValueError("Expected 4 datasets, got %d" % (len(datasets),)) dnb_data = datasets[0] sza_data = datasets[1] lza_data = datasets[2] # this algorithm assumes units of "W cm-2 sr-1" so if there are other # units we need to adjust for that if dnb_data.attrs.get("units", "W m-2 sr-1") == "W m-2 sr-1": unit_factor = 10000. else: unit_factor = 1. mda = dnb_data.attrs.copy() dnb_data = dnb_data.copy() / unit_factor # convert to decimal instead of % moon_illum_fraction = da.mean(datasets[3].data) * 0.01 phi = da.rad2deg(da.arccos(2. * moon_illum_fraction - 1)) vfl = 0.026 * phi + 4.0e-9 * (phi ** 4.) m_fullmoon = -12.74 m_sun = -26.74 m_moon = vfl + m_fullmoon gs_ = self.gain_factor(sza_data.data) r_sun_moon = 10.**((m_sun - m_moon) / -2.5) gl_ = r_sun_moon * self.gain_factor(lza_data.data) gtot = 1. / (1. / gs_ + 1. / gl_) dnb_data += 2.6e-10 dnb_data *= gtot mda['name'] = self.attrs['name'] mda['standard_name'] = 'ncc_radiance' dnb_data.attrs = mda return dnb_data def gain_factor(self, theta): """Compute gain factor in a dask-friendly manner.""" return theta.map_blocks(self._gain_factor, dtype=theta.dtype) @staticmethod def _gain_factor(theta): gain = np.empty_like(theta) mask = theta <= 87.541 gain[mask] = (58 + 4 / np.cos(np.deg2rad(theta[mask]))) / 5 mask = np.logical_and(theta <= 96, 87.541 < theta) gain[mask] = (123 * np.exp(1.06 * (theta[mask] - 89.589)) * ((theta[mask] - 93)**2 / 18 + 0.5)) mask = np.logical_and(96 < theta, theta <= 101) gain[mask] = 123 * np.exp(1.06 * (theta[mask] - 89.589)) mask = np.logical_and(101 < theta, theta <= 103.49) gain[mask] = (123 * np.exp(1.06 * (101 - 89.589)) * np.log(theta[mask] - (101 - np.e)) ** 2) gain[theta > 103.49] = 6.0e7 return gain class SnowAge(GenericCompositor): """Create RGB snow product. Product is based on method presented at the second CSPP/IMAPP users' meeting at Eumetsat in Darmstadt on 14-16 April 2015 # Bernard Bellec snow Look-Up Tables V 1.0 (c) Meteo-France # These Look-up Tables allow you to create the RGB snow product # for SUOMI-NPP VIIRS Imager according to the algorithm # presented at the second CSPP/IMAPP users' meeting at Eumetsat # in Darmstadt on 14-16 April 2015 # The algorithm and the product are described in this # presentation : # http://www.ssec.wisc.edu/meetings/cspp/2015/Agenda%20PDF/Wednesday/Roquet_snow_product_cspp2015.pdf # For further information you may contact # Bernard Bellec at Bernard.Bellec@meteo.fr # or # Pascale Roquet at Pascale.Roquet@meteo.fr """ def __call__(self, projectables, nonprojectables=None, **info): """Generate a SnowAge RGB composite. The algorithm and the product are described in this presentation : http://www.ssec.wisc.edu/meetings/cspp/2015/Agenda%20PDF/Wednesday/Roquet_snow_product_cspp2015.pdf For further information you may contact Bernard Bellec at Bernard.Bellec@meteo.fr or Pascale Roquet at Pascale.Roquet@meteo.fr """ if len(projectables) != 5: raise ValueError("Expected 5 datasets, got %d" % (len(projectables), )) # Collect information that is the same between the projectables info = combine_metadata(*projectables) # Update that information with configured information (including name) info.update(self.attrs) # Force certain pieces of metadata that we *know* to be true info["wavelength"] = None m07 = projectables[0] * 255. / 160. m08 = projectables[1] * 255. / 160. m09 = projectables[2] * 255. / 160. m10 = projectables[3] * 255. / 160. m11 = projectables[4] * 255. / 160. refcu = m11 - m10 refcu = refcu.clip(min=0) ch1 = m07 - refcu / 2. - m09 / 4. ch2 = m08 + refcu / 4. + m09 / 4. ch3 = m11 + m09 # GenericCompositor needs valid DataArrays with 'area' metadata ch1.attrs = info ch2.attrs = info ch3.attrs = info return super(SnowAge, self).__call__([ch1, ch2, ch3], **info) satpy-0.34.0/satpy/conftest.py000066400000000000000000000020501420401153000162600ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Pytest configuration and setup functions.""" def pytest_configure(config): """Set test configuration.""" from satpy import aux_download aux_download.RUNNING_TESTS = True def pytest_unconfigure(config): """Undo previous configurations.""" from satpy import aux_download aux_download.RUNNING_TESTS = False satpy-0.34.0/satpy/dataset/000077500000000000000000000000001420401153000155115ustar00rootroot00000000000000satpy-0.34.0/satpy/dataset/__init__.py000066400000000000000000000020311420401153000176160ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Classes and functions related to data identification and querying.""" from .anc_vars import dataset_walker, replace_anc # noqa from .data_dict import DatasetDict, get_key # noqa from .dataid import DataID, DataQuery, ModifierTuple, create_filtered_query # noqa from .metadata import combine_metadata # noqa satpy-0.34.0/satpy/dataset/anc_vars.py000066400000000000000000000033421420401153000176610ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Utilities for dealing with ancillary variables.""" from .dataid import DataID def dataset_walker(datasets): """Walk through *datasets* and their ancillary data. Yields datasets and their parent. """ for dataset in datasets: yield dataset, None for anc_ds in dataset.attrs.get('ancillary_variables', []): try: anc_ds.attrs yield anc_ds, dataset except AttributeError: continue def replace_anc(dataset, parent_dataset): """Replace *dataset* the *parent_dataset*'s `ancillary_variables` field.""" if parent_dataset is None: return id_keys = parent_dataset.attrs.get('_satpy_id_keys', dataset.attrs.get('_satpy_id_keys')) current_dataid = DataID(id_keys, **dataset.attrs) for idx, ds in enumerate(parent_dataset.attrs['ancillary_variables']): if current_dataid == DataID(id_keys, **ds.attrs): parent_dataset.attrs['ancillary_variables'][idx] = dataset return satpy-0.34.0/satpy/dataset/data_dict.py000066400000000000000000000241701420401153000200030ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Classes and functions related to a dictionary with DataID keys.""" import numpy as np from .dataid import DataID, create_filtered_query, minimal_default_keys_config class TooManyResults(KeyError): """Special exception when one key maps to multiple items in the container.""" def get_best_dataset_key(key, choices): """Choose the "best" `DataID` from `choices` based on `key`. To see how the keys are sorted, refer to `:meth:satpy.datasets.DataQuery.sort_dataids`. This function assumes `choices` has already been filtered to only include datasets that match the provided `key`. Args: key (DataQuery): Query parameters to sort `choices` by. choices (iterable): `DataID` objects to sort through to determine the best dataset. Returns: List of best `DataID`s from `choices`. If there is more than one element this function could not choose between the available datasets. """ sorted_choices, distances = key.sort_dataids(choices) if len(sorted_choices) == 0 or distances[0] is np.inf: return [] else: return [choice for choice, distance in zip(sorted_choices, distances) if distance == distances[0]] def get_key(key, key_container, num_results=1, best=True, query=None, **kwargs): """Get the fully-specified key best matching the provided key. Only the best match is returned if `best` is `True` (default). See `get_best_dataset_key` for more information on how this is determined. `query` is provided as a convenience to filter by multiple parameters at once without having to filter by multiple `key` inputs. Args: key (DataID): DataID of query parameters to use for searching. Any parameter that is `None` is considered a wild card and any match is accepted. key_container (dict or set): Container of DataID objects that uses hashing to quickly access items. num_results (int): Number of results to return. Use `0` for all matching results. If `1` then the single matching key is returned instead of a list of length 1. (default: 1) best (bool): Sort results to get "best" result first (default: True). See `get_best_dataset_key` for details. query (DataQuery): filter for the key which can contain for example: resolution (float, int, or list): Resolution of the dataset in dataset units (typically meters). This can also be a list of these numbers. calibration (str or list): Dataset calibration (ex.'reflectance'). This can also be a list of these strings. polarization (str or list): Dataset polarization (ex.'V'). This can also be a list of these strings. level (number or list): Dataset level (ex. 100). This can also be a list of these numbers. modifiers (list): Modifiers applied to the dataset. Unlike resolution and calibration this is the exact desired list of modifiers for one dataset, not a list of possible modifiers. Returns: list or DataID: Matching key(s) Raises: KeyError if no matching results or if more than one result is found when `num_results` is `1`. """ key = create_filtered_query(key, query) res = key.filter_dataids(key_container) if not res: raise KeyError("No dataset matching '{}' found".format(str(key))) if best: res = get_best_dataset_key(key, res) if num_results == 1 and not res: raise KeyError("No dataset matching '{}' found".format(str(key))) if num_results == 1 and len(res) != 1: raise TooManyResults("No unique dataset matching {}".format(str(key))) if num_results == 1: return res[0] if num_results == 0: return res return res[:num_results] class DatasetDict(dict): """Special dictionary object that can handle dict operations based on dataset name, wavelength, or DataID. Note: Internal dictionary keys are `DataID` objects. """ def keys(self, names=False, wavelengths=False): """Give currently contained keys.""" # sort keys so things are a little more deterministic (.keys() is not) keys = sorted(super(DatasetDict, self).keys()) if names: return (k.get('name') for k in keys) elif wavelengths: return (k.get('wavelength') for k in keys) else: return keys def get_key(self, match_key, num_results=1, best=True, **dfilter): """Get multiple fully-specified keys that match the provided query. Args: key (DataID): DataID of query parameters to use for searching. Any parameter that is `None` is considered a wild card and any match is accepted. Can also be a string representing the dataset name or a number representing the dataset wavelength. num_results (int): Number of results to return. If `0` return all, if `1` return only that element, otherwise return a list of matching keys. **dfilter (dict): See `get_key` function for more information. """ return get_key(match_key, self.keys(), num_results=num_results, best=best, **dfilter) def getitem(self, item): """Get Node when we know the *exact* DataID.""" return super(DatasetDict, self).__getitem__(item) def __getitem__(self, item): """Get item from container.""" try: # short circuit - try to get the object without more work return super(DatasetDict, self).__getitem__(item) except KeyError: key = self.get_key(item) return super(DatasetDict, self).__getitem__(key) def get(self, key, default=None): """Get value with optional default.""" try: key = self.get_key(key) except KeyError: return default return super(DatasetDict, self).get(key, default) def __setitem__(self, key, value): """Support assigning 'Dataset' objects or dictionaries of metadata.""" if hasattr(value, 'attrs'): # xarray.DataArray objects value_info = value.attrs else: value_info = value # use value information to make a more complete DataID if not isinstance(key, DataID): key = self._create_dataid_key(key, value_info) # update the 'value' with the information contained in the key try: new_info = key.to_dict() except AttributeError: new_info = key if isinstance(value_info, dict): value_info.update(new_info) if isinstance(key, DataID): value_info['_satpy_id'] = key return super(DatasetDict, self).__setitem__(key, value) def _create_dataid_key(self, key, value_info): """Create a DataID key from dictionary.""" if not isinstance(value_info, dict): raise ValueError("Key must be a DataID when value is not an xarray DataArray or dict") old_key = key try: key = self.get_key(key) except KeyError: if isinstance(old_key, str): new_name = old_key else: new_name = value_info.get("name") # this is a new key and it's not a full DataID tuple if new_name is None and value_info.get('wavelength') is None: raise ValueError("One of 'name' or 'wavelength' attrs " "values should be set.") id_keys = self._create_id_keys_from_dict(value_info) value_info['name'] = new_name key = DataID(id_keys, **value_info) return key def _create_id_keys_from_dict(self, value_info_dict): """Create id_keys from dict.""" try: id_keys = value_info_dict['_satpy_id'].id_keys except KeyError: try: id_keys = value_info_dict['_satpy_id_keys'] except KeyError: id_keys = minimal_default_keys_config return id_keys def contains(self, item): """Check contains when we know the *exact* DataID.""" return super(DatasetDict, self).__contains__(item) def __contains__(self, item): """Check if item exists in container.""" try: key = self.get_key(item) except KeyError: return False return super(DatasetDict, self).__contains__(key) def __delitem__(self, key): """Delete item from container.""" try: # short circuit - try to get the object without more work return super(DatasetDict, self).__delitem__(key) except KeyError: key = self.get_key(key) return super(DatasetDict, self).__delitem__(key) satpy-0.34.0/satpy/dataset/dataid.py000066400000000000000000000630771420401153000173260ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Dataset identifying objects.""" import logging import numbers import warnings from collections import namedtuple from contextlib import suppress from copy import copy, deepcopy from enum import Enum, IntEnum from typing import NoReturn import numpy as np logger = logging.getLogger(__name__) def get_keys_from_config(common_id_keys, config): """Gather keys for a new DataID from the ones available in configured dataset.""" id_keys = {} for key, val in common_id_keys.items(): if key in config: id_keys[key] = val elif val is not None and (val.get('required') is True or val.get('default') is not None): id_keys[key] = val if not id_keys: raise ValueError('Metadata does not contain enough information to create a DataID.') return id_keys class ValueList(IntEnum): """A static value list. This class is meant to be used for dynamically created Enums. Due to this it should not be used as a normal Enum class or there may be some unexpected behavior. For example, this class contains custom pickling and unpickling handling that may break in subclasses. """ @classmethod def convert(cls, value): """Convert value to an instance of this class.""" try: return cls[value] except KeyError: raise ValueError('{} invalid value for {}'.format(value, cls)) @classmethod def _unpickle(cls, enum_name, enum_members, enum_member): """Create dynamic class that was previously pickled. See :meth:`__reduce_ex__` for implementation details. """ enum_cls = cls(enum_name, enum_members) return enum_cls[enum_member] def __reduce_ex__(self, proto): """Reduce the object for pickling.""" return (ValueList._unpickle, (self.__class__.__name__, list(self.__class__.__members__.keys()), self.name)) def __eq__(self, other): """Check equality.""" return self.name == other def __ne__(self, other): """Check non-equality.""" return self.name != other def __hash__(self): """Hash the object.""" return hash(self.name) def __repr__(self): """Represent the values.""" return '<' + str(self) + '>' wlklass = namedtuple("WavelengthRange", "min central max unit", defaults=('µm',)) # type: ignore class WavelengthRange(wlklass): """A named tuple for wavelength ranges. The elements of the range are min, central and max values, and optionally a unit (defaults to µm). No clever unit conversion is done here, it's just used for checking that two ranges are comparable. """ def __eq__(self, other): """Return if two wavelengths are equal. Args: other (tuple or scalar): (min wl, nominal wl, max wl) or scalar wl Return: True if other is a scalar and min <= other <= max, or if other is a tuple equal to self, False otherwise. """ if other is None: return False if isinstance(other, numbers.Number): return other in self if isinstance(other, (tuple, list)) and len(other) == 3: return self[:3] == other return super().__eq__(other) def __ne__(self, other): """Return the opposite of `__eq__`.""" return not self == other def __lt__(self, other): """Compare to another wavelength.""" if other is None: return False return super().__lt__(other) def __gt__(self, other): """Compare to another wavelength.""" if other is None: return True return super().__gt__(other) def __hash__(self): """Hash this tuple.""" return tuple.__hash__(self) def __str__(self): """Format for print out.""" return "{0.central} {0.unit} ({0.min}-{0.max} {0.unit})".format(self) def __contains__(self, other): """Check if this range contains *other*.""" if other is None: return False if isinstance(other, numbers.Number): return self.min <= other <= self.max with suppress(AttributeError): if self.unit != other.unit: raise NotImplementedError("Can't compare wavelength ranges with different units.") return self.min <= other.min and self.max >= other.max return False def distance(self, value): """Get the distance from value.""" if self == value: try: return abs(value.central - self.central) except AttributeError: if isinstance(value, (tuple, list)): return abs(value[1] - self.central) return abs(value - self.central) else: return np.inf @classmethod def convert(cls, wl): """Convert `wl` to this type if possible.""" if isinstance(wl, (tuple, list)): return cls(*wl) return wl def to_cf(self): """Serialize for cf export.""" return str(self) @classmethod def from_cf(cls, blob): """Return a WavelengthRange from a cf blob.""" try: obj = cls._read_cf_from_string_export(blob) except TypeError: obj = cls._read_cf_from_string_list(blob) return obj @classmethod def _read_cf_from_string_export(cls, blob): """Read blob as a string created by `to_cf`.""" pattern = "{central:f} {unit:s} ({min:f}-{max:f} {unit2:s})" from trollsift import Parser parser = Parser(pattern) res_dict = parser.parse(blob) res_dict.pop('unit2') obj = cls(**res_dict) return obj @classmethod def _read_cf_from_string_list(cls, blob): """Read blob as a list of strings (legacy formatting).""" min_wl, central_wl, max_wl, unit = blob obj = cls(float(min_wl), float(central_wl), float(max_wl), unit) return obj class ModifierTuple(tuple): """A tuple holder for modifiers.""" @classmethod def convert(cls, modifiers): """Convert `modifiers` to this type if possible.""" if modifiers is None: return None if not isinstance(modifiers, (cls, tuple, list)): raise TypeError("'DataID' modifiers must be a tuple or None, " "not {}".format(type(modifiers))) return cls(modifiers) def __eq__(self, other): """Check equality.""" if isinstance(other, list): other = tuple(other) return super().__eq__(other) def __ne__(self, other): """Check non-equality.""" if isinstance(other, list): other = tuple(other) return super().__ne__(other) def __hash__(self): """Hash this tuple.""" return tuple.__hash__(self) #: Default ID keys DataArrays. default_id_keys_config = {'name': { 'required': True, }, 'wavelength': { 'type': WavelengthRange, }, 'resolution': { 'transitive': False, }, 'calibration': { 'enum': [ 'reflectance', 'brightness_temperature', 'radiance', 'counts' ], 'transitive': True, }, 'modifiers': { 'default': ModifierTuple(), 'type': ModifierTuple, }, } #: Default ID keys for coordinate DataArrays. default_co_keys_config = {'name': { 'required': True, }, 'resolution': { 'transitive': True, } } #: Minimal ID keys for DataArrays, for example composites. minimal_default_keys_config = {'name': { 'required': True, }, 'resolution': { 'transitive': True, } } class DataID(dict): """Identifier for all `DataArray` objects. DataID is a dict that holds identifying and classifying information about a DataArray. """ def __init__(self, id_keys, **keyval_dict): """Init the DataID. The *id_keys* dictionary has to be formed as described in :doc:`../dev_guide/satpy_internals`. The other keyword arguments are values to be assigned to the keys. Note that `None` isn't a valid value and will simply be ignored. """ self._hash = None self._orig_id_keys = id_keys self._id_keys = self.fix_id_keys(id_keys or {}) if keyval_dict: curated = self.convert_dict(keyval_dict) else: curated = {} super(DataID, self).__init__(curated) @staticmethod def fix_id_keys(id_keys): """Flesh out enums in the id keys as gotten from a config.""" new_id_keys = id_keys.copy() for key, val in id_keys.items(): if not val: continue if 'enum' in val and 'type' in val: raise ValueError('Cannot have both type and enum for the same id key.') new_val = copy(val) if 'enum' in val: new_val['type'] = ValueList(key, ' '.join(new_val.pop('enum'))) new_id_keys[key] = new_val return new_id_keys def convert_dict(self, keyvals): """Convert a dictionary's values to the types defined in this object's id_keys.""" curated = {} if not keyvals: return curated for key, val in self._id_keys.items(): if val is None: val = {} if key in keyvals or val.get('default') is not None or val.get('required'): curated_val = keyvals.get(key, val.get('default')) if 'required' in val and curated_val is None: raise ValueError('Required field {} missing.'.format(key)) if 'type' in val: curated[key] = val['type'].convert(curated_val) elif curated_val is not None: curated[key] = curated_val return curated @classmethod def _unpickle(cls, id_keys, keyval): """Create a new instance of the DataID after pickling.""" return cls(id_keys, **keyval) def __reduce__(self): """Reduce the object for pickling.""" return (self._unpickle, (self._orig_id_keys, self.to_dict())) def from_dict(self, keyvals): """Create a DataID from a dictionary.""" return self.__class__(self._id_keys, **keyvals) @classmethod def from_dataarray(cls, array, default_keys=minimal_default_keys_config): """Get the DataID using the dataarray attributes.""" if '_satpy_id' in array.attrs: return array.attrs['_satpy_id'] return cls.new_id_from_dataarray(array, default_keys) @classmethod def new_id_from_dataarray(cls, array, default_keys=minimal_default_keys_config): """Create a new DataID from a dataarray's attributes.""" try: id_keys = array.attrs['_satpy_id'].id_keys except KeyError: id_keys = array.attrs.get('_satpy_id_keys', default_keys) return cls(id_keys, **array.attrs) @property def id_keys(self): """Get the id_keys.""" return deepcopy(self._id_keys) def create_filter_query_without_required_fields(self, query): """Remove the required fields from *query*.""" try: new_query = query.to_dict() except AttributeError: new_query = query.copy() for key, val in self._id_keys.items(): if val and (val.get('transitive') is not True): new_query.pop(key, None) return DataQuery.from_dict(new_query) def _asdict(self): return dict(self.items()) def to_dict(self): """Convert the ID to a dict.""" res_dict = dict() for key, value in self._asdict().items(): if isinstance(value, Enum): res_dict[key] = value.name else: res_dict[key] = value return res_dict def __getattr__(self, key): """Support old syntax for getting items.""" if key in self._id_keys: warnings.warn('Attribute access to DataIDs is deprecated, use key access instead.', stacklevel=2) return self[key] else: return super().__getattr__(key) def __deepcopy__(self, memo=None): """Copy this object. Returns self as it's immutable. """ return self def __copy__(self): """Copy this object. Returns self as it's immutable. """ return self def __repr__(self): """Represent the id.""" items = ("{}={}".format(key, repr(val)) for key, val in self.items()) return self.__class__.__name__ + "(" + ", ".join(items) + ")" def _replace(self, **kwargs): """Make a new instance with replaced items.""" info = dict(self.items()) info.update(kwargs) return self.from_dict(info) def __hash__(self): """Hash the object.""" if self._hash is None: self._hash = hash(tuple(sorted(self.items()))) return self._hash def _immutable(self, *args, **kws) -> NoReturn: """Raise and error.""" raise TypeError('Cannot change a DataID') def __lt__(self, other): """Check lesser than.""" list_self, list_other = [], [] for key in self._id_keys: if key not in self and key not in other: continue elif key in self and key in other: list_self.append(self[key]) list_other.append(other[key]) elif key in self: val = self[key] list_self.append(val) list_other.append(_generalize_value_for_comparison(val)) elif key in other: val = other[key] list_other.append(val) list_self.append(_generalize_value_for_comparison(val)) return tuple(list_self) < tuple(list_other) __setitem__ = _immutable __delitem__ = _immutable pop = _immutable # type: ignore popitem = _immutable clear = _immutable update = _immutable # type: ignore setdefault = _immutable def _find_modifiers_key(self): for key, val in self.items(): if isinstance(val, ModifierTuple): return key raise KeyError def create_less_modified_query(self): """Create a query with one less modifier.""" new_dict = self.to_dict() new_dict['modifiers'] = tuple(new_dict['modifiers'][:-1]) return DataQuery.from_dict(new_dict) def is_modified(self): """Check if this is modified.""" try: key = self._find_modifiers_key() except KeyError: return False return bool(self[key]) def _generalize_value_for_comparison(val): """Get a generalize value for comparisons.""" if isinstance(val, numbers.Number): return 0 if isinstance(val, str): return "" if isinstance(val, tuple): return tuple() raise NotImplementedError("Don't know how to generalize " + str(type(val))) class DataQuery: """The data query object. A DataQuery can be used in Satpy to query for a Dataset. This way a fully qualified DataID can be found even if some of the DataID elements are unknown. In this case a `*` signifies something that is unknown or not applicable to the requested Dataset. """ def __init__(self, **kwargs): """Initialize the query.""" self._dict = kwargs.copy() self._fields = tuple(self._dict.keys()) self._values = tuple(self._dict.values()) def __getitem__(self, key): """Get an item.""" return self._dict[key] def __eq__(self, other): """Compare the DataQuerys. A DataQuery is considered equal to another DataQuery or DataID if they have common keys that have equal values. """ sdict = self._asdict() try: odict = other._asdict() except AttributeError: return False common_keys = False for key, val in sdict.items(): if key in odict: common_keys = True if odict[key] != val and val is not None: return False return common_keys def __hash__(self): """Hash.""" fields = [] values = [] for field, value in sorted(self._dict.items()): if value != '*': fields.append(field) if isinstance(value, (list, set)): value = tuple(value) values.append(value) return hash(tuple(zip(fields, values))) def get(self, key, default=None): """Get an item.""" return self._dict.get(key, default) @classmethod def from_dict(cls, the_dict): """Convert a dict to an ID.""" return cls(**the_dict) def items(self): """Get the items of this query.""" return self._dict.items() def _asdict(self): return self._dict.copy() def to_dict(self, trim=True): """Convert the ID to a dict.""" if trim: return self._to_trimmed_dict() else: return self._asdict() def _to_trimmed_dict(self): return {key: val for key, val in self._dict.items() if val != '*'} def __repr__(self): """Represent the query.""" items = ("{}={}".format(key, repr(val)) for key, val in zip(self._fields, self._values)) return self.__class__.__name__ + "(" + ", ".join(items) + ")" def filter_dataids(self, dataid_container): """Filter DataIDs based on this query.""" keys = list(filter(self._match_dataid, dataid_container)) return keys def _match_dataid(self, dataid): """Match the dataid with the current query.""" if self._shares_required_keys(dataid): keys_to_check = set(dataid.keys()) & set(self._fields) else: keys_to_check = set(dataid._id_keys.keys()) & set(self._fields) if not keys_to_check: return False return all(self._match_query_value(key, dataid.get(key)) for key in keys_to_check) def _shares_required_keys(self, dataid): """Check if dataid shares required keys with the current query.""" for key, val in dataid._id_keys.items(): try: if val.get('required', False): if key in self._fields: return True except AttributeError: continue return False def _match_query_value(self, key, id_val): val = self._dict[key] if val == '*': return True if isinstance(id_val, tuple) and isinstance(val, (tuple, list)): return tuple(val) == id_val if not isinstance(val, list): val = [val] return id_val in val def sort_dataids_with_preference(self, all_ids, preference): """Sort `all_ids` given a sorting `preference` (DataQuery or None).""" try: res = preference.to_dict() except AttributeError: res = dict() res.update(self.to_dict()) optimistic_query = DataQuery.from_dict(res) sorted_ids, distances = optimistic_query.sort_dataids(all_ids) if distances[0] == np.inf: # nothing matches the optimistic query sorted_ids, distances = self.sort_dataids(all_ids) return sorted_ids, distances def sort_dataids(self, dataids): """Sort the DataIDs based on this query. Returns the sorted dataids and the list of distances. The sorting is performed based on the types of the keys to search on (as they are defined in the DataIDs from `dataids`). If that type defines a `distance` method, then it is used to find how 'far' the DataID is from the current query. If the type is a number, a simple subtraction is performed. For other types, the distance is 0 if the values are identical, np.inf otherwise. For example, with the default DataID, we use the following criteria: 1. Central wavelength is nearest to the `key` wavelength if specified. 2. Least modified dataset if `modifiers` is `None` in `key`. Otherwise, the modifiers are ignored. 3. Highest calibration if `calibration` is `None` in `key`. Calibration priority is the order of the calibration list defined as reflectance, brightness temperature, radiance counts if not overridden in the reader configuration. 4. Best resolution (smallest number) if `resolution` is `None` in `key`. Otherwise, the resolution is ignored. """ distances = [] sorted_dataids = [] big_distance = 100000 keys = set(self._dict.keys()) for dataid in dataids: keys |= set(dataid.keys()) for dataid in sorted(dataids): sorted_dataids.append(dataid) distance = 0 for key in keys: if distance == np.inf: break val = self._dict.get(key, '*') if val == '*': distance = self._add_absolute_distance(dataid, key, distance) else: try: dataid_val = dataid[key] except KeyError: distance += big_distance continue distance = self._add_distance_from_query(dataid_val, val, distance) distances.append(distance) distances, dataids = zip(*sorted(zip(distances, sorted_dataids))) return dataids, distances @staticmethod def _add_absolute_distance(dataid, key, distance): try: # for enums distance += dataid.get(key).value except AttributeError: if isinstance(dataid.get(key), numbers.Number): distance += dataid.get(key) elif isinstance(dataid.get(key), tuple): distance += len(dataid.get(key)) return distance @staticmethod def _add_distance_from_query(dataid_val, requested_val, distance): try: distance += dataid_val.distance(requested_val) except AttributeError: if not isinstance(requested_val, list): requested_val = [requested_val] if dataid_val not in requested_val: distance = np.inf elif isinstance(dataid_val, numbers.Number): # so as to get the highest resolution first # FIXME: this ought to be clarified, not sure that # higher resolution is preferable is all cases. # Moreover this might break with other numerical # values. distance += dataid_val return distance def create_less_modified_query(self): """Create a query with one less modifier.""" new_dict = self.to_dict() new_dict['modifiers'] = tuple(new_dict['modifiers'][:-1]) return DataQuery.from_dict(new_dict) def is_modified(self): """Check if this is modified.""" return bool(self._dict.get('modifiers')) def create_filtered_query(dataset_key, filter_query): """Create a DataQuery matching *dataset_key* and *filter_query*. If a property is specified in both *dataset_key* and *filter_query*, the former has priority. """ ds_dict = _create_id_dict_from_any_key(dataset_key) _update_dict_with_filter_query(ds_dict, filter_query) return DataQuery.from_dict(ds_dict) def _update_dict_with_filter_query(ds_dict, filter_query): if filter_query is not None: for key, value in filter_query.items(): if value != '*': ds_dict.setdefault(key, value) def _create_id_dict_from_any_key(dataset_key): try: ds_dict = dataset_key.to_dict() except AttributeError: if isinstance(dataset_key, str): ds_dict = {'name': dataset_key} elif isinstance(dataset_key, numbers.Number): ds_dict = {'wavelength': dataset_key} else: raise TypeError("Don't know how to interpret a dataset_key of type {}".format(type(dataset_key))) return ds_dict satpy-0.34.0/satpy/dataset/metadata.py000066400000000000000000000156311420401153000176510ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Utilities for merging metadata from various sources.""" from collections.abc import Collection from datetime import datetime from functools import partial, reduce from operator import eq, is_ import numpy as np from satpy.writers.utils import flatten_dict def combine_metadata(*metadata_objects, average_times=True): """Combine the metadata of two or more Datasets. If the values corresponding to any keys are not equal or do not exist in all provided dictionaries then they are not included in the returned dictionary. By default any keys with the word 'time' in them and consisting of datetime objects will be averaged. This is to handle cases where data were observed at almost the same time but not exactly. In the interest of time, lazy arrays are compared by object identity rather than by their contents. Args: *metadata_objects: MetadataObject or dict objects to combine average_times (bool): Average any keys with 'time' in the name Returns: dict: the combined metadata """ info_dicts = _get_valid_dicts(metadata_objects) if len(info_dicts) == 1: return info_dicts[0].copy() shared_keys = _shared_keys(info_dicts) return _combine_shared_info(shared_keys, info_dicts, average_times) def _get_valid_dicts(metadata_objects): """Get the valid dictionaries matching the metadata_objects.""" info_dicts = [] for metadata_object in metadata_objects: if isinstance(metadata_object, dict): metadata_dict = metadata_object elif hasattr(metadata_object, "attrs"): metadata_dict = metadata_object.attrs else: continue info_dicts.append(metadata_dict) return info_dicts def _shared_keys(info_dicts): key_sets = (set(metadata_dict.keys()) for metadata_dict in info_dicts) return reduce(set.intersection, key_sets) def _combine_shared_info(shared_keys, info_dicts, average_times): shared_info = {} for key in shared_keys: values = [info[key] for info in info_dicts] if 'time' in key and isinstance(values[0], datetime) and average_times: shared_info[key] = average_datetimes(values) elif _are_values_combinable(values): shared_info[key] = values[0] return shared_info def average_datetimes(datetime_list): """Average a series of datetime objects. .. note:: This function assumes all datetime objects are naive and in the same time zone (UTC). Args: datetime_list (iterable): Datetime objects to average Returns: Average datetime as a datetime object """ total = [datetime.timestamp(dt) for dt in datetime_list] return datetime.fromtimestamp(sum(total) / len(total)) def _are_values_combinable(values): """Check if the *values* can be combined.""" if _contain_dicts(values): return _all_dicts_equal(values) return _all_non_dicts_equal(values) def _all_non_dicts_equal(values): if _contain_arrays(values): return _all_arrays_equal(values) if _contain_collections_of_arrays(values): # in the real world, the `ancillary_variables` attribute may be # List[xarray.DataArray], this means our values are now # List[List[xarray.DataArray]]. # note that this list_of_arrays check is also true for any # higher-dimensional ndarray, but we only use this check after we have # checked any_arrays so this false positive should have no impact return _all_list_of_arrays_equal(values) return _all_values_equal(values) def _contain_arrays(values): return any([_is_array(value) for value in values]) def _is_array(val): """Check if val is an array.""" return hasattr(val, "__array__") and not np.isscalar(val) def _contain_dicts(values): return any(isinstance(value, dict) for value in values) nan_allclose = partial(np.allclose, equal_nan=True) def _all_arrays_equal(arrays): """Check if the arrays are equal. If the arrays are lazy, just check if they have the same identity. """ if hasattr(arrays[0], 'compute'): return _all_identical(arrays) return _all_values_equal(arrays) def _all_values_equal(values): try: return _all_close(values) except (ValueError, TypeError): # In case of object type arrays (e.g. datetime) _all_close fails, # but _all_equal succeeds. return _all_equal(values) def _all_dicts_equal(dicts): try: return _pairwise_all(_dict_equal, dicts) except AttributeError: # There is something else than a dictionary in the list return False def _dict_equal(d1, d2): """Check that two dictionaries are equal. Nested dictionaries are flattened to facilitate comparison. """ d1_flat = flatten_dict(d1) d2_flat = flatten_dict(d2) if not _dict_keys_equal(d1_flat, d2_flat): return False for key in d1_flat.keys(): value_pair = [d1_flat[key], d2_flat[key]] if not _all_non_dicts_equal(value_pair): return False return True def _dict_keys_equal(d1, d2): return d1.keys() == d2.keys() def _pairwise_all(func, values): for value in values[1:]: if not _is_equal(values[0], value, func): return False return True def _is_equal(a, b, comp_func): res = comp_func(a, b) if _is_array(res): return res.all() return res def _all_identical(values): """Check that the identities of all values are the same.""" return _pairwise_all(is_, values) def _all_close(values): return _pairwise_all(nan_allclose, values) def _all_equal(values): return _pairwise_all(eq, values) def _contain_collections_of_arrays(values): return any( [_is_non_empty_collection(value) and _is_all_arrays(value) for value in values]) def _is_non_empty_collection(value): return isinstance(value, Collection) and len(value) > 0 def _is_all_arrays(value): return all([_is_array(sub_value) for sub_value in value]) def _all_list_of_arrays_equal(array_lists): """Check that the lists of arrays are equal.""" for array_list in zip(*array_lists): if not _all_arrays_equal(array_list): return False return True satpy-0.34.0/satpy/demo/000077500000000000000000000000001420401153000150105ustar00rootroot00000000000000satpy-0.34.0/satpy/demo/__init__.py000066400000000000000000000046011420401153000171220ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019-2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Demo data download helper functions. Each ``get_*`` function below downloads files to a local directory and returns a list of paths to those files. Some (not all) functions have multiple options for how the data is downloaded (via the ``method`` keyword argument) including: - gcsfs: Download data from a public google cloud storage bucket using the ``gcsfs`` package. - unidata_thredds: Access data using OpenDAP or similar method from Unidata's public THREDDS server (https://thredds.unidata.ucar.edu/thredds/catalog.html). - uwaos_thredds: Access data using OpenDAP or similar method from the University of Wisconsin - Madison's AOS department's THREDDS server. - http: A last resort download method when nothing else is available of a tarball or zip file from one or more servers available to the Satpy project. - uw_arcdata: A network mount available on many servers at the Space Science and Engineering Center (SSEC) at the University of Wisconsin - Madison. This is method is mainly meant when tutorials are taught at the SSEC using a Jupyter Hub server. To use these functions, do: >>> from satpy import Scene, demo >>> filenames = demo.get_us_midlatitude_cyclone_abi() >>> scn = Scene(reader='abi_l1b', filenames=filenames) """ from .abi_l1b import get_hurricane_florence_abi # noqa: F401 from .abi_l1b import get_us_midlatitude_cyclone_abi # noqa: F401 from .ahi_hsd import download_typhoon_surigae_ahi # noqa: F401 from .fci import download_fci_test_data # noqa: F401 from .seviri_hrit import download_seviri_hrit_20180228_1500 # noqa: F401 from .viirs_sdr import get_viirs_sdr_20170128_1229 # noqa: F401 satpy-0.34.0/satpy/demo/_google_cloud_platform.py000066400000000000000000000073511420401153000220750ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . import logging import os from urllib.error import URLError from urllib.request import urlopen try: import gcsfs except ImportError: gcsfs = None LOG = logging.getLogger(__name__) def is_google_cloud_instance(): """Check if we are on a GCP virtual machine.""" try: return urlopen('http://metadata.google.internal').headers.get('Metadata-Flavor') == 'Google' except URLError: return False def get_bucket_files(glob_pattern, base_dir, force=False, pattern_slice=None): """Download files from Google Cloud Storage. Args: glob_pattern (str or list): Glob pattern string or series of patterns used to search for on Google Cloud Storage. The pattern should include the "gs://" protocol prefix. If a list of lists, then the results of each sublist pattern are concatenated and the result is treated as one pattern result. This is important for things like ``pattern_slice`` and complicated glob patterns not supported by GCP. base_dir (str): Root directory to place downloaded files on the local system. force (bool): Force re-download of data regardless of its existence on the local system. Warning: May delete non-demo files stored in download directory. pattern_slice (slice): Slice object to limit the number of files returned by each glob pattern. """ if pattern_slice is None: pattern_slice = slice(None) if gcsfs is None: raise RuntimeError("Missing 'gcsfs' dependency for GCS download.") if not os.path.isdir(base_dir): # it is the caller's responsibility to make this raise OSError("Directory does not exist: {}".format(base_dir)) if isinstance(glob_pattern, str): glob_pattern = [glob_pattern] fs = gcsfs.GCSFileSystem(token='anon') filenames = [] for gp in glob_pattern: # handle multiple glob patterns being treated as one pattern # for complicated patterns that GCP can't handle if isinstance(gp, str): glob_results = list(fs.glob(gp)) else: # flat list of results glob_results = [fn for pat in gp for fn in fs.glob(pat)] filenames.extend(_download_gcs_files(glob_results[pattern_slice], fs, base_dir, force)) if not filenames: raise OSError("No files could be found or downloaded.") return filenames def _download_gcs_files(globbed_files, fs, base_dir, force): filenames = [] for fn in globbed_files: ondisk_fn = os.path.basename(fn) ondisk_pathname = os.path.join(base_dir, ondisk_fn) filenames.append(ondisk_pathname) if force and os.path.isfile(ondisk_pathname): os.remove(ondisk_pathname) elif os.path.isfile(ondisk_pathname): LOG.info("Found existing: {}".format(ondisk_pathname)) continue LOG.info("Downloading: {}".format(ondisk_pathname)) fs.get('gs://' + fn, ondisk_pathname) return filenames satpy-0.34.0/satpy/demo/abi_l1b.py000066400000000000000000000115041420401153000166540ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Demo data download helper functions for ABI L1b data.""" import os from satpy import config def get_us_midlatitude_cyclone_abi(base_dir=None, method=None, force=False): """Get GOES-16 ABI (CONUS sector) data from 2019-03-14 00:00Z. Args: base_dir (str): Base directory for downloaded files. method (str): Force download method for the data if not already cached. Allowed options are: 'gcsfs'. Default of ``None`` will choose the best method based on environment settings. force (bool): Force re-download of data regardless of its existence on the local system. Warning: May delete non-demo files stored in download directory. Total size: ~110MB """ base_dir = base_dir or config.get('demo_data_dir', '.') if method is None: method = 'gcsfs' if method not in ['gcsfs']: raise NotImplementedError("Demo data download method '{}' not " "implemented yet.".format(method)) from ._google_cloud_platform import get_bucket_files patterns = ['gs://gcp-public-data-goes-16/ABI-L1b-RadC/2019/073/00/*s20190730002*.nc'] subdir = os.path.join(base_dir, 'abi_l1b', '20190314_us_midlatitude_cyclone') os.makedirs(subdir, exist_ok=True) filenames = get_bucket_files(patterns, subdir, force=force) assert len(filenames) == 16, "Not all files could be downloaded" return filenames def get_hurricane_florence_abi(base_dir=None, method=None, force=False, channels=None, num_frames=10): """Get GOES-16 ABI (Meso sector) data from 2018-09-11 13:00Z to 17:00Z. Args: base_dir (str): Base directory for downloaded files. method (str): Force download method for the data if not already cached. Allowed options are: 'gcsfs'. Default of ``None`` will choose the best method based on environment settings. force (bool): Force re-download of data regardless of its existence on the local system. Warning: May delete non-demo files stored in download directory. channels (list): Channels to include in download. Defaults to all 16 channels. num_frames (int or slice): Number of frames to download. Maximum 240 frames. Default 10 frames. Size per frame (all channels): ~15MB Total size (default 10 frames, all channels): ~124MB Total size (240 frames, all channels): ~3.5GB """ base_dir = base_dir or config.get('demo_data_dir', '.') if channels is None: channels = range(1, 17) if method is None: method = 'gcsfs' if method not in ['gcsfs']: raise NotImplementedError("Demo data download method '{}' not " "implemented yet.".format(method)) if isinstance(num_frames, (int, float)): frame_slice = slice(0, num_frames) else: frame_slice = num_frames from ._google_cloud_platform import get_bucket_files patterns = [] for channel in channels: # patterns += ['gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/1[3456]/' # '*C{:02d}*s20182541[3456]*.nc'.format(channel)] patterns += [( 'gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/13/*RadM1*C{:02d}*s201825413*.nc'.format(channel), 'gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/14/*RadM1*C{:02d}*s201825414*.nc'.format(channel), 'gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/15/*RadM1*C{:02d}*s201825415*.nc'.format(channel), 'gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/16/*RadM1*C{:02d}*s201825416*.nc'.format(channel), )] subdir = os.path.join(base_dir, 'abi_l1b', '20180911_hurricane_florence_abi_l1b') os.makedirs(subdir, exist_ok=True) filenames = get_bucket_files(patterns, subdir, force=force, pattern_slice=frame_slice) actual_slice = frame_slice.indices(240) # 240 max frames num_frames = int((actual_slice[1] - actual_slice[0]) / actual_slice[2]) assert len(filenames) == len(channels) * num_frames, "Not all files could be downloaded" return filenames satpy-0.34.0/satpy/demo/ahi_hsd.py000066400000000000000000000041151420401153000167620ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Demo data download helper functions for AHI HSD data.""" import os from satpy import config def download_typhoon_surigae_ahi(base_dir=None, channels=(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16), segments=(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)): """Download Himawari 8 data. This scene shows the Typhoon Surigae. """ import s3fs base_dir = base_dir or config.get('demo_data_dir', '.') channel_resolution = {1: 10, 2: 10, 3: 5, 4: 10} data_files = [] for channel in channels: resolution = channel_resolution.get(channel, 20) for segment in segments: data_files.append(f"HS_H08_20210417_0500_B{channel:02d}_FLDK_R{resolution:02d}_S{segment:02d}10.DAT.bz2") subdir = os.path.join(base_dir, 'ahi_hsd', '20210417_0500_typhoon_surigae') os.makedirs(subdir, exist_ok=True) fs = s3fs.S3FileSystem(anon=True) result = [] for filename in data_files: destination_filename = os.path.join(subdir, filename) result.append(destination_filename) if os.path.exists(destination_filename): continue to_get = 'noaa-himawari8/AHI-L1b-FLDK/2021/04/17/0500/' + filename fs.get_file(to_get, destination_filename) return result satpy-0.34.0/satpy/demo/fci.py000066400000000000000000000035761420401153000161360ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Demo FCI data download.""" import pathlib import tarfile import tempfile from satpy import config from . import utils _fci_uncompressed_nominal = ( "https://sftp.eumetsat.int/public/folder/UsCVknVOOkSyCdgpMimJNQ/" "User-Materials/Test-Data/MTG/MTG_FCI_L1C_Enhanced-NonN_TD-272_May2020/" "FCI_1C_UNCOMPRESSED_NOMINAL.tar.gz") def download_fci_test_data(base_dir=None): """Download FCI test data. Download the nominal FCI test data from July 2020. """ subdir = get_fci_test_data_dir(base_dir=base_dir) with tempfile.TemporaryDirectory() as td: nm = pathlib.Path(td) / "fci-test-data.tar.gz" utils.download_url(_fci_uncompressed_nominal, nm) return _unpack_tarfile_to(nm, subdir) def get_fci_test_data_dir(base_dir=None): """Get directory for FCI test data.""" base_dir = base_dir or config.get("demo_data_dir", ".") return pathlib.Path(base_dir) / "fci" / "test_data" def _unpack_tarfile_to(filename, subdir): """Unpack content of tarfile in filename to subdir.""" with tarfile.open(filename, mode="r:gz") as tf: contents = tf.getnames() tf.extractall(path=subdir) return contents satpy-0.34.0/satpy/demo/seviri_hrit.py000066400000000000000000000056641420401153000177240ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Demo data download for SEVIRI HRIT files.""" import logging import os.path from satpy import config from satpy.demo.utils import download_url logger = logging.getLogger(__name__) ZENODO_BASE_URL = "https://zenodo.org/api/files/dcc5ab29-d8a3-4fb5-ab2b-adc405d18c23/" FILENAME = "H-000-MSG4__-MSG4________-{channel:_<9s}-{segment:_<9s}-201802281500-__" def download_seviri_hrit_20180228_1500(base_dir=None, subset=None): """Download the SEVIRI HRIT files for 2018-02-28T15:00. *subset* is a dictionary with the channels as keys and granules to download as values, eg:: {"HRV": [1, 2, 3], "IR_108": [1, 2], "EPI": None} """ files = generate_subset_of_filenames(subset) base_dir = base_dir or config.get("demo_data_dir", ".") subdir = os.path.join(base_dir, "seviri_hrit", "20180228_1500") os.makedirs(subdir, exist_ok=True) targets = [] for the_file in files: target = os.path.join(subdir, the_file) targets.append(target) if os.path.isfile(target): continue download_url(ZENODO_BASE_URL + the_file, target) return targets def generate_subset_of_filenames(subset=None, base_dir=""): """Generate SEVIRI HRIT filenames.""" if subset is None: subset = _create_full_set() pattern = os.path.join(base_dir, FILENAME) files = [] for channel, segments in subset.items(): new_files = _generate_filenames(pattern, channel, segments) files.extend(new_files) return files def _generate_filenames(pattern, channel, segments): """Generate the filenames for *channel* and *segments*.""" if channel in ["PRO", "EPI"]: new_files = [pattern.format(channel="", segment=channel)] else: new_files = (pattern.format(channel=channel, segment=f"{segment:06d}") for segment in segments) return new_files def _create_full_set(): """Create the full set dictionary.""" subset = {"HRV": range(1, 25), "EPI": None, "PRO": None} channels = ["IR_016", "IR_039", "IR_087", "IR_097", "IR_108", "IR_120", "IR_134", "VIS006", "VIS008", "WV_062", "WV_073"] for channel in channels: subset[channel] = range(1, 9) return subset satpy-0.34.0/satpy/demo/utils.py000066400000000000000000000020541420401153000165230ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Utilities for demo data download.""" import requests def download_url(source, target): """Download a url in stream mode.""" with requests.get(source, stream=True) as r: r.raise_for_status() with open(target, "wb") as f: for chunk in r.iter_content(chunk_size=8192): f.write(chunk) satpy-0.34.0/satpy/demo/viirs_sdr.py000066400000000000000000000631761420401153000174030ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Demo data download for VIIRS SDR HDF5 files.""" import logging import os from satpy import config from satpy.demo.utils import download_url logger = logging.getLogger(__name__) ZENODO_BASE_URL = "https://zenodo.org/api/files/6aae2ac7-5e8e-4a42-96d0-393ad6a620ea/" GDNBO_URLS = [ "GDNBO_npp_d20170128_t1230144_e1231386_b27228_c20170128123806232923_cspp_dev.h5", "GDNBO_npp_d20170128_t1231398_e1233040_b27228_c20170128123931141440_cspp_dev.h5", "GDNBO_npp_d20170128_t1233052_e1234294_b27228_c20170128124058766619_cspp_dev.h5", "GDNBO_npp_d20170128_t1234306_e1235548_b27228_c20170128124307612305_cspp_dev.h5", "GDNBO_npp_d20170128_t1235560_e1237184_b27228_c20170128124429250510_cspp_dev.h5", "GDNBO_npp_d20170128_t1237197_e1238439_b27228_c20170128124604860922_cspp_dev.h5", "GDNBO_npp_d20170128_t1238451_e1240093_b27228_c20170128124804684300_cspp_dev.h5", "GDNBO_npp_d20170128_t1240105_e1241347_b27228_c20170128124931597063_cspp_dev.h5", "GDNBO_npp_d20170128_t1241359_e1243001_b27228_c20170128125104219695_cspp_dev.h5", "GDNBO_npp_d20170128_t1243013_e1244238_b27228_c20170128125239512908_cspp_dev.h5", ] GITCO_URLS = [ "GITCO_npp_d20170128_t1230144_e1231386_b27228_c20170128123806844060_cspp_dev.h5", "GITCO_npp_d20170128_t1231398_e1233040_b27228_c20170128123931757165_cspp_dev.h5", "GITCO_npp_d20170128_t1233052_e1234294_b27228_c20170128124059393347_cspp_dev.h5", "GITCO_npp_d20170128_t1234306_e1235548_b27228_c20170128124308254991_cspp_dev.h5", "GITCO_npp_d20170128_t1235560_e1237184_b27228_c20170128124429909006_cspp_dev.h5", "GITCO_npp_d20170128_t1237197_e1238439_b27228_c20170128124605535586_cspp_dev.h5", "GITCO_npp_d20170128_t1238451_e1240093_b27228_c20170128124805310389_cspp_dev.h5", "GITCO_npp_d20170128_t1240105_e1241347_b27228_c20170128124932240716_cspp_dev.h5", "GITCO_npp_d20170128_t1241359_e1243001_b27228_c20170128125104876016_cspp_dev.h5", "GITCO_npp_d20170128_t1243013_e1244238_b27228_c20170128125240141821_cspp_dev.h5", ] GMTCO_URLS = [ "GMTCO_npp_d20170128_t1230144_e1231386_b27228_c20170128123807370375_cspp_dev.h5", "GMTCO_npp_d20170128_t1231398_e1233040_b27228_c20170128123932277110_cspp_dev.h5", "GMTCO_npp_d20170128_t1233052_e1234294_b27228_c20170128124059920205_cspp_dev.h5", "GMTCO_npp_d20170128_t1234306_e1235548_b27228_c20170128124308776985_cspp_dev.h5", "GMTCO_npp_d20170128_t1235560_e1237184_b27228_c20170128124430441905_cspp_dev.h5", "GMTCO_npp_d20170128_t1237197_e1238439_b27228_c20170128124606068231_cspp_dev.h5", "GMTCO_npp_d20170128_t1238451_e1240093_b27228_c20170128124805827641_cspp_dev.h5", "GMTCO_npp_d20170128_t1240105_e1241347_b27228_c20170128124932760643_cspp_dev.h5", "GMTCO_npp_d20170128_t1241359_e1243001_b27228_c20170128125105397710_cspp_dev.h5", "GMTCO_npp_d20170128_t1243013_e1244238_b27228_c20170128125240670869_cspp_dev.h5", ] SVDNB_FILES = [ "SVDNB_npp_d20170128_t1230144_e1231386_b27228_c20170128123806052274_cspp_dev.h5", "SVDNB_npp_d20170128_t1231398_e1233040_b27228_c20170128123930950786_cspp_dev.h5", "SVDNB_npp_d20170128_t1233052_e1234294_b27228_c20170128124058573341_cspp_dev.h5", "SVDNB_npp_d20170128_t1234306_e1235548_b27228_c20170128124307412059_cspp_dev.h5", "SVDNB_npp_d20170128_t1235560_e1237184_b27228_c20170128124429036820_cspp_dev.h5", "SVDNB_npp_d20170128_t1237197_e1238439_b27228_c20170128124604651619_cspp_dev.h5", "SVDNB_npp_d20170128_t1238451_e1240093_b27228_c20170128124804485537_cspp_dev.h5", "SVDNB_npp_d20170128_t1240105_e1241347_b27228_c20170128124931392535_cspp_dev.h5", "SVDNB_npp_d20170128_t1241359_e1243001_b27228_c20170128125104024324_cspp_dev.h5", "SVDNB_npp_d20170128_t1243013_e1244238_b27228_c20170128125239325940_cspp_dev.h5", ] SVI01_FILES = [ "SVI01_npp_d20170128_t1230144_e1231386_b27228_c20170128123807637119_cspp_dev.h5", "SVI01_npp_d20170128_t1231398_e1233040_b27228_c20170128123932561605_cspp_dev.h5", "SVI01_npp_d20170128_t1233052_e1234294_b27228_c20170128124100227434_cspp_dev.h5", "SVI01_npp_d20170128_t1234306_e1235548_b27228_c20170128124309038634_cspp_dev.h5", "SVI01_npp_d20170128_t1235560_e1237184_b27228_c20170128124430720302_cspp_dev.h5", "SVI01_npp_d20170128_t1237197_e1238439_b27228_c20170128124606429178_cspp_dev.h5", "SVI01_npp_d20170128_t1238451_e1240093_b27228_c20170128124806092384_cspp_dev.h5", "SVI01_npp_d20170128_t1240105_e1241347_b27228_c20170128124933022697_cspp_dev.h5", "SVI01_npp_d20170128_t1241359_e1243001_b27228_c20170128125105683986_cspp_dev.h5", "SVI01_npp_d20170128_t1243013_e1244238_b27228_c20170128125240927888_cspp_dev.h5", ] SVI02_FILES = [ "SVI02_npp_d20170128_t1230144_e1231386_b27228_c20170128123807711244_cspp_dev.h5", "SVI02_npp_d20170128_t1231398_e1233040_b27228_c20170128123932632807_cspp_dev.h5", "SVI02_npp_d20170128_t1233052_e1234294_b27228_c20170128124100316117_cspp_dev.h5", "SVI02_npp_d20170128_t1234306_e1235548_b27228_c20170128124309108964_cspp_dev.h5", "SVI02_npp_d20170128_t1235560_e1237184_b27228_c20170128124430789039_cspp_dev.h5", "SVI02_npp_d20170128_t1237197_e1238439_b27228_c20170128124606564398_cspp_dev.h5", "SVI02_npp_d20170128_t1238451_e1240093_b27228_c20170128124806162998_cspp_dev.h5", "SVI02_npp_d20170128_t1240105_e1241347_b27228_c20170128124933090354_cspp_dev.h5", "SVI02_npp_d20170128_t1241359_e1243001_b27228_c20170128125105758438_cspp_dev.h5", "SVI02_npp_d20170128_t1243013_e1244238_b27228_c20170128125240934475_cspp_dev.h5", ] SVI03_FILES = [ "SVI03_npp_d20170128_t1230144_e1231386_b27228_c20170128123807790854_cspp_dev.h5", "SVI03_npp_d20170128_t1231398_e1233040_b27228_c20170128123932703535_cspp_dev.h5", "SVI03_npp_d20170128_t1233052_e1234294_b27228_c20170128124100406626_cspp_dev.h5", "SVI03_npp_d20170128_t1234306_e1235548_b27228_c20170128124309179885_cspp_dev.h5", "SVI03_npp_d20170128_t1235560_e1237184_b27228_c20170128124430858868_cspp_dev.h5", "SVI03_npp_d20170128_t1237197_e1238439_b27228_c20170128124606750872_cspp_dev.h5", "SVI03_npp_d20170128_t1238451_e1240093_b27228_c20170128124806231759_cspp_dev.h5", "SVI03_npp_d20170128_t1240105_e1241347_b27228_c20170128124933157871_cspp_dev.h5", "SVI03_npp_d20170128_t1241359_e1243001_b27228_c20170128125105832479_cspp_dev.h5", "SVI03_npp_d20170128_t1243013_e1244238_b27228_c20170128125240940464_cspp_dev.h5", ] SVI04_FILES = [ "SVI04_npp_d20170128_t1230144_e1231386_b27228_c20170128123807879916_cspp_dev.h5", "SVI04_npp_d20170128_t1231398_e1233040_b27228_c20170128123932774251_cspp_dev.h5", "SVI04_npp_d20170128_t1233052_e1234294_b27228_c20170128124100502220_cspp_dev.h5", "SVI04_npp_d20170128_t1234306_e1235548_b27228_c20170128124309251788_cspp_dev.h5", "SVI04_npp_d20170128_t1235560_e1237184_b27228_c20170128124430928643_cspp_dev.h5", "SVI04_npp_d20170128_t1237197_e1238439_b27228_c20170128124606941637_cspp_dev.h5", "SVI04_npp_d20170128_t1238451_e1240093_b27228_c20170128124806300867_cspp_dev.h5", "SVI04_npp_d20170128_t1240105_e1241347_b27228_c20170128124933224276_cspp_dev.h5", "SVI04_npp_d20170128_t1241359_e1243001_b27228_c20170128125105908005_cspp_dev.h5", "SVI04_npp_d20170128_t1243013_e1244238_b27228_c20170128125240946462_cspp_dev.h5", ] SVI05_FILES = [ "SVI05_npp_d20170128_t1230144_e1231386_b27228_c20170128123807965352_cspp_dev.h5", "SVI05_npp_d20170128_t1231398_e1233040_b27228_c20170128123932843985_cspp_dev.h5", "SVI05_npp_d20170128_t1233052_e1234294_b27228_c20170128124100619023_cspp_dev.h5", "SVI05_npp_d20170128_t1234306_e1235548_b27228_c20170128124309321883_cspp_dev.h5", "SVI05_npp_d20170128_t1235560_e1237184_b27228_c20170128124430998015_cspp_dev.h5", "SVI05_npp_d20170128_t1237197_e1238439_b27228_c20170128124607124779_cspp_dev.h5", "SVI05_npp_d20170128_t1238451_e1240093_b27228_c20170128124806370721_cspp_dev.h5", "SVI05_npp_d20170128_t1240105_e1241347_b27228_c20170128124933292345_cspp_dev.h5", "SVI05_npp_d20170128_t1241359_e1243001_b27228_c20170128125105983240_cspp_dev.h5", "SVI05_npp_d20170128_t1243013_e1244238_b27228_c20170128125241011931_cspp_dev.h5", ] SVM01_FILES = [ "SVM01_npp_d20170128_t1230144_e1231386_b27228_c20170128123808056273_cspp_dev.h5", "SVM01_npp_d20170128_t1231398_e1233040_b27228_c20170128123932914817_cspp_dev.h5", "SVM01_npp_d20170128_t1233052_e1234294_b27228_c20170128124100687072_cspp_dev.h5", "SVM01_npp_d20170128_t1234306_e1235548_b27228_c20170128124309391583_cspp_dev.h5", "SVM01_npp_d20170128_t1235560_e1237184_b27228_c20170128124431068152_cspp_dev.h5", "SVM01_npp_d20170128_t1237197_e1238439_b27228_c20170128124607341439_cspp_dev.h5", "SVM01_npp_d20170128_t1238451_e1240093_b27228_c20170128124806439930_cspp_dev.h5", "SVM01_npp_d20170128_t1240105_e1241347_b27228_c20170128124933359550_cspp_dev.h5", "SVM01_npp_d20170128_t1241359_e1243001_b27228_c20170128125106057121_cspp_dev.h5", "SVM01_npp_d20170128_t1243013_e1244238_b27228_c20170128125241079274_cspp_dev.h5", ] SVM02_FILES = [ "SVM02_npp_d20170128_t1230144_e1231386_b27228_c20170128123808083056_cspp_dev.h5", "SVM02_npp_d20170128_t1231398_e1233040_b27228_c20170128123932936791_cspp_dev.h5", "SVM02_npp_d20170128_t1233052_e1234294_b27228_c20170128124100708303_cspp_dev.h5", "SVM02_npp_d20170128_t1234306_e1235548_b27228_c20170128124309411322_cspp_dev.h5", "SVM02_npp_d20170128_t1235560_e1237184_b27228_c20170128124431089436_cspp_dev.h5", "SVM02_npp_d20170128_t1237197_e1238439_b27228_c20170128124607386792_cspp_dev.h5", "SVM02_npp_d20170128_t1238451_e1240093_b27228_c20170128124806460870_cspp_dev.h5", "SVM02_npp_d20170128_t1240105_e1241347_b27228_c20170128124933381053_cspp_dev.h5", "SVM02_npp_d20170128_t1241359_e1243001_b27228_c20170128125106080807_cspp_dev.h5", "SVM02_npp_d20170128_t1243013_e1244238_b27228_c20170128125241085636_cspp_dev.h5", ] SVM03_FILES = [ "SVM03_npp_d20170128_t1230144_e1231386_b27228_c20170128123808110482_cspp_dev.h5", "SVM03_npp_d20170128_t1231398_e1233040_b27228_c20170128123932959109_cspp_dev.h5", "SVM03_npp_d20170128_t1233052_e1234294_b27228_c20170128124100729893_cspp_dev.h5", "SVM03_npp_d20170128_t1234306_e1235548_b27228_c20170128124309431166_cspp_dev.h5", "SVM03_npp_d20170128_t1235560_e1237184_b27228_c20170128124431111317_cspp_dev.h5", "SVM03_npp_d20170128_t1237197_e1238439_b27228_c20170128124607452947_cspp_dev.h5", "SVM03_npp_d20170128_t1238451_e1240093_b27228_c20170128124806482313_cspp_dev.h5", "SVM03_npp_d20170128_t1240105_e1241347_b27228_c20170128124933402956_cspp_dev.h5", "SVM03_npp_d20170128_t1241359_e1243001_b27228_c20170128125106104416_cspp_dev.h5", "SVM03_npp_d20170128_t1243013_e1244238_b27228_c20170128125241091894_cspp_dev.h5", ] SVM04_FILES = [ "SVM04_npp_d20170128_t1230144_e1231386_b27228_c20170128123808144258_cspp_dev.h5", "SVM04_npp_d20170128_t1231398_e1233040_b27228_c20170128123932987116_cspp_dev.h5", "SVM04_npp_d20170128_t1233052_e1234294_b27228_c20170128124100757998_cspp_dev.h5", "SVM04_npp_d20170128_t1234306_e1235548_b27228_c20170128124309456779_cspp_dev.h5", "SVM04_npp_d20170128_t1235560_e1237184_b27228_c20170128124431139074_cspp_dev.h5", "SVM04_npp_d20170128_t1237197_e1238439_b27228_c20170128124607542297_cspp_dev.h5", "SVM04_npp_d20170128_t1238451_e1240093_b27228_c20170128124806582119_cspp_dev.h5", "SVM04_npp_d20170128_t1240105_e1241347_b27228_c20170128124933430115_cspp_dev.h5", "SVM04_npp_d20170128_t1241359_e1243001_b27228_c20170128125106135317_cspp_dev.h5", "SVM04_npp_d20170128_t1243013_e1244238_b27228_c20170128125241097854_cspp_dev.h5", ] SVM05_FILES = [ "SVM05_npp_d20170128_t1230144_e1231386_b27228_c20170128123808174909_cspp_dev.h5", "SVM05_npp_d20170128_t1231398_e1233040_b27228_c20170128123933013965_cspp_dev.h5", "SVM05_npp_d20170128_t1233052_e1234294_b27228_c20170128124100786454_cspp_dev.h5", "SVM05_npp_d20170128_t1234306_e1235548_b27228_c20170128124309482588_cspp_dev.h5", "SVM05_npp_d20170128_t1235560_e1237184_b27228_c20170128124431167292_cspp_dev.h5", "SVM05_npp_d20170128_t1237197_e1238439_b27228_c20170128124607571141_cspp_dev.h5", "SVM05_npp_d20170128_t1238451_e1240093_b27228_c20170128124806609136_cspp_dev.h5", "SVM05_npp_d20170128_t1240105_e1241347_b27228_c20170128124933456985_cspp_dev.h5", "SVM05_npp_d20170128_t1241359_e1243001_b27228_c20170128125106166701_cspp_dev.h5", "SVM05_npp_d20170128_t1243013_e1244238_b27228_c20170128125241103776_cspp_dev.h5", ] SVM06_FILES = [ "SVM06_npp_d20170128_t1230144_e1231386_b27228_c20170128123808209437_cspp_dev.h5", "SVM06_npp_d20170128_t1231398_e1233040_b27228_c20170128123933040415_cspp_dev.h5", "SVM06_npp_d20170128_t1233052_e1234294_b27228_c20170128124100814386_cspp_dev.h5", "SVM06_npp_d20170128_t1234306_e1235548_b27228_c20170128124309508530_cspp_dev.h5", "SVM06_npp_d20170128_t1235560_e1237184_b27228_c20170128124431195933_cspp_dev.h5", "SVM06_npp_d20170128_t1237197_e1238439_b27228_c20170128124607627637_cspp_dev.h5", "SVM06_npp_d20170128_t1238451_e1240093_b27228_c20170128124806636359_cspp_dev.h5", "SVM06_npp_d20170128_t1240105_e1241347_b27228_c20170128124933483996_cspp_dev.h5", "SVM06_npp_d20170128_t1241359_e1243001_b27228_c20170128125106198061_cspp_dev.h5", "SVM06_npp_d20170128_t1243013_e1244238_b27228_c20170128125241109756_cspp_dev.h5", ] SVM07_FILES = [ "SVM07_npp_d20170128_t1230144_e1231386_b27228_c20170128123808817507_cspp_dev.h5", "SVM07_npp_d20170128_t1231398_e1233040_b27228_c20170128123933681441_cspp_dev.h5", "SVM07_npp_d20170128_t1233052_e1234294_b27228_c20170128124101490225_cspp_dev.h5", "SVM07_npp_d20170128_t1234306_e1235548_b27228_c20170128124310169252_cspp_dev.h5", "SVM07_npp_d20170128_t1235560_e1237184_b27228_c20170128124431921741_cspp_dev.h5", "SVM07_npp_d20170128_t1237197_e1238439_b27228_c20170128124608449604_cspp_dev.h5", "SVM07_npp_d20170128_t1238451_e1240093_b27228_c20170128124807323479_cspp_dev.h5", "SVM07_npp_d20170128_t1240105_e1241347_b27228_c20170128124934114857_cspp_dev.h5", "SVM07_npp_d20170128_t1241359_e1243001_b27228_c20170128125106915897_cspp_dev.h5", "SVM07_npp_d20170128_t1243013_e1244238_b27228_c20170128125241115831_cspp_dev.h5", ] SVM08_FILES = [ "SVM08_npp_d20170128_t1230144_e1231386_b27228_c20170128123808263071_cspp_dev.h5", "SVM08_npp_d20170128_t1231398_e1233040_b27228_c20170128123933088148_cspp_dev.h5", "SVM08_npp_d20170128_t1233052_e1234294_b27228_c20170128124100871070_cspp_dev.h5", "SVM08_npp_d20170128_t1234306_e1235548_b27228_c20170128124309555838_cspp_dev.h5", "SVM08_npp_d20170128_t1235560_e1237184_b27228_c20170128124431248317_cspp_dev.h5", "SVM08_npp_d20170128_t1237197_e1238439_b27228_c20170128124607703167_cspp_dev.h5", "SVM08_npp_d20170128_t1238451_e1240093_b27228_c20170128124806684245_cspp_dev.h5", "SVM08_npp_d20170128_t1240105_e1241347_b27228_c20170128124933531899_cspp_dev.h5", "SVM08_npp_d20170128_t1241359_e1243001_b27228_c20170128125106322404_cspp_dev.h5", "SVM08_npp_d20170128_t1243013_e1244238_b27228_c20170128125241141517_cspp_dev.h5", ] SVM09_FILES = [ "SVM09_npp_d20170128_t1230144_e1231386_b27228_c20170128123808287273_cspp_dev.h5", "SVM09_npp_d20170128_t1231398_e1233040_b27228_c20170128123933108818_cspp_dev.h5", "SVM09_npp_d20170128_t1233052_e1234294_b27228_c20170128124100892937_cspp_dev.h5", "SVM09_npp_d20170128_t1234306_e1235548_b27228_c20170128124309576967_cspp_dev.h5", "SVM09_npp_d20170128_t1235560_e1237184_b27228_c20170128124431271226_cspp_dev.h5", "SVM09_npp_d20170128_t1237197_e1238439_b27228_c20170128124607724822_cspp_dev.h5", "SVM09_npp_d20170128_t1238451_e1240093_b27228_c20170128124806704840_cspp_dev.h5", "SVM09_npp_d20170128_t1240105_e1241347_b27228_c20170128124933552828_cspp_dev.h5", "SVM09_npp_d20170128_t1241359_e1243001_b27228_c20170128125106345774_cspp_dev.h5", "SVM09_npp_d20170128_t1243013_e1244238_b27228_c20170128125241161505_cspp_dev.h5", ] SVM10_FILES = [ "SVM10_npp_d20170128_t1230144_e1231386_b27228_c20170128123808310591_cspp_dev.h5", "SVM10_npp_d20170128_t1231398_e1233040_b27228_c20170128123933130017_cspp_dev.h5", "SVM10_npp_d20170128_t1233052_e1234294_b27228_c20170128124100914429_cspp_dev.h5", "SVM10_npp_d20170128_t1234306_e1235548_b27228_c20170128124309597409_cspp_dev.h5", "SVM10_npp_d20170128_t1235560_e1237184_b27228_c20170128124431293295_cspp_dev.h5", "SVM10_npp_d20170128_t1237197_e1238439_b27228_c20170128124607775262_cspp_dev.h5", "SVM10_npp_d20170128_t1238451_e1240093_b27228_c20170128124806725948_cspp_dev.h5", "SVM10_npp_d20170128_t1240105_e1241347_b27228_c20170128124933573645_cspp_dev.h5", "SVM10_npp_d20170128_t1241359_e1243001_b27228_c20170128125106368109_cspp_dev.h5", "SVM10_npp_d20170128_t1243013_e1244238_b27228_c20170128125241167901_cspp_dev.h5", ] SVM11_FILES = [ "SVM11_npp_d20170128_t1230144_e1231386_b27228_c20170128123808334604_cspp_dev.h5", "SVM11_npp_d20170128_t1231398_e1233040_b27228_c20170128123933151513_cspp_dev.h5", "SVM11_npp_d20170128_t1233052_e1234294_b27228_c20170128124100935872_cspp_dev.h5", "SVM11_npp_d20170128_t1234306_e1235548_b27228_c20170128124309618913_cspp_dev.h5", "SVM11_npp_d20170128_t1235560_e1237184_b27228_c20170128124431315343_cspp_dev.h5", "SVM11_npp_d20170128_t1237197_e1238439_b27228_c20170128124607795773_cspp_dev.h5", "SVM11_npp_d20170128_t1238451_e1240093_b27228_c20170128124806746702_cspp_dev.h5", "SVM11_npp_d20170128_t1240105_e1241347_b27228_c20170128124933594619_cspp_dev.h5", "SVM11_npp_d20170128_t1241359_e1243001_b27228_c20170128125106390787_cspp_dev.h5", "SVM11_npp_d20170128_t1243013_e1244238_b27228_c20170128125241187089_cspp_dev.h5", ] SVM12_FILES = [ "SVM12_npp_d20170128_t1230144_e1231386_b27228_c20170128123808354907_cspp_dev.h5", "SVM12_npp_d20170128_t1231398_e1233040_b27228_c20170128123933172698_cspp_dev.h5", "SVM12_npp_d20170128_t1233052_e1234294_b27228_c20170128124100958185_cspp_dev.h5", "SVM12_npp_d20170128_t1234306_e1235548_b27228_c20170128124309641720_cspp_dev.h5", "SVM12_npp_d20170128_t1235560_e1237184_b27228_c20170128124431337449_cspp_dev.h5", "SVM12_npp_d20170128_t1237197_e1238439_b27228_c20170128124607849336_cspp_dev.h5", "SVM12_npp_d20170128_t1238451_e1240093_b27228_c20170128124806767820_cspp_dev.h5", "SVM12_npp_d20170128_t1240105_e1241347_b27228_c20170128124933615858_cspp_dev.h5", "SVM12_npp_d20170128_t1241359_e1243001_b27228_c20170128125106413369_cspp_dev.h5", "SVM12_npp_d20170128_t1243013_e1244238_b27228_c20170128125241193417_cspp_dev.h5", ] SVM13_FILES = [ "SVM13_npp_d20170128_t1230144_e1231386_b27228_c20170128123808374740_cspp_dev.h5", "SVM13_npp_d20170128_t1231398_e1233040_b27228_c20170128123933194069_cspp_dev.h5", "SVM13_npp_d20170128_t1233052_e1234294_b27228_c20170128124100980119_cspp_dev.h5", "SVM13_npp_d20170128_t1234306_e1235548_b27228_c20170128124309664100_cspp_dev.h5", "SVM13_npp_d20170128_t1235560_e1237184_b27228_c20170128124431359731_cspp_dev.h5", "SVM13_npp_d20170128_t1237197_e1238439_b27228_c20170128124607874078_cspp_dev.h5", "SVM13_npp_d20170128_t1238451_e1240093_b27228_c20170128124806788761_cspp_dev.h5", "SVM13_npp_d20170128_t1240105_e1241347_b27228_c20170128124933637079_cspp_dev.h5", "SVM13_npp_d20170128_t1241359_e1243001_b27228_c20170128125106435940_cspp_dev.h5", "SVM13_npp_d20170128_t1243013_e1244238_b27228_c20170128125241212475_cspp_dev.h5", ] SVM14_FILES = [ "SVM14_npp_d20170128_t1230144_e1231386_b27228_c20170128123808406951_cspp_dev.h5", "SVM14_npp_d20170128_t1231398_e1233040_b27228_c20170128123933225740_cspp_dev.h5", "SVM14_npp_d20170128_t1233052_e1234294_b27228_c20170128124101014245_cspp_dev.h5", "SVM14_npp_d20170128_t1234306_e1235548_b27228_c20170128124309701221_cspp_dev.h5", "SVM14_npp_d20170128_t1235560_e1237184_b27228_c20170128124431396452_cspp_dev.h5", "SVM14_npp_d20170128_t1237197_e1238439_b27228_c20170128124607945197_cspp_dev.h5", "SVM14_npp_d20170128_t1238451_e1240093_b27228_c20170128124806821782_cspp_dev.h5", "SVM14_npp_d20170128_t1240105_e1241347_b27228_c20170128124933671536_cspp_dev.h5", "SVM14_npp_d20170128_t1241359_e1243001_b27228_c20170128125106472259_cspp_dev.h5", "SVM14_npp_d20170128_t1243013_e1244238_b27228_c20170128125241244180_cspp_dev.h5", ] SVM15_FILES = [ "SVM15_npp_d20170128_t1230144_e1231386_b27228_c20170128123808427359_cspp_dev.h5", "SVM15_npp_d20170128_t1231398_e1233040_b27228_c20170128123933246722_cspp_dev.h5", "SVM15_npp_d20170128_t1233052_e1234294_b27228_c20170128124101036439_cspp_dev.h5", "SVM15_npp_d20170128_t1234306_e1235548_b27228_c20170128124309725283_cspp_dev.h5", "SVM15_npp_d20170128_t1235560_e1237184_b27228_c20170128124431418392_cspp_dev.h5", "SVM15_npp_d20170128_t1237197_e1238439_b27228_c20170128124607965779_cspp_dev.h5", "SVM15_npp_d20170128_t1238451_e1240093_b27228_c20170128124806948533_cspp_dev.h5", "SVM15_npp_d20170128_t1240105_e1241347_b27228_c20170128124933693703_cspp_dev.h5", "SVM15_npp_d20170128_t1241359_e1243001_b27228_c20170128125106494806_cspp_dev.h5", "SVM15_npp_d20170128_t1243013_e1244238_b27228_c20170128125241264993_cspp_dev.h5", ] SVM16_FILES = [ "SVM16_npp_d20170128_t1230144_e1231386_b27228_c20170128123808447333_cspp_dev.h5", "SVM16_npp_d20170128_t1231398_e1233040_b27228_c20170128123933268965_cspp_dev.h5", "SVM16_npp_d20170128_t1233052_e1234294_b27228_c20170128124101058805_cspp_dev.h5", "SVM16_npp_d20170128_t1234306_e1235548_b27228_c20170128124309747830_cspp_dev.h5", "SVM16_npp_d20170128_t1235560_e1237184_b27228_c20170128124431440604_cspp_dev.h5", "SVM16_npp_d20170128_t1237197_e1238439_b27228_c20170128124608015196_cspp_dev.h5", "SVM16_npp_d20170128_t1238451_e1240093_b27228_c20170128124806970479_cspp_dev.h5", "SVM16_npp_d20170128_t1240105_e1241347_b27228_c20170128124933715705_cspp_dev.h5", "SVM16_npp_d20170128_t1241359_e1243001_b27228_c20170128125106518023_cspp_dev.h5", "SVM16_npp_d20170128_t1243013_e1244238_b27228_c20170128125241285533_cspp_dev.h5", ] FILES_20170128_1229 = { "DNB": SVDNB_FILES, "I01": SVI01_FILES, "I02": SVI02_FILES, "I03": SVI03_FILES, "I04": SVI04_FILES, "I05": SVI05_FILES, "M01": SVM01_FILES, "M02": SVM02_FILES, "M03": SVM03_FILES, "M04": SVM04_FILES, "M05": SVM05_FILES, "M06": SVM06_FILES, "M07": SVM07_FILES, "M08": SVM08_FILES, "M09": SVM09_FILES, "M10": SVM10_FILES, "M11": SVM11_FILES, "M12": SVM12_FILES, "M13": SVM13_FILES, "M14": SVM14_FILES, "M15": SVM15_FILES, "M16": SVM16_FILES, } def get_viirs_sdr_20170128_1229( base_dir=None, channels=("I01", "I02", "I03", "I04", "I05", "M01", "M02", "M03", "M04", "M05", "M06", "M07", "M08", "M09", "M10", "M11", "M12", "M13", "M14", "M15", "M16", "DNB"), granules=(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)): r"""Get VIIRS SDR files for 2017-01-28 12:29 to 12:43. These files are downloaded from Zenodo. You can see the full file listing here: https://zenodo.org/record/263296 Specific channels can be specified with the ``channels`` keyword argument. By default, all channels (all I bands, M bands, and DNB bands) will be downloaded. Channels are referred to by their band type and channel number (ex. "I01" or "M16" or "DNB"). Terrain-corrected geolocation files are always downloaded when the corresponding band data is specified. The ``granules`` argument will control which granules ("time steps") are downloaded. There are 10 available and the keyword argument can be specified as a tuple of integers from 1 to 10. This full dataset is ~10.1GB. Notes: File list was retrieved using the zenodo API. .. code-block:: python import requests viirs_listing = requests.get("https://zenodo.org/api/records/263296") viirs_dict = json.loads(viirs_listing.content) print("\n".join(sorted(x['links']['self'] for x in viirs_dict['files']))) """ base_dir = base_dir or config.get("demo_data_dir", ".") subdir = os.path.join(base_dir, "viirs_sdr", "20170128_1229") os.makedirs(subdir, exist_ok=True) urls = (ZENODO_BASE_URL + fn for fn in _get_filenames_to_download(channels, granules)) files = [] for url in urls: target = os.path.join(subdir, os.path.basename(url)) files.append(target) if os.path.isfile(target): logger.info(f"File {target} already exists, skipping...") continue logger.info(f"Downloading file to {target}...") download_url(url, target) return files def _get_filenames_to_download(channels, granules): if any("DNB" in chan for chan in channels): yield from _yield_specific_granules(GDNBO_URLS, granules) if any("I" in chan for chan in channels): yield from _yield_specific_granules(GITCO_URLS, granules) if any("M" in chan for chan in channels): yield from _yield_specific_granules(GMTCO_URLS, granules) for channel in channels: yield from _yield_specific_granules(FILES_20170128_1229[channel], granules) def _yield_specific_granules(filenames, granules): for gran_num in granules: yield filenames[gran_num - 1] satpy-0.34.0/satpy/dependency_tree.py000066400000000000000000000613261420401153000176030ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Implementation of a dependency tree.""" from __future__ import annotations from typing import Container, Iterable, Optional import numpy as np from satpy import DataID, DatasetDict from satpy.dataset import ModifierTuple, create_filtered_query from satpy.dataset.data_dict import TooManyResults, get_key from satpy.node import EMPTY_LEAF_NAME, LOG, CompositorNode, MissingDependencies, Node, ReaderNode class Tree: """A tree implementation.""" # simplify future logic by only having one "sentinel" empty node # making it a class attribute ensures it is the same across instances empty_node = Node(EMPTY_LEAF_NAME) def __init__(self): """Set up the tree.""" self._root = Node(None) # keep a flat dictionary of nodes contained in the tree for better # __contains__ self._all_nodes = _DataIDContainer() def leaves(self, limit_nodes_to: Optional[Iterable[DataID]] = None, unique: bool = True ) -> list[Node]: """Get the leaves of the tree starting at the root. Args: limit_nodes_to: Limit leaves to Nodes with the names (DataIDs) specified. unique: Only include individual leaf nodes once. Returns: list of leaf nodes """ if limit_nodes_to is None: return self._root.leaves(unique=unique) res = list() for child_id in limit_nodes_to: for sub_child in self._all_nodes[child_id].leaves(unique=unique): if not unique or sub_child not in res: res.append(sub_child) return res def trunk(self, limit_nodes_to: Optional[Iterable[DataID]] = None, unique: bool = True, limit_children_to: Optional[Container[DataID]] = None, ) -> list[Node]: """Get the trunk nodes of the tree starting at this root. Args: limit_nodes_to: Limit searching to trunk nodes with the names (DataIDs) specified and the children of these nodes. unique: Only include individual trunk nodes once limit_children_to: Limit searching to the children with the specified names. These child nodes will be included in the result, but not their children. Returns: list of trunk nodes """ if limit_nodes_to is None: return self._root.trunk(unique=unique, limit_children_to=limit_children_to) res = list() for child_id in limit_nodes_to: child_node = self._all_nodes[child_id] for sub_child in child_node.trunk(unique=unique, limit_children_to=limit_children_to): if not unique or sub_child not in res: res.append(sub_child) return res def add_child(self, parent, child): """Add a child to the tree.""" Node.add_child(parent, child) # Sanity check: Node objects should be unique. They can be added # multiple times if more than one Node depends on them # but they should all map to the same Node object. if self.contains(child.name): if self._all_nodes[child.name] is not child: raise RuntimeError if child is self.empty_node: # No need to store "empty" nodes return self._all_nodes[child.name] = child def add_leaf(self, ds_id, parent=None): """Add a leaf to the tree.""" if parent is None: parent = self._root try: node = self[ds_id] except KeyError: node = Node(ds_id) self.add_child(parent, node) return node def __contains__(self, item): """Check if a item is in the tree.""" return item in self._all_nodes def __getitem__(self, item): """Get an item of the tree.""" return self._all_nodes[item] def contains(self, item): """Check contains when we know the *exact* DataID or DataQuery.""" return super(_DataIDContainer, self._all_nodes).__contains__(item) def getitem(self, item): """Get Node when we know the *exact* DataID or DataQuery.""" return super(_DataIDContainer, self._all_nodes).__getitem__(item) def __str__(self): """Render the dependency tree as a string.""" return self._root.display() class DependencyTree(Tree): """Structure to discover and store `Dataset` dependencies. Used primarily by the `Scene` object to organize dependency finding. Dependencies are stored used a series of `Node` objects which this class is a subclass of. """ def __init__(self, readers, compositors=None, modifiers=None, available_only=False): """Collect Dataset generating information. Collect the objects that generate and have information about Datasets including objects that may depend on certain Datasets being generated. This includes readers, compositors, and modifiers. Composites and modifiers are defined per-sensor. If multiple sensors are available, compositors and modifiers are searched for in sensor alphabetical order. Args: readers (dict): Reader name -> Reader Object compositors (dict): Sensor name -> Composite ID -> Composite Object. Empty dictionary by default. modifiers (dict): Sensor name -> Modifier name -> (Modifier Class, modifier options). Empty dictionary by default. available_only (bool): Whether only reader's available/loadable datasets should be used when searching for dependencies (True) or use all known/configured datasets regardless of whether the necessary files were provided to the reader (False). Note that when ``False`` loadable variations of a dataset will have priority over other known variations. Default is ``False``. """ super().__init__() self.readers = readers self.compositors = {} self.modifiers = {} self._available_only = available_only self.update_compositors_and_modifiers(compositors or {}, modifiers or {}) def update_compositors_and_modifiers(self, compositors: dict, modifiers: dict) -> None: """Add additional compositors and modifiers to the tree. Provided dictionaries and the first sub-level dictionaries are copied to avoid modifying the input. Args: compositors (dict): Sensor name -> composite ID -> Composite Object modifiers (dict): Sensor name -> Modifier name -> (Modifier Class, modifier options) """ for sensor_name, sensor_comps in compositors.items(): self.compositors.setdefault(sensor_name, DatasetDict()).update(sensor_comps) for sensor_name, sensor_mods in modifiers.items(): self.modifiers.setdefault(sensor_name, {}).update(sensor_mods) def copy(self): """Copy this node tree. Note all references to readers are removed. This is meant to avoid tree copies accessing readers that would return incompatible (Area) data. Theoretically it should be possible for tree copies to request compositor or modifier information as long as they don't depend on any datasets not already existing in the dependency tree. """ new_tree = DependencyTree({}, self.compositors, self.modifiers) for c in self._root.children: c = c.copy(node_cache=new_tree._all_nodes) new_tree.add_child(new_tree._root, c) return new_tree def update_node_name(self, node, new_name): """Update 'name' property of a node and any related metadata.""" old_name = node.name if old_name not in self._all_nodes: raise RuntimeError del self._all_nodes[old_name] node.update_name(new_name) self._all_nodes[new_name] = node def populate_with_keys(self, dataset_keys: set, query=None): """Populate the dependency tree. Args: dataset_keys (set): Strings, DataIDs, DataQuerys to find dependencies for query (DataQuery): Additional filter parameters. See `satpy.readers.get_key` for more details. Returns: (Node, set): Root node of the dependency tree and a set of unknown datasets """ unknown_datasets = list() known_nodes = list() for key in dataset_keys.copy(): try: dsq = create_filtered_query(key, query) node = self._create_subtree_for_key(dsq, query) except MissingDependencies as unknown: unknown_datasets.append(unknown.missing_dependencies) else: known_nodes.append(node) self.add_child(self._root, node) for key in dataset_keys.copy(): dataset_keys.discard(key) for node in known_nodes: dataset_keys.add(node.name) if unknown_datasets: raise MissingDependencies(unknown_datasets, "Unknown datasets:") def _create_subtree_for_key(self, dataset_key, query=None): """Find the dependencies for *dataset_key*. Args: dataset_key (str, float, DataID, DataQuery): Dataset identifier to locate and find any additional dependencies for. query (DataQuery): Additional filter parameters. See `satpy.readers.get_key` for more details. """ # 0 check if the *exact* dataset is already loaded try: node = self._get_subtree_for_existing_key(dataset_key) except MissingDependencies: # exact dataset isn't loaded, let's load it below pass else: return node # 1 try to get *best* dataset from reader try: node = self._create_subtree_from_reader(dataset_key, query) except TooManyResults: LOG.warning("Too many possible datasets to load for {}".format(dataset_key)) raise MissingDependencies({dataset_key}) except MissingDependencies: pass else: return node # 2 try to find a composite by name (any version of it is good enough) try: node = self._get_subtree_for_existing_name(dataset_key) except MissingDependencies: pass else: return node # 3 try to find a composite that matches try: node = self._create_subtree_from_compositors(dataset_key, query) except MissingDependencies: raise else: return node def _get_subtree_for_existing_key(self, dsq): try: node = self.getitem(dsq) LOG.trace("Found exact dataset already loaded: {}".format(node.name)) return node except KeyError: LOG.trace("Exact dataset {} isn't loaded, will try reader...".format(dsq)) raise MissingDependencies({dsq}) def _create_subtree_from_reader(self, dataset_key, query): try: node = self._find_reader_node(dataset_key, query) except MissingDependencies: LOG.trace("Could not find dataset in reader: {}".format(dataset_key)) raise else: LOG.trace("Found reader provided dataset:\n\tRequested: {}\n\tFound: {}".format(dataset_key, node.name)) return node def _find_reader_node(self, dataset_key, query): """Attempt to find a `DataID` in the available readers. Args: dataset_key (str, float, DataID, DataQuery): Dataset name, wavelength, `DataID` or `DataQuery` to use in searching for the dataset from the available readers. """ matching_ids = self._find_matching_ids_in_readers(dataset_key) unique_id = self._get_unique_matching_id(matching_ids, dataset_key, query) for reader_name, ids in matching_ids.items(): if unique_id in ids: return self._get_unique_reader_node_from_id(unique_id, reader_name) raise RuntimeError("Data ID disappeared.") def _find_matching_ids_in_readers(self, dataset_key): matching_ids = {} for reader_name, reader_instance in self.readers.items(): matching_ids[reader_name] = [] try: ds_ids = reader_instance.get_dataset_key(dataset_key, available_only=self._available_only, num_results=0, best=False) except KeyError: LOG.trace("Can't find dataset %s in reader %s", str(dataset_key), reader_name) continue matching_ids[reader_name].extend(ds_ids) return matching_ids def _get_unique_matching_id(self, matching_ids, dataset_key, query): """Get unique matching id from `matching_ids`, for a given `dataset_key` and some optional `query`.""" all_ids = sum(matching_ids.values(), []) if len(all_ids) == 0: raise MissingDependencies({dataset_key}) elif len(all_ids) == 1: result = all_ids[0] else: sorted_ids, distances = dataset_key.sort_dataids_with_preference(all_ids, query) try: result = self._get_unique_id_from_sorted_ids(sorted_ids, distances) except TooManyResults: LOG.trace("Too many datasets matching key {} in readers {}".format(dataset_key, matching_ids.keys())) raise TooManyResults("Too many keys matching: {}".format(dataset_key)) except MissingDependencies: raise MissingDependencies({dataset_key}) return result @staticmethod def _get_unique_id_from_sorted_ids(sorted_ids, distances): if distances[0] != np.inf: if distances[0] != distances[1]: result = sorted_ids[0] else: raise TooManyResults else: raise MissingDependencies return result def _get_unique_reader_node_from_id(self, data_id, reader_name): try: # now that we know we have the exact DataID see if we have already created a Node for it return self.getitem(data_id) except KeyError: # we haven't created a node yet, create it now return ReaderNode(data_id, reader_name) def _get_subtree_for_existing_name(self, dsq): try: # assume that there is no such thing as a "better" composite # version so if we find any DataIDs already loaded then # we want to use them node = self[dsq] LOG.trace("Composite already loaded:\n\tRequested: {}\n\tFound: {}".format(dsq, node.name)) return node except KeyError: # composite hasn't been loaded yet, let's load it below LOG.trace("Composite hasn't been loaded yet, will load: {}".format(dsq)) raise MissingDependencies({dsq}) def _create_subtree_from_compositors(self, dataset_key, query): try: node = self._find_compositor(dataset_key, query) LOG.trace("Found composite:\n\tRequested: {}\n\tFound: {}".format(dataset_key, node and node.name)) except KeyError: LOG.trace("Composite not found: {}".format(dataset_key)) raise MissingDependencies({dataset_key}) return node def _find_compositor(self, dataset_key, query): """Find the compositor object for the given dataset_key.""" # NOTE: This function can not find a modifier that performs # one or more modifications if it has modifiers see if we can find # the unmodified version first if dataset_key.is_modified(): implicit_dependency_node = self._create_implicit_dependency_subtree(dataset_key, query) dataset_key = self._promote_query_to_modified_dataid(dataset_key, implicit_dependency_node.name) try: compositor = self.get_modifier(dataset_key) except KeyError: raise KeyError("Can't find anything called {}".format(str(dataset_key))) compositor.attrs['prerequisites'] = [implicit_dependency_node] + list(compositor.attrs['prerequisites']) else: try: compositor = self.get_compositor(dataset_key) except KeyError: raise KeyError("Can't find anything called {}".format(str(dataset_key))) root = CompositorNode(compositor) composite_id = root.name prerequisite_filter = composite_id.create_filter_query_without_required_fields(dataset_key) # Get the prerequisites LOG.trace("Looking for composite prerequisites for: {}".format(dataset_key)) prereqs = [create_filtered_query(prereq, prerequisite_filter) if not isinstance(prereq, Node) else prereq for prereq in compositor.attrs['prerequisites']] prereqs = self._create_required_subtrees(root, prereqs, query=query) root.add_required_nodes(prereqs) # Get the optionals LOG.trace("Looking for optional prerequisites for: {}".format(dataset_key)) optionals = [create_filtered_query(prereq, prerequisite_filter) if not isinstance(prereq, Node) else prereq for prereq in compositor.attrs['optional_prerequisites']] optionals = self._create_optional_subtrees(root, optionals, query=query) root.add_optional_nodes(optionals) return root def _create_implicit_dependency_subtree(self, dataset_key, query): new_prereq = dataset_key.create_less_modified_query() src_node = self._create_subtree_for_key(new_prereq, query) return src_node def _promote_query_to_modified_dataid(self, query, dep_key): """Promote a query to an id based on the dataset it will modify (dep). Typical use case is requesting a modified dataset (query). This modified dataset most likely depends on a less-modified dataset (dep_key). The less-modified dataset must come from a reader (at least for now) or will eventually depend on a reader dataset. The original request key may be limited like (wavelength=0.67, modifiers=('a', 'b')) while the reader-based key should have all of its properties specified. This method updates the original request key so it is fully specified and should reduce the chance of Node's not being unique. """ orig_dict = query._asdict() dep_dict = dep_key._asdict() for key, dep_val in dep_dict.items(): # don't change the modifiers, just cast them to the right class if isinstance(dep_val, ModifierTuple): orig_dict[key] = dep_val.__class__(orig_dict[key]) else: orig_dict[key] = dep_val return dep_key.from_dict(orig_dict) def get_compositor(self, key): """Get a compositor.""" for sensor_name in sorted(self.compositors): try: return self.compositors[sensor_name][key] except KeyError: continue raise KeyError("Could not find compositor '{}'".format(key)) def get_modifier(self, comp_id): """Get a modifer.""" # create a DataID for the compositor we are generating modifier = comp_id['modifiers'][-1] for sensor_name in sorted(self.modifiers): modifiers = self.modifiers[sensor_name] compositors = self.compositors[sensor_name] if modifier not in modifiers: continue mloader, moptions = modifiers[modifier] moptions = moptions.copy() moptions.update(comp_id.to_dict()) moptions['sensor'] = sensor_name compositors[comp_id] = mloader(_satpy_id=comp_id, **moptions) return compositors[comp_id] raise KeyError("Could not find modifier '{}'".format(modifier)) def _create_required_subtrees(self, parent, prereqs, query=None): """Determine required prerequisite Nodes for a composite. Args: parent (Node): Compositor node to add these prerequisites under prereqs (sequence): Strings (names), floats (wavelengths), DataQuerys or Nodes to analyze. """ prereq_nodes, unknown_datasets = self._create_prerequisite_subtrees(parent, prereqs, query) if unknown_datasets: raise MissingDependencies(unknown_datasets) return prereq_nodes def _create_optional_subtrees(self, parent, prereqs, query=None): """Determine optional prerequisite Nodes for a composite. Args: parent (Node): Compositor node to add these prerequisites under prereqs (sequence): Strings (names), floats (wavelengths), or DataQuerys to analyze. """ prereq_nodes, unknown_datasets = self._create_prerequisite_subtrees(parent, prereqs, query) for prereq, unknowns in unknown_datasets.items(): u_str = ", ".join([str(x) for x in unknowns]) LOG.debug('Skipping optional %s: Unknown dataset %s', str(prereq), u_str) return prereq_nodes def _create_prerequisite_subtrees(self, parent, prereqs, query=None): """Determine prerequisite Nodes for a composite. Args: parent (Node): Compositor node to add these prerequisites under prereqs (sequence): Strings (names), floats (wavelengths), DataQuerys or Nodes to analyze. """ prereq_nodes = [] unknown_datasets = dict() if not prereqs: # this composite has no required prerequisites prereq_nodes.append(self.empty_node) self.add_child(parent, self.empty_node) return prereq_nodes, unknown_datasets for prereq in prereqs: try: if isinstance(prereq, Node): node = prereq else: node = self._create_subtree_for_key(prereq, query=query) except MissingDependencies as unknown: unknown_datasets[prereq] = unknown.missing_dependencies else: prereq_nodes.append(node) self.add_child(parent, node) return prereq_nodes, unknown_datasets class _DataIDContainer(dict): """Special dictionary object that can handle dict operations based on dataset name, wavelength, or DataID. Note: Internal dictionary keys are `DataID` objects. """ def keys(self): """Give currently contained keys.""" # sort keys so things are a little more deterministic (.keys() is not) return sorted(super(_DataIDContainer, self).keys()) def get_key(self, match_key): """Get multiple fully-specified keys that match the provided query. Args: match_key (DataID): DataID or DataQuery of query parameters to use for searching. Can also be a string representing the dataset name or a number representing the dataset wavelength. """ return get_key(match_key, self.keys()) def __getitem__(self, item): """Get item from container.""" try: # short circuit - try to get the object without more work return super(_DataIDContainer, self).__getitem__(item) except KeyError: key = self.get_key(item) return super(_DataIDContainer, self).__getitem__(key) def __contains__(self, item): """Check if item exists in container.""" try: key = self.get_key(item) except KeyError: return False return super(_DataIDContainer, self).__contains__(key) satpy-0.34.0/satpy/enhancements/000077500000000000000000000000001420401153000165345ustar00rootroot00000000000000satpy-0.34.0/satpy/enhancements/__init__.py000066400000000000000000000521431420401153000206520ustar00rootroot00000000000000#!/usr/bin/env python # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Enhancements.""" import logging import os import warnings from functools import partial from numbers import Number import dask import dask.array as da import numpy as np import xarray as xr from trollimage.xrimage import XRImage from satpy._compat import ArrayLike from satpy._config import get_config_path LOG = logging.getLogger(__name__) def stretch(img, **kwargs): """Perform stretch.""" return img.stretch(**kwargs) def gamma(img, **kwargs): """Perform gamma correction.""" return img.gamma(**kwargs) def invert(img, *args): """Perform inversion.""" return img.invert(*args) def apply_enhancement(data, func, exclude=None, separate=False, pass_dask=False): """Apply `func` to the provided data. Args: data (xarray.DataArray): Data to be modified inplace. func (callable): Function to be applied to an xarray exclude (iterable): Bands in the 'bands' dimension to not include in the calculations. separate (bool): Apply `func` one band at a time. Default is False. pass_dask (bool): Pass the underlying dask array instead of the xarray.DataArray. """ attrs = data.attrs bands = data.coords['bands'].values if exclude is None: exclude = ['A'] if 'A' in bands else [] if separate: data_arrs = [] for idx, band_name in enumerate(bands): band_data = data.sel(bands=[band_name]) if band_name in exclude: # don't modify alpha data_arrs.append(band_data) continue if pass_dask: dims = band_data.dims coords = band_data.coords d_arr = func(band_data.data, index=idx) band_data = xr.DataArray(d_arr, dims=dims, coords=coords) else: band_data = func(band_data, index=idx) data_arrs.append(band_data) # we assume that the func can add attrs attrs.update(band_data.attrs) data.data = xr.concat(data_arrs, dim='bands').data data.attrs = attrs return data band_data = data.sel(bands=[b for b in bands if b not in exclude]) if pass_dask: dims = band_data.dims coords = band_data.coords d_arr = func(band_data.data) band_data = xr.DataArray(d_arr, dims=dims, coords=coords) else: band_data = func(band_data) attrs.update(band_data.attrs) # combine the new data with the excluded data new_data = xr.concat([band_data, data.sel(bands=exclude)], dim='bands') data.data = new_data.sel(bands=bands).data data.attrs = attrs return data def crefl_scaling(img, **kwargs): """Apply non-linear stretch used by CREFL-based RGBs.""" LOG.debug("Applying the crefl_scaling") warnings.warn("'crefl_scaling' is deprecated, use 'piecewise_linear_stretch' instead.", DeprecationWarning) img.data.data = img.data.data / 100 return piecewise_linear_stretch(img, xp=kwargs['idx'], fp=kwargs['sc'], reference_scale_factor=255) def piecewise_linear_stretch( img: XRImage, xp: ArrayLike, fp: ArrayLike, reference_scale_factor: Number = None, **kwargs) -> xr.DataArray: """Apply 1D linear interpolation. This uses :func:`numpy.interp` mapped over the provided dask array chunks. Args: img: Image data to be scaled. It is assumed the data is already normalized between 0 and 1. xp: Input reference values of the image data points used for interpolation. This is passed directly to :func:`numpy.interp`. fp: Target reference values of the output image data points used for interpolation. This is passed directly to :func:`numpy.interp`. reference_scale_factor: Divide ``xp`` and ``fp`` by this value before using them for interpolation. This is a convenience to make matching normalized image data to interp coordinates or to avoid floating point precision errors in YAML configuration files. If not provided, ``xp`` and ``fp`` will not be modified. Examples: This example YAML uses a 'crude' stretch to pre-scale the RGB data and then uses reference points in a 0-255 range. .. code-block:: yaml true_color_linear_interpolation: sensor: abi standard_name: true_color operations: - name: reflectance_range method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: 'crude', min_stretch: 0., max_stretch: 100.} - name: Linear interpolation method: !!python/name:satpy.enhancements.piecewise_linear_stretch kwargs: xp: [0., 25., 55., 100., 255.] fp: [0., 90., 140., 175., 255.] reference_scale_factor: 255 This example YAML does the same as the above on the C02 channel, but the interpolation reference points are already adjusted for the input reflectance (%) data and the output range (0 to 1). .. code-block:: yaml c02_linear_interpolation: sensor: abi standard_name: C02 operations: - name: Linear interpolation method: !!python/name:satpy.enhancements.piecewise_linear_stretch kwargs: xp: [0., 9.8039, 21.5686, 39.2157, 100.] fp: [0., 0.3529, 0.5490, 0.6863, 1.0] """ LOG.debug("Applying the piecewise_linear_stretch") if reference_scale_factor is not None: xp = np.asarray(xp) / reference_scale_factor fp = np.asarray(fp) / reference_scale_factor def func(band_data, xp, fp, index=None): # Interpolate band on [0,1] using "lazy" arrays (put calculations off until the end). band_data = xr.DataArray(da.clip(band_data.data.map_blocks(np.interp, xp=xp, fp=fp), 0, 1), coords=band_data.coords, dims=band_data.dims, name=band_data.name, attrs=band_data.attrs) return band_data func_with_kwargs = partial(func, xp=xp, fp=fp) return apply_enhancement(img.data, func_with_kwargs, separate=True) def cira_stretch(img, **kwargs): """Logarithmic stretch adapted to human vision. Applicable only for visible channels. """ LOG.debug("Applying the cira-stretch") def func(band_data): log_root = np.log10(0.0223) denom = (1.0 - log_root) * 0.75 band_data *= 0.01 band_data = band_data.clip(np.finfo(float).eps) band_data = np.log10(band_data) band_data -= log_root band_data /= denom return band_data return apply_enhancement(img.data, func) def reinhard_to_srgb(img, saturation=1.25, white=100, **kwargs): """Stretch method based on the Reinhard algorithm, using luminance. Args: saturation: Saturation enhancement factor. Less is grayer. Neutral is 1. white: the reflectance luminance to set to white (in %). Reinhard, Erik & Stark, Michael & Shirley, Peter & Ferwerda, James. (2002). Photographic Tone Reproduction For Digital Images. ACM Transactions on Graphics. :doi: `21. 10.1145/566654.566575` """ with xr.set_options(keep_attrs=True): # scale the data to [0, 1] interval rgb = img.data / 100 white /= 100 # extract color components r = rgb.sel(bands='R').data g = rgb.sel(bands='G').data b = rgb.sel(bands='B').data # saturate luma = _compute_luminance_from_rgb(r, g, b) rgb = (luma + (rgb - luma) * saturation).clip(0) # reinhard reinhard_luma = (luma / (1 + luma)) * (1 + luma/(white**2)) coef = reinhard_luma / luma rgb = rgb * coef # srgb gamma rgb.data = _srgb_gamma(rgb.data) img.data = rgb return img.data def _compute_luminance_from_rgb(r, g, b): """Compute the luminance of the image.""" return r * 0.2126 + g * 0.7152 + b * 0.0722 def _srgb_gamma(arr): """Apply the srgb gamma.""" return da.where(arr < 0.0031308, arr * 12.92, 1.055 * arr ** 0.41666 - 0.055) def _lookup_delayed(luts, band_data): # can't use luts.__getitem__ for some reason return luts[band_data] def lookup(img, **kwargs): """Assign values to channels based on a table.""" luts = np.array(kwargs['luts'], dtype=np.float32) / 255.0 def func(band_data, luts=luts, index=-1): # NaN/null values will become 0 lut = luts[:, index] if len(luts.shape) == 2 else luts band_data = band_data.clip(0, lut.size - 1).astype(np.uint8) new_delay = dask.delayed(_lookup_delayed)(lut, band_data) new_data = da.from_delayed(new_delay, shape=band_data.shape, dtype=luts.dtype) return new_data return apply_enhancement(img.data, func, separate=True, pass_dask=True) def colorize(img, **kwargs): """Colorize the given image. Args: img: image to be colorized Kwargs: palettes: colormap(s) to use The `palettes` kwarg can be one of the following: - a trollimage.colormap.Colormap object - list of dictionaries with each of one of the following forms: - {'filename': '/path/to/colors.npy', 'min_value': , 'max_value': , 'reverse': , 'min_value': , 'max_value': , 'reverse': , 'min_value': , 'max_value': , 'reverse': , 'values': , 'min_value': , 'max_value': , 'reverse': = threshold, high_offset - high_factor * band_data, low_offset - low_factor * band_data) return apply_enhancement(img.data, _bt_threshold, pass_dask=True) satpy-0.34.0/satpy/enhancements/abi.py000066400000000000000000000033241420401153000176430ustar00rootroot00000000000000#!/usr/bin/env python # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Enhancement functions specific to the ABI sensor.""" from satpy.enhancements import apply_enhancement def cimss_true_color_contrast(img, **kwargs): """Scale data based on CIMSS True Color recipe for AWIPS.""" def func(img_data): """Perform per-chunk enhancement. Code ported from Kaba Bah's AWIPS python plugin for creating the CIMSS Natural (True) Color image in AWIPS. AWIPS provides that python code the image data on a 0-255 scale. Satpy gives this function the data on a 0-1.0 scale (assuming linear stretching and sqrt enhancements have already been applied). """ max_value = 1.0 acont = (255.0 / 10.0) / 255.0 amax = (255.0 + 4.0) / 255.0 amid = 1.0 / 2.0 afact = (amax * (acont + max_value) / (max_value * (amax - acont))) aband = (afact * (img_data - amid) + amid) aband[aband <= 10 / 255.0] = 0 aband[aband >= 1.0] = 1.0 return aband apply_enhancement(img.data, func, pass_dask=True) satpy-0.34.0/satpy/enhancements/ahi.py000066400000000000000000000032011420401153000176430ustar00rootroot00000000000000#!/usr/bin/env python # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Enhancement functions specific to the AHI sensor.""" import dask.array as da import numpy as np from satpy.enhancements import apply_enhancement def jma_true_color_reproduction(img, **kwargs): """Apply CIE XYZ matrix and return True Color Reproduction data. Himawari-8 True Color Reproduction Approach Based on the CIE XYZ Color System Hidehiko MURATA, Kotaro SAITOH, and Yasuhiko SUMIDA Meteorological Satellite Center, Japan Meteorological Agency NOAA National Environmental Satellite, Data, and Information Service Colorado State University—CIRA https://www.jma.go.jp/jma/jma-eng/satellite/introduction/TCR.html """ def func(img_data): ccm = np.array([ [1.1759, 0.0561, -0.1322], [-0.0386, 0.9587, 0.0559], [-0.0189, -0.1161, 1.0777] ]) output = da.dot(img_data.T, ccm.T) return output.T apply_enhancement(img.data, func, pass_dask=True) satpy-0.34.0/satpy/enhancements/mimic.py000066400000000000000000000504551420401153000202150ustar00rootroot00000000000000#!/usr/bin/env python # Copyright (c) 2018-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Mimic TPW Color enhancements.""" from trollimage.colormap import Colormap def nrl_colors(img, **kwargs): """TPW color table based on NRL Color table (0-76 mm).""" nrl_tpw_colors = {"colors": [[0.0, [188, 132, 98]], [0.27472527472527475, [188, 130, 99]], [0.5494505494505495, [187, 128, 100]], [0.8241758241758242, [186, 125, 101]], [1.098901098901099, [185, 124, 102]], [1.3736263736263736, [184, 122, 103]], [1.6483516483516485, [183, 120, 103]], [1.9230769230769234, [182, 119, 104]], [2.197802197802198, [182, 118, 106]], [2.4725274725274726, [181, 116, 107]], [2.7472527472527473, [180, 114, 108]], [3.0219780219780223, [179, 114, 108]], [3.296703296703297, [178, 113, 109]], [3.5714285714285716, [177, 111, 110]], [3.8461538461538467, [177, 110, 111]], [4.120879120879121, [176, 108, 111]], [4.395604395604396, [176, 106, 110]], [4.670329670329671, [175, 104, 110]], [4.945054945054945, [174, 103, 111]], [5.21978021978022, [174, 101, 111]], [5.4945054945054945, [173, 99, 111]], [5.76923076923077, [172, 97, 111]], [6.043956043956045, [171, 95, 112]], [6.318681318681319, [171, 93, 112]], [6.593406593406594, [171, 91, 113]], [6.868131868131869, [170, 90, 113]], [7.142857142857143, [169, 88, 114]], [7.417582417582418, [169, 86, 114]], [7.692307692307693, [168, 85, 115]], [7.967032967032968, [167, 83, 115]], [8.241758241758243, [166, 81, 116]], [8.516483516483516, [166, 80, 118]], [8.791208791208792, [165, 78, 119]], [9.065934065934067, [165, 76, 120]], [9.340659340659341, [164, 75, 120]], [9.615384615384617, [164, 74, 121]], [9.89010989010989, [163, 72, 123]], [10.164835164835166, [162, 70, 124]], [10.43956043956044, [161, 69, 125]], [10.714285714285715, [160, 67, 126]], [10.989010989010989, [160, 66, 128]], [11.263736263736265, [159, 64, 130]], [11.53846153846154, [159, 63, 131]], [11.813186813186814, [158, 61, 132]], [12.08791208791209, [158, 60, 134]], [12.362637362637363, [157, 58, 136]], [12.637362637362639, [156, 57, 137]], [12.912087912087912, [155, 56, 139]], [13.186813186813188, [155, 54, 141]], [13.461538461538463, [154, 52, 142]], [13.736263736263737, [154, 52, 144]], [14.010989010989013, [153, 50, 146]], [14.285714285714286, [153, 49, 148]], [14.560439560439562, [152, 47, 150]], [14.835164835164836, [150, 46, 151]], [15.109890109890111, [147, 45, 150]], [15.384615384615387, [144, 44, 150]], [15.65934065934066, [142, 44, 152]], [15.934065934065936, [138, 48, 156]], [16.20879120879121, [135, 50, 159]], [16.483516483516485, [132, 52, 161]], [16.75824175824176, [131, 56, 164]], [17.032967032967033, [126, 60, 168]], [17.30769230769231, [123, 62, 171]], [17.582417582417584, [121, 65, 173]], [17.857142857142858, [117, 69, 177]], [18.131868131868135, [114, 71, 180]], [18.40659340659341, [111, 74, 182]], [18.681318681318682, [109, 77, 185]], [18.956043956043956, [104, 82, 190]], [19.230769230769234, [101, 84, 193]], [19.505494505494507, [98, 86, 195]], [19.78021978021978, [96, 89, 198]], [20.05494505494506, [93, 92, 200]], [20.329670329670332, [90, 95, 204]], [20.604395604395606, [87, 98, 207]], [20.87912087912088, [83, 103, 211]], [21.153846153846157, [80, 105, 214]], [21.42857142857143, [77, 108, 216]], [21.703296703296704, [74, 110, 220]], [21.978021978021978, [71, 114, 222]], [22.252747252747255, [68, 116, 225]], [22.52747252747253, [65, 120, 228]], [22.802197802197803, [61, 125, 233]], [23.07692307692308, [57, 127, 235]], [23.351648351648354, [55, 130, 239]], [23.626373626373628, [52, 133, 242]], [23.9010989010989, [49, 137, 245]], [24.17582417582418, [47, 139, 247]], [24.450549450549453, [44, 142, 250]], [24.725274725274726, [40, 146, 255]], [25.000000000000004, [40, 148, 255]], [25.274725274725277, [42, 150, 255]], [25.54945054945055, [46, 154, 255]], [25.824175824175825, [50, 158, 255]], [26.098901098901102, [52, 159, 255]], [26.373626373626376, [55, 163, 255]], [26.64835164835165, [59, 167, 255]], [26.923076923076927, [61, 169, 255]], [27.1978021978022, [65, 173, 255]], [27.472527472527474, [70, 178, 255]], [27.747252747252748, [73, 182, 255]], [28.021978021978025, [76, 185, 255]], [28.2967032967033, [79, 188, 255]], [28.571428571428573, [82, 192, 255]], [28.84615384615385, [86, 195, 255]], [29.120879120879124, [88, 199, 255]], [29.395604395604398, [91, 201, 255]], [29.67032967032967, [95, 205, 255]], [29.94505494505495, [97, 207, 255]], [30.219780219780223, [101, 210, 255]], [30.494505494505496, [104, 213, 255]], [30.769230769230774, [107, 216, 255]], [31.043956043956047, [110, 218, 255]], [31.31868131868132, [114, 222, 255]], [31.593406593406595, [115, 223, 255]], [31.868131868131872, [119, 227, 255]], [32.142857142857146, [123, 231, 255]], [32.41758241758242, [125, 233, 255]], [32.69230769230769, [127, 236, 255]], [32.96703296703297, [133, 241, 255]], [33.24175824175825, [136, 244, 255]], [33.51648351648352, [139, 247, 255]], [33.791208791208796, [143, 252, 255]], [34.065934065934066, [145, 254, 255]], [34.34065934065934, [148, 255, 254]], [34.61538461538462, [148, 255, 247]], [34.89010989010989, [148, 255, 241]], [35.16483516483517, [148, 255, 235]], [35.439560439560445, [148, 255, 229]], [35.714285714285715, [148, 255, 223]], [35.98901098901099, [148, 255, 217]], [36.26373626373627, [148, 255, 210]], [36.53846153846154, [148, 255, 205]], [36.81318681318682, [148, 255, 199]], [37.08791208791209, [148, 255, 193]], [37.362637362637365, [148, 255, 187]], [37.63736263736264, [148, 255, 181]], [37.91208791208791, [148, 255, 174]], [38.18681318681319, [148, 255, 168]], [38.46153846153847, [148, 255, 162]], [38.73626373626374, [148, 255, 156]], [39.010989010989015, [148, 255, 150]], [39.28571428571429, [151, 255, 148]], [39.56043956043956, [157, 255, 148]], [39.83516483516484, [163, 255, 148]], [40.10989010989012, [169, 255, 148]], [40.38461538461539, [175, 255, 148]], [40.659340659340664, [181, 255, 148]], [40.934065934065934, [188, 255, 148]], [41.20879120879121, [197, 255, 148]], [41.48351648351649, [203, 255, 148]], [41.75824175824176, [209, 255, 148]], [42.032967032967036, [215, 255, 148]], [42.307692307692314, [221, 255, 148]], [42.582417582417584, [227, 255, 148]], [42.85714285714286, [233, 255, 148]], [43.13186813186814, [239, 255, 148]], [43.40659340659341, [244, 255, 148]], [43.681318681318686, [250, 255, 148]], [43.956043956043956, [254, 254, 146]], [44.23076923076923, [255, 251, 143]], [44.50549450549451, [255, 249, 141]], [44.78021978021978, [255, 247, 139]], [45.05494505494506, [255, 242, 134]], [45.329670329670336, [255, 239, 131]], [45.604395604395606, [255, 236, 128]], [45.87912087912088, [255, 233, 125]], [46.15384615384616, [255, 231, 122]], [46.42857142857143, [255, 227, 120]], [46.70329670329671, [255, 225, 117]], [46.978021978021985, [255, 221, 113]], [47.252747252747255, [255, 218, 110]], [47.52747252747253, [255, 216, 108]], [47.8021978021978, [255, 211, 103]], [48.07692307692308, [255, 209, 101]], [48.35164835164836, [255, 206, 98]], [48.62637362637363, [255, 204, 96]], [48.901098901098905, [255, 199, 91]], [49.17582417582418, [255, 196, 87]], [49.45054945054945, [255, 193, 85]], [49.72527472527473, [255, 191, 82]], [50.00000000000001, [255, 188, 80]], [50.27472527472528, [255, 185, 77]], [50.549450549450555, [255, 182, 74]], [50.82417582417583, [255, 179, 70]], [51.0989010989011, [255, 176, 68]], [51.37362637362638, [255, 173, 64]], [51.64835164835165, [255, 171, 61]], [51.92307692307693, [255, 167, 58]], [52.197802197802204, [255, 164, 55]], [52.472527472527474, [255, 161, 52]], [52.74725274725275, [255, 158, 49]], [53.02197802197803, [255, 154, 46]], [53.2967032967033, [255, 151, 42]], [53.57142857142858, [255, 148, 40]], [53.846153846153854, [252, 144, 39]], [54.120879120879124, [249, 140, 39]], [54.3956043956044, [246, 136, 39]], [54.67032967032967, [243, 132, 39]], [54.94505494505495, [240, 128, 39]], [55.219780219780226, [237, 125, 39]], [55.494505494505496, [234, 121, 39]], [55.769230769230774, [231, 118, 39]], [56.04395604395605, [227, 114, 39]], [56.31868131868132, [225, 111, 39]], [56.5934065934066, [222, 108, 39]], [56.868131868131876, [219, 104, 39]], [57.142857142857146, [216, 101, 39]], [57.41758241758242, [213, 97, 39]], [57.6923076923077, [210, 95, 39]], [57.96703296703297, [206, 91, 39]], [58.24175824175825, [204, 89, 39]], [58.51648351648352, [200, 86, 39]], [58.791208791208796, [198, 83, 39]], [59.06593406593407, [194, 80, 39]], [59.34065934065934, [192, 78, 39]], [59.61538461538462, [188, 75, 39]], [59.8901098901099, [185, 73, 39]], [60.16483516483517, [182, 70, 39]], [60.439560439560445, [179, 68, 39]], [60.71428571428572, [176, 66, 39]], [60.98901098901099, [173, 63, 39]], [61.26373626373627, [171, 62, 39]], [61.53846153846155, [169, 59, 39]], [61.81318681318682, [167, 57, 40]], [62.087912087912095, [165, 56, 40]], [62.362637362637365, [165, 54, 40]], [62.63736263736264, [163, 52, 40]], [62.91208791208792, [161, 50, 41]], [63.18681318681319, [159, 48, 42]], [63.46153846153847, [159, 47, 42]], [63.736263736263744, [157, 46, 43]], [64.01098901098902, [155, 44, 43]], [64.28571428571429, [154, 44, 45]], [64.56043956043956, [156, 45, 48]], [64.83516483516485, [157, 46, 52]], [65.10989010989012, [159, 48, 55]], [65.38461538461539, [160, 50, 58]], [65.65934065934067, [162, 52, 62]], [65.93406593406594, [164, 53, 65]], [66.20879120879121, [165, 55, 69]], [66.4835164835165, [167, 57, 72]], [66.75824175824177, [169, 59, 76]], [67.03296703296704, [171, 61, 80]], [67.3076923076923, [172, 63, 83]], [67.58241758241759, [174, 65, 87]], [67.85714285714286, [176, 67, 91]], [68.13186813186813, [177, 69, 95]], [68.40659340659342, [179, 71, 98]], [68.68131868131869, [181, 73, 102]], [68.95604395604396, [182, 75, 106]], [69.23076923076924, [184, 78, 109]], [69.50549450549451, [186, 80, 114]], [69.78021978021978, [188, 82, 117]], [70.05494505494507, [189, 85, 121]], [70.32967032967034, [191, 87, 125]], [70.6043956043956, [193, 90, 129]], [70.87912087912089, [194, 92, 132]], [71.15384615384616, [196, 95, 137]], [71.42857142857143, [198, 97, 140]], [71.70329670329672, [199, 100, 144]], [71.97802197802199, [201, 103, 148]], [72.25274725274726, [203, 105, 152]], [72.52747252747254, [205, 108, 155]], [72.80219780219781, [206, 110, 159]], [73.07692307692308, [208, 114, 163]], [73.35164835164836, [210, 116, 167]], [73.62637362637363, [211, 120, 171]], [73.9010989010989, [213, 122, 174]], [74.17582417582418, [215, 125, 178]], [74.45054945054946, [216, 128, 182]], [74.72527472527473, [218, 131, 185]], [75.0, [220, 135, 189]], ]} kwargs['palettes'].update(nrl_tpw_colors) palette = kwargs['palettes'] palette['colors'] = tuple(map(tuple, palette['colors'])) cm = Colormap(*palette['colors']) img.palettize(cm) def total_precipitable_water(img, **kwargs): """Palettizes images from MIMIC TPW data. This modifies the image's data so the correct colors can be applied to it, and then palettizes the image. """ palette = kwargs['palettes'] palette['colors'] = tuple(map(tuple, palette['colors'])) cm = Colormap(*palette['colors']) img.palettize(cm) satpy-0.34.0/satpy/enhancements/viirs.py000066400000000000000000000027171420401153000202510ustar00rootroot00000000000000#!/usr/bin/env python # Copyright (c) 2018-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Enhancements specific to the VIIRS instrument.""" import numpy as np from trollimage.colormap import Colormap from satpy.enhancements import apply_enhancement def water_detection(img, **kwargs): """Palettizes images from VIIRS flood data. This modifies the image's data so the correct colors can be applied to it, and then palettizes the image. """ palette = kwargs['palettes'] palette['colors'] = tuple(map(tuple, palette['colors'])) def func(img_data): data = np.asarray(img_data) data[data == 150] = 31 data[data == 199] = 18 data[data >= 200] = data[data >= 200] - 100 return data apply_enhancement(img.data, func, pass_dask=True) cm = Colormap(*palette['colors']) img.palettize(cm) satpy-0.34.0/satpy/etc/000077500000000000000000000000001420401153000146375ustar00rootroot00000000000000satpy-0.34.0/satpy/etc/areas.yaml000066400000000000000000001234461420401153000166300ustar00rootroot00000000000000# This file contains a set of pre-defined areas # to be used for resampling purposes. # ----------------------------------------------------------------------------- # -------------------------- Geostationary Areas ------------------------------ # ----------------------------------------------------------------------------- # This section contains a set of full-disk and regional areas in geostationary # projection. # ---------- Meteosat Second Generation (MSG) / SEVIRI Instrument ------------- # Full disk msg_seviri_fes_3km: description: MSG SEVIRI Full Earth Scanning service area definition with 3 km resolution projection: proj: geos lon_0: 0.0 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 3712 width: 3712 area_extent: lower_left_xy: [-5570248.686685662, -5567248.28340708] upper_right_xy: [5567248.28340708, 5570248.686685662] msg_seviri_fes_1km: description: MSG SEVIRI Full Earth Scanning service area definition with 1 km resolution projection: proj: geos lon_0: 0.0 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 11136 width: 11136 area_extent: lower_left_xy: [-5571248.412732527, -5566247.740968115] upper_right_xy: [5566247.740968115, 5571248.412732527] msg_seviri_rss_3km: description: MSG SEVIRI Rapid Scanning Service area definition with 3 km resolution projection: proj: geos lon_0: 9.5 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 3712 width: 3712 area_extent: lower_left_xy: [-5570248.686685662, -5567248.28340708] upper_right_xy: [5567248.28340708, 5570248.686685662] msg_seviri_rss_1km: description: MSG SEVIRI Rapid Scanning Service area definition with 1 km resolution projection: proj: geos lon_0: 9.5 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 11136 width: 11136 area_extent: lower_left_xy: [-5571248.412732527, -5566247.740968115] upper_right_xy: [5566247.740968115, 5571248.412732527] msg_seviri_iodc_3km: description: MSG SEVIRI Indian Ocean Data Coverage service area definition with 3 km resolution projection: proj: geos lon_0: 41.5 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 3712 width: 3712 area_extent: lower_left_xy: [-5570248.686685662, -5567248.28340708] upper_right_xy: [5567248.28340708, 5570248.686685662] msg_seviri_iodc_1km: description: MSG SEVIRI Indian Ocean Data Coverage service area definition with 1 km resolution projection: proj: geos lon_0: 41.5 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 11136 width: 11136 area_extent: lower_left_xy: [-5571248.412732527, -5566247.740968115] upper_right_xy: [5566247.740968115, 5571248.412732527] # Full disk - segmented products msg_seviri_fes_9km: description: MSG SEVIRI Full Earth Scanning service area definition with 9 km resolution projection: proj: geos lon_0: 0.0 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 1237 width: 1237 area_extent: lower_left_xy: [-5567248.28351984, -5567248.28340708] upper_right_xy: [5567248.28340708 , 5567248.28351984] msg_seviri_rss_9km: description: MSG SEVIRI Rapid Scanning Service area definition with 9 km resolution projection: proj: geos lon_0: 9.5 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 1237 width: 1237 area_extent: lower_left_xy: [-5567248.28351984, -5567248.28340708] upper_right_xy: [5567248.28340708 , 5567248.28351984] msg_seviri_iodc_9km: description: MSG SEVIRI Indian Ocean Data Coverage service area definition with 9 km resolution projection: proj: geos lon_0: 41.5 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 1237 width: 1237 area_extent: lower_left_xy: [-5567248.28351984, -5567248.28340708] upper_right_xy: [5567248.28340708 , 5567248.28351984] msg_seviri_fes_9km_ext: description: MSG SEVIRI Full Earth Scanning service area definition with 9 km resolution (extended outside original 3km grid) projection: proj: geos lon_0: 0.0 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 1238 width: 1238 area_extent: lower_left_xy: [-5571748.888268564, -5571748.888155806] upper_right_xy: [5571748.888155806, 5571748.888268564] msg_seviri_rss_9km_ext: description: MSG SEVIRI Rapid Scanning Service area definition with 9 km resolution (extended outside original 3km grid) projection: proj: geos lon_0: 9.5 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 1238 width: 1238 area_extent: lower_left_xy: [-5571748.888268564, -5571748.888155806] upper_right_xy: [5571748.888155806, 5571748.888268564] msg_seviri_iodc_9km_ext: description: MSG SEVIRI Indian Ocean Data Coverage service area definition with 9 km resolution (extended outside original 3km grid) projection: proj: geos lon_0: 41.5 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 1238 width: 1238 area_extent: lower_left_xy: [-5571748.888268564, -5571748.888155806] upper_right_xy: [5571748.888155806, 5571748.888268564] msg_seviri_fes_48km: description: MSG SEVIRI Full Earth Scanning service area definition with 48 km resolution projection: proj: geos lon_0: 0.0 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 232 width: 232 area_extent: lower_left_xy: [-5570248.686685662, -5567248.28340708] upper_right_xy: [5567248.28340708, 5570248.686685662] msg_seviri_rss_48km: description: MSG SEVIRI Rapid Scanning Service area definition with 48 km resolution projection: proj: geos lon_0: 9.5 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 232 width: 232 area_extent: lower_left_xy: [-5570248.686685662, -5567248.28340708] upper_right_xy: [5567248.28340708, 5570248.686685662] msg_seviri_iodc_48km: description: MSG SEVIRI Indian Ocean Data Coverage service area definition with 48 km resolution projection: proj: geos lon_0: 41.5 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 232 width: 232 area_extent: lower_left_xy: [-5570248.686685662, -5567248.28340708] upper_right_xy: [5567248.28340708, 5570248.686685662] # Regional EuropeCanary: description: Northern disk MSG image 0 degrees projection: proj: geos lon_0: 0.0 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 1200 width: 3000 area_extent: lower_left_xy: [-4823148.089050828, 1969764.6783588605] upper_right_xy: [4178061.408400173, 5570248.477339261] EastEurope: description: Eastern part of Northern disk MSG image 0 degrees projection: proj: geos lon_0: 0.0 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 800 width: 1300 area_extent: lower_left_xy: [654112.8864287604, 2989901.7547366405] upper_right_xy: [4553111.804127298, 5390224.287390241] AfHorn: description: Eastern disk MSG image 0 degrees projection: proj: geos lon_0: 0.0 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 1600 width: 1100 area_extent: lower_left_xy: [2263804.1886089267, -1327678.4008740226] upper_right_xy: [5564247.671007627, 3472966.6644331776] SouthAmerica: description: Lower West part of Southern disk MSG image 0 degrees projection: proj: geos lon_0: 0.0 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 1200 width: 3000 area_extent: lower_left_xy: [-5570248.477339261, -4263473.561036119] upper_right_xy: [-384719.90821206354, 1339786.2707295895] # ---------- Meteosat Third Generation (MTG) / FCI Instrument ----------------- # Full disk mtg_fci_fdss_1km: description: MTG FCI Full Disk Scanning Service area definition with 1 km resolution projection: proj: geos lon_0: 0 h: 35786400 x_0: 0 y_0: 0 ellps: WGS84 no_defs: null shape: height: 11136 width: 11136 area_extent: lower_left_xy: [-5567999.998577303, -5567999.998577303] upper_right_xy: [5567999.998527619, 5567999.998527619] units: m mtg_fci_fdss_2km: description: MTG FCI Full Disk Scanning Service area definition with 2 km resolution projection: proj: geos lon_0: 0 h: 35786400 x_0: 0 y_0: 0 ellps: WGS84 no_defs: null shape: height: 5568 width: 5568 area_extent: lower_left_xy: [-5567999.994200589, -5567999.994200589] upper_right_xy: [5567999.994206558, 5567999.994206558] units: m # Geostationary Operational Environmental Satellite (GOES) / ABI Instrument # Full disk goes_east_abi_f_500m: description: GOES East ABI Full Disk at 500 m SSP resolution projection: proj: geos sweep: x lon_0: -75 h: 35786023 x_0: 0 y_0: 0 ellps: GRS80 no_defs: null type: crs shape: height: 21696 width: 21696 area_extent: lower_left_xy: [-5434894.885056, -5434894.885056] upper_right_xy: [5434894.885056, 5434894.885056] units: m goes_east_abi_f_1km: description: GOES East ABI Full Disk at 1 km SSP resolution projection: proj: geos sweep: x lon_0: -75 h: 35786023 x_0: 0 y_0: 0 ellps: GRS80 no_defs: null type: crs shape: height: 10848 width: 10848 area_extent: lower_left_xy: [-5434894.885056, -5434894.885056] upper_right_xy: [5434894.885056, 5434894.885056] units: m goes_east_abi_f_2km: description: GOES East ABI Full Disk at 2 km SSP resolution projection: proj: geos sweep: x lon_0: -75 h: 35786023 x_0: 0 y_0: 0 ellps: GRS80 no_defs: null type: crs shape: height: 5424 width: 5424 area_extent: lower_left_xy: [-5434894.885056, -5434894.885056] upper_right_xy: [5434894.885056, 5434894.885056] units: m goes_west_abi_f_500m: description: GOES West ABI Full Disk at 500 m SSP resolution projection: proj: geos sweep: x lon_0: -137 h: 35786023 x_0: 0 y_0: 0 ellps: GRS80 no_defs: null type: crs shape: height: 21696 width: 21696 area_extent: lower_left_xy: [-5434894.885056, -5434894.885056] upper_right_xy: [5434894.885056, 5434894.885056] units: m goes_west_abi_f_1km: description: GOES West ABI Full Disk at 1 km SSP resolution projection: proj: geos sweep: x lon_0: -137 h: 35786023 x_0: 0 y_0: 0 ellps: GRS80 no_defs: null type: crs shape: height: 10848 width: 10848 area_extent: lower_left_xy: [-5434894.885056, -5434894.885056] upper_right_xy: [5434894.885056, 5434894.885056] units: m goes_west_abi_f_2km: description: GOES West ABI Full Disk at 2 km SSP resolution projection: proj: geos sweep: x lon_0: -137 h: 35786023 x_0: 0 y_0: 0 ellps: GRS80 no_defs: null type: crs shape: height: 5424 width: 5424 area_extent: lower_left_xy: [-5434894.885056, -5434894.885056] upper_right_xy: [5434894.885056, 5434894.885056] units: m # Regional goes_east_abi_c_500m: description: GOES East ABI CONUS at 500 m SSP resolution projection: proj: geos sweep: x lon_0: -75 h: 35786023 x_0: 0 y_0: 0 ellps: GRS80 no_defs: null type: crs shape: height: 6000 width: 10000 area_extent: lower_left_xy: [-3627271.29128, 1583173.65752] upper_right_xy: [1382771.92872, 4589199.58952] units: m goes_east_abi_c_1km: description: GOES East ABI CONUS at 1 km SSP resolution projection: proj: geos sweep: x lon_0: -75 h: 35786023 x_0: 0 y_0: 0 ellps: GRS80 no_defs: null type: crs shape: height: 3000 width: 5000 area_extent: lower_left_xy: [-3627271.29128, 1583173.65752] upper_right_xy: [1382771.92872, 4589199.58952] units: m goes_east_abi_c_2km: description: GOES East ABI CONUS at 2 km SSP resolution projection: proj: geos sweep: x lon_0: -75 h: 35786023 x_0: 0 y_0: 0 ellps: GRS80 no_defs: null type: crs shape: height: 1500 width: 2500 area_extent: lower_left_xy: [-3627271.29128, 1583173.65752] upper_right_xy: [1382771.92872, 4589199.58952] units: m goes_west_abi_p_500m: description: GOES West ABI PACUS at 500 m resolution projection: proj: geos sweep: x lon_0: -137 h: 35786023 x_0: 0 y_0: 0 ellps: GRS80 no_defs: null type: crs shape: height: 6000 width: 10000 area_extent: lower_left_xy: [-2505021.61, 1583173.65752] upper_right_xy: [2505021.61, 4589199.58952] units: m goes_west_abi_p_1km: description: GOES West ABI PACUS at 1 km SSP resolution projection: proj: geos sweep: x lon_0: -137 h: 35786023 x_0: 0 y_0: 0 ellps: GRS80 no_defs: null type: crs shape: height: 3000 width: 5000 area_extent: lower_left_xy: [-2505021.61, 1583173.65752] upper_right_xy: [2505021.61, 4589199.58952] units: m goes_west_abi_p_2km: description: GOES West ABI PACUS at 2 km resolution projection: proj: geos sweep: x lon_0: -137 h: 35786023 x_0: 0 y_0: 0 ellps: GRS80 no_defs: null type: crs shape: height: 1500 width: 2500 area_extent: lower_left_xy: [-2505021.61, 1583173.65752] upper_right_xy: [2505021.61, 4589199.58952] units: m # ----------------------------------------------------------------------------- # ------------------------- Miscellaneous Areas ------------------------------- # ----------------------------------------------------------------------------- # This section contains a set of areas, local and global, # in different projections. omerc_bb: description: Oblique Mercator Bounding Box for Polar Overpasses projection: # The omerc projection does not work well with non-spherical ellipsoids. ellps: sphere proj: omerc optimize_projection: True laea_bb: description: Lambert Azimuthal Equal-Area Bounding Box for Polar Overpasses projection: ellps: WGS84 proj: laea optimize_projection: True australia: description: australia projection: proj: merc lat_0: -27.5 lon_0: 132.5 ellps: WGS84 shape: height: 895 width: 1001 area_extent: lower_left_xy: [-2504688.5428486555, -5591295.9185533915] upper_right_xy: [2504688.5428486555, -1111475.102852225] mali: description: mali projection: proj: merc lat_0: 19.0 lon_0: -1.0 ellps: WGS84 shape: height: 705 width: 816 area_extent: lower_left_xy: [-1224514.3987260093, 1111475.1028522244] upper_right_xy: [1224514.3987260093, 3228918.5790461157] mali_eqc: description: mali projection: proj: eqc lat_0: 19.0 lon_0: -1.0 ellps: WGS84 shape: height: 667 width: 816 area_extent: lower_left_xy: [-1224514.3987260093, -1001875.4171394627] upper_right_xy: [1224514.3987260093, 1001875.4171394617] sve: description: Sweden and baltic sea projection: EPSG: 3006 shape: height: 2000 width: 2000 area_extent: lower_left_xy: [-342379.698, 6032580.06] upper_right_xy: [1423701.52, 8029648.75] brazil2: description: brazil, platecarree projection: proj: eqc ellps: WGS84 shape: height: 768 width: 768 area_extent: lower_left_xy: [-7792364.355529149, -4452779.631730943] upper_right_xy: [-2226389.8158654715, 1669792.3618991035] units: m sudeste: description: sudeste, platecarree projection: proj: eqc ellps: WGS84 shape: height: 959 width: 959 area_extent: lower_left_xy: [-6122571.993630046, -3005626.251418386] upper_right_xy: [-4230140.650144396, -1447153.3803125564] units: m SouthAmerica_flat: description: South America flat projection: proj: eqc a: 6378137.0 b: 6378137.0 shape: height: 1213 width: 1442 area_extent: lower_left_xy: [-8326322.82790897, -4609377.085697311] upper_right_xy: [-556597.4539663679, 1535833.8895192828] units: m south_america: description: south_america, platecarree projection: proj: eqc ellps: WGS84 shape: height: 1024 width: 1024 area_extent: lower_left_xy: [-8126322.82790897, -5009377.085697311] upper_right_xy: [-556597.4539663679, 1335833.8895192828] units: m brazil: description: brazil, platecarree projection: proj: eqc ellps: WGS84 shape: height: 768 width: 768 area_extent: lower_left_xy: [-8348961.809495518, -3896182.1777645745] upper_right_xy: [-3784862.6869713017, 1001875.4171394621] units: m worldeqc3km70: description: World in 3km, platecarree projection: proj: eqc ellps: WGS84 shape: height: 4096 width: 8192 area_extent: lower_left_xy: [-20037508.3428, -7792364.355533333] upper_right_xy: [20037508.3428, 7792364.355533333] units: m worldeqc30km70: description: World in 3km, platecarree projection: proj: eqc ellps: WGS84 shape: height: 410 width: 819 area_extent: lower_left_xy: [-20037508.3428, -7792364.355533333] upper_right_xy: [20037508.3428, 7792364.355533333] units: m worldeqc3km73: description: World in 3km, platecarree projection: proj: eqc ellps: WGS84 shape: height: 2048 width: 4096 area_extent: lower_left_xy: [-20037508.3428, -8181982.573309999] upper_right_xy: [20037508.3428, 8181982.573309999] units: m worldeqc3km: description: World in 3km, platecarree projection: proj: eqc ellps: WGS84 shape: height: 2048 width: 4096 area_extent: lower_left_xy: [-20037508.3428, -10018754.1714] upper_right_xy: [20037508.3428, 10018754.1714] units: m worldeqc30km: description: World in 3km, platecarree projection: proj: eqc ellps: WGS84 shape: height: 410 width: 820 area_extent: lower_left_xy: [-20037508.3428, -10018754.1714] upper_right_xy: [20037508.3428, 10018754.1714] units: m libya: description: libya area projection: proj: merc lat_ts: 31.625 lon_0: 17.875 ellps: WGS84 shape: height: 1024 width: 1280 area_extent: lower_left_xy: [-1921632.0902750609, 1725320.2028891125] upper_right_xy: [1918367.9097249391, 4797320.202889113] units: m phil: description: kuwait area projection: proj: merc lat_0: 10.0 lat_ts: 10.0 lon_0: 125.0 ellps: WGS84 shape: height: 2048 width: 4096 area_extent: lower_left_xy: [-2200000.0, 0.0] upper_right_xy: [2200000.0, 2200000.0] units: m phil_small: description: kuwait area projection: proj: merc lat_0: 10.0 lat_ts: 10.0 lon_0: 125.0 ellps: WGS84 shape: height: 512 width: 512 area_extent: lower_left_xy: [-600000.0, 0.0] upper_right_xy: [1600000.0, 2200000.0] units: m kuwait: description: kuwait area projection: proj: merc lat_ts: 30.0 lon_0: 44.75 ellps: WGS84 shape: height: 512 width: 512 area_extent: lower_left_xy: [-1280000.0, 1820000.0] upper_right_xy: [1280000.0, 4380000.0] units: m afghanistan: description: Afghanistan projection: proj: merc lat_ts: 35.0 a: 6370997.0 b: 6370997.0 lon_0: 67.5 lat_0: 35.0 shape: height: 1600 width: 1600 area_extent: lower_left_xy: [-1600000.0, 1600000.0] upper_right_xy: [1600000.0, 4800000.0] maspalomas: description: Western Africa and Canary Islands projection: proj: merc ellps: bessel lon_0: -10.0 shape: height: 1100 width: 2100 area_extent: lower_left_xy: [-1200000.0, 2900000.0] upper_right_xy: [900000.0, 4000000.0] afhorn: description: Africa horn 3km resolution projection: proj: merc ellps: bessel lon_0: 50.0 shape: height: 1622 width: 1622 area_extent: lower_left_xy: [-2432000.0, -1130348.139543] upper_right_xy: [2432000.0, 3733651.860457] spain: description: Spain projection: proj: stere ellps: bessel lat_0: 40.0 lon_0: -3.0 lat_ts: 40.0 a: 6378144.0 b: 6356759.0 shape: height: 2048 width: 2048 area_extent: lower_left_xy: [-500000.0, -500000.0] upper_right_xy: [500000.0, 500000.0] germ: description: Germany projection: proj: stere ellps: bessel lat_0: 90.0 lon_0: 5.0 lat_ts: 50.0 a: 6378144.0 b: 6356759.0 shape: height: 1024 width: 1024 area_extent: lower_left_xy: [-155100.436345, -4441495.37946] upper_right_xy: [868899.563655, -3417495.37946] germ2: description: Germany projection: proj: stere ellps: bessel lat_0: 90.0 lon_0: 5.0 lat_ts: 50.0 a: 6378144.0 b: 6356759.0 shape: height: 1024 width: 1280 area_extent: lower_left_xy: [-165100.436345, -4441495.37946] upper_right_xy: [878899.563655, -3417495.37946] euro4: description: Euro 4km area - Europe projection: proj: stere ellps: bessel lat_0: 90.0 lon_0: 14.0 lat_ts: 60.0 shape: height: 1024 width: 1024 area_extent: lower_left_xy: [-2717181.7304994687, -5571048.14031214] upper_right_xy: [1378818.2695005313, -1475048.1403121399] euro1: description: Euro 4km area - Europe projection: proj: stere ellps: bessel lat_0: 90.0 lon_0: 14.0 lat_ts: 60.0 shape: height: 4096 width: 4096 area_extent: lower_left_xy: [-2717181.7304994687, -5571048.14031214] upper_right_xy: [1378818.2695005313, -1475048.1403121399] scan: description: Scandinavia projection: proj: stere ellps: bessel lat_0: 90.0 lon_0: 14.0 lat_ts: 60.0 shape: height: 512 width: 512 area_extent: lower_left_xy: [-1268854.126638295, -4150234.8425892727] upper_right_xy: [779145.8733617051, -2102234.8425892727] scan2: description: Scandinavia - 2km area projection: proj: stere ellps: bessel lat_0: 90.0 lon_0: 14.0 lat_ts: 60.0 shape: height: 1024 width: 1024 area_extent: lower_left_xy: [-1268854.126638295, -4150234.8425892727] upper_right_xy: [779145.8733617051, -2102234.8425892727] scan1: description: Scandinavia - 1km area projection: proj: stere ellps: bessel lat_0: 90.0 lon_0: 14.0 lat_ts: 60.0 shape: height: 2088 width: 2048 area_extent: lower_left_xy: [-1268854.126638295, -4150234.8425892727] upper_right_xy: [779145.8733617051, -2062234.8425892727] scan500m: description: Scandinavia - 500m area projection: proj: stere ellps: bessel lat_0: 90.0 lon_0: 14.0 lat_ts: 60.0 shape: height: 4176 width: 4096 area_extent: lower_left_xy: [-1268854.126638295, -4150234.8425892727] upper_right_xy: [779145.8733617051, -2062234.8425892727] mesanX: description: Mesan-X rotated lon/lat 1.8km projection: proj: ob_tran o_proj: eqc o_lat_p: 30.0 o_lon_p: 10.0 lon_0: -10.0 a: 6371000.0 b: 6371000.0 wktext: True shape: height: 1608 width: 1476 area_extent: lower_left_xy: [1067435.7598983962, -1278764.890341909] upper_right_xy: [3791765.9965939857, 1690140.6680267097] mesanE: description: Europe Mesan rotated lon/lat 1.8km projection: proj: ob_tran o_proj: eqc o_lat_p: 30.0 o_lon_p: 10.0 lon_0: -10.0 a: 6371000.0 b: 6371000.0 wktext: True shape: height: 6294 width: 5093 area_extent: lower_left_xy: [289083.0005619671, -2957836.6467769896] upper_right_xy: [5381881.121371055, 3335826.68502126] baws: description: BAWS projection: proj: aea ellps: bessel lon_0: 14.0 lat_1: 60.0 lat_2: 60.0 shape: height: 1400 width: 1400 area_extent: lower_left_xy: [-475617.0, 5324430.0] upper_right_xy: [924383.0, 6724430.0] eurotv: description: Europe TV - 6.2x5.0km projection: proj: stere ellps: bessel lat_0: 90.0 lon_0: 14.0 lat_ts: 60.0 a: 6378144.0 b: 6356759.0 shape: height: 1024 width: 1024 area_extent: lower_left_xy: [-3503748.8201907813, -6589593.134058789] upper_right_xy: [2842567.6359087573, -1499856.5846593212] eurotv4n: description: Europe TV4 - 4.1x4.1km projection: proj: stere ellps: bessel lat_0: 90.0 lon_0: 14.0 lat_ts: 60.0 a: 6378144.0 b: 6356759.0 shape: height: 1152 width: 2048 area_extent: lower_left_xy: [-5103428.678666952, -6772478.60053407] upper_right_xy: [3293371.321333048, -2049278.6005340703] eurol: description: Euro 3.0km area - Europe projection: proj: stere ellps: WGS84 lat_0: 90.0 lon_0: 0.0 lat_ts: 60.0 shape: height: 2048 width: 2560 area_extent: lower_left_xy: [-3780000.0, -7644000.0] upper_right_xy: [3900000.0, -1500000.0] eurol1: description: Euro 3.0km area - Europe projection: proj: stere ellps: WGS84 lat_0: 90.0 lon_0: 0.0 lat_ts: 60.0 shape: height: 8000 width: 10000 area_extent: lower_left_xy: [-3780000.0, -7644000.0] upper_right_xy: [3900000.0, -1500000.0] scanl: description: Scandinavia - Large projection: proj: stere ellps: WGS84 lat_0: 90.0 lon_0: 0.0 lat_ts: 60.0 shape: height: 2900 width: 2900 area_extent: lower_left_xy: [-900000.0, -4500000.0] upper_right_xy: [2000000.0, -1600000.0] euron1: description: Northern Europe - 1km projection: proj: stere ellps: WGS84 lat_0: 90.0 lon_0: 0.0 lat_ts: 60.0 shape: height: 3072 width: 3072 area_extent: lower_left_xy: [-1000000.0, -4500000.0] upper_right_xy: [2072000.0, -1428000.0] euron0250: description: Northern Europe - 1km projection: proj: stere ellps: WGS84 lat_0: 90.0 lon_0: 0.0 lat_ts: 60.0 shape: height: 12288 width: 12288 area_extent: lower_left_xy: [-1000000.0, -4500000.0] upper_right_xy: [2072000.0, -1428000.0] nsea: description: North Baltic Sea projection: proj: merc ellps: WGS84 lat_ts: 0.0 lon_0: 15.0 shape: height: 1024 width: 1024 area_extent: lower_left_xy: [-322789.07638000086, 7784901.986829306] upper_right_xy: [1725210.923619999, 9832901.986829307] ssea: description: South Baltic Sea projection: proj: merc ellps: WGS84 lat_ts: 0.0 lon_0: 15.0 shape: height: 1024 width: 1024 area_extent: lower_left_xy: [-801407.3620468981, 7003690.663643802] upper_right_xy: [1246592.637953102, 9051690.663643802] nsea250: description: North Baltic Sea projection: proj: merc ellps: WGS84 lat_ts: 0.0 lon_0: 15.0 shape: height: 4096 width: 4096 area_extent: lower_left_xy: [-322789.07638000086, 7784901.986829306] upper_right_xy: [1725210.923619999, 9832901.986829307] ssea250: description: South Baltic Sea projection: proj: merc ellps: WGS84 lat_ts: 0.0 lon_0: 15.0 shape: height: 4096 width: 4096 area_extent: lower_left_xy: [-801407.3620468981, 7003690.663643802] upper_right_xy: [1246592.637953102, 9051690.663643802] bsea250: description: South Baltic Sea projection: proj: merc ellps: WGS84 lat_ts: 60.0 lon_0: 0.0 shape: height: 5632 width: 4752 area_extent: lower_left_xy: [512000.0, 3525000.0] upper_right_xy: [1700000.0, 4933000.0] test250: description: South Baltic Sea projection: proj: merc ellps: WGS84 lat_ts: 60.0 lon_0: 0.0 shape: height: 5632 width: 4752 area_extent: lower_left_xy: [512000.0, 3525000.0] upper_right_xy: [1700000.0, 4933000.0] bsea1000: description: South Baltic Sea projection: proj: merc ellps: WGS84 lat_ts: 60.0 lon_0: 0.0 shape: height: 1408 width: 1188 area_extent: lower_left_xy: [512000.0, 3525000.0] upper_right_xy: [1700000.0, 4933000.0] euro: description: Euro area - Europe projection: proj: stere ellps: bessel lat_0: 90.0 lon_0: 14.0 lat_ts: 60.0 shape: height: 512 width: 512 area_extent: lower_left_xy: [-2717181.7304994687, -5571048.14031214] upper_right_xy: [1378818.2695005313, -1475048.1403121399] baltrad_lambert: description: Baltrad Lambert projection: proj: laea a: 6370997.0 b: 6370997.0 lon_0: 20.0 lat_0: 60.0 shape: height: 1195 width: 815 area_extent: lower_left_xy: [-994211.85388, -1291605.15396] upper_right_xy: [635788.14612, 1098394.84604] eport: description: eport projection: proj: stere lat_0: 90.0 lon_0: 0.0 ellps: WGS84 shape: height: 1792 width: 1792 area_extent: lower_left_xy: [-5283418.625834752, -5283418.625834753] upper_right_xy: [5283418.625834753, 5283418.625834752] units: m eport1: description: eport projection: proj: stere lat_0: 90.0 lon_0: 0.0 ellps: WGS84 shape: height: 10567 width: 10567 area_extent: lower_left_xy: [-5283418.625834752, -5283418.625834753] upper_right_xy: [5283418.625834753, 5283418.625834752] units: m eport10: description: eport reduced resolution projection: proj: stere lat_0: 90.0 lon_0: 0.0 ellps: WGS84 shape: height: 1057 width: 1057 area_extent: lower_left_xy: [-5283418.625834752, -5283418.625834753] upper_right_xy: [5283418.625834753, 5283418.625834752] units: m eport4: description: eport reduced resolution projection: proj: stere lat_0: 90.0 lon_0: 0.0 ellps: WGS84 shape: height: 2642 width: 2642 area_extent: lower_left_xy: [-5283418.625834752, -5283418.625834753] upper_right_xy: [5283418.625834753, 5283418.625834752] units: m eport2: description: eport reduced resolution projection: proj: stere lat_0: 90.0 lon_0: 0.0 ellps: WGS84 shape: height: 5285 width: 5285 area_extent: lower_left_xy: [-5283418.625834752, -5283418.625834753] upper_right_xy: [5283418.625834753, 5283418.625834752] units: m npp_sample_m: description: North America - NPP sample data - M-bands projection: proj: laea a: 6370997.0 b: 6370997.0 lat_0: 60.0 lon_0: -120.0 shape: height: 1500 width: 1500 area_extent: lower_left_xy: [-1700000.0, -1400000.0] upper_right_xy: [1100000.0, 1400000.0] arctic_europe_1km: description: Arctic and Europe projection: proj: laea a: 6371228.0 b: 6371228.0 lon_0: 0.0 lat_0: 90.0 shape: height: 9100 width: 9100 area_extent: lower_left_xy: [-3100000.0, -7100000.0] upper_right_xy: [6000000.0, 2000000.0] arctic_europe_9km: description: Arctic and Europe projection: proj: laea a: 6371228.0 b: 6371228.0 lon_0: 0.0 lat_0: 90.0 shape: height: 910 width: 910 area_extent: lower_left_xy: [-3100000.0, -7100000.0] upper_right_xy: [6000000.0, 2000000.0] sswe: description: Southern Sweden projection: proj: stere ellps: bessel a: 6378144.0 b: 6356759.0 lat_0: 90.0 lon_0: 14.0 lat_ts: 60.0 shape: height: 1024 width: 1024 area_extent: lower_left_xy: [-400884.23045, -3946631.71387] upper_right_xy: [623115.76955, -2922631.71387] nswe: description: Northern Sweden projection: proj: stere ellps: bessel a: 6378144.0 b: 6356759.0 lat_0: 90.0 lon_0: 14.0 lat_ts: 60.0 shape: height: 1024 width: 1024 area_extent: lower_left_xy: [-392288.010506, -3105279.35252] upper_right_xy: [631711.989494, -2081279.35252] sval: description: Svalbard projection: proj: stere ellps: WGS84 lat_0: 90.0 lon_0: 20.0 lat_ts: 75.0 shape: height: 1024 width: 1024 area_extent: lower_left_xy: [-287554.9489620461, -1630805.15418955] upper_right_xy: [736445.0510379539, -606805.1541895501] ease_sh: description: Antarctic EASE grid projection: proj: laea lat_0: -90.0 lon_0: 0.0 a: 6371228.0 b: 6371228.0 shape: height: 425 width: 425 area_extent: lower_left_xy: [-5326849.0625, -5326849.0625] upper_right_xy: [5326849.0625, 5326849.0625] units: m ease_nh: description: Arctic EASE grid projection: proj: laea lat_0: 90.0 lon_0: 0.0 a: 6371228.0 b: 6371228.0 shape: height: 425 width: 425 area_extent: lower_left_xy: [-5326849.0625, -5326849.0625] upper_right_xy: [5326849.0625, 5326849.0625] units: m barents_sea: description: Barents and Greenland seas projection: proj: stere ellps: WGS84 lat_0: 90.0 lon_0: 40.0 lat_ts: 75.0 shape: height: 1700 width: 3000 area_extent: lower_left_xy: [-1600000.0, -2000000.0] upper_right_xy: [1400000.0, -300000.0] antarctica: description: Antarctica - 1km projection: proj: laea a: 6370997.0 b: 6370997.0 lon_0: 0.0 lat_0: -90.0 shape: height: 5718 width: 5718 area_extent: lower_left_xy: [-2858899.2042342643, -2858899.204234264] upper_right_xy: [2858899.204234264, 2858899.2042342643] arctica: description: arctica - 1km projection: proj: laea a: 6370997.0 b: 6370997.0 lon_0: 0.0 lat_0: 90.0 shape: height: 5718 width: 5718 area_extent: lower_left_xy: [-1458899.2042342643, -1458899.2042342639] upper_right_xy: [1458899.2042342639, 1458899.2042342643] euroasia: description: Euroasia - Global 1km USGS Landuse database projection: proj: laea a: 6370997.0 b: 6370997.0 lat_0: 55.0 lon_0: 20.0 shape: height: 13000 width: 13000 area_extent: lower_left_xy: [-3000000.0, -4999000.0] upper_right_xy: [9999000.0, 8000000.0] euroasia_10km: description: Euroasia - Global 1km USGS Landuse database projection: proj: laea a: 6370997.0 b: 6370997.0 lat_0: 55.0 lon_0: 20.0 shape: height: 1300 width: 1300 area_extent: lower_left_xy: [-3000000.0, -4999000.0] upper_right_xy: [9999000.0, 8000000.0] euroasia_asia: description: Euroasia - optimised for Asia - Global 1km USGS Landuse database projection: proj: laea a: 6370997.0 b: 6370997.0 lat_0: 45.0 lon_0: 100.0 shape: height: 12000 width: 13000 area_extent: lower_left_xy: [-8000000.0, -5499000.0] upper_right_xy: [4999000.0, 6500000.0] euroasia_asia_10km: description: Euroasia - optimised for Asia - Global 1km USGS Landuse database projection: proj: laea a: 6370997.0 b: 6370997.0 lat_0: 45.0 lon_0: 100.0 shape: height: 1200 width: 1300 area_extent: lower_left_xy: [-8000000.0, -5499000.0] upper_right_xy: [4999000.0, 6500000.0] australia_pacific: description: Austalia/Pacific - Global 1km USGS Landuse database projection: proj: laea a: 6370997.0 b: 6370997.0 lat_0: -15.0 lon_0: 135.0 shape: height: 8000 width: 9300 area_extent: lower_left_xy: [-5000000.0, -3944890.0] upper_right_xy: [4299000.0, 4054110.0] australia_pacific_10km: description: Austalia/Pacific - Global 1km USGS Landuse database projection: proj: laea a: 6370997.0 b: 6370997.0 lat_0: -15.0 lon_0: 135.0 shape: height: 800 width: 930 area_extent: lower_left_xy: [-5000000.0, -3944890.0] upper_right_xy: [4299000.0, 4054110.0] africa: description: Africa - Global 1km USGS Landuse database projection: proj: laea a: 6370997.0 b: 6370997.0 lat_0: 5.0 lon_0: 20.0 shape: height: 9276 width: 8350 area_extent: lower_left_xy: [-4458000.0, -4795000.0] upper_right_xy: [3891000.0, 4480000.0] africa_10km: description: Africa - Global 1km USGS Landuse database projection: proj: laea a: 6370997.0 b: 6370997.0 lat_0: 5.0 lon_0: 20.0 shape: height: 928 width: 835 area_extent: lower_left_xy: [-4458000.0, -4795000.0] upper_right_xy: [3891000.0, 4480000.0] southamerica: description: South America - Global 1km USGS Landuse database projection: proj: laea a: 6370997.0 b: 6370997.0 lat_0: -15.0 lon_0: -60.0 shape: height: 8000 width: 6000 area_extent: lower_left_xy: [-3000000.0, -4899000.0] upper_right_xy: [2999000.0, 3100000.0] southamerica_10km: description: South America - Global 1km USGS Landuse database projection: proj: laea a: 6370997.0 b: 6370997.0 lat_0: -15.0 lon_0: -60.0 shape: height: 800 width: 600 area_extent: lower_left_xy: [-3000000.0, -4899000.0] upper_right_xy: [2999000.0, 3100000.0] northamerica: description: North America - Global 1km USGS Landuse database projection: proj: laea a: 6370997.0 b: 6370997.0 lat_0: 50.0 lon_0: -100.0 shape: height: 8996 width: 9223 area_extent: lower_left_xy: [-4487000.0, -4515000.0] upper_right_xy: [4735000.0, 4480000.0] northamerica_10km: description: North America - Global 1km USGS Landuse database projection: proj: laea a: 6370997.0 b: 6370997.0 lat_0: 50.0 lon_0: -100.0 shape: height: 900 width: 922 area_extent: lower_left_xy: [-4487000.0, -4515000.0] upper_right_xy: [4735000.0, 4480000.0] romania: description: Romania - 3km projection: proj: stere ellps: WGS84 lat_0: 50.0 lon_0: 15.0 lat_ts: 60.0 shape: height: 855 width: 1509 area_extent: lower_left_xy: [-2226837.662574135, -1684219.2829063328] upper_right_xy: [2299196.337425865, 881436.7170936672] stere_asia_test: description: stere projection: proj: stere lon_0: 121.5 lat_0: 25.0 shape: height: 7162 width: 7200 area_extent: lower_left_xy: [-3589072.840299738, -3568228.07278016] upper_right_xy: [3611014.256314698, 3594111.7022882444] bocheng_test: description: stere projection: proj: stere lon_0: 121.5 lat_0: 25.0 shape: height: 1989 width: 2000 area_extent: lower_left_xy: [-3589072.840299738, -3568228.07278016] upper_right_xy: [3611014.256314698, 3594111.7022882444] nsper_swe: description: nsper_swe projection: proj: nsper lon_0: 16.0 lat_0: 58.0 h: 360000000.0 wktext: True shape: height: 1024 width: 1024 area_extent: lower_left_xy: [-5000000.0, -5000000.0] upper_right_xy: [5000000.0, 5000000.0] new_bsea250: description: new_bsea250 projection: proj: stere lat_0: 59.5 lon_0: 19.5 ellps: WGS84 shape: height: 5750 width: 5104 area_extent: lower_left_xy: [-638072.2772287376, -680339.8397175331] upper_right_xy: [638072.277228737, 757253.9342263378] scanice: description: Scandinavia and Iceland projection: proj: laea a: 6370997.0 b: 6370997.0 lon_0: 0.0 lat_0: 64.0 shape: height: 1024 width: 1280 area_extent: lower_left_xy: [-1920000.0, -1536000.0] upper_right_xy: [1920000.0, 1536000.0] baws250: description: BAWS, 250m resolution projection: proj: aea ellps: WGS84 lon_0: 14.0 lat_1: 60.0 lat_2: 60.0 shape: height: 5600 width: 5600 area_extent: lower_left_xy: [-475617.0, 5324430.0] upper_right_xy: [924383.0, 6724430.0] moll: description: moll projection: proj: moll lat_0: 0.0 lon_0: 0.0 ellps: WGS84 shape: height: 1800 width: 3600 area_extent: lower_left_xy: [-18040095.696147293, -9020047.848073646] upper_right_xy: [18040095.696147293, 9020047.848073646] robinson: description: robinson projection: proj: robin lat_0: 70.0 lon_0: -45.0 ellps: WGS84 shape: height: 3296 width: 4096 area_extent: lower_left_xy: [-2049911.5256036147, 5326895.725982913] upper_right_xy: [2049911.5256036168, 8625155.12857459] # ----------------------------------------------------------------------------- # -------------------- Areas to be deprecated --------------------------------- # ----------------------------------------------------------------------------- # This section contains areas that are obsolete. met07globe: # obsolete platform number description: Full disk IODC image 57 degrees projection: proj: geos lon_0: 57.0 a: 6378140.0 b: 6356755.0 h: 35785831.0 shape: height: 2500 width: 2500 area_extent: lower_left_xy: [-5621225.237846375, -5621225.237846375] upper_right_xy: [5621225.237846375, 5621225.237846375] met09globe: # obsolete platform number description: Cropped disk MSG image 0 degrees projection: proj: geos lon_0: 0.0 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 3620 width: 3620 area_extent: lower_left_xy: [-5432229.931711678, -5429229.528545862] upper_right_xy: [5429229.528545862, 5432229.931711678] met09globeFull: # superseded by msg_seviri_fes_3km description: Full disk MSG image 0 degrees projection: proj: geos lon_0: 0.0 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 3712 width: 3712 area_extent: lower_left_xy: [-5570248.477339261, -5567248.074173444] upper_right_xy: [5567248.074173444, 5570248.477339261] seviri_0deg: # superseded by msg_seviri_fes_3km description: Full disk MSG image 0 degrees projection: proj: geos lon_0: 0.0 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 3712 width: 3712 area_extent: lower_left_xy: [-5570248.686685662, -5567248.28340708] upper_right_xy: [5567248.28340708, 5570248.686685662] seviri_iodc: # superseded by msg_seviri_iodc_3km description: Full disk MSG image 41.5 degrees projection: proj: geos lon_0: 41.5 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 3712 width: 3712 area_extent: lower_left_xy: [-5570248.686685662, -5567248.28340708] upper_right_xy: [5567248.28340708, 5570248.686685662] msg_resample_area: description: Full disk MSG image 20.75 degrees projection: proj: geos lon_0: 20.75 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 3712 width: 3712 area_extent: lower_left_xy: [-5570248.477339261, -5567248.074173444] upper_right_xy: [5567248.074173444, 5570248.477339261] satpy-0.34.0/satpy/etc/composites/000077500000000000000000000000001420401153000170245ustar00rootroot00000000000000satpy-0.34.0/satpy/etc/composites/abi.yaml000066400000000000000000000557341420401153000204610ustar00rootroot00000000000000sensor_name: visir/abi modifiers: rayleigh_corrected_crefl: modifier: !!python/name:satpy.modifiers.atmosphere.ReflectanceCorrector url: "https://www.ssec.wisc.edu/~davidh/polar2grid/viirs_crefl/CMGDEM.hdf" known_hash: "sha256:f33f1f867d79fff4fafe128f61c154236dd74fcc97bf418ea1437977a38d0604" optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle rayleigh_corrected_500m: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: midlatitude summer aerosol_type: marine_tropical_aerosol prerequisites: - name: C02 modifiers: [effective_solar_pathlength_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle composites: green_crefl: compositor: !!python/name:satpy.composites.abi.SimulatedGreen # FUTURE: Set a wavelength...see what happens. Dependency finding # probably wouldn't work. prerequisites: # should we be using the most corrected or least corrected inputs? - name: C01 modifiers: [sunz_corrected, rayleigh_corrected_crefl] - name: C02 modifiers: [sunz_corrected, rayleigh_corrected_crefl] - name: C03 modifiers: [sunz_corrected, rayleigh_corrected_crefl] standard_name: toa_bidirectional_reflectance green_raw: compositor: !!python/name:satpy.composites.abi.SimulatedGreen # FUTURE: Set a wavelength...see what happens. Dependency finding # probably wouldn't work. prerequisites: # should we be using the most corrected or least corrected inputs? - name: C01 modifiers: [sunz_corrected] - name: C02 modifiers: [sunz_corrected] - name: C03 modifiers: [sunz_corrected] standard_name: toa_bidirectional_reflectance green: compositor: !!python/name:satpy.composites.abi.SimulatedGreen # FUTURE: Set a wavelength...see what happens. Dependency finding # probably wouldn't work. prerequisites: # should we be using the most corrected or least corrected inputs? - name: C01 modifiers: [sunz_corrected, rayleigh_corrected] - name: C02 modifiers: [sunz_corrected, rayleigh_corrected] - name: C03 modifiers: [sunz_corrected] standard_name: toa_bidirectional_reflectance green_nocorr: compositor: !!python/name:satpy.composites.abi.SimulatedGreen # FUTURE: Set a wavelength...see what happens. Dependency finding # probably wouldn't work. prerequisites: # should we be using the most corrected or least corrected inputs? - name: C01 - name: C02 - name: C03 standard_name: toa_reflectance true_color_crefl: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: C02 modifiers: [sunz_corrected, rayleigh_corrected_crefl] - name: green_crefl - name: C01 modifiers: [sunz_corrected, rayleigh_corrected_crefl] standard_name: true_color true_color_raw: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: C02 modifiers: [sunz_corrected] - name: green_raw - name: C01 modifiers: [sunz_corrected] standard_name: true_color true_color: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: C02 modifiers: [sunz_corrected, rayleigh_corrected] - name: green - name: C01 modifiers: [sunz_corrected, rayleigh_corrected] standard_name: true_color true_color_nocorr: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: C02 - name: green_nocorr - name: C01 standard_name: true_color natural_color: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: C05 modifiers: [sunz_corrected] - name: C03 modifiers: [sunz_corrected] - name: C02 modifiers: [sunz_corrected] high_resolution_band: blue standard_name: natural_color natural_color_nocorr: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: C05 - name: C03 - name: C02 high_resolution_band: blue standard_name: natural_color natural_color_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C05 - name: C03 - name: C02 standard_name: natural_color overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C02 modifiers: [sunz_corrected] - name: C03 modifiers: [sunz_corrected] - C14 standard_name: overview overview_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - C02 - C03 - C14 standard_name: overview colorized_ir_clouds: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - name: C13 standard_name: colorized_ir_clouds airmass: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C08 - name: C10 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C12 - name: C13 - name: C08 standard_name: airmass # CIMSS True Color Composites cimss_green_sunz_rayleigh: compositor: !!python/name:satpy.composites.abi.SimulatedGreen fractions: [0.45, 0.45, 0.1] prerequisites: # should we be using the most corrected or least corrected inputs? - name: C01 modifiers: [sunz_corrected, rayleigh_corrected] - name: C02 modifiers: [sunz_corrected, rayleigh_corrected] - name: C03 modifiers: [sunz_corrected] standard_name: toa_bidirectional_reflectance cimss_green_sunz: compositor: !!python/name:satpy.composites.abi.SimulatedGreen fractions: [0.45, 0.45, 0.1] prerequisites: # should we be using the most corrected or least corrected inputs? - name: C01 modifiers: [sunz_corrected] - name: C02 modifiers: [sunz_corrected] - name: C03 modifiers: [sunz_corrected] standard_name: toa_bidirectional_reflectance cimss_green: compositor: !!python/name:satpy.composites.abi.SimulatedGreen fractions: [0.45, 0.45, 0.1] prerequisites: # should we be using the most corrected or least corrected inputs? - name: C01 - name: C02 - name: C03 standard_name: toa_bidirectional_reflectance cimss_true_color_sunz_rayleigh: compositor: !!python/name:satpy.composites.SelfSharpenedRGB description: > CIMSS Natural (True) Color RGB. This version has been adjusted for the cosine of the solar zenith angle and has had rayleigh correction applied. This RGB is ratio sharpened by comparing a high resolution C02 (red) band with a lower/averaged version of itself and applying that ratio to the green and blue channels. references: Research Article: https://agupubs.onlinelibrary.wiley.com/doi/10.1029/2018EA000379 prerequisites: - name: C02 modifiers: [sunz_corrected, rayleigh_corrected] - name: cimss_green_sunz_rayleigh - name: C01 modifiers: [sunz_corrected, rayleigh_corrected] standard_name: cimss_true_color cimss_true_color_sunz: compositor: !!python/name:satpy.composites.SelfSharpenedRGB description: > CIMSS Natural (True) Color RGB. This version has been adjusted for the cosine of the solar zenith angle. This RGB is ratio sharpened by comparing a high resolution C02 (red) band with a lower/averaged version of itself and applying that ratio to the green and blue channels. references: Research Article: https://agupubs.onlinelibrary.wiley.com/doi/10.1029/2018EA000379 prerequisites: - name: C02 modifiers: [sunz_corrected] - name: cimss_green_sunz - name: C01 modifiers: [sunz_corrected] standard_name: cimss_true_color cimss_true_color: compositor: !!python/name:satpy.composites.SelfSharpenedRGB description: > CIMSS Natural (True) Color RGB. No solar zenith angle or atmospheric corrections are applied to this composite. This RGB is ratio sharpened by comparing a high resolution C02 (red) band with a lower/averaged version of itself and applying that ratio to the green and blue channels. references: Research Article: https://agupubs.onlinelibrary.wiley.com/doi/10.1029/2018EA000379 prerequisites: - name: C02 - name: cimss_green - name: C01 standard_name: cimss_true_color true_color_with_night_ir: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: true_color_with_night_ir lim_low: 90.0 lim_high: 100.0 prerequisites: - true_color - night_ir_with_background true_color_with_night_ir_hires: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: true_color_with_night_ir_hires lim_low: 90.0 lim_high: 100.0 prerequisites: - true_color - night_ir_with_background_hires night_ir_alpha: compositor: !!python/name:satpy.composites.GenericCompositor standard_name: night_ir_alpha prerequisites: - 3.90 - 10.3 - 12.3 - 10.3 night_ir_with_background: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: night_ir_with_background prerequisites: - night_ir_alpha - _night_background night_ir_with_background_hires: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: night_ir_with_background_hires prerequisites: - night_ir_alpha - _night_background_hires cira_fire_temperature: compositor: !!python/name:satpy.composites.GenericCompositor standard_name: cira_fire_temperature description: > The fire temperature RGB highlights intense fires and differentiate these from low temperature fires. Small low temperature fires will only show up at 3.9 μm and appear red. With the increasing intensity and temperature the fires will also be detected by the 2.2 μm and 1.6 μm bands resulting very intense fires in white. references: Research Article: https://rammb.cira.colostate.edu/training/visit/quick_guides/Fire_Temperature_RGB.pdf prerequisites: - name: C07 - name: C06 - name: C05 cira_day_convection: compositor: !!python/name:satpy.composites.GenericCompositor standard_name: cira_day_convection description: > The Day Convection RGB emphasizes convection with strong updrafts and small ice particles indicative of severe storms. Bright yellow in the RGB indicates strong updrafts prior to the mature storm stage. references: Research Article: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_DayConvectionRGB_final.pdf prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C08 - name: C10 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C07 - name: C13 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C05 - name: C02 cimss_cloud_type: compositor: !!python/name:satpy.composites.GenericCompositor standard_name: cimss_cloud_type description: > The Cloud Type RGB discriminates very well between high clouds and low clouds and can be used to estimate cloud type. Thin cirrus clouds appear red, opaque ice clouds are yellow, low water clouds are cyan, and lofted water clouds are white. references: Research Article: https://www.wmo.int/pages/prog/sat/meetings/documents/RGB-WS-2017_Doc_02g_Kerkmann-new-rgbs.pdf prerequisites: - name: C04 - name: C02 - name: C05 ash: description: > Ash RGB, for GOESR: NASA, NOAA references: CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/GOES_Ash_RGB.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C15 - name: C13 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C14 - name: C11 - name: C13 standard_name: ash dust: description: > Dust RGB, for GOESR: NASA, NOAA references: CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/Dust_RGB_Quick_Guide.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C15 - name: C13 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C14 - name: C11 - name: C13 standard_name: dust cloud_phase_distinction: description: > Day Cloud Phase Distinction RGB, for GOESR: NASA, NOAA Cloud Type RGB, for EUMETSAT (https://www.eumetsat.int/website/home/Images/ImageLibrary/DAT_3958037.html) references: CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_DayCloudPhaseDistinction_final_v2.pdf Cloud Type recipe and typical colours: https://www.eumetsat.int/fr/media/45659 ## it uses the default used in etc/enhancements/generic.yaml compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C13 - name: C02 modifiers: [sunz_corrected, rayleigh_corrected] - name: C05 modifiers: [sunz_corrected] standard_name: cloud_phase_distinction cloud_phase_distinction_raw: description: > same as cloud_phase_distinction compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C13 - name: C02 - name: C05 standard_name: cloud_phase_distinction water_vapors1: description: > Simple Water Vapor RGB, for GOESR: NASA, NOAA references: CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/Simple_Water_Vapor_RGB.pdf ## it uses the default used in etc/enhancements/generic.yaml compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C13 - name: C08 - name: C10 standard_name: water_vapors1 water_vapors2: description: > Differential Water Vapor RGB, for GOESR: NASA, NOAA references: CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_DifferentialWaterVaporRGB_final.pdf ## it uses the default used in etc/enhancements/generic.yaml compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C10 - name: C08 - name: C10 - name: C08 standard_name: water_vapors2 convection: description: > Day Convection RGB, for GOESR: NASA, NOAA references: CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_DayConvectionRGB_final.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C08 - name: C10 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C07 - name: C13 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C05 - name: C02 standard_name: convection so2: description: > SO2 RGB, for GOESR: NASA, NOAA references: CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/Quick_Guide_SO2_RGB.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C09 - name: C10 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C13 - name: C11 - name: C13 standard_name: so2 snow_fog: description: > Day Snow-Fog RGB, for GOESR: NASA, NOAA references: CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_DaySnowFogRGB_final_v2.pdf ## it uses the default used in etc/enhancements/generic.yaml of snow_defaul compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C03 modifiers: [sunz_corrected] - name: C05 modifiers: [sunz_corrected] - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C07 - name: C13 standard_name: snow night_microphysics_abi: description: > Nighttime Microphysics RGB, for GOESR: NASA, NOAA references: CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_NtMicroRGB_Final_20191206.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C15 - name: C13 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C13 - name: C07 - name: C13 standard_name: night_microphysics fire_temperature_awips: description: > Fire Temperature RGB, for GOESR: NASA, NOAA references: CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/Fire_Temperature_RGB.pdf ## adapted from etc/composites/viirs.yaml compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C07 - name: C06 - name: C05 standard_name: fire_temperature name: fire_temperature_awips land_cloud_fire: description: > Day Land Cloud Fire RGB, for GOESR: NASA, NOAA references: CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_DayLandCloudFireRGB_final.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C06 - name: C03 modifiers: [sunz_corrected] - name: C02 modifiers: [sunz_corrected] standard_name: land_cloud_fire land_cloud: description: > Day Land Cloud RGB, for GOESR: NASA, NOAA references: CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_daylandcloudRGB_final.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C05 - name: C03 modifiers: [sunz_corrected] - name: C02 modifiers: [sunz_corrected] standard_name: land_cloud snow: description: > SEVIRI Snow RGB, for EUMETSAT references: EUMETRAIN Quick Guide: http://www.eumetrain.org/rgb_quick_guides/quick_guides/SnowRGB.pdf ## adapted from etc/composites/visir.yaml compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C03 modifiers: [sunz_corrected] - name: C05 modifiers: [sunz_corrected] - name: C07 modifiers: [nir_reflectance] standard_name: snow day_microphysics_eum: description: > SEVIRI Day Microphysics RGB, for EUMETSAT references: EUMETRAIN Quick Guide: http://www.eumetrain.org/rgb_quick_guides/quick_guides/DaymicroRGB.pdf ## adapted from etc/composites/ahi.yaml compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C03 modifiers: [sunz_corrected] - name: C07 modifiers: [nir_reflectance] - name: C14 standard_name: day_microphysics day_microphysics_abi: description: > Variations to the Day Microphysics RGB product using the 2.3 micron channel, see the pages 12 and 13 in the references references: Quick Guide: http://www.virtuallab.bom.gov.au/files/3114/7884/4308/NewRGBProductsNov2016RFGmeeting.pdf ## adapted from etc/composites/ahi.yaml compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C03 - name: C06 - name: C13 standard_name: day_microphysics_ahi cloud_phase: description: > EUMETSAT Cloud Phase RGB product references: EUMETRAIN Quick Guide: http://www.eumetrain.org/rgb_quick_guides/quick_guides/CloudPhaseRGB.pdf Recipe : http://eumetrain.org/RGBguide/recipes/RGB_recipes.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C05 modifiers: [sunz_corrected] - name: C06 modifiers: [sunz_corrected] - name: C02 modifiers: [sunz_corrected, rayleigh_corrected] standard_name: cloud_phase cloud_phase_raw: description: > same as eum_cloud_phase RGB product, without modifiers compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C05 - name: C06 - name: C02 standard_name: cloud_phase tropical_airmass: description: > Tropical Airmass RGB, see the page 6 in the references too see the EUMETSAT article: https://www.eumetsat.int/website/home/Images/ImageLibrary/DAT_2861499.html references: Quick Guide: http://www.virtuallab.bom.gov.au/files/3114/7884/4308/NewRGBProductsNov2016RFGmeeting.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C10 - name: C08 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C13 - name: C12 - name: C08 standard_name: tropical_airmass color_infrared: description: > Similar to landsat and sentinel 2 combinations for enhance vegetation compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: C03 modifiers: [sunz_corrected, rayleigh_corrected] - name: C02 modifiers: [sunz_corrected, rayleigh_corrected] - name: green standard_name: true_color highlight_C14: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - name: C14 standard_name: highlighted_toa_brightness_temperature satpy-0.34.0/satpy/etc/composites/agri.yaml000066400000000000000000000162131420401153000206350ustar00rootroot00000000000000sensor_name: visir/agri composites: green: compositor: !!python/name:satpy.composites.abi.SimulatedGreen # FUTURE: Set a wavelength...see what happens. Dependency finding # probably wouldn't work. prerequisites: # should we be using the most corrected or least corrected inputs? - name: C01 modifiers: [sunz_corrected, rayleigh_corrected] - name: C02 modifiers: [sunz_corrected, rayleigh_corrected] - name: C03 modifiers: [sunz_corrected] standard_name: toa_bidirectional_reflectance true_color: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: C02 modifiers: [sunz_corrected, rayleigh_corrected] - name: green - name: C01 modifiers: [sunz_corrected, rayleigh_corrected] standard_name: true_color # cloud_phase_distinction: description: > Day Cloud Phase Distinction RGB, for GOESR: NASA, NOAA Cloud Type RGB, for EUMETSAT (https://www.eumetsat.int/website/home/Images/ImageLibrary/DAT_3958037.html) references: CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_DayCloudPhaseDistinction_final_v2.pdf Cloud Type recipe and typical colours: https://www.eumetsat.int/website/wcm/idc/idcplg?IdcService=GET_FILE&dDocName=PDF_IL_18_05_13_A&RevisionSelectionMethod=LatestReleased&Rendition=Web ## it uses the default used in etc/enhancements/generic.yaml compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C12 - name: C02 modifiers: [sunz_corrected, rayleigh_corrected] - name: C05 modifiers: [sunz_corrected] standard_name: cloud_phase_distinction cloud_phase_distinction_raw: description: > same as cloud_phase_distinction compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C12 - name: C02 - name: C05 standard_name: cloud_phase_distinction snow_fog: description: > Day Snow-Fog RGB, for GOESR: NASA, NOAA references: CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_DaySnowFogRGB_final_v2.pdf ## it uses the default used in etc/enhancements/generic.yaml of snow_defaul compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C03 modifiers: [sunz_corrected] - name: C05 modifiers: [sunz_corrected] - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C07 - name: C12 standard_name: snow fire_temperature_awips: description: > Fire Temperature RGB, for GOESR: NASA, NOAA references: CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/Fire_Temperature_RGB.pdf ## adapted from etc/composites/viirs.yaml compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C07 - name: C06 - name: C05 standard_name: fire_temperature name: fire_temperature_awips land_cloud_fire: description: > Day Land Cloud Fire RGB, for GOESR: NASA, NOAA references: CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_DayLandCloudFireRGB_final.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C06 - name: C03 modifiers: [sunz_corrected] - name: C02 modifiers: [sunz_corrected] standard_name: land_cloud_fire land_cloud: description: > Day Land Cloud RGB, for GOESR: NASA, NOAA references: CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_daylandcloudRGB_final.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C05 - name: C03 modifiers: [sunz_corrected] - name: C02 modifiers: [sunz_corrected] standard_name: land_cloud snow: description: > SEVIRI Snow RGB, for EUMETSAT references: EUMETRAIN Quick Guide: http://www.eumetrain.org/rgb_quick_guides/quick_guides/SnowRGB.pdf ## adapted from etc/composites/visir.yaml compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C03 modifiers: [sunz_corrected] - name: C05 modifiers: [sunz_corrected] - name: C07 modifiers: [nir_reflectance] standard_name: snow day_microphysics_eum: description: > SEVIRI Day Microphysics RGB, for EUMETSAT references: EUMETRAIN Quick Guide: http://www.eumetrain.org/rgb_quick_guides/quick_guides/DaymicroRGB.pdf ## adapted from etc/composites/ahi.yaml compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C03 modifiers: [sunz_corrected] - name: C07 modifiers: [nir_reflectance] - name: C12 standard_name: day_microphysics day_microphysics_agri: description: > Variations to the Day Microphysics RGB product using the 2.3 micron channel, see the pages 12 and 13 in the references references: Quick Guide: http://www.virtuallab.bom.gov.au/files/3114/7884/4308/NewRGBProductsNov2016RFGmeeting.pdf ## adapted from etc/composites/ahi.yaml compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C03 - name: C06 - name: C12 standard_name: day_microphysics_ahi cloud_phase: description: > Cloud Phase RGB, for EUMETSAT Day Cloud Phase RGB, for EUMETSAT (https://www.eumetsat.int/website/home/Images/ImageLibrary/DAT_2861499.html) "When we use the NIR2.3 instead of the VIS0.8 on the green beam, we can devise a new RGB product (let us call it 'Day Cloud Phase RGB') that has similar cloud colours than the Natural Colour RGB, but with improved separation of ice and water clouds." references: EUMETRAIN Quick Guide: http://www.eumetrain.org/rgb_quick_guides/quick_guides/CloudPhaseRGB.pdf Cloud Phase recipe and typical colours: https://www.eumetsat.int/website/wcm/idc/idcplg?IdcService=GET_FILE&dDocName=PDF_IL_18_05_13&RevisionSelectionMethod=LatestReleased&Rendition=Web compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C05 modifiers: [sunz_corrected] - name: C06 modifiers: [sunz_corrected] - name: C02 modifiers: [sunz_corrected, rayleigh_corrected] standard_name: natural_color cloud_phase_raw: description: > same as cloud_phase compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C05 - name: C06 - name: C02 standard_name: natural_color color_infrared: description: > Similar to landsat and sentinel 2 combinations for enhance vegetation compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: C03 modifiers: [sunz_corrected, rayleigh_corrected] - name: C02 modifiers: [sunz_corrected, rayleigh_corrected] - name: green standard_name: true_color satpy-0.34.0/satpy/etc/composites/ahi.yaml000066400000000000000000000277601420401153000204650ustar00rootroot00000000000000sensor_name: visir/ahi modifiers: rayleigh_corrected: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: marine_clean_aerosol prerequisites: - wavelength: 0.64 modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle no_aerosol_rayleigh_corrected: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: - name: B03 modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle composites: green: compositor: !!python/name:satpy.composites.ahi.GreenCorrector # FUTURE: Set a wavelength...see what happens. Dependency finding # probably wouldn't work. prerequisites: # should we be using the most corrected or least corrected inputs? # what happens if something requests more modifiers on top of this? - wavelength: 0.51 modifiers: [sunz_corrected, rayleigh_corrected] - wavelength: 0.85 modifiers: [sunz_corrected] standard_name: toa_bidirectional_reflectance green_true_color_reproduction: # JMA True Color Reproduction green band # http://www.jma.go.jp/jma/jma-eng/satellite/introduction/TCR.html compositor: !!python/name:satpy.composites.ahi.GreenCorrector fractions: [0.6321, 0.2928, 0.0751] prerequisites: - name: B02 modifiers: [sunz_corrected, no_aerosol_rayleigh_corrected] - name: B03 modifiers: [sunz_corrected, no_aerosol_rayleigh_corrected] - name: B04 modifiers: [sunz_corrected] standard_name: none green_nocorr: compositor: !!python/name:satpy.composites.ahi.GreenCorrector # FUTURE: Set a wavelength...see what happens. Dependency finding # probably wouldn't work. prerequisites: # should we be using the most corrected or least corrected inputs? # what happens if something requests more modifiers on top of this? - wavelength: 0.51 - wavelength: 0.85 standard_name: toa_reflectance airmass: # PDF slides: https://www.eumetsat.int/website/home/News/ConferencesandEvents/DAT_2833302.html # Under session 2 by Akihiro Shimizu (JMA) compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B08 - name: B10 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B12 - name: B14 - name: B08 standard_name: airmass ash: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B15 - name: B13 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B14 - name: B11 - name: B13 standard_name: ash dust: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B15 - name: B13 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B14 - name: B11 - name: B13 standard_name: dust fog: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B15 - name: B13 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B14 - name: B11 - name: B13 standard_name: fog night_microphysics: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B15 - name: B13 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B14 - name: B07 - name: B13 standard_name: night_microphysics fire_temperature: # CIRA: Original VIIRS compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 3.85 calibration: radiance - wavelength: 2.26 calibration: radiance - wavelength: 1.61 calibration: radiance standard_name: fire_temperature name: fire_temperature fire_temperature_awips: # CIRA: EUMETSAT compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 3.85 - wavelength: 2.26 - wavelength: 1.61 standard_name: fire_temperature name: fire_temperature_awips fire_temperature_eumetsat: # CIRA: AWIPS compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 3.85 - wavelength: 2.26 - wavelength: 1.61 standard_name: fire_temperature name: fire_temperature_eumetsat fire_temperature_39refl: # CIRA: All bands in Reflectance units (%) compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 3.85 modifiers: [nir_reflectance] - wavelength: 2.26 modifiers: [sunz_corrected] - wavelength: 1.61 modifiers: [sunz_corrected] standard_name: fire_temperature name: fire_temperature_39refl overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: B03 modifiers: [sunz_corrected] - name: B04 modifiers: [sunz_corrected] - name: B13 standard_name: overview overview_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - B03 - B04 - B13 standard_name: overview natural_color: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - wavelength: 1.63 modifiers: [sunz_corrected] #, rayleigh_corrected] - wavelength: 0.85 modifiers: [sunz_corrected] #, rayleigh_corrected] - wavelength: 0.635 modifiers: [sunz_corrected] #, rayleigh_corrected] high_resolution_band: blue standard_name: natural_color true_color: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: B03 modifiers: [sunz_corrected, rayleigh_corrected] - name: green - name: B01 modifiers: [sunz_corrected, rayleigh_corrected] high_resolution_band: red standard_name: true_color natural_color_nocorr: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: B05 - name: B04 - name: B03 high_resolution_band: blue standard_name: natural_color true_color_nocorr: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: B03 - name: green_nocorr - name: B01 high_resolution_band: red standard_name: true_color true_color_reproduction: # JMA True Color Reproduction # http://www.jma.go.jp/jma/jma-eng/satellite/introduction/TCR.html compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: B03 modifiers: [sunz_corrected, no_aerosol_rayleigh_corrected] - name: green_true_color_reproduction - name: B01 modifiers: [sunz_corrected, no_aerosol_rayleigh_corrected] standard_name: true_color_reproduction # true_color_reducedsize_land: # compositor: !!python/name:satpy.composites.GenericCompositor # prerequisites: # - wavelength: 0.65 # modifiers: [reducer4, effective_solar_pathlength_corrected, # rayleigh_corrected_reducedsize_land] # - wavelength: 0.51 # modifiers: [reducer2, vegetation_corrected_reduced, effective_solar_pathlength_corrected, # rayleigh_corrected_reducedsize_land] # - wavelength: 0.46 # modifiers: [reducer2, effective_solar_pathlength_corrected, # rayleigh_corrected_reducedsize_land] # standard_name: true_color # # true_color_reducedsize_marine_tropical: # compositor: !!python/name:satpy.composites.GenericCompositor # prerequisites: # - wavelength: 0.65 # modifiers: [reducer4, effective_solar_pathlength_corrected, # rayleigh_corrected_reducedsize_marine_tropical] # - wavelength: 0.51 # modifiers: [reducer2, vegetation_corrected_reduced, effective_solar_pathlength_corrected, # rayleigh_corrected_reducedsize_marine_tropical] # - wavelength: 0.46 # modifiers: [reducer2, effective_solar_pathlength_corrected, # rayleigh_corrected_reducedsize_marine_tropical] # standard_name: true_color day_microphysics_eum: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 0.86 - wavelength: 3.9 modifiers: [nir_reflectance] - wavelength: 10.4 standard_name: day_microphysics day_microphysics_ahi: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 0.86 - wavelength: 2.3 - wavelength: 10.4 standard_name: day_microphysics cloud_phase_distinction: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 10.4 - wavelength: 0.64 - wavelength: 1.6 standard_name: cloud_phase_distinction colorized_ir_clouds: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - name: B13 standard_name: colorized_ir_clouds water_vapors1: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 10.4 - wavelength: 6.2 - wavelength: 7.3 standard_name: water_vapors1 mid_vapor: compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - wavelength: 7.3 - wavelength: 6.2 standard_name: mid_vapor water_vapors2: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: mid_vapor - wavelength: 7.3 - wavelength: 6.2 standard_name: water_vapors2 convection: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 6.7 - 7.3 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 3.75 - 10.4 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 1.63 - 0.635 standard_name: convection ir_cloud_day: standard_name: ir_cloud_day compositor: !!python/name:satpy.composites.CloudCompositor prerequisites: - name: B14 true_color_with_night_ir: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: true_color_with_night_ir prerequisites: - true_color - night_ir_with_background true_color_with_night_ir_hires: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: true_color_with_night_ir_hires prerequisites: - true_color - night_ir_with_background_hires night_ir_alpha: compositor: !!python/name:satpy.composites.GenericCompositor standard_name: night_ir_alpha prerequisites: - name: B07 - name: B13 - name: B15 - name: B13 night_ir_with_background: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: night_ir_with_background prerequisites: - night_ir_alpha - _night_background night_ir_with_background_hires: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: night_ir_with_background_hires prerequisites: - night_ir_alpha - _night_background_hires satpy-0.34.0/satpy/etc/composites/ami.yaml000066400000000000000000000141001420401153000204520ustar00rootroot00000000000000sensor_name: visir/ami composites: green_raw: compositor: !!python/name:satpy.composites.ahi.GreenCorrector prerequisites: - name: VI005 modifiers: [sunz_corrected] - name: VI008 modifiers: [sunz_corrected] standard_name: toa_bidirectional_reflectance fractions: [0.85, 0.15] green: compositor: !!python/name:satpy.composites.ahi.GreenCorrector prerequisites: - name: VI005 modifiers: [sunz_corrected, rayleigh_corrected] - name: VI008 modifiers: [sunz_corrected] standard_name: toa_bidirectional_reflectance fractions: [0.85, 0.15] green_nocorr: compositor: !!python/name:satpy.composites.ahi.GreenCorrector prerequisites: - name: VI005 - name: VI008 standard_name: toa_reflectance fractions: [0.85, 0.15] true_color_raw: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: VI006 modifiers: [sunz_corrected] - name: green_raw - name: VI004 modifiers: [sunz_corrected] standard_name: true_color true_color: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: VI006 modifiers: [sunz_corrected, rayleigh_corrected] - name: green - name: VI004 modifiers: [sunz_corrected, rayleigh_corrected] standard_name: true_color true_color_nocorr: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: VI006 - name: green_nocorr - name: VI004 standard_name: true_color overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - 0.65 - 0.85 - 10.4 standard_name: overview colorized_ir_clouds: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - name: '10.4' standard_name: colorized_ir_clouds natural_color: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: NR016 modifiers: [sunz_corrected] #, rayleigh_corrected] - name: VI008 modifiers: [sunz_corrected] #, rayleigh_corrected] - name: VI006 modifiers: [sunz_corrected] #, rayleigh_corrected] high_resolution_band: blue standard_name: natural_color natural_color_nocorr: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: NR016 - name: VI008 - name: VI006 high_resolution_band: blue standard_name: natural_color day_microphysics_eum: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 0.86 - wavelength: 3.9 modifiers: [nir_reflectance] - wavelength: 10.4 standard_name: day_microphysics cloud_phase_distinction: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 10.4 - wavelength: 0.64 - wavelength: 1.6 standard_name: cloud_phase_distinction water_vapors1: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 10.4 - wavelength: 6.2 - wavelength: 7.3 standard_name: water_vapors1 mid_vapor: compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - wavelength: 7.3 - wavelength: 6.2 standard_name: mid_vapor water_vapors2: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: mid_vapor - wavelength: 7.3 - wavelength: 6.2 standard_name: water_vapors2 convection: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - WV069 - WV073 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - SW038 - IR105 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - NR016 - VI006 standard_name: convection ir_cloud_day: standard_name: ir_cloud_day compositor: !!python/name:satpy.composites.CloudCompositor prerequisites: - name: IR112 airmass: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: WV063 - name: WV073 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: IR096 - name: IR105 - name: WV063 standard_name: airmass ash: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - IR123 - IR112 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - IR112 - IR087 - IR112 standard_name: ash true_color_with_night_ir: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: true_color_with_night_ir prerequisites: - true_color - night_ir_with_background true_color_with_night_ir_hires: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: true_color_with_night_ir_hires prerequisites: - true_color - night_ir_with_background_hires night_ir_alpha: compositor: !!python/name:satpy.composites.GenericCompositor standard_name: night_ir_alpha prerequisites: - 3.83 - 10.35 - 12.36 - 10.35 night_ir_with_background: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: night_ir_with_background prerequisites: - night_ir_alpha - _night_background night_ir_with_background_hires: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: night_ir_with_background_hires prerequisites: - night_ir_alpha - _night_background_hires satpy-0.34.0/satpy/etc/composites/amsr2.yaml000066400000000000000000000003141420401153000207320ustar00rootroot00000000000000sensor_name: amsr2 composites: rgb_color: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: 'btemp_10.7h' - name: 'btemp_36.5h' - name: 'btemp_89.0ah' satpy-0.34.0/satpy/etc/composites/avhrr-3.yaml000066400000000000000000000012161420401153000211720ustar00rootroot00000000000000sensor_name: visir/avhrr-3 composites: nwc_pps_ct_masked_ir: compositor: !!python/name:satpy.composites.MaskingCompositor prerequisites: - 10.8 - ct standard_name: nwc_pps_ct_masked_ir # Default is opaque (transparency = 0) conditions: - method: equal value: Cloud-free_land transparency: 100 - method: equal value: Cloud-free_sea transparency: 100 - method: equal value: Snow_over_land transparency: 100 - method: equal value: Sea_ice transparency: 100 - method: equal value: Fractional_clouds transparency: 45 satpy-0.34.0/satpy/etc/composites/fci.yaml000066400000000000000000000007611420401153000204550ustar00rootroot00000000000000sensor_name: visir/fci composites: binary_cloud_mask: # This will set all clear pixels to '0', all pixles with cloudy features (meteorological/dust/ash clouds) to '1' and # missing/undefined pixels to 'nan'. This can be used for the the official EUMETSAT cloud mask product (CLM). compositor: !!python/name:satpy.composites.CategoricalDataCompositor prerequisites: - name: 'cloud_state' lut: [.nan, 0, 1, 1, 1, 1, 1, 1, 0, .nan] standard_name: binary_cloud_mask satpy-0.34.0/satpy/etc/composites/glm.yaml000066400000000000000000000006701420401153000204720ustar00rootroot00000000000000sensor_name: visir/glm composites: C14_flash_extent_density: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: c14_flash_extent_density prerequisites: - flash_extent_density - C14 C14_yellow_lightning: compositor: !!python/name:satpy.composites.glm.HighlightCompositor standard_name: c14_yellow_lightning prerequisites: - flash_extent_density - highlight_C14 satpy-0.34.0/satpy/etc/composites/goes_imager.yaml000066400000000000000000000003371420401153000221740ustar00rootroot00000000000000# XXX arb sensor_name: visir/goes_imager composites: overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - '00_7' - '00_7' - '10_7' standard_name: overview satpy-0.34.0/satpy/etc/composites/hsaf.yaml000066400000000000000000000012651420401153000206350ustar00rootroot00000000000000sensor_name: hsaf composites: instantaneous_rainrate_3: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: h03 standard_name: instantaneous_rainrate_3 instantaneous_rainrate_3b: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: h03B standard_name: instantaneous_rainrate_3b accum_rainrate_5: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: h05 standard_name: accum_rainrate_5 accum_rainrate_5b: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: h05B standard_name: accum_rainrate_5b satpy-0.34.0/satpy/etc/composites/mersi-2.yaml000066400000000000000000000102331420401153000211650ustar00rootroot00000000000000sensor_name: visir/mersi-2 modifiers: rayleigh_corrected: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: - name: '3' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle sunz_corrected: modifier: !!python/name:satpy.modifiers.SunZenithCorrector prerequisites: - solar_zenith_angle nir_reflectance: modifier: !!python/name:satpy.modifiers.NIRReflectance prerequisites: - name: '24' optional_prerequisites: - solar_zenith_angle composites: ash: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: '25' - name: '24' - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: '24' - name: '23' - name: '24' standard_name: ash true_color_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - '3' - '2' - '1' standard_name: true_color true_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '3' # 0.65 modifiers: [sunz_corrected, rayleigh_corrected] - name: '2' modifiers: [sunz_corrected, rayleigh_corrected] - name: '1' # 0.47 modifiers: [sunz_corrected, rayleigh_corrected] standard_name: true_color natural_color: compositor: !!python/name:satpy.composites.RatioSharpenedRGB prerequisites: - name: '6' modifiers: [sunz_corrected] - name: '15' modifiers: [sunz_corrected] - name: '3' modifiers: [sunz_corrected] optional_prerequisites: - name: '4' modifiers: [sunz_corrected] standard_name: natural_color high_resolution_band: green natural_color_lowres: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '6' modifiers: [sunz_corrected] - name: '15' modifiers: [sunz_corrected] - name: '12' modifiers: [sunz_corrected] standard_name: natural_color overview_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '12' - name: '15' - name: '24' standard_name: overview overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '12' modifiers: [sunz_corrected] - name: '15' modifiers: [sunz_corrected] - name: '24' standard_name: overview cloudtop: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '20' - name: '24' - name: '25' standard_name: cloudtop day_microphysics: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '15' modifiers: [sunz_corrected] - name: '20' modifiers: [nir_reflectance] - name: '24' standard_name: day_microphysics night_fog: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: '25' - name: '24' - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: '24' - name: '20' - name: '24' standard_name: night_fog fog: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: '25' - name: '24' - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: '24' - name: '23' - name: '24' standard_name: fog green_snow: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '6' - name: '3' - name: '24' standard_name: green_snow satpy-0.34.0/satpy/etc/composites/mhs.yaml000066400000000000000000000006271420401153000205040ustar00rootroot00000000000000sensor_name: mhs composites: mw183_humidity: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: '3' - name: '4' - name: '5' standard_name: mw183_humidity mw183_humidity_surface: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: '1' - name: '2' - name: '3' standard_name: mw183_humidity_surface satpy-0.34.0/satpy/etc/composites/modis.yaml000066400000000000000000000110441420401153000210230ustar00rootroot00000000000000sensor_name: visir/modis modifiers: rayleigh_corrected_crefl: modifier: !!python/name:satpy.modifiers.atmosphere.ReflectanceCorrector url: "https://www.ssec.wisc.edu/~davidh/polar2grid/modis_crefl/tbase.hdf" known_hash: "sha256:ed5183cddce905361c1cac8ae6e3a447212875ea421a05747751efe76f8a068e" dem_sds: "Elevation" prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle composites: true_color_uncorrected: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '1' modifiers: [sunz_corrected] - name: '4' modifiers: [sunz_corrected] - name: '3' modifiers: [sunz_corrected] standard_name: true_color true_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '1' modifiers: [sunz_corrected, rayleigh_corrected] - name: '4' modifiers: [sunz_corrected, rayleigh_corrected] - name: '3' modifiers: [sunz_corrected, rayleigh_corrected] standard_name: true_color true_color_thin: compositor: !!python/name:satpy.composites.FillingCompositor prerequisites: - name: '1' modifiers: [sunz_corrected, rayleigh_corrected] - name: '1' modifiers: [sunz_corrected, rayleigh_corrected] - name: '12' modifiers: [sunz_corrected, rayleigh_corrected] - name: '10' modifiers: [sunz_corrected, rayleigh_corrected] standard_name: true_color true_color_crefl: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '1' modifiers: [sunz_corrected, rayleigh_corrected_crefl] - name: '4' modifiers: [sunz_corrected, rayleigh_corrected_crefl] - name: '3' modifiers: [sunz_corrected, rayleigh_corrected_crefl] standard_name: true_color overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '1' modifiers: [sunz_corrected] - compositor: !!python/name:satpy.composites.Filler prerequisites: - name: '2' modifiers: [sunz_corrected] - name: '1' modifiers: [sunz_corrected] - name: '31' standard_name: overview snow: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.Filler prerequisites: - name: '2' modifiers: [sunz_corrected] - name: '1' modifiers: [sunz_corrected] - name: '6' modifiers: [sunz_corrected] - name: '20' modifiers: [nir_reflectance] standard_name: snow natural_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '6' modifiers: [sunz_corrected] - compositor: !!python/name:satpy.composites.Filler prerequisites: - name: '2' modifiers: [sunz_corrected] - name: '1' modifiers: [sunz_corrected] - name: '1' modifiers: [sunz_corrected] standard_name: natural_color day_microphysics: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.Filler prerequisites: - name: '2' modifiers: [sunz_corrected] - name: '1' modifiers: [sunz_corrected] - name: '20' modifiers: [nir_reflectance] - name: '31' standard_name: day_microphysics airmass: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - wavelength: 6.7 - wavelength: 7.3 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - wavelength: 9.7 - wavelength: 10.8 - wavelength: 6.7 standard_name: airmass ocean_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '1' modifiers: [sunz_corrected, rayleigh_corrected] - name: '4' modifiers: [sunz_corrected, rayleigh_corrected] - name: '3' modifiers: [sunz_corrected, rayleigh_corrected] standard_name: ocean_color night_fog: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 12.0 - 10.8 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 10.8 - 3.75 - 10.8 standard_name: night_fog satpy-0.34.0/satpy/etc/composites/msi.yaml000066400000000000000000000112651420401153000205050ustar00rootroot00000000000000sensor_name: visir/msi modifiers: rayleigh_corrected: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: - name: 'B04' modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle rayleigh_corrected_marine_clean: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: marine_clean_aerosol prerequisites: - name: 'B04' modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle rayleigh_corrected_marine_tropical: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: tropical aerosol_type: marine_tropical_aerosol prerequisites: - name: 'B04' modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle rayleigh_corrected_desert: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: tropical aerosol_type: desert_aerosol prerequisites: - name: 'B04' modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle rayleigh_corrected_land: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: continental_average_aerosol prerequisites: - name: 'B04' modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle composites: natural_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B11' - name: 'B08' modifiers: [effective_solar_pathlength_corrected] - name: 'B04' modifiers: [effective_solar_pathlength_corrected] standard_name: natural_color true_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B04' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - name: 'B03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - name: 'B02' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] standard_name: true_color true_color_land: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B04' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_land] - name: 'B03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_land] - name: 'B02' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_land] standard_name: true_color true_color_desert: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B04' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] - name: 'B03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] - name: 'B02' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] standard_name: true_color true_color_marine_clean: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B04' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] - name: 'B03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] - name: 'B02' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] standard_name: true_color true_color_marine_tropical: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B04' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] - name: 'B03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] - name: 'B02' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] standard_name: true_color true_color_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B04' #modifiers: [effective_solar_pathlength_corrected] - name: 'B03' #modifiers: [effective_solar_pathlength_corrected] - name: 'B02' #modifiers: [effective_solar_pathlength_corrected] standard_name: true_color satpy-0.34.0/satpy/etc/composites/msu-gs.yaml000066400000000000000000000006551420401153000211310ustar00rootroot00000000000000sensor_name: visir/msu-gs composites: overview_raw: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - 00_9 - 00_9 - 10.8 standard_name: overview overview: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: 00_9 modifiers: [sunz_corrected] - name: 00_9 modifiers: [sunz_corrected] - 10.8 standard_name: overview satpy-0.34.0/satpy/etc/composites/msu_gsa.yaml000066400000000000000000000040001420401153000213400ustar00rootroot00000000000000sensor_name: visir/msu_gsa composites: overview_raw: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: C01 - name: C03 - name: C09 standard_name: overview overview: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: C01 modifiers: [sunz_corrected] - name: C03 modifiers: [sunz_corrected] - name: C09 standard_name: overview msugsa_color: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: C03 modifiers: [sunz_corrected] - name: C02 modifiers: [sunz_corrected] - name: C01 modifiers: [sunz_corrected] standard_name: natural_color msugsa_color_raw: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: C03 - name: C02 - name: C01 standard_name: natural_color night_ir_alpha: compositor: !!python/name:satpy.composites.GenericCompositor standard_name: night_ir_alpha prerequisites: - 3.8 - 10.8 - 11.9 - 10.8 day_color_with_night_ir: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: day_color_with_night_ir lim_low: 90.0 lim_high: 100.0 prerequisites: - msugsa_color_raw - night_ir_with_background day_color_with_night_ir_hires: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: day_color_with_night_ir lim_low: 90.0 lim_high: 100.0 prerequisites: - msugsa_color_raw - night_ir_with_background_hires night_ir_with_background: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: night_ir_with_background prerequisites: - night_ir_alpha - _night_background night_ir_with_background_hires: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: night_ir_with_background_hires prerequisites: - night_ir_alpha - _night_background_hires satpy-0.34.0/satpy/etc/composites/olci.yaml000066400000000000000000000114751420401153000206460ustar00rootroot00000000000000sensor_name: visir/olci modifiers: rayleigh_corrected: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: - name: 'Oa08' modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle rayleigh_corrected_marine_clean: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: marine_clean_aerosol prerequisites: - name: 'Oa08' modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle rayleigh_corrected_marine_tropical: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: tropical aerosol_type: marine_tropical_aerosol prerequisites: - name: 'Oa08' modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle rayleigh_corrected_desert: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: tropical aerosol_type: desert_aerosol prerequisites: - name: 'Oa08' modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle rayleigh_corrected_land: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: continental_average_aerosol prerequisites: - name: 'Oa08' modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle composites: true_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'Oa08' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - name: 'Oa06' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - name: 'Oa03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] standard_name: true_color true_color_land: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'Oa08' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_land] - name: 'Oa06' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_land] - name: 'Oa03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_land] standard_name: true_color true_color_desert: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'Oa08' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] - name: 'Oa06' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] - name: 'Oa03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] standard_name: true_color true_color_marine_clean: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'Oa08' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] - name: 'Oa06' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] - name: 'Oa03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] standard_name: true_color true_color_marine_tropical: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'Oa08' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] - name: 'Oa06' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] - name: 'Oa03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] standard_name: true_color true_color_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'Oa08' modifiers: [effective_solar_pathlength_corrected] - name: 'Oa06' modifiers: [effective_solar_pathlength_corrected] - name: 'Oa03' modifiers: [effective_solar_pathlength_corrected] standard_name: true_color ocean_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'Oa08' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - name: 'Oa06' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - name: 'Oa03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] standard_name: ocean_color satpy-0.34.0/satpy/etc/composites/sar-c.yaml000066400000000000000000000000271420401153000207140ustar00rootroot00000000000000sensor_name: sar/sar-c satpy-0.34.0/satpy/etc/composites/sar.yaml000066400000000000000000000043241420401153000205000ustar00rootroot00000000000000sensor_name: sar composites: sar-ice: compositor: !!python/name:satpy.composites.sar.SARIce prerequisites: - name: measurement polarization: hh calibration: gamma - name: measurement polarization: hv calibration: gamma standard_name: sar-ice sar-ice-iw: compositor: !!python/name:satpy.composites.sar.SARIce prerequisites: - name: measurement polarization: vv calibration: gamma - name: measurement polarization: vh calibration: gamma standard_name: sar-ice sar-rgb: compositor: !!python/name:satpy.composites.sar.SARIce prerequisites: - name: measurement polarization: hh - name: measurement polarization: hv standard_name: sar-rgb sar-quick: compositor: !!python/name:satpy.composites.sar.SARQuickLook prerequisites: - name: measurement polarization: hh - name: measurement polarization: hv standard_name: sar-quick sar-ice-legacy: compositor: !!python/name:satpy.composites.sar.SARIceLegacy prerequisites: - name: measurement polarization: hh - name: measurement polarization: hv standard_name: sar-ice-legacy sar-land: compositor: !!python/name:satpy.composites.sar.SARIce prerequisites: - name: measurement polarization: hh - name: measurement polarization: hv standard_name: sar-land sar-land-iw: compositor: !!python/name:satpy.composites.sar.SARIce prerequisites: - name: measurement polarization: vv - name: measurement polarization: vh standard_name: sar-land sar-ice-log: compositor: !!python/name:satpy.composites.sar.SARIceLog prerequisites: - name: measurement polarization: hh calibration: gamma quantity: dB - name: measurement polarization: hv calibration: gamma quantity: dB standard_name: sar-ice-log sar-ice-log-iw: compositor: !!python/name:satpy.composites.sar.SARIceLog prerequisites: - name: measurement polarization: vv calibration: gamma quantity: dB - name: measurement polarization: vh calibration: gamma quantity: dB standard_name: sar-ice-log satpy-0.34.0/satpy/etc/composites/scatterometer.yaml000066400000000000000000000006021420401153000225670ustar00rootroot00000000000000sensor_name: visir/scatterometer composites: scat_wind_speed: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: wind_speed_selection standard_name: scat_wind_speed soil_moisture: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: surface_soil_moisture standard_name: soil_moisture satpy-0.34.0/satpy/etc/composites/seviri.yaml000066400000000000000000000372611420401153000212220ustar00rootroot00000000000000sensor_name: visir/seviri modifiers: sunz_corrected: modifier: !!python/name:satpy.modifiers.SunZenithCorrector co2_corrected: modifier: !!python/name:satpy.modifiers.CO2Corrector sensor: [seviri] prerequisites: - IR_108 - IR_134 rayleigh_corrected: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: - name: VIS006 modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle composites: ct_masked_ir: compositor: !!python/name:satpy.composites.MaskingCompositor prerequisites: - IR_108 - ct standard_name: ct_masked_ir conditions: - method: equal value: 0 transparency: 100 - method: equal value: 1 transparency: 100 - method: equal value: 2 transparency: 100 - method: equal value: 3 transparency: 100 - method: equal value: 4 transparency: 100 - method: equal value: 10 transparency: 35 nwc_geo_ct_masked_ir: compositor: !!python/name:satpy.composites.MaskingCompositor prerequisites: - IR_108 - ct standard_name: nwc_geo_ct_masked_ir # Default is opaque (transparency = 0) conditions: - method: equal value: Cloud-free_land transparency: 100 - method: equal value: Cloud-free_sea transparency: 100 - method: equal value: Snow_over_land transparency: 100 - method: equal value: Sea_ice transparency: 100 - method: equal value: Fractional_clouds transparency: 45 - method: equal value: High_semitransparent_thin_clouds transparency: 50 - method: equal value: High_semitransparent_above_snow_ice transparency: 60 cloudtop: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: IR_039 modifiers: [co2_corrected] - IR_108 - IR_120 standard_name: cloudtop cloudtop_daytime: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: IR_039 modifiers: [nir_emissive] - IR_108 - IR_120 standard_name: cloudtop convection: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - WV_062 - WV_073 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: IR_039 modifiers: [co2_corrected] - IR_108 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - IR_016 - VIS006 standard_name: convection night_fog: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - IR_120 - IR_108 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - IR_108 - name: IR_039 modifiers: [co2_corrected] - IR_108 standard_name: night_fog snow: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: VIS008 modifiers: [sunz_corrected] - name: IR_016 modifiers: [sunz_corrected] - name: IR_039 modifiers: [nir_reflectance] standard_name: snow day_microphysics: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: VIS008 modifiers: [sunz_corrected] - name: IR_039 modifiers: [nir_reflectance] - IR_108 standard_name: day_microphysics day_microphysics_winter: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: VIS008 modifiers: [sunz_corrected] - name: IR_039 modifiers: [nir_reflectance] - IR_108 standard_name: day_microphysics_winter natural_color_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - IR_016 - VIS008 - VIS006 standard_name: natural_color natural_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: IR_016 modifiers: [sunz_corrected] - name: VIS008 modifiers: [sunz_corrected] - name: VIS006 modifiers: [sunz_corrected] standard_name: natural_color natural_color_nocorr: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: IR_016 - name: VIS008 - name: VIS006 standard_name: natural_color fog: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - IR_120 - IR_108 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - IR_108 - IR_087 - IR_108 standard_name: fog cloudmask: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - cma - cma_pal standard_name: cloudmask cloudtype: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - ct - ct_pal standard_name: cloudtype cloud_top_height: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - ctth_alti - ctth_alti_pal standard_name: cloud_top_height cloud_top_pressure: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - ctth_pres - ctth_pres_pal standard_name: cloud_top_pressure cloud_top_temperature: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - ctth_tempe - ctth_tempe_pal standard_name: cloud_top_temperature cloud_top_phase: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - cmic_phase - cmic_phase_pal standard_name: cloud_top_phase cloud_drop_effective_radius: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - cmic_reff - cmic_reff_pal standard_name: cloud_drop_effective_radius cloud_optical_thickness: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - cmic_cot - cmic_cot_pal standard_name: cloud_optical_thickness cloud_liquid_water_path: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - cmic_lwp - cmic_lwp_pal standard_name: cloud_liquid_water_path cloud_ice_water_path: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - cmic_iwp - cmic_iwp_pal standard_name: cloud_ice_water_path precipitation_probability: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - pc - pc_pal standard_name: precipitation_probability convective_rain_rate: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - crr - crr_pal standard_name: convective_rain_rate convective_precipitation_hourly_accumulation: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - crr_accum - crr_pal standard_name: convective_precipitation_hourly_accumulation total_precipitable_water: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - ishai_tpw - ishai_tpw_pal standard_name: total_precipitable_water showalter_index: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - ishai_shw - ishai_shw_pal standard_name: showalter_index lifted_index: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - ishai_li - ishai_li_pal standard_name: lifted_index convection_initiation_prob30: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - ci_prob30 - ci_pal standard_name: convection_initiation_prob30 convection_initiation_prob60: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - ci_prob60 - ci_pal standard_name: convection_initiation_prob60 convection_initiation_prob90: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - ci_prob90 - ci_pal standard_name: convection_initiation_prob90 asii_prob: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - asii_turb_trop_prob - asii_turb_prob_pal standard_name: asii_prob rdt_cell_type: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - MapCellCatType - MapCellCatType_pal standard_name: rdt_cell_type realistic_colors: compositor: !!python/name:satpy.composites.RealisticColors standard_name: realistic_colors prerequisites: - name: VIS006 modifiers: [sunz_corrected] - name: VIS008 modifiers: [sunz_corrected] - name: HRV modifiers: [sunz_corrected] ir_overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: IR_039 modifiers: [co2_corrected] - IR_108 - IR_120 standard_name: ir_overview overview_raw: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - VIS006 - VIS008 - IR_108 standard_name: overview overview: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: VIS006 modifiers: [sunz_corrected] - name: VIS008 modifiers: [sunz_corrected] - IR_108 standard_name: overview green_snow: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: IR_016 - name: VIS006 - name: IR_108 standard_name: green_snow colorized_ir_clouds: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - name: 'IR_108' standard_name: colorized_ir_clouds vis_sharpened_ir: compositor: !!python/name:satpy.composites.LuminanceSharpeningCompositor standard_name: vis_sharpened_ir prerequisites: - name: 'HRV' modifiers: [sunz_corrected] - name: colorized_ir_clouds ir_sandwich: compositor: !!python/name:satpy.composites.SandwichCompositor standard_name: ir_sandwich prerequisites: - name: 'HRV' modifiers: [sunz_corrected] - name: colorized_ir_clouds natural_enh: compositor: !!python/name:satpy.composites.NaturalEnh standard_name: natural_enh ch16_w: 1.3 ch08_w: 2.5 ch06_w: 2.2 prerequisites: - name: IR_016 modifiers: [sunz_corrected] - name: VIS008 modifiers: [sunz_corrected] - name: VIS006 modifiers: [sunz_corrected] hrv_clouds: compositor: !!python/name:satpy.composites.GenericCompositor standard_name: hrv_clouds prerequisites: - name: HRV modifiers: [sunz_corrected] - name: HRV modifiers: [sunz_corrected] - IR_108 hrv_fog: compositor: !!python/name:satpy.composites.GenericCompositor standard_name: hrv_fog prerequisites: - name: IR_016 modifiers: [sunz_corrected] - name: HRV modifiers: [sunz_corrected] - name: HRV modifiers: [sunz_corrected] hrv_severe_storms: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: HRV modifiers: [sunz_corrected] - name: HRV modifiers: [sunz_corrected] - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - wavelength: 10.8 - wavelength: 3.9 standard_name: hrv_severe_storms hrv_severe_storms_masked: compositor: !!python/name:satpy.composites.MaskingCompositor conditions: # Data will be masked where SZA corrected HRV data is less than 70 %, or NaN - method: less value: 75 transparency: 70 - method: less value: 70 transparency: 100 - method: isnan transparency: 100 prerequisites: # Composite - name: hrv_severe_storms # Data used in masking - name: HRV modifiers: [sunz_corrected] standard_name: hrv_severe_storms_masked natural_with_night_fog: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: natural_with_night_fog prerequisites: - natural_color - night_fog natural_color_with_night_ir: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: natural_color_with_night_ir prerequisites: - natural_color - night_ir_with_background natural_color_with_night_ir_hires: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: natural_color_with_night_ir_hires prerequisites: - natural_color - night_ir_with_background_hires natural_enh_with_night_ir: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: natural_color_with_night_ir prerequisites: - natural_enh - night_ir_with_background natural_enh_with_night_ir_hires: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: natural_color_with_night_ir_hires prerequisites: - natural_enh - night_ir_with_background_hires night_ir_alpha: compositor: !!python/name:satpy.composites.GenericCompositor standard_name: night_ir_alpha prerequisites: - name: IR_039 - name: IR_108 - name: IR_120 - name: IR_108 night_ir_with_background: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: night_ir_with_background prerequisites: - night_ir_alpha - _night_background night_ir_with_background_hires: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: night_ir_with_background_hires prerequisites: - night_ir_alpha - _night_background_hires _vis06: compositor: !!python/name:satpy.composites.GenericCompositor standard_name: vis06 prerequisites: - name: VIS006 modifiers: [sunz_corrected] _hrv: compositor: !!python/name:satpy.composites.GenericCompositor standard_name: hrv prerequisites: - name: HRV modifiers: [sunz_corrected] _vis06_filled_hrv: compositor: !!python/name:satpy.composites.Filler standard_name: vis06_filled_hrv prerequisites: - _hrv - _vis06 _ir108: compositor: !!python/name:satpy.composites.GenericCompositor standard_name: ir108 prerequisites: - name: IR_108 _vis_with_ir: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: vis_with_ir lim_low: 85.0 lim_high: 88.0 prerequisites: - _vis06_filled_hrv - _ir108 vis_with_ir_cloud_overlay: compositor: !!python/name:satpy.composites.MaskingCompositor standard_name: vis_with_ir_cloud_overlay prerequisites: - _vis_with_ir - ct # Default is opaque (transparency = 0) conditions: - method: equal value: Cloud-free_land transparency: 100 - method: equal value: Cloud-free_sea transparency: 100 - method: equal value: Snow_over_land transparency: 100 - method: equal value: Sea_ice transparency: 100 - method: equal value: Fractional_clouds transparency: 45 - method: equal value: High_semitransparent_thin_clouds transparency: 50 - method: equal value: High_semitransparent_above_snow_ice transparency: 60 satpy-0.34.0/satpy/etc/composites/slstr.yaml000066400000000000000000000023001420401153000210520ustar00rootroot00000000000000sensor_name: visir/slstr composite_identification_keys: name: required: true resolution: transitive: true view: enum: - nadir - oblique transitive: true stripe: enum: - a - b - i modifiers: nir_reflectance: modifier: !!python/name:satpy.modifiers.NIRReflectance prerequisites: - S8 optional_prerequisites: - solar_zenith_angle - 13.4 sunz_threshold: 85.0 composites: overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: S2 modifiers: [sunz_corrected] - name: S3 modifiers: [sunz_corrected] - S8 standard_name: overview natural_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: S5 modifiers: [sunz_corrected] - name: S3 modifiers: [sunz_corrected] - name: S2 modifiers: [sunz_corrected] standard_name: natural_color day_microphysics: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: S3 modifiers: [sunz_corrected] - name: S7 modifiers: [nir_reflectance] - S8 standard_name: day_microphysics satpy-0.34.0/satpy/etc/composites/tropomi.yaml000066400000000000000000000006561420401153000214100ustar00rootroot00000000000000sensor_name: tropomi composites: no2_tropospheric_clean: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - nitrogendioxide_tropospheric_column standard_name: no2_tropospheric_clean no2_tropospheric_polluted: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - nitrogendioxide_tropospheric_column standard_name: no2_tropospheric_polluted satpy-0.34.0/satpy/etc/composites/viirs.yaml000066400000000000000000000356421420401153000210560ustar00rootroot00000000000000sensor_name: visir/viirs modifiers: rayleigh_corrected_crefl: modifier: !!python/name:satpy.modifiers.atmosphere.ReflectanceCorrector url: "https://www.ssec.wisc.edu/~davidh/polar2grid/viirs_crefl/CMGDEM.hdf" known_hash: "sha256:f33f1f867d79fff4fafe128f61c154236dd74fcc97bf418ea1437977a38d0604" prerequisites: - name: satellite_azimuth_angle resolution: 742 - name: satellite_zenith_angle resolution: 742 - name: solar_azimuth_angle resolution: 742 - name: solar_zenith_angle resolution: 742 rayleigh_corrected_crefl_iband: modifier: !!python/name:satpy.modifiers.atmosphere.ReflectanceCorrector url: "https://www.ssec.wisc.edu/~davidh/polar2grid/viirs_crefl/CMGDEM.hdf" known_hash: "sha256:f33f1f867d79fff4fafe128f61c154236dd74fcc97bf418ea1437977a38d0604" prerequisites: - name: satellite_azimuth_angle resolution: 371 - name: satellite_zenith_angle resolution: 371 - name: solar_azimuth_angle resolution: 371 - name: solar_zenith_angle resolution: 371 rayleigh_corrected_iband: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: - name: I01 resolution: 371 modifiers: [sunz_corrected_iband] optional_prerequisites: - name: satellite_azimuth_angle resolution: 371 - name: satellite_zenith_angle resolution: 371 - name: solar_azimuth_angle resolution: 371 - name: solar_zenith_angle resolution: 371 rayleigh_corrected: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: - name: M05 resolution: 742 modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle resolution: 742 - name: satellite_zenith_angle resolution: 742 - name: solar_azimuth_angle resolution: 742 - name: solar_zenith_angle resolution: 742 rayleigh_corrected_marine_tropical: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: marine_tropical_aerosol prerequisites: - name: M05 resolution: 742 modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle resolution: 742 - name: satellite_zenith_angle resolution: 742 - name: solar_azimuth_angle resolution: 742 - name: solar_zenith_angle resolution: 742 rayleigh_corrected_land: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: continental_average_aerosol prerequisites: - name: M05 resolution: 742 modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle resolution: 742 - name: satellite_zenith_angle resolution: 742 - name: solar_azimuth_angle resolution: 742 - name: solar_zenith_angle resolution: 742 sunz_corrected: modifier: !!python/name:satpy.modifiers.SunZenithCorrector prerequisites: - name: solar_zenith_angle resolution: 742 sunz_corrected_iband: modifier: !!python/name:satpy.modifiers.SunZenithCorrector prerequisites: - name: solar_zenith_angle resolution: 371 nir_emissive_lowres: modifier: !!python/name:satpy.modifiers.NIREmissivePartFromReflectance prerequisites: - M15 optional_prerequisites: - name: solar_zenith_angle resolution: 742 nir_emissive_hires: modifier: !!python/name:satpy.modifiers.NIREmissivePartFromReflectance prerequisites: - I05 optional_prerequisites: - name: solar_zenith_angle resolution: 371 nir_reflectance_lowres: modifier: !!python/name:satpy.modifiers.NIRReflectance prerequisites: - M15 optional_prerequisites: - name: solar_zenith_angle resolution: 742 nir_reflectance_hires: modifier: !!python/name:satpy.modifiers.NIRReflectance prerequisites: - I05 optional_prerequisites: - name: solar_zenith_angle resolution: 371 composites: true_color: compositor: !!python/name:satpy.composites.RatioSharpenedRGB prerequisites: - name: M05 modifiers: [sunz_corrected, rayleigh_corrected] - name: M04 modifiers: [sunz_corrected, rayleigh_corrected] - name: M03 modifiers: [sunz_corrected, rayleigh_corrected] optional_prerequisites: - name: I01 modifiers: [sunz_corrected_iband, rayleigh_corrected_iband] standard_name: true_color high_resolution_band: red true_color_crefl: compositor: !!python/name:satpy.composites.RatioSharpenedRGB prerequisites: - name: M05 modifiers: [sunz_corrected, rayleigh_corrected_crefl] - name: M04 modifiers: [sunz_corrected, rayleigh_corrected_crefl] - name: M03 modifiers: [sunz_corrected, rayleigh_corrected_crefl] optional_prerequisites: - name: I01 modifiers: [sunz_corrected_iband, rayleigh_corrected_crefl_iband] standard_name: true_color high_resolution_band: red true_color_lowres: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: M05 modifiers: [sunz_corrected, rayleigh_corrected] - name: M04 modifiers: [sunz_corrected, rayleigh_corrected] - name: M03 modifiers: [sunz_corrected, rayleigh_corrected] standard_name: true_color true_color_lowres_crefl: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: M05 modifiers: [sunz_corrected, rayleigh_corrected_crefl] - name: M04 modifiers: [sunz_corrected, rayleigh_corrected_crefl] - name: M03 modifiers: [sunz_corrected, rayleigh_corrected_crefl] standard_name: true_color true_color_lowres_land: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: M05 modifiers: [sunz_corrected, rayleigh_corrected_land] - name: M04 modifiers: [sunz_corrected, rayleigh_corrected_land] - name: M03 modifiers: [sunz_corrected, rayleigh_corrected_land] standard_name: true_color true_color_lowres_marine_tropical: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: M05 modifiers: [sunz_corrected, rayleigh_corrected_marine_tropical] - name: M04 modifiers: [sunz_corrected, rayleigh_corrected_marine_tropical] - name: M03 modifiers: [sunz_corrected, rayleigh_corrected_marine_tropical] standard_name: true_color false_color: compositor: !!python/name:satpy.composites.RatioSharpenedRGB prerequisites: - name: M11 modifiers: [sunz_corrected] - name: M07 modifiers: [sunz_corrected] - name: M05 modifiers: [sunz_corrected, rayleigh_corrected] optional_prerequisites: - name: I01 modifiers: [sunz_corrected_iband, rayleigh_corrected_iband] standard_name: false_color high_resolution_band: blue fire_temperature: # CIRA: Original VIIRS compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: M12 calibration: radiance - name: M11 calibration: radiance - name: M10 calibration: radiance standard_name: fire_temperature name: fire_temperature fire_temperature_awips: # CIRA: EUMETSAT compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: M12 - name: M11 - name: M10 standard_name: fire_temperature name: fire_temperature_awips fire_temperature_eumetsat: # CIRA: AWIPS compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: M12 - name: M11 - name: M10 standard_name: fire_temperature name: fire_temperature_eumetsat fire_temperature_39refl: # CIRA: All bands in Reflectance units (%) compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: M12 modifiers: [nir_reflectance_lowres] - name: M11 modifiers: [sunz_corrected] - name: M10 modifiers: [sunz_corrected] standard_name: fire_temperature name: fire_temperature_39refl natural_color: compositor: !!python/name:satpy.composites.RatioSharpenedRGB prerequisites: - name: M10 modifiers: [sunz_corrected] - name: M07 modifiers: [sunz_corrected] - name: M05 modifiers: [sunz_corrected, rayleigh_corrected] optional_prerequisites: - name: I01 modifiers: [sunz_corrected_iband, rayleigh_corrected_iband] standard_name: natural_color high_resolution_band: blue natural_color_sun: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: I03 modifiers: [sunz_corrected_iband] - name: I02 modifiers: [sunz_corrected_iband] - name: I01 modifiers: [sunz_corrected_iband] standard_name: natural_color natural_color_sun_lowres: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: M10 modifiers: [sunz_corrected] - name: M07 modifiers: [sunz_corrected] - name: M05 modifiers: [sunz_corrected] standard_name: natural_color true_color_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: M05 modifiers: [sunz_corrected] - name: M04 modifiers: [sunz_corrected] - name: M03 modifiers: [sunz_corrected] standard_name: true_color night_overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - DNB - DNB - M15 standard_name: night_overview overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - M05 - M07 - M15 standard_name: overview hr_overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - I01 - I02 - I05 standard_name: overview night_microphysics: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - hncc_dnb - M12 - M15 standard_name: night_microphysics fog: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - M16 - M15 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - M15 - M14 - M15 standard_name: fog dust: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - M16 - M15 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - M15 - M14 - M15 standard_name: dust ash: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - M16 - M15 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - M15 - M14 - M15 standard_name: ash night_fog: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - M16 - M15 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - M15 - M12 - M15 standard_name: night_fog ssec_fog: compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - I05 - I04 standard_name: temperature_difference cloudtop_daytime: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: M12 modifiers: [nir_emissive_lowres] - name: M15 - name: M16 standard_name: cloudtop hr_cloudtop_daytime: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: I04 modifiers: [nir_emissive_hires] - name: I05 - name: I05 standard_name: cloudtop snow_lowres: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: M07 modifiers: [sunz_corrected] - name: M10 modifiers: [sunz_corrected] - name: M12 modifiers: [nir_reflectance_lowres] standard_name: snow snow_hires: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: I02 modifiers: [sunz_corrected_iband] - name: I03 modifiers: [sunz_corrected_iband] - name: I04 modifiers: [nir_reflectance_hires] standard_name: snow snow: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: I02 modifiers: [sunz_corrected_iband] - name: I03 modifiers: [sunz_corrected_iband] - name: I04 modifiers: [nir_reflectance_hires] standard_name: snow histogram_dnb: compositor: !!python/name:satpy.composites.viirs.HistogramDNB prerequisites: - DNB - dnb_solar_zenith_angle standard_name: equalized_radiance units: "1" adaptive_dnb: compositor: !!python/name:satpy.composites.viirs.AdaptiveDNB prerequisites: - DNB - dnb_solar_zenith_angle adaptive_day: multiple adaptive_mixed: always adaptive_night: never standard_name: equalized_radiance units: "1" dynamic_dnb: compositor: !!python/name:satpy.composites.viirs.ERFDNB prerequisites: - DNB - dnb_solar_zenith_angle - dnb_lunar_zenith_angle - dnb_moon_illumination_fraction standard_name: equalized_radiance units: "1" hncc_dnb: compositor: !!python/name:satpy.composites.viirs.NCCZinke prerequisites: - DNB - dnb_solar_zenith_angle - dnb_lunar_zenith_angle - dnb_moon_illumination_fraction standard_name: ncc_radiance units: "1" night_overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - hncc_dnb - hncc_dnb - M15 standard_name: night_overview snow_age: compositor: !!python/name:satpy.composites.viirs.SnowAge prerequisites: - name: M07 modifiers: [sunz_corrected] - name: M08 modifiers: [sunz_corrected] - name: M09 modifiers: [sunz_corrected] - name: M10 modifiers: [sunz_corrected] - name: M11 modifiers: [sunz_corrected] standard_name: snow_age ocean_color: compositor: !!python/name:satpy.composites.RatioSharpenedRGB prerequisites: - name: M05 modifiers: [sunz_corrected, rayleigh_corrected] - name: M04 modifiers: [sunz_corrected, rayleigh_corrected] - name: M03 modifiers: [sunz_corrected, rayleigh_corrected] optional_prerequisites: - name: I01 modifiers: [sunz_corrected_iband, rayleigh_corrected_iband] standard_name: ocean_color high_resolution_band: red satpy-0.34.0/satpy/etc/composites/virr.yaml000066400000000000000000000022361420401153000206750ustar00rootroot00000000000000sensor_name: visir/virr modifiers: sunz_corrected: modifier: !!python/name:satpy.modifiers.SunZenithCorrector prerequisites: - name: solar_zenith_angle rayleigh_corrected: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: - name: '1' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle composites: true_color_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '1' modifiers: [sunz_corrected] - name: '9' modifiers: [sunz_corrected] - name: '7' modifiers: [sunz_corrected] standard_name: true_color true_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '1' modifiers: [sunz_corrected, rayleigh_corrected] - name: '9' modifiers: [sunz_corrected, rayleigh_corrected] - name: '7' modifiers: [sunz_corrected, rayleigh_corrected] standard_name: true_color satpy-0.34.0/satpy/etc/composites/visir.yaml000066400000000000000000000263251420401153000210540ustar00rootroot00000000000000sensor_name: visir composite_identification_keys: name: required: true resolution: transitive: false modifiers: sunz_corrected: modifier: !!python/name:satpy.modifiers.SunZenithCorrector optional_prerequisites: - solar_zenith_angle effective_solar_pathlength_corrected: modifier: !!python/name:satpy.modifiers.EffectiveSolarPathLengthCorrector optional_prerequisites: - solar_zenith_angle co2_corrected: modifier: !!python/name:satpy.modifiers.CO2Corrector prerequisites: - 10.8 - 13.4 nir_reflectance: modifier: !!python/name:satpy.modifiers.NIRReflectance prerequisites: - 11 optional_prerequisites: - solar_zenith_angle - 13.4 sunz_threshold: 85.0 nir_emissive: modifier: !!python/name:satpy.modifiers.NIREmissivePartFromReflectance prerequisites: - 11 optional_prerequisites: - solar_zenith_angle - 13.4 sunz_threshold: 85.0 atm_correction: modifier: !!python/name:satpy.modifiers.PSPAtmosphericalCorrection optional_prerequisites: - satellite_zenith_angle rayleigh_corrected: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: - wavelength: 0.67 modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle rayleigh_corrected_marine_tropical: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: tropical aerosol_type: marine_tropical_aerosol prerequisites: - wavelength: 0.67 modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle rayleigh_corrected_desert: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: tropical aerosol_type: desert_aerosol prerequisites: - wavelength: 0.67 modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle rayleigh_corrected_land: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: continental_average_aerosol prerequisites: - wavelength: 0.67 modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle composites: airmass: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - wavelength: 6.2 - wavelength: 7.3 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - wavelength: 9.7 - wavelength: 10.8 - wavelength: 6.2 standard_name: airmass ash: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 12.0 - 10.8 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 10.8 - 8.7 - 10.8 standard_name: ash cloudtop: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - 3.9 - 10.8 - 12.0 standard_name: cloudtop convection: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 6.2 - 7.3 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 3.9 - 10.8 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 1.6 - 0.6 standard_name: convection snow: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 0.8 modifiers: [sunz_corrected] - wavelength: 1.63 modifiers: [sunz_corrected] - wavelength: 3.9 modifiers: [nir_reflectance] standard_name: snow day_microphysics: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 0.85 modifiers: [sunz_corrected] - wavelength: 3.9 modifiers: [nir_reflectance] - 10.8 standard_name: day_microphysics dust: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 12.0 - 10.8 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 10.8 - 8.7 - 10.8 standard_name: dust fog: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 12.0 - 10.8 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 10.8 - 8.7 - 10.8 standard_name: fog green_snow: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - 1.63 - 0.635 - 10.8 standard_name: green_snow natural_color_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - 1.63 - 0.85 - 0.635 standard_name: natural_color natural_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 1.63 modifiers: [sunz_corrected] - wavelength: 0.85 modifiers: [sunz_corrected] - wavelength: 0.635 modifiers: [sunz_corrected] standard_name: natural_color night_fog: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 12.0 - 10.8 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 10.8 - 3.9 - 10.8 standard_name: night_fog overview_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - 0.6 - 0.8 - 10.8 standard_name: overview overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 0.6 modifiers: [sunz_corrected] - wavelength: 0.8 modifiers: [sunz_corrected] - 10.8 standard_name: overview true_color_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - 0.65 - 0.5 - 0.45 standard_name: true_color natural_with_night_fog: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: natural_with_night_fog prerequisites: - natural_color - night_fog - solar_zenith_angle precipitation_probability: compositor: !!python/name:satpy.composites.cloud_products.PrecipCloudsRGB prerequisites: - pc_precip_light - pc_precip_moderate - pc_precip_intense - pc_status_flag standard_name: precipitation_probability cloudmask: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - cma - cma_pal standard_name: cloudmask cloudmask_extended: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - cma_extended - cma_extended_pal standard_name: cloudmask cloudmask_probability: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - cmaprob - cmaprob_pal standard_name: cloudmask_probability cloudtype: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - ct - ct_pal standard_name: cloudtype cloud_top_height: compositor: !!python/name:satpy.composites.cloud_products.CloudTopHeightCompositor prerequisites: - ctth_alti - ctth_alti_pal - ctth_status_flag standard_name: cloud_top_height cloud_top_pressure: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - ctth_pres - ctth_pres_pal standard_name: cloud_top_pressure cloud_top_temperature: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - ctth_tempe - ctth_tempe_pal standard_name: cloud_top_temperature cloud_top_phase: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - cmic_phase - cmic_phase_pal standard_name: cloud_top_phase cloud_drop_effective_radius: compositor: !!python/name:satpy.composites.ColorizeCompositor prerequisites: - cmic_reff - cmic_reff_pal standard_name: cloud_drop_effective_radius cloud_optical_thickness: compositor: !!python/name:satpy.composites.ColorizeCompositor prerequisites: - cmic_cot - cmic_cot_pal standard_name: cloud_optical_thickness cloud_water_path: compositor: !!python/name:satpy.composites.ColorizeCompositor prerequisites: - cmic_cwp - cmic_cwp_pal standard_name: cloud_water_path ice_water_path: compositor: !!python/name:satpy.composites.ColorizeCompositor prerequisites: - cmic_iwp - cmic_iwp_pal standard_name: ice_water_path liquid_water_path: compositor: !!python/name:satpy.composites.ColorizeCompositor prerequisites: - cmic_lwp - cmic_lwp_pal standard_name: liquid_water_path night_microphysics: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 12.0 - 10.8 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 10.8 - 3.9 - 10.8 standard_name: night_microphysics ir108_3d: compositor: !!python/name:satpy.composites.GenericCompositor standard_name: ir108_3d prerequisites: - wavelength: 10.8 ir_cloud_day: standard_name: ir_cloud_day compositor: !!python/name:satpy.composites.CloudCompositor prerequisites: - 10.8 transition_min: 258.15 transition_max: 298.15 transition_gamma: 3.0 natural_enh: compositor: !!python/name:satpy.composites.NaturalEnh standard_name: natural_enh prerequisites: - wavelength: 1.6 modifiers: [sunz_corrected] - wavelength: 0.8 modifiers: [sunz_corrected] - wavelength: 0.6 modifiers: [sunz_corrected] _night_background: compositor: !!python/name:satpy.composites.StaticImageCompositor standard_name: night_background url: "https://neo.sci.gsfc.nasa.gov/archive/blackmarble/2016/global/BlackMarble_2016_01deg_geo.tif" known_hash: "sha256:146c116962677ae113d9233374715686737ff97141a77cc5da69a9451315a685" # optional _night_background_hires: compositor: !!python/name:satpy.composites.StaticImageCompositor standard_name: night_background_hires url: "https://neo.sci.gsfc.nasa.gov/archive/blackmarble/2016/global/BlackMarble_2016_3km_geo.tif" known_hash: "sha256:e915ef2a20d84e2a59e1547d3ad564463ad4bcf22bfa02e0e0b8ed1cd722e9c0" # optional satpy-0.34.0/satpy/etc/enhancements/000077500000000000000000000000001420401153000173075ustar00rootroot00000000000000satpy-0.34.0/satpy/etc/enhancements/abi.yaml000066400000000000000000000162461420401153000207370ustar00rootroot00000000000000enhancements: cimss_true_color: standard_name: cimss_true_color sensor: abi operations: - name: linear_stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: 'crude', min_stretch: 0.0, max_stretch: 120.} - name: sqrt method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: 2.0} - name: contrast method: !!python/name:satpy.enhancements.abi.cimss_true_color_contrast cmi_reflectance_default: standard_name: toa_lambertian_equivalent_albedo_multiplied_by_cosine_solar_zenith_angle operations: - name: linear_stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: 'crude', min_stretch: 0.0, max_stretch: 100.} - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: 1.5} airmass: standard_name: airmass operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [-26.2, -43.2, 243.9] max_stretch: [0.6, 6.7, 208.5] cira_fire_temperature: standard_name: cira_fire_temperature operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [273.15, 0.0, 0.0] max_stretch: [333.15, 100.0, 75.0] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [0.4, 1, 1] cira_day_convection: standard_name: cira_day_convection operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [-35.0, -5.0, -75.0] max_stretch: [5.0, 60.0, 25.0] cimss_cloud_type: standard_name: cimss_cloud_type operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0.0, 0.0, 0.0] max_stretch: [10.0, 80.0, 80.0] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [1.5, 0.75, 1.0] # ash_abi: ## RGB Ash recipe source: http://rammb.cira.colostate.edu/training/visit/quick_guides/GOES_Ash_RGB.pdf standard_name: ash sensor: abi operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [-6.7, -6.0, 243.6] max_stretch: [ 2.6, 6.3, 302.4] dust_abi: ## RGB Dust recipe source: http://rammb.cira.colostate.edu/training/visit/quick_guides/Dust_RGB_Quick_Guide.pdf standard_name: dust sensor: abi operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [-6.7, -0.5, 261.2] max_stretch: [ 2.6, 20.0, 288.7] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [1, 2.5, 1] convection_abi: ## RGB Convection recipe source: http://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_DayConvectionRGB_final.pdf standard_name: convection sensor: abi operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [-35.0, -5.0, -75] max_stretch: [ 5.0, 60.0, 25] night_microphysics_abi: ## RGB Nighttime Microphysics recipe source: http://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_NtMicroRGB_final.pdf standard_name: night_microphysics sensor: abi operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [-6.7, -3.1, 243.55] max_stretch: [ 2.6, 5.2, 292.65] land_cloud_fire: ## RGB Day Land Cloud Fire recipe source: http://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_DayLandCloudFireRGB_final.pdf standard_name: land_cloud_fire sensor: abi operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: crude, min_stretch: 0, max_stretch: 100} land_cloud: ## RGB Day Land Cloud Fire recipe source: http://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_daylandcloudRGB_final.pdf standard_name: land_cloud sensor: abi operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [ 0.0, 0.0, 0.0] max_stretch: [97.5, 108.6, 100.0] # IR with white clouds highlighted_brightness_temperature: standard_name: highlighted_toa_brightness_temperature sensor: abi operations: - name: btemp_threshold method: !!python/name:satpy.enhancements.btemp_threshold kwargs: threshold: 242.0 min_in: 163.0 max_in: 330.0 # EumetSat cloud phase and cloud type RGB recipes # http://eumetrain.org/RGBguide/recipes/RGB_recipes.pdf cloud_phase: standard_name: cloud_phase operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [ 0, 0, 0] max_stretch: [50, 50, 100] # NOAA GOES-R Level-2 ABI Cloud Mask product # https://www.goes-r.gov/products/baseline-clear-sky-mask.html binary_cloud_mask: name: BCM operations: - name: palettize method: !!python/name:satpy.enhancements.palettize kwargs: palettes: - {'values': [ 0, # Clear 1, # Cloudy 255, # Fill Value ], 'colors': [[ 94, 79, 162], # blue, 0 = Clear [255, 255, 255], # white, 1 = Cloudy [ 0, 0, 0], # black, 255 = Fill Value ], 'color_scale': 255, } four_level_cloud_mask: name: ACM operations: - name: palettize method: !!python/name:satpy.enhancements.palettize kwargs: palettes: - {'values': [ 0, # Clear 1, # Probably Clear 2, # Probably Cloudy 3, # Cloudy 255, # Fill Value ], 'colors': [[ 94, 79, 162], # blue, 0 = Clear [ 73, 228, 242], # cyan, 1 = Probably Clear [158, 1, 66], # red, 2 = Probably Cloudy [255, 255, 255], # white, 3 = Cloudy [ 0, 0, 0], # black, 255 = Fill Value ], 'color_scale': 255, } cloud_probability: name: Cloud_Probabilities operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - {colors: 'spectral', reverse: true, min_value: 0.0, max_value: 1.0, } satpy-0.34.0/satpy/etc/enhancements/ahi.yaml000066400000000000000000000014021420401153000207310ustar00rootroot00000000000000enhancements: airmass: # matches ABI standard_name: airmass operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [-26.2, -43.2, 243.9] max_stretch: [0.6, 6.7, 208.5] true_color_reproduction: standard_name: true_color_reproduction operations: - name: color method: !!python/name:satpy.enhancements.ahi.jma_true_color_reproduction - name: cira_stretch method: !!python/name:satpy.enhancements.cira_stretch - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0.08, 0.08, 0.08] max_stretch: [.93, .90, .90] satpy-0.34.0/satpy/etc/enhancements/amsr2.yaml000066400000000000000000000056571420401153000212340ustar00rootroot00000000000000enhancements: # GAASP enhancements based on PNGs at: # https://www.ospo.noaa.gov/Products/atmosphere/gpds/maps.html?GPRR#gpdsMaps gaasp_clw: name: CLW sensor: amsr2 operations: - name: linear_stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: 'crude', min_stretch: 0.0, max_stretch: 0.5} gaasp_sst: name: SST sensor: amsr2 operations: - name: linear_stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: 'crude', min_stretch: -5.0, max_stretch: 35} gaasp_tpw: name: TPW sensor: amsr2 operations: - name: linear_stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: 'crude', min_stretch: 0.0, max_stretch: 75.0} gaasp_wspd: name: WSPD sensor: amsr2 operations: - name: linear_stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: 'crude', min_stretch: 0.0, max_stretch: 100.0} # Snow_Cover unscaled (category product) gaasp_snow_depth: name: Snow_Depth sensor: amsr2 operations: - name: linear_stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: 'crude', min_stretch: 0.0, max_stretch: 150.0} gaasp_swe: name: SWE sensor: amsr2 operations: - name: linear_stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: 'crude', min_stretch: 0.0, max_stretch: 16.0} gaasp_soil_moisture: name: Soil_Moisture sensor: amsr2 operations: - name: linear_stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: 'crude', min_stretch: 0.0, max_stretch: 100.0} gaasp_ice_concentration_nh: name: NASA_Team_2_Ice_Concentration_NH sensor: amsr2 operations: - name: linear_stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: 'crude', min_stretch: 0.0, max_stretch: 100.0} gaasp_ice_concentration_sh: name: NASA_Team_2_Ice_Concentration_SH sensor: amsr2 operations: - name: linear_stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: 'crude', min_stretch: 0.0, max_stretch: 100.0} # gaasp_latency_nh: # name: Latency_NH # sensor: amsr2 # operations: # - name: linear_stretch # method: !!python/name:satpy.enhancements.stretch # kwargs: {stretch: 'crude', min_stretch: 0.0, max_stretch: 100.0} # gaasp_latency_sh: # name: Latency_SH # sensor: amsr2 # operations: # - name: linear_stretch # method: !!python/name:satpy.enhancements.stretch # kwargs: {stretch: 'crude', min_stretch: 0.0, max_stretch: 100.0} gaasp_rain_rate: name: Rain_Rate sensor: amsr2 operations: - name: linear_stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: 'crude', min_stretch: 0.0, max_stretch: 65.} satpy-0.34.0/satpy/etc/enhancements/generic.yaml000066400000000000000000000727151420401153000216230ustar00rootroot000000000000003d_filter: !!python/name:satpy.enhancements.three_d_effect enhancements: default: operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: linear} reflectance_default: standard_name: toa_bidirectional_reflectance operations: - name: linear_stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: 'crude', min_stretch: 0.0, max_stretch: 100.} - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: 1.5} true_color_default: standard_name: true_color operations: - name: cira_stretch method: !!python/name:satpy.enhancements.cira_stretch true_color_crefl: name: true_color_crefl standard_name: true_color operations: - name: reflectance_range method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: 'crude', min_stretch: 0., max_stretch: 100.} - name: linear interpolation method: !!python/name:satpy.enhancements.piecewise_linear_stretch kwargs: # Polar2Grid's "Preferred" scaling xp: [0., 25., 55., 100., 255.] fp: [0., 90., 140., 175., 255.] reference_scale_factor: 255 overview_default: standard_name: overview operations: - name: inverse method: !!python/name:satpy.enhancements.invert args: - [false, false, true] - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: linear} - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: 1.6} ocean_color_default: standard_name: ocean_color operations: - name: cira_stretch method: !!python/name:satpy.enhancements.cira_stretch - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: 2.6} night_overview_default: standard_name: night_overview operations: - name: inverse method: !!python/name:satpy.enhancements.invert args: - [false, false, true] - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: linear} - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: 1.6} natural_color_default: standard_name: natural_color operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: crude, min_stretch: 0, max_stretch: 120} - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: 1.8} fire_temperature: standard_name: fire_temperature name: fire_temperature operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [3.5, 35., 85.] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: [1.0, 1.0, 1.0]} fire_temperature_awips: standard_name: fire_temperature name: fire_temperature_awips operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [273.0, 0, 0] max_stretch: [333.0, 100., 75.] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: [0.4, 1.0, 1.0]} fire_temperature_eumetsat: standard_name: fire_temperature name: fire_temperature_eumetsat operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [273.0, 0, 0] max_stretch: [350.0, 60., 60.] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: [1.0, 1.0, 1.0]} fire_temperature_39refl: standard_name: fire_temperature name: fire_temperature_39refl operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [50., 100., 75.] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: [1.0, 1.0, 1.0]} airmass_default: standard_name: airmass operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [-25, -40, 243] max_stretch: [0, 5, 208] green_snow_default: standard_name: green_snow operations: - name: inverse method: !!python/name:satpy.enhancements.invert args: - [false, false, true] - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: crude} - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: 1.6} convection_default: standard_name: convection operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [-30, 0, -70] max_stretch: [0, 55, 20] dust_default: standard_name: dust operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [-4, 0, 261] max_stretch: [2, 15, 289] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [1, 2.5, 1] ash_default: standard_name: ash operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [-4, -4, 243] max_stretch: [2, 5, 303] fog_default: standard_name: fog operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [-4, 0, 243] max_stretch: [2, 6, 283] night_fog_default: standard_name: night_fog operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [-4, 0, 243] max_stretch: [2, 6, 293] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [1, 2, 1] snow_default: standard_name: snow operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [100, 70, 30] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [1.7, 1.7, 1.7] day_microphysics_default: standard_name: day_microphysics operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 203] max_stretch: [100, 60, 323] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [1, 2.5, 1] day_microphysics_winter: standard_name: day_microphysics_winter operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 203] max_stretch: [100, 25, 323] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [1, 1.5, 1] cloudtop_default: standard_name: cloudtop operations: - name: inverse method: !!python/name:satpy.enhancements.invert args: [true] - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: linear cutoffs: [0.005, 0.005] sar-ice: standard_name: sar-ice operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [0.10, 1.37, 0.32 ] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [2, 3, 2] sar-ice-log: standard_name: sar-ice-log operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [-38, -32, -40] max_stretch: [-10, 0, 0 ] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [1.2, 0.42, 0.75] sar-ice-legacy: standard_name: sar-ice-legacy operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: linear cutoffs: [0.2, 0.02] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [1, 1.2, 1] sar-land: standard_name: sar-land operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0.01, 1. , 0.15 ] max_stretch: [0.765, 50., 1.4] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [1.5, 2.25, 1.5] sar-rgb: standard_name: sar-rgb operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: linear - name: inverse method: !!python/name:satpy.enhancements.invert args: - [true, true, true] sar-quick: standard_name: sar-quick operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: linear cutoffs: [0.2, 0.02] natural_with_ir_night: standard_name: natural_with_night_fog operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] cloudtype: standard_name: cloudtype operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] cloudmask: standard_name: cloudmask operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] cloudmask_probability: standard_name: cloudmask_probability operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] cloud_top_height: standard_name: cloud_top_height operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] cloud_top_pressure: standard_name: cloud_top_pressure operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] cloud_top_temperature: standard_name: cloud_top_temperature operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] cloud_top_phase: standard_name: cloud_top_phase operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] cloud_drop_effective_radius: standard_name: cloud_drop_effective_radius operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] cloud_optical_thickness: standard_name: cloud_optical_thickness operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] cloud_liquid_water_path: standard_name: cloud_liquid_water_path operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] cloud_ice_water_path: standard_name: cloud_ice_water_path operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] precipitation_probability: standard_name: precipitation_probability operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] convective_rain_rate: standard_name: convective_rain_rate operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] convective_precipitation_hourly_accumulation: standard_name: convective_precipitation_hourly_accumulation operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] total_precipitable_water: standard_name: total_precipitable_water operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] showalter_index: standard_name: showalter_index operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] lifted_index: standard_name: lifted_index operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] convection_initiation_prob30: standard_name: convection_initiation_prob30 operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] convection_initiation_prob60: standard_name: convection_initiation_prob60 operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] convection_initiation_prob90: standard_name: convection_initiation_prob90 operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] rdt_cell_type: standard_name: rdt_cell_type operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] asii_prob: standard_name: asii_prob operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] day_microphysics_ahi: standard_name: day_microphysics_ahi operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 203] max_stretch: [100, 70, 323] cloud_phase_distinction_default: standard_name: cloud_phase_distinction operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [280.67, 0, 1] max_stretch: [219.62, 78, 59] water_vapors1_default: standard_name: water_vapors1 operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [278.96, 242.67, 261.03] max_stretch: [202.29, 214.66, 245.12] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [10, 5.5, 5.5] water_vapors2_default: standard_name: water_vapors2 operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [30, 278.15, 243.9] max_stretch: [-3, 213.15, 208.5] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [3.5, 2.5, 2.5] ncc_default: standard_name: ncc_radiance operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0] max_stretch: [0.075] realistic_colors: standard_name: realistic_colors operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [110, 110, 110] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [1.4, 1.4, 1.2] snow_age_default: standard_name: snow_age operations: - name: snow_age method: !!python/name:satpy.enhancements.lookup kwargs: luts: [[0, 0, 0], [1, 2, 2], [3, 8, 5], [4, 12, 8], [6, 15, 10], [8, 18, 13], [9, 21, 16], [11, 24, 19], [13, 26, 21], [14, 28, 24], [ 16, 30, 27], [18, 32, 30], [19, 34, 32], [21, 36, 35], [22, 38, 38], [24, 40, 40], [ 26, 42, 43], [27, 43, 46], [29, 45, 49], [31, 47, 51], [32, 49, 54], [34, 50, 57], [ 36, 52, 60], [37, 54, 62], [39, 55, 65], [40, 57, 68], [42, 59, 70], [44, 60, 73], [ 45, 62, 76], [47, 64, 79], [49, 66, 81], [50, 67, 84], [52, 69, 87], [53, 71, 90], [ 55, 73, 92], [56, 75, 95], [58, 77, 98], [59, 79, 100], [61, 81, 103], [62, 83, 106], [ 64, 85, 109], [65, 86, 111], [67, 88, 114], [68, 90, 117], [70, 92, 119], [71, 94, 121], [ 73, 96, 124], [74, 98, 126], [76, 100, 129], [77, 102, 131], [79, 104, 134], [80, 106, 136], [ 82, 107, 139], [83, 109, 141], [85, 111, 144], [86, 113, 146], [88, 115, 149], [89, 117, 151], [ 91, 118, 154], [92, 120, 156], [94, 122, 159], [95, 124, 161], [97, 126, 162], [98, 128, 164], [ 100, 129, 166], [101, 131, 168], [103, 133, 170], [104, 135, 172], [106, 137, 173], [ 107, 138, 175], [109, 140, 177], [110, 142, 179], [112, 144, 181], [113, 145, 183], [ 114, 147, 184], [116, 149, 186], [117, 151, 188], [118, 152, 190], [120, 154, 192], [ 121, 156, 193], [123, 158, 194], [124, 159, 196], [125, 161, 197], [127, 163, 199], [ 128, 165, 200], [130, 166, 202], [131, 168, 203], [132, 170, 205], [134, 172, 206], [ 135, 173, 206], [136, 175, 207], [138, 177, 208], [139, 178, 209], [141, 180, 210], [ 142, 182, 211], [143, 184, 212], [145, 185, 213], [146, 187, 214], [148, 189, 215], [ 149, 191, 216], [150, 192, 217], [152, 194, 218], [153, 196, 219], [154, 198, 220], [ 156, 200, 220], [157, 201, 221], [159, 203, 221], [160, 205, 222], [161, 207, 223], [ 162, 209, 223], [163, 210, 224], [164, 212, 225], [166, 213, 225], [167, 214, 226], [ 168, 216, 227], [169, 217, 227], [171, 218, 228], [173, 220, 228], [174, 221, 228], [ 175, 222, 229], [176, 224, 229], [177, 225, 229], [178, 226, 230], [179, 227, 230], [ 181, 228, 230], [182, 229, 231], [183, 230, 231], [184, 231, 232], [185, 232, 232], [ 186, 233, 232], [187, 234, 233], [188, 235, 233], [190, 236, 233], [191, 237, 234], [ 192, 237, 234], [193, 238, 234], [194, 239, 235], [195, 240, 235], [196, 240, 236], [ 196, 241, 236], [197, 242, 236], [198, 243, 237], [199, 243, 237], [200, 244, 237], [ 201, 245, 238], [202, 245, 238], [203, 245, 238], [204, 246, 239], [205, 246, 239], [ 206, 246, 239], [207, 247, 239], [208, 247, 239], [209, 247, 239], [209, 248, 240], [ 210, 248, 240], [210, 248, 240], [211, 248, 240], [212, 248, 240], [212, 248, 241], [ 213, 248, 241], [214, 248, 241], [215, 248, 241], [216, 248, 241], [217, 248, 242], [ 217, 248, 242], [218, 248, 242], [219, 248, 242], [219, 248, 242], [220, 248, 243], [ 221, 248, 243], [221, 249, 243], [222, 249, 243], [223, 249, 243], [223, 249, 244], [ 223, 249, 244], [224, 249, 244], [224, 249, 244], [225, 249, 245], [225, 249, 245], [ 226, 249, 245], [226, 249, 245], [227, 249, 245], [227, 249, 246], [228, 249, 246], [ 228, 250, 246], [229, 250, 246], [229, 250, 246], [230, 250, 247], [230, 250, 247], [ 231, 250, 247], [231, 250, 247], [232, 250, 247], [233, 250, 248], [233, 250, 248], [ 233, 250, 248], [234, 250, 248], [234, 250, 248], [234, 250, 249], [235, 251, 249], [ 235, 251, 249], [235, 251, 249], [236, 251, 249], [236, 251, 250], [237, 251, 250], [ 237, 251, 250], [237, 251, 250], [238, 251, 250], [238, 251, 250], [238, 251, 250], [ 239, 251, 250], [239, 251, 250], [240, 251, 250], [240, 251, 250], [240, 252, 250], [ 241, 252, 250], [241, 252, 251], [241, 252, 251], [242, 252, 251], [242, 252, 251], [ 242, 252, 251], [243, 252, 251], [243, 252, 251], [244, 252, 251], [244, 252, 251], [ 244, 252, 251], [245, 252, 252], [245, 252, 252], [245, 253, 252], [246, 253, 252], [ 246, 253, 252], [247, 253, 252], [248, 253, 252], [248, 253, 252], [248, 253, 252], [ 249, 253, 252], [249, 253, 253], [249, 253, 253], [250, 253, 253], [250, 253, 253], [ 250, 253, 253], [250, 253, 253], [251, 254, 253], [251, 254, 253], [251, 254, 253], [ 252, 254, 253], [252, 254, 254], [252, 254, 254], [253, 254, 254], [253, 254, 254], [ 253, 254, 254], [253, 254, 254], [254, 254, 254], [254, 254, 254], [254, 254, 254], [254, 254, 254], [255, 255, 255]] night_microphysics_default: standard_name: night_microphysics operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [-4, 0, 243] max_stretch: [2, 10, 293] ir_overview_default: standard_name: ir_overview operations: - name: inverse method: !!python/name:satpy.enhancements.invert args: [true] - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: histogram ir108_3d: standard_name: ir108_3d operations: - name: inverse method: !!python/name:satpy.enhancements.invert args: [true] - name: 3d_filter method: !!python/name:satpy.enhancements.three_d_effect - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: linear} ir_cloud_day: standard_name: ir_cloud_day operations: - name: inverse method: !!python/name:satpy.enhancements.invert args: - [True, false] - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: linear - name: 3d method: !!python/name:satpy.enhancements.three_d_effect kwargs: weight: 1.0 colorized_ir_clouds: standard_name: colorized_ir_clouds operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - {colors: spectral, min_value: 193.15, max_value: 253.149999} - {colors: greys, min_value: 253.15, max_value: 303.15} vis_sharpened_ir: standard_name: vis_sharpened_ir operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] ir_sandwich: standard_name: ir_sandwich operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] natural_enh: standard_name: natural_enh operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [589, 95, 95] hrv_clouds: operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 323] max_stretch: [100, 100, 203] standard_name: hrv_clouds hrv_fog: operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [70, 100, 100] standard_name: hrv_fog hrv_severe_storms: standard_name: hrv_severe_storms operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [70, 70, -60] max_stretch: [100, 100, -40] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [1.7, 1.7, 2.0] hrv_severe_storms_masked: standard_name: hrv_severe_storms_masked operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude # MaskingCompositor always adds alpha channel min_stretch: [70, 70, -60, 0] max_stretch: [100, 100, -40, 1] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: # MaskingCompositor always adds alpha channel gamma: [1.7, 1.7, 2.0, 1.0] true_color_with_night_ir: standard_name: true_color_with_night_ir operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] night_background: standard_name: night_background operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [255, 255, 255] night_ir_alpha: standard_name: night_ir_alpha operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: linear, cutoffs: [0.02, 0.02]} - name: inverse method: !!python/name:satpy.enhancements.invert args: - [true, true, true, true] night_ir_with_background: standard_name: night_ir_with_background operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] # so2_default: ## RGB SO2 recipe source: http://rammb.cira.colostate.edu/training/visit/quick_guides/Quick_Guide_SO2_RGB.pdf standard_name: so2 operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [-4, -4, 243.05] max_stretch: [ 2, 5, 302.95] tropical_airmass_default: standard_name: tropical_airmass operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [26.2, 27.4, 243.9] max_stretch: [ 0.6, -26.2, 208.5] # SEADAS Cholorphyll A - MODIS or VIIRS chlor_a_default: name: chlor_a reader: seadas_l2 operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: log base: "10" factor: 21.0 min_stretch: 0.0 max_stretch: 20.0 satpy-0.34.0/satpy/etc/enhancements/glm.yaml000066400000000000000000000015021420401153000207500ustar00rootroot00000000000000enhancements: flash_extent_density: name: flash_extent_density operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - {colors: ylorrd, min_value: 0, max_value: 20} # Requires C14 from ABI c14_flash_extent_density: standard_name: c14_flash_extent_density operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] c14_yellow_lightning: standard_name: c14_yellow_lightning operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0, 0] max_stretch: [1, 1, 1, 1] satpy-0.34.0/satpy/etc/enhancements/mersi-2.yaml000066400000000000000000000010031420401153000214430ustar00rootroot00000000000000enhancements: default: operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: linear} cloudtop_default: standard_name: cloudtop operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [320, 310, 310] max_stretch: [220, 220, 220] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [1.4, 1.4, 1.2] satpy-0.34.0/satpy/etc/enhancements/mhs.yaml000066400000000000000000000014441420401153000207650ustar00rootroot00000000000000enhancements: mw183_humidity: standard_name: mw183_humidity operations: - name: inverse method: !!python/name:satpy.enhancements.invert args: - [true, true, true] - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: linear} - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: 1.2} mw183_humidity_surface: standard_name: mw183_humidity_surface operations: - name: inverse method: !!python/name:satpy.enhancements.invert args: - [true, true, true] - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: linear} - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: 1.2} satpy-0.34.0/satpy/etc/enhancements/mimic.yaml000066400000000000000000000066211420401153000212760ustar00rootroot00000000000000enhancements: tpwGrid: name: tpwGrid operations: - name: tpw_nrl_brightened method: !!python/name:satpy.enhancements.mimic.nrl_colors kwargs: palettes: {min_value: 0, max_value: 76} tpwGridPrior: name: tpwGridPrior operations: - name: tpw_nrl_brightened_prior method: !!python/name:satpy.enhancements.mimic.nrl_colors kwargs: palettes: {min_value: 0, max_value: 76} tpwGridSubseq: name: tpwGridSubseq operations: - name: tpw_nrl_brightened_subsequent method: !!python/name:satpy.enhancements.mimic.nrl_colors kwargs: palettes: {min_value: 0, max_value: 76} timeAwayGridPrior: name: timeAwayGridPrior operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - {colors: spectral, min_value: 0, max_value: 480, reverse: True} - {colors: rainbow, min_value: 0, max_value: 480} timeAwayGridSubseq: name: timeAwayGridSubseq operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - { colors: spectral, min_value: 0, max_value: 480, reverse: True } - { colors: rainbow, min_value: 0, max_value: 480 } footGridPrior: name: footGridPrior operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - { colors: rainbow, min_value: 9, max_value: 45 } footGridSubseq: name: footGridSubseq operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - { colors: rainbow, min_value: 9, max_value: 45 } satGridPrior: name: satGridPrior operations: - name: satellite_contribution method: !!python/name:satpy.enhancements.mimic.total_precipitable_water kwargs: palettes: { colors: [ [ 0, [ 136, 136, 136 ]], [ 1, [ 0, 0, 255 ]], [ 2, [ 0, 129, 255 ]], [ 3, [ 22, 255, 255 ]], [ 4, [ 125, 255, 122 ]], [ 5, [ 228, 255, 19 ]], [ 6, [ 255, 148, 0 ]], [ 7, [ 255, 30, 0 ]], [ 8, [ 128, 0, 0 ]], ], min_value: 0, max_value: 8 } satGridSubsq: name: satGridSubseq operations: - name: satellite_contribution method: !!python/name:satpy.enhancements.mimic.total_precipitable_water kwargs: palettes: { colors: [ [ 0, [ 136, 136, 136 ] ], [ 1, [ 0, 0, 255 ] ], [ 2, [ 0, 129, 255 ] ], [ 3, [ 22, 255, 255 ] ], [ 4, [ 125, 255, 122 ] ], [ 5, [ 228, 255, 19 ] ], [ 6, [ 255, 148, 0 ] ], [ 7, [ 255, 30, 0 ] ], [ 8, [ 128, 0, 0 ] ], ], min_value: 0, max_value: 8 } satpy-0.34.0/satpy/etc/enhancements/scatterometer.yaml000066400000000000000000000007621420401153000230610ustar00rootroot00000000000000enhancements: scat_wind_speed: name: scat_wind_speed operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - {colors: spectral, min_value: 0, max_value: 25} soil_moisture: standard_name: soil_moisture operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - {colors: rdbu, min_value: 0, max_value: 100} satpy-0.34.0/satpy/etc/enhancements/seviri.yaml000066400000000000000000000025601420401153000214770ustar00rootroot00000000000000enhancements: hrv: standard_name: hrv operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, ] max_stretch: [100, ] ir108: standard_name: ir108 operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [300, ] max_stretch: [215, ] vis06_filled_hrv: standard_name: vis06_filled_hrv operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, ] max_stretch: [100, ] vis_with_ir: standard_name: vis_with_ir operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, ] max_stretch: [1, ] vis_with_ir_cloud_overlay: standard_name: vis_with_ir_cloud_overlay operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0] max_stretch: [1, 1] ct: standard_name: ct operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, ] max_stretch: [255, ] satpy-0.34.0/satpy/etc/enhancements/tropomi.yaml000066400000000000000000000016461420401153000216730ustar00rootroot00000000000000enhancements: no2_tropospheric_clean: standard_name: no2_tropospheric_clean operations: - name: colorize_no2_tropospheric_clean method: !!python/name:satpy.enhancements.colorize kwargs: palettes: # 0 to 50 10e-6 mol/m2 - {colors: ylgnbu, min_value: 0.0, max_value: 0.00005, reverse: true} # 50 to 90 10e-6 mol/m2 - {colors: ylorrd, min_value: 0.00005, max_value: 0.00009} no2_tropospheric_polluted: standard_name: no2_tropospheric_polluted operations: - name: colorize_no2_tropospheric_poulluted method: !!python/name:satpy.enhancements.colorize kwargs: palettes: # 0 to 120 10e-6 mol/m2 - {colors: ylgnbu, min_value: 0.0, max_value: 0.00012, reverse: true} # 120 to 600 10e-6 mol/m2 - {colors: ylorrd, min_value: 0.00012, max_value: 0.0006} satpy-0.34.0/satpy/etc/enhancements/viirs.yaml000066400000000000000000000104061420401153000213300ustar00rootroot00000000000000enhancements: # data comes out of the compositor normalized to 0-1 # this makes sure that we aren't dependent on the default dynamic stretch # which would have the same end result dynamic_dnb: name: dynamic_dnb operations: - name: linear_stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: 'crude', min_stretch: 0.0, max_stretch: 1.0} water_detection: name: WaterDetection operations: - name: WaterDetection method: !!python/name:satpy.enhancements.viirs.water_detection kwargs: palettes: {colors: [[14, [0.0, 0.0, 0.0]], [15, [0.0, 0.0, 0.39215686274509803]], [16, [0.7686274509803922, 0.6352941176470588, 0.4470588235294118]], [17, [0.7686274509803922, 0.6352941176470588, 0.4470588235294118]], [18, [0.0, 0.0, 1.0]], [20, [1.0, 1.0, 1.0]], [27, [0.0, 1.0, 1.0]], [30, [0.7843137254901961, 0.7843137254901961, 0.7843137254901961]], [31, [0.39215686274509803, 0.39215686274509803, 0.39215686274509803]], [88, [0.7058823529411765, 0.0, 0.9019607843137255]], [100, [0.19607843137254902, 1.0, 0.39215686274509803]], [120, [0.19607843137254902, 1.0, 0.39215686274509803]], [121, [0.0, 1.0, 0.0]], [130, [0.0, 1.0, 0.0]], [131, [0.7843137254901961, 1.0, 0.0]], [140, [0.7843137254901961, 1.0, 0.0]], [141, [1.0, 1.0, 0.5882352941176471]], [150, [1.0, 1.0, 0.5882352941176471]], [151, [1.0, 1.0, 0.0]], [160, [1.0, 1.0, 0.0]], [161, [1.0, 0.7843137254901961, 0.0]], [170, [1.0, 0.7843137254901961, 0.0]], [171, [1.0, 0.5882352941176471, 0.19607843137254902]], [180, [1.0, 0.5882352941176471, 0.19607843137254902]], [181, [1.0, 0.39215686274509803, 0.0]], [190, [1.0, 0.39215686274509803, 0.0]], [191, [1.0, 0.0, 0.0]], [200, [1.0, 0.0, 0.0]], [201, [0.0, 0.0, 0.0]]], min_value: 0, max_value: 201} # palettes: {colors: # [[14, [0.0, 0.0, 0.0, 0.0]], # [15, [0.0, 0.0, 0.39215686274509803, 1.0]], # [16, [0.7686274509803922, 0.6352941176470588, 0.4470588235294118, 1.0]], # [17, [0.7686274509803922, 0.6352941176470588, 0.4470588235294118, 1.0]], # [18, [0.0, 0.0, 1.0, 1.0]], # [20, [1.0, 1.0, 1.0, 1.0]], # [27, [0.0, 1.0, 1.0, 1.0]], # [30, [0.7843137254901961, 0.7843137254901961, 0.7843137254901961, 1.0]], # [31, [0.39215686274509803, 0.39215686274509803, 0.39215686274509803, 1.0]], # [88, [0.7058823529411765, 0.0, 0.9019607843137255, 1.0]], # [100, [0.19607843137254902, 1.0, 0.39215686274509803, 1.0]], # [120, [0.19607843137254902, 1.0, 0.39215686274509803, 1.0]], # [121, [0.0, 1.0, 0.0, 1.0]], # [130, [0.0, 1.0, 0.0, 1.0]], # [131, [0.7843137254901961, 1.0, 0.0, 1.0]], # [140, [0.7843137254901961, 1.0, 0.0, 1.0]], # [141, [1.0, 1.0, 0.5882352941176471, 1.0]], # [150, [1.0, 1.0, 0.5882352941176471, 1.0]], # [151, [1.0, 1.0, 0.0, 1.0]], # [160, [1.0, 1.0, 0.0, 1.0]], # [161, [1.0, 0.7843137254901961, 0.0, 1.0]], # [170, [1.0, 0.7843137254901961, 0.0, 1.0]], # [171, [1.0, 0.5882352941176471, 0.19607843137254902, 1.0]], # [180, [1.0, 0.5882352941176471, 0.19607843137254902, 1.0]], # [181, [1.0, 0.39215686274509803, 0.0, 1.0]], # [190, [1.0, 0.39215686274509803, 0.0, 1.0]], # [191, [1.0, 0.0, 0.0, 1.0]], # [200, [1.0, 0.0, 0.0, 1.0]], # [201, [0.0, 0.0, 0.0, 0.0]]], # min_value: 0, # max_value: 201} satpy-0.34.0/satpy/etc/eps_avhrrl1b_6.5.xml000066400000000000000000002352121420401153000203460ustar00rootroot00000000000000 40 april04 50 nov05 100 launch current 65 EPS AVHRR/3 Level 1B Format This AVHRR/3 1B description was generated using the AVHRR/3 PFS Excel document Issue 6 Revision 5 (eps_avhrrl1_6.5_names_masks.xls) and pfs2xml version 3.3 AVHR_*1B_*Z* Geolocation AVHRR Geolocation Coverage (Latitude, Longitude) mdr-1b[].EARTH_LOCATIONS[][0] mdr-1b[].EARTH_LOCATIONS[][1] Channel 1 AVHRR Scene Radiance for Channel 1 mdr-1b[].SCENE_RADIANCES[0][] Geolocation Channel 2 AVHRR Scene Radiance for Channel 2 mdr-1b[].SCENE_RADIANCES[1][] Geolocation Channel 3a/b AVHRR Scene Radiance for Channel 3a/b mdr-1b[].SCENE_RADIANCES[2][] Geolocation Channel 4 AVHRR Scene Radiance for Channel 4 mdr-1b[].SCENE_RADIANCES[3][] Geolocation Channel 5 AVHRR Scene Radiance for Channel 5 mdr-1b[].SCENE_RADIANCES[4][] Geolocation Uniformity Test A AVHRR Cloud Information Uniformity Test A (0=test failed or clear, 1=cloudy) mdr-1b[].CLOUD_INFORMATION[].#Uniformity_test_a Geolocation Uniformity Test B AVHRR Cloud Information Uniformity Test A (0 =test failed or cloudy, 1=clear) mdr-1b[].CLOUD_INFORMATION[].#Uniformity_test_b Geolocation T3-T5 Test A AVHRR Cloud Information T3-T5 Test A (0=test failed or clear, 1=cloudy) mdr-1b[].CLOUD_INFORMATION[].#T3_T5_test_a Geolocation T3-T5 Test B AVHRR Cloud Information T3-T5 Test B (0 =test failed or cloudy, 1=clear) mdr-1b[].CLOUD_INFORMATION[].#T3_T5_test_b Geolocation T4-T3 Test A AVHRR Cloud Information T4-T3 Test A (0=test failed or clear, 1=cloudy) mdr-1b[].CLOUD_INFORMATION[].#T4_T3_test_a Geolocation T4-T3 Test B AVHRR Cloud Information T4-T3 Test B (0 =test failed or cloudy, 1=clear) mdr-1b[].CLOUD_INFORMATION[].#T4_T3_test_b Geolocation T4-T5 Test A AVHRR Cloud Information T4-T5 Test A (0=test failed or clear, 1=cloudy) mdr-1b[].CLOUD_INFORMATION[].#T4_T5_test_a Geolocation T4-T5 Test B AVHRR Cloud Information T4-T5 Test B (0 =test failed or cloudy, 1=clear) mdr-1b[].CLOUD_INFORMATION[].#T4_T5_test_b Geolocation Albedo Test A AVHRR Cloud Information Albedo Test A (0=test failed or clear, 1=cloudy or snow/ice covered) mdr-1b[].CLOUD_INFORMATION[].#Albedo_test_a Geolocation Albedo Test B AVHRR Cloud Information Albedo Test B (0 =test failed or cloudy, 1=clear or snow/ice covered) mdr-1b[].CLOUD_INFORMATION[].#Albedo_test_b Geolocation T4 Test A AVHRR Cloud Information T4 Test A (0=test failed or clear, 1=cloudy or snow/ice covered) mdr-1b[].CLOUD_INFORMATION[].#T4_test_a Geolocation T4 Test B AVHRR Cloud Information T4 Test B (0 =test failed or cloudy, 1=clear or snow/ice covered) mdr-1b[].CLOUD_INFORMATION[].#T4_test_b Geolocation Test Situations AVHRR Cloud Information - number of the test situations (11 different test situations) mdr-1b[].CLOUD_INFORMATION[].#Number_of_the_test_situation Geolocation eps-product satpy-0.34.0/satpy/etc/ninjo-cmd.yaml000066400000000000000000000004071420401153000174020ustar00rootroot00000000000000# sample config file holding NinJo Tiff metadata # to be passed to the convert_to_ninjotif.py script # as an alternative to the command line input chan_id : 662626 sat_id : 1622 data_cat : GPRN data_src : EUMETCAST area : nrEURO1km_NPOL_COALeqc ph_unit : CELSIUS satpy-0.34.0/satpy/etc/readers/000077500000000000000000000000001420401153000162645ustar00rootroot00000000000000satpy-0.34.0/satpy/etc/readers/abi_l1b.yaml000066400000000000000000000420071420401153000204440ustar00rootroot00000000000000# References: # - GOES-R Series Data Book, Chapter 3 # # Note: Channels < 3 microns have different units than channels > 3 microns reader: name: abi_l1b short_name: ABI L1b long_name: GOES-R ABI Level 1b description: > GOES-R ABI Level 1b data reader in the NetCDF4 format. The file format is described in the GOES-R Product Definition and Users' Guide (PUG). Volume 4 of this document can be found `here `_. sensors: [abi] default_channels: reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader # file pattern keys to sort files by with 'satpy.utils.group_files' group_keys: ['start_time', 'platform_shortname', 'scene_abbr'] file_types: # NOTE: observation_type == product acronym in PUG document # "suffix" is an arbitrary suffix that may be added during third-party testing (see PR #1380) c01: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C01_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C01_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}-{chid:6d}_0.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C01_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] c02: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C02_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C02_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}-{chid:6d}_0.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C02_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] c03: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C03_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C03_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}-{chid:6d}_0.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C03_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] c04: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C04_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C04_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] c05: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C05_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C05_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}-{chid:6d}_0.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C05_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] c06: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C06_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C06_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] c07: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C07_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C07_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] c08: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C08_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C08_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] c09: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C09_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C09_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] c10: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C10_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C10_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] c11: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C11_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C11_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] c12: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C12_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C12_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] c13: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C13_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C13_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] c14: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C14_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C14_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] c15: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C15_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C15_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] c16: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C16_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C16_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] datasets: C01: name: C01 wavelength: [0.450, 0.470, 0.490] resolution: 1000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_type: c01 C02: name: C02 wavelength: [0.590, 0.640, 0.690] resolution: 500 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_type: c02 C03: name: C03 wavelength: [0.8455, 0.865, 0.8845] resolution: 1000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_type: c03 C04: name: C04 wavelength: [1.3705, 1.378, 1.3855] resolution: 2000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_type: c04 C05: name: C05 wavelength: [1.580, 1.610, 1.640] resolution: 1000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_type: c05 C06: name: C06 wavelength: [2.225, 2.250, 2.275] resolution: 2000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_type: c06 C07: name: C07 wavelength: [3.80, 3.90, 4.00] resolution: 2000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K counts: standard_name: counts units: "1" file_type: c07 C08: name: C08 wavelength: [5.770, 6.185, 6.600] resolution: 2000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K counts: standard_name: counts units: "1" file_type: c08 C09: name: C09 wavelength: [6.75, 6.95, 7.15] resolution: 2000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K counts: standard_name: counts units: "1" file_type: c09 C10: name: C10 wavelength: [7.24, 7.34, 7.44] resolution: 2000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K counts: standard_name: counts units: "1" file_type: c10 C11: name: C11 wavelength: [8.30, 8.50, 8.70] resolution: 2000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K counts: standard_name: counts units: "1" file_type: c11 C12: name: C12 wavelength: [9.42, 9.61, 9.80] resolution: 2000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K counts: standard_name: counts units: "1" file_type: c12 C13: name: C13 wavelength: [10.10, 10.35, 10.60] resolution: 2000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K counts: standard_name: counts units: "1" file_type: c13 C14: name: C14 wavelength: [10.80, 11.20, 11.60] resolution: 2000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K counts: standard_name: counts units: "1" file_type: c14 C15: name: C15 wavelength: [11.80, 12.30, 12.80] resolution: 2000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K counts: standard_name: counts units: "1" file_type: c15 C16: name: C16 wavelength: [13.00, 13.30, 13.60] resolution: 2000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K counts: standard_name: counts units: "1" file_type: c16 satpy-0.34.0/satpy/etc/readers/abi_l1b_scmi.yaml000066400000000000000000000304301420401153000214540ustar00rootroot00000000000000reader: description: SCMI NetCDF4 Reader for ABI data name: abi_l1b_scmi sensors: [] default_channels: reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader # Typical filenames from Unidata THREDDS server: # Server: http://thredds-test.unidata.ucar.edu/thredds/catalog/satellite/goes16/GOES16/CONUS/Channel02/current/catalog.xml # satellite/goes16/GOES16/CONUS/Channel02/current/GOES16_CONUS_20180912_201712_0.64_500m_30.1N_87.1W.nc4 # Typical names for operational/official SCMI files (CSPP Geo, etc): # CG_EFD-005-B12-M3C02-T131_G16_s2018257024530_c2018257132258.nc # Operational/official SCMI files for Meso sectors: # CG_EMESO-020-B14-S1-N34W077-M3C07-T001_G16_s2018257000420_c2018257142255.nc file_types: c01: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_0.47_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C01-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C01-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c02: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_0.64_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C02-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C02-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c03: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_0.87_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C03-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C03-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c04: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_1.38_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C04-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C04-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c05: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_1.61_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C05-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C05-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c06: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_2.25_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C06-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C06-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c07: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_3.90_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C07-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C07-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c08: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_6.19_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C08-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C08-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c09: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_6.95_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C09-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C09-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c10: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_7.34_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C10-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C10-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c11: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_8.50_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C11-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C11-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c12: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_9.61_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C12-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C12-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c13: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_10.35_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C13-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C13-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c14: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_11.20_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C14-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C14-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c15: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_12.30_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C15-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C15-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c16: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_13.30_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C16-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C16-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' datasets: C01: name: C01 sensor: abi wavelength: [0.450, 0.470, 0.490] resolution: 1000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: c01 C02: name: C02 sensor: abi wavelength: [0.590, 0.640, 0.690] resolution: 500 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: c02 C03: name: C03 sensor: abi wavelength: [0.8455, 0.865, 0.8845] resolution: 1000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: c03 C04: name: C04 sensor: abi wavelength: [1.3705, 1.378, 1.3855] resolution: 2000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: c04 C05: name: C05 sensor: abi wavelength: [1.580, 1.610, 1.640] resolution: 1000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: c05 C06: name: C06 sensor: abi wavelength: [2.225, 2.250, 2.275] resolution: 2000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: c06 C07: name: C07 sensor: abi wavelength: [3.80, 3.90, 4.00] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c07 C08: name: C08 sensor: abi wavelength: [5.770, 6.185, 6.600] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c08 C09: name: C09 sensor: abi wavelength: [6.75, 6.95, 7.15] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c09 C10: name: C10 sensor: abi wavelength: [7.24, 7.34, 7.44] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c10 C11: name: C11 sensor: abi wavelength: [8.30, 8.50, 8.70] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c11 C12: name: C12 sensor: abi wavelength: [9.42, 9.61, 9.80] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c12 C13: name: C13 sensor: abi wavelength: [10.10, 10.35, 10.60] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c13 C14: name: C14 sensor: abi wavelength: [10.80, 11.20, 11.60] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c14 C15: name: C15 sensor: abi wavelength: [11.80, 12.30, 12.80] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c15 C16: name: C16 sensor: abi wavelength: [13.00, 13.30, 13.60] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c16 satpy-0.34.0/satpy/etc/readers/abi_l2_nc.yaml000066400000000000000000000531731420401153000207710ustar00rootroot00000000000000reader: name: abi_l2_nc short_name: ABI L2 NetCDF4 long_name: GOES-R ABI Level 2 NetCDF4 description: > GOES-R ABI Level 2+ data reader in the NetCDF4 format. The file format is described in the GOES-R Product Definition and Users' Guide (PUG) Volume 5. This document can be found `here `_. sensors: ['abi'] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader # file pattern keys to sort files by with 'satpy.utils.group_files' group_keys: ['start_time', 'platform_shortname', 'scene_abbr'] datasets: # --- Cloud Moisture Image Products --- CMIP_C01: # Cloud Moisture Image Products Channel 1 name: C01 wavelength: [0.450, 0.470, 0.490] calibration: reflectance file_key: CMI file_type: [abi_l2_cmip_c01, abi_l2_mcmip] CMIP_C02: # Cloud Moisture Image Products Channel 2 name: C02 wavelength: [0.590, 0.640, 0.690] calibration: reflectance file_key: CMI file_type: [abi_l2_cmip_c02, abi_l2_mcmip] CMIP_C03: # Cloud Moisture Image Products Channel 3 name: C03 wavelength: [0.8455, 0.865, 0.8845] calibration: reflectance file_key: CMI file_type: [abi_l2_cmip_c03, abi_l2_mcmip] CMIP_C04: # Cloud Moisture Image Products Channel 4 name: C04 wavelength: [1.3705, 1.378, 1.3855] calibration: reflectance file_key: CMI file_type: [abi_l2_cmip_c04, abi_l2_mcmip] CMIP_C05: # Cloud Moisture Image Products Channel 5 name: C05 wavelength: [1.580, 1.610, 1.640] calibration: reflectance file_key: CMI file_type: [abi_l2_cmip_c05, abi_l2_mcmip] CMIP_C06: # Cloud Moisture Image Products Channel 6 name: C06 wavelength: [2.225, 2.250, 2.275] calibration: reflectance file_key: CMI file_type: [abi_l2_cmip_c06, abi_l2_mcmip] CMIP_C07: # Cloud Moisture Image Products Channel 7 name: C07 wavelength: [3.80, 3.90, 4.00] calibration: brightness_temperature file_key: CMI file_type: [abi_l2_cmip_c07, abi_l2_mcmip] CMIP_C08: # Cloud Moisture Image Products Channel 8 name: C08 wavelength: [5.770, 6.185, 6.600] calibration: brightness_temperature file_key: CMI file_type: [abi_l2_cmip_c08, abi_l2_mcmip] CMIP_C09: # Cloud Moisture Image Products Channel 9 name: C09 wavelength: [6.75, 6.95, 7.15] calibration: brightness_temperature file_key: CMI file_type: [abi_l2_cmip_c09, abi_l2_mcmip] CMIP_C10: # Cloud Moisture Image Products Channel 10 name: C10 wavelength: [7.24, 7.34, 7.44] calibration: brightness_temperature file_key: CMI file_type: [abi_l2_cmip_c10, abi_l2_mcmip] CMIP_C11: # Cloud Moisture Image Products Channel 11 name: C11 wavelength: [8.30, 8.50, 8.70] calibration: brightness_temperature file_key: CMI file_type: [abi_l2_cmip_c11, abi_l2_mcmip] CMIP_C12: # Cloud Moisture Image Products Channel 12 name: C12 wavelength: [9.42, 9.61, 9.80] calibration: brightness_temperature file_key: CMI file_type: [abi_l2_cmip_c12, abi_l2_mcmip] CMIP_C13: # Cloud Moisture Image Products Channel 13 name: C13 wavelength: [10.10, 10.35, 10.60] calibration: brightness_temperature file_key: CMI file_type: [abi_l2_cmip_c13, abi_l2_mcmip] CMIP_C14: # Cloud Moisture Image Products Channel 14 name: C14 wavelength: [10.80, 11.20, 11.60] calibration: brightness_temperature file_key: CMI file_type: [abi_l2_cmip_c14, abi_l2_mcmip] CMIP_C15: # Cloud Moisture Image Products Channel 15 name: C15 wavelength: [11.80, 12.30, 12.80] calibration: brightness_temperature file_key: CMI file_type: [abi_l2_cmip_c15, abi_l2_mcmip] CMIP_C16: # Cloud Moisture Image Products Channel 16 name: C16 wavelength: [13.00, 13.30, 13.60] calibration: brightness_temperature file_key: CMI file_type: [abi_l2_cmip_c16, abi_l2_mcmip] # --- Cloud Top Height --- cloud_top_height: name: HT file_type: abi_l2_acha file_key: HT # variable name in the nc files # --- Cloud Top Temperature --- cloud_top_temperature: name: TEMP file_type: abi_l2_acht file_key: TEMP # --- Cloud Top Phase --- cloud_top_phase: name: Phase file_type: abi_l2_actp file_key: Phase # --- Clear Sky Mask --- clear_sky_mask: name: BCM file_type: abi_l2_acm file_key: BCM four_level_cloud_mask: name: ACM file_type: abi_l2_acm file_key: ACM cloud_probabilities: name: Cloud_Probabilities file_type: abi_l2_acm file_key: Cloud_Probabilities # --- Aerosol Detection Products --- aerosol_binary_mask: name: Aerosol file_type: abi_l2_adp file_key: Aerosol smoke_binary_mask: name: Smoke file_type: abi_l2_adp file_key: Smoke dust_binary_mask: name: Dust file_type: abi_l2_adp file_key: Dust # --- Aerosol Optical Depth at 550 nm --- aerosol_optical_depth: name: AOD file_type: abi_l2_aod file_key: AOD # --- Cloud Optical Depth at 640 nm --- cloud_optical_depth: name: COD file_type: abi_l2_cod file_key: COD cloud_optical_depth_day: name: CODD file_type: abi_l2_codd file_key: COD cloud_optical_depth_night: name: CODN file_type: abi_l2_codn file_key: COD # --- Cloud Particle Size --- cloud_particle_size: name: PSD file_type: abi_l2_cps file_key: PSD cloud_particle_size_day: name: PSDD file_type: abi_l2_cpsd file_key: PSD cloud_particle_size_night: name: PSDN file_type: abi_l2_cpsn file_key: PSD # --- Cloud Top Pressure --- cloud_top_pressure: name: PRES file_type: abi_l2_ctp file_key: PRES # --- Derived Stability Indices --- cape: name: CAPE file_type: abi_l2_dsi file_key: CAPE total_totals_index: name: TT file_type: abi_l2_dsi file_key: TT lifted_index: name: LI file_type: abi_l2_dsi file_key: LI showalter_index: name: SI file_type: abi_l2_dsi file_key: SI k_index: name: KI file_type: abi_l2_dsi file_key: KI # --- Fire (Hot Spot Characterization) Products --- fire_area: name: Area file_type: abi_l2_fdc file_key: Area fire_temp: name: Temp file_type: abi_l2_fdc file_key: Temp radiative_power: name: Power file_type: abi_l2_fdc file_key: Power fire_mask: name: Mask file_type: abi_l2_fdc file_key: Mask # --- Snow Cover --- snow_cover_fraction: name: FSC file_type: abi_l2_fsc file_key: FSC # --- Reflected Shortwave Radiation --- reflected_shortwave_radiation: name: RSR file_type: abi_l2_rsr file_key: RSR # coordinates: [lon, lat] # --- Downward Shortwave Radiation: Surface --- downward_shortwave_radiation: name: DSR file_type: abi_l2_dsr file_key: DSR # coordinates: [lon, lat] # --- Land Surface (Skin) Temperature --- land_surface_temperature: name: LST file_type: abi_l2_lst file_key: LST # --- Sea Surface (Skin) Temperature --- sea_surface_temperature: name: SST file_type: abi_l2_sst file_key: SST # --- Rainfall Rate - Quantitative Prediction Estimate --- rainfall_rate: name: RRQPE file_type: abi_l2_rrqpe file_key: RRQPE # --- Total Precipitalable Water --- total_precipitalable_water: name: TPW file_type: abi_l2_tpw file_key: TPW # ---Volcanic Ash Products --- ash_cloud_height: name: VAH file_type: abi_l2_vaa file_key: VAH ash_mass_loading: name: VAML file_type: abi_l2_vaa file_key: VAML # ---Navigation Products - Unofficial --- nav_longitude: name: Longitude file_type: abi_l2_nav file_key: Longitude nav_latitude: name: Latitude file_type: abi_l2_nav file_key: Latitude # ---- file_types: abi_l2_cmip_c01: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C01_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "CMIP" abi_l2_cmip_c02: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C02_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "CMIP" abi_l2_cmip_c03: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C03_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "CMIP" abi_l2_cmip_c04: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C04_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "CMIP" abi_l2_cmip_c05: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C05_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "CMIP" abi_l2_cmip_c06: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C06_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "CMIP" abi_l2_cmip_c07: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C07_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "CMIP" abi_l2_cmip_c08: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C08_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "CMIP" abi_l2_cmip_c09: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C09_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "CMIP" abi_l2_cmip_c10: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C10_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "CMIP" abi_l2_cmip_c11: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C11_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "CMIP" abi_l2_cmip_c12: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C12_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "CMIP" abi_l2_cmip_c13: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C13_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "CMIP" abi_l2_cmip_c14: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C14_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "CMIP" abi_l2_cmip_c15: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C15_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "CMIP" abi_l2_cmip_c16: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:1s}-{scan_mode:2s}C16_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "CMIP" abi_l2_mcmip: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-MCMIP{scene_abbr:1s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "MCMIP" abi_l2_acha: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-ACHA{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "ACHA" abi_l2_acht: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-ACHT{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "ACHT" abi_l2_acm: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-ACM{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "ACM" abi_l2_actp: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-ACTP{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "ACTP" abi_l2_adp: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-ADP{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "ADP" abi_l2_aod: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-AOD{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "AOD" abi_l2_cod: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-COD{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "COD" # CSPP Geo keeps Day and Night algorithm outputs separate abi_l2_codd: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: - '{system_environment:2s}_{mission_id:3s}-L2-CODD{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc' observation_type: "CODD" abi_l2_codn: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: - '{system_environment:2s}_{mission_id:3s}-L2-CODN{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc' observation_type: "CODN" abi_l2_cps: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CPS{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "CPS" # CSPP Geo keeps Day and Night algorithm outputs separate abi_l2_cpsd: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: - '{system_environment:2s}_{mission_id:3s}-L2-CPSD{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc' observation_type: "CPSD" abi_l2_cpsn: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: - '{system_environment:2s}_{mission_id:3s}-L2-CPSN{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc' observation_type: "CPSN" abi_l2_ctp: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CTP{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "CTP" abi_l2_dsi: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-DSI{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "DSI" abi_l2_drs: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-DRS{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "DRS" abi_l2_fdc: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-FDC{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "FDC" abi_l2_fsc: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-FSC{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "FSC" abi_l2_lst: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-LST{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "LST" abi_l2_rrqpe: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-RRQPE{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "RRQPE" abi_l2_rsr: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-RSR{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "RSR" abi_l2_dsr: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-DSR{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "DSR" abi_l2_sst: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-SST{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "SST" abi_l2_tpw: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-TPW{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "TPW" abi_l2_vaa: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-VAA{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc', '{system_environment:2s}_{mission_id:3s}-L2-VAA{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}-128600_0.nc'] observation_type: "VAA" # CSPP - Geo Unofficial product abi_l2_nav: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-NAV{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "NAV" satpy-0.34.0/satpy/etc/readers/acspo.yaml000066400000000000000000000020741420401153000202600ustar00rootroot00000000000000reader: description: NOAA Level 2 Product (L2P) ACSPO SST File Reader name: acspo reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [viirs,modis,avhrr] default_datasets: file_types: acspo_sst: file_reader: !!python/name:satpy.readers.acspo.ACSPOFileHandler file_patterns: ['{start_time:%Y%m%d%H%M%S}-{rdac:4s}-L2P_GHRSST-{dataset_name}-{sensor_id}-ACSPO_V{version}-v{gds_version}-fv{file_version}.nc'] datasets: longitude: name: longitude file_type: acspo_sst file_key: lon latitude: name: latitude file_type: acspo_sst file_key: lat sst: name: sst coordinates: [longitude, latitude] file_type: acspo_sst file_key: sea_surface_temperature cloud_clear: True satellite_zenith_angle: name: satellite_zenith_angle coordinates: [longitude, latitude] file_type: acspo_sst sea_ice_fraction: name: sea_ice_fraction coordinates: [longitude, latitude] file_type: acspo_sst wind_speed: name: wind_speed coordinates: [longitude, latitude] file_type: acspo_sst satpy-0.34.0/satpy/etc/readers/agri_l1.yaml000077500000000000000000000217541420401153000205020ustar00rootroot00000000000000# References: # - L1_SDR Data of FY4A Advanced Geostationary Radiation Imager # - http://fy4.nsmc.org.cn/data/en/data/realtime.html reader: name: agri_l1 description: FY-4A AGRI instrument HDF5 reader sensors: [agri] default_channels: reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: agri_l1_0500m: file_reader: !!python/name:satpy.readers.agri_l1.HDF_AGRI_L1 file_patterns: ['{platform_id:4s}-_{instrument:4s}--_N_{observation_type:s}_{longitude:5s}_L1-_FDI-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_0500M_{version:s}.HDF'] agri_l1_1000m: file_reader: !!python/name:satpy.readers.agri_l1.HDF_AGRI_L1 file_patterns: ['{platform_id:4s}-_{instrument:4s}--_N_{observation_type:s}_{longitude:5s}_L1-_FDI-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_1000M_{version:s}.HDF'] agri_l1_2000m: file_reader: !!python/name:satpy.readers.agri_l1.HDF_AGRI_L1 file_patterns: ['{platform_id:4s}-_{instrument:4s}--_N_{observation_type:s}_{longitude:5s}_L1-_FDI-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_2000M_{version:s}.HDF'] agri_l1_4000m: file_reader: !!python/name:satpy.readers.agri_l1.HDF_AGRI_L1 file_patterns: ['{platform_id:4s}-_{instrument:4s}--_N_{observation_type:s}_{longitude:5s}_L1-_FDI-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_4000M_{version:s}.HDF'] agri_l1_4000m_geo: file_reader: !!python/name:satpy.readers.agri_l1.HDF_AGRI_L1 file_patterns: ['{platform_id:4s}-_{instrument:4s}--_N_{observation_type:s}_{longitude:5s}_L1-_GEO-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_4000M_{version:s}.HDF'] datasets: C01: name: C01 wavelength: [0.45, 0.47, 0.49] resolution: 1000: {file_type: agri_l1_1000m} 2000: {file_type: agri_l1_2000m} 4000: {file_type: agri_l1_4000m} calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_key: NOMChannel01 lut_key: CALChannel01 C02: name: C02 wavelength: [0.55, 0.65, 0.75] resolution: 500: {file_type: agri_l1_0500m} 1000: {file_type: agri_l1_1000m} 2000: {file_type: agri_l1_2000m} 4000: {file_type: agri_l1_4000m} calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_key: NOMChannel02 lut_key: CALChannel02 C03: name: C03 wavelength: [0.75, 0.83, 0.90] resolution: 1000: {file_type: agri_l1_1000m} 2000: {file_type: agri_l1_2000m} 4000: {file_type: agri_l1_4000m} calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_key: NOMChannel03 lut_key: CALChannel03 C04: name: C04 wavelength: [1.36, 1.37, 1.39] resolution: 2000: {file_type: agri_l1_2000m} 4000: {file_type: agri_l1_4000m} calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_key: NOMChannel04 lut_key: CALChannel04 C05: name: C05 wavelength: [1.58, 1.61, 1.64] resolution: 2000: {file_type: agri_l1_2000m} 4000: {file_type: agri_l1_4000m} calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_key: NOMChannel05 lut_key: CALChannel05 C06: name: C06 wavelength: [2.10, 2.22, 2.35] resolution: 2000: {file_type: agri_l1_2000m} 4000: {file_type: agri_l1_4000m} calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_key: NOMChannel06 lut_key: CALChannel06 C07: name: C07 wavelength: [3.5, 3.72, 4.0] resolution: 2000: {file_type: agri_l1_2000m} 4000: {file_type: agri_l1_4000m} calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: "mW/ (m2 cm-1 sr)" brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: "1" file_key: NOMChannel07 lut_key: CALChannel07 C08: name: C08 wavelength: [3.5, 3.72, 4.0] resolution: 4000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: "mW/ (m2 cm-1 sr)" brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: "1" file_key: NOMChannel08 lut_key: CALChannel08 file_type: agri_l1_4000m C09: name: C09 wavelength: [5.8, 6.25, 6.7] resolution: 4000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: "mW/ (m2 cm-1 sr)" brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: "1" file_key: NOMChannel09 lut_key: CALChannel09 file_type: agri_l1_4000m C10: name: C10 wavelength: [6.9, 7.10, 7.3] resolution: 4000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: "mW/ (m2 cm-1 sr)" brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: "1" file_key: NOMChannel10 lut_key: CALChannel10 file_type: agri_l1_4000m C11: name: C11 wavelength: [8.0, 8.5, 9.0] resolution: 4000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: "mW/ (m2 cm-1 sr)" brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: "1" file_key: NOMChannel11 lut_key: CALChannel11 file_type: agri_l1_4000m C12: name: C12 wavelength: [10.3, 10.8, 11.1] resolution: 4000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: "mW/ (m2 cm-1 sr)" brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: "1" file_key: NOMChannel12 lut_key: CALChannel12 file_type: agri_l1_4000m C13: name: C13 wavelength: [11.5, 12.0, 12.5] resolution: 4000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: "mW/ (m2 cm-1 sr)" brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: "1" file_key: NOMChannel13 lut_key: CALChannel13 file_type: agri_l1_4000m C14: name: C14 wavelength: [13.2, 13.5, 13.8] resolution: 4000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: "mW/ (m2 cm-1 sr)" brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: "1" file_key: NOMChannel14 lut_key: CALChannel14 file_type: agri_l1_4000m solar_zenith_angle: name: solar_zenith_angle units: degree standard_name: solar_zenith_angle resolution: 4000 file_type: agri_l1_4000m_geo file_key: NOMSunZenith solar_azimuth_angle: name: solar_azimuth_angle units: degree standard_name: solar_azimuth_angle resolution: 4000 file_type: agri_l1_4000m_geo file_key: NOMSunAzimuth solar_glint_angle: name: solar_glint_angle units: degree standard_name: solar_glint_angle resolution: 4000 file_type: agri_l1_4000m_geo file_key: NOMSunGlintAngle satellite_zenith_angle: name: satellite_zenith_angle units: degree standard_name: satellite_zenith_angle resolution: 4000 file_type: agri_l1_4000m_geo file_key: NOMSatelliteZenith satellite_azimuth_angle: name: satellite_azimuth_angle units: degree standard_name: satellite_azimuth_angle resolution: 4000 file_type: agri_l1_4000m_geo file_key: NOMSatelliteAzimuth satpy-0.34.0/satpy/etc/readers/ahi_hrit.yaml000066400000000000000000000313751420401153000207500ustar00rootroot00000000000000# References: # - http://www.data.jma.go.jp/mscweb/en/himawari89/space_segment/spsg_ahi.html # - http://www.data.jma.go.jp/mscweb/en/himawari89/space_segment/sample_hrit.html reader: name: ahi_hrit short_name: AHI HRIT long_name: Himawari AHI Level 1 (HRIT) description: Reader for the JMA Himawari AHI Level 1 data in HRIT format sensors: [ahi] reader: !!python/name:satpy.readers.yaml_reader.GEOSegmentYAMLReader # file pattern keys to sort files by with 'satpy.utils.group_files' group_keys: ['start_time', 'area'] file_types: hrit_b01_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B01_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_b01_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B01_{start_time:%Y%m%d%H%M}' hrit_b02_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B02_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_b02_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B02_{start_time:%Y%m%d%H%M}' hrit_b03_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}VIS_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_b03_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}VIS_{start_time:%Y%m%d%H%M}' hrit_b04_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B04_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_b04_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B04_{start_time:%Y%m%d%H%M}' hrit_b05_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B05_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_b05_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B05_{start_time:%Y%m%d%H%M}' hrit_b06_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B06_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_b06_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B06_{start_time:%Y%m%d%H%M}' hrit_b07_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler # B07 are high resolution versions of IR4 at night # See section 1.3 of # https://www.data.jma.go.jp/mscweb/en/himawari89/himawari_cast/note/HimawariCast_dataset_20150624_en.pdf file_patterns: - 'IMG_DK{area:02d}B07_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_b07_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler # B07 are high resolution versions of IR4 at night # See section 1.3 of # https://www.data.jma.go.jp/mscweb/en/himawari89/himawari_cast/note/HimawariCast_dataset_20150624_en.pdf file_patterns: - 'IMG_DK{area:02d}B07_{start_time:%Y%m%d%H%M}' hrit_b07_ir4_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR4_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_b07_ir4_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR4_{start_time:%Y%m%d%H%M}' hrit_b08_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR3_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_b08_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR3_{start_time:%Y%m%d%H%M}' hrit_b09_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B09_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_b09_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B09_{start_time:%Y%m%d%H%M}' hrit_b10_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B10_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_b10_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B10_{start_time:%Y%m%d%H%M}' hrit_b11_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B11_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_b11_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B11_{start_time:%Y%m%d%H%M}' hrit_b12_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B12_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_b12_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B12_{start_time:%Y%m%d%H%M}' hrit_b13_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR1_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_b13_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR1_{start_time:%Y%m%d%H%M}' hrit_b14_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B14_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_b14_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B14_{start_time:%Y%m%d%H%M}' hrit_b15_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR2_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_b15_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR2_{start_time:%Y%m%d%H%M}' hrit_b16_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B16_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_b16_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B16_{start_time:%Y%m%d%H%M}' datasets: B01: name: B01 sensor: ahi wavelength: [0.45,0.47,0.49] resolution: 1000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: 1 file_type: [hrit_b01_seg, hrit_b01_fd] B02: name: B02 sensor: ahi wavelength: [0.49,0.51,0.53] resolution: 1000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: 1 file_type: [hrit_b02_seg, hrit_b02_fd] B03: name: B03 sensor: ahi wavelength: [0.62,0.64,0.66] resolution: 1000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: 1 file_type: [hrit_b03_seg, hrit_b03_fd] B04: name: B04 sensor: ahi wavelength: [0.83, 0.85, 0.87] resolution: 4000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: 1 file_type: [hrit_b04_seg, hrit_b04_fd] B05: name: B05 sensor: ahi wavelength: [1.5, 1.6, 1.7] resolution: 4000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: 1 file_type: [hrit_b05_seg, hrit_b05_fd] B06: name: B06 sensor: ahi wavelength: [2.2, 2.3, 2.4] resolution: 4000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: 1 file_type: [hrit_b06_seg, hrit_b06_fd] B07_low_res: name: B07 resolution: 4000 # resolution: 2000 sensor: ahi wavelength: [3.7, 3.9, 4.1] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 # FUTURE: Split this in to multiple resolutions so each can be loaded file_type: [hrit_b07_seg, hrit_b07_ir4_seg, hrit_b07_fd, hrit_b07_ir4_fd] # B07_high_res: # name: B07 # resolution: 2000 # sensor: ahi # wavelength: [3.7, 3.9, 4.1] # calibration: # brightness_temperature: # standard_name: toa_brightness_temperature # units: "K" # counts: # standard_name: counts # units: 1 # file_type: hrit_b07 B08: name: B08 sensor: ahi wavelength: [6.0, 6.2, 6.4] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: [hrit_b08_seg, hrit_b08_fd] B09: name: B09 sensor: ahi wavelength: [6.7, 6.9, 7.1] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: [hrit_b09_seg, hrit_b09_fd] B10: name: B10 sensor: ahi wavelength: [7.1, 7.3, 7.5] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: [hrit_b10_seg, hrit_b10_fd] B11: name: B11 sensor: ahi wavelength: [8.4, 8.6, 8.8] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: [hrit_b11_seg, hrit_b11_fd] B12: name: B12 sensor: ahi wavelength: [9.4, 9.6, 9.8] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: [hrit_b12_seg, hrit_b12_fd] B13: name: B13 sensor: ahi wavelength: [10.2, 10.4, 10.6] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: [hrit_b13_seg, hrit_b13_fd] B14: name: B14 sensor: ahi wavelength: [11.0, 11.2, 11.4] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: [hrit_b14_seg, hrit_b14_fd] B15: name: B15 sensor: ahi wavelength: [12.2, 12.4, 12.6] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: [hrit_b15_seg, hrit_b15_fd] B16: name: B16 sensor: ahi wavelength: [13.1, 13.3, 13.5] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: [hrit_b16_seg, hrit_b16_fd] satpy-0.34.0/satpy/etc/readers/ahi_hsd.yaml000066400000000000000000000301101420401153000205420ustar00rootroot00000000000000# References: # - Himawari-8/9 Himawari Standard Data User's Guide reader: name: ahi_hsd short_name: AHI HSD long_name: Himawari AHI Level 1b (HSD) description: Reader for the JMA Himawari AHI Level 1 data in HSD format reader: !!python/name:satpy.readers.yaml_reader.GEOSegmentYAMLReader sensors: [ahi] # file pattern keys to sort files by with 'satpy.utils.group_files' group_keys: ['start_time', 'platform_shortname', 'area'] datasets: B01: name: B01 sensor: ahi wavelength: [0.45,0.47,0.49] resolution: 1000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b01 B02: name: B02 sensor: ahi wavelength: [0.49,0.51,0.53] resolution: 1000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b02 B03: name: B03 sensor: ahi wavelength: [0.62,0.64,0.66] resolution: 500 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b03 B04: name: B04 sensor: ahi wavelength: [0.83, 0.85, 0.87] resolution: 1000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b04 B05: name: B05 sensor: ahi wavelength: [1.5, 1.6, 1.7] resolution: 2000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b05 B06: name: B06 sensor: ahi wavelength: [2.2, 2.3, 2.4] resolution: 2000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b06 B07: name: B07 sensor: ahi wavelength: [3.7, 3.9, 4.1] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b07 B08: name: B08 sensor: ahi wavelength: [6.0, 6.2, 6.4] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b08 B09: name: B09 sensor: ahi wavelength: [6.7, 6.9, 7.1] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b09 B10: name: B10 sensor: ahi wavelength: [7.1, 7.3, 7.5] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b10 B11: name: B11 sensor: ahi wavelength: [8.4, 8.6, 8.8] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b11 B12: name: B12 sensor: ahi wavelength: [9.4, 9.6, 9.8] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b12 B13: name: B13 sensor: ahi wavelength: [10.2, 10.4, 10.6] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b13 B14: name: B14 sensor: ahi wavelength: [11.0, 11.2, 11.4] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b14 B15: name: B15 sensor: ahi wavelength: [12.2, 12.4, 12.6] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b15 B16: name: B16 sensor: ahi wavelength: [13.1, 13.3, 13.5] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b16 file_types: hsd_b01: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B01_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B01_{area}_R10_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b02: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B02_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B02_{area}_R10_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b03: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B03_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B03_{area}_R05_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b04: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B04_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B04_{area}_R10_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b05: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B05_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B05_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b06: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B06_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B06_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b07: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B07_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B07_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b08: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B08_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B08_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b09: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B09_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B09_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b10: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B10_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B10_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b11: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B11_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B11_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b12: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B12_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B12_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b13: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B13_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B13_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b14: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B14_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B14_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b15: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B15_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B15_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b16: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B16_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B16_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2'] satpy-0.34.0/satpy/etc/readers/ahi_l1b_gridded_bin.yaml000066400000000000000000000215731420401153000227710ustar00rootroot00000000000000# References: # - Himawari-8/9 Grided data website: http://www.cr.chiba-u.jp/databases/GEO/H8_9/FD/index_jp.html reader: name: ahi_l1b_gridded_bin short_name: AHI Gridded long_name: Himawari AHI Level 1b (gridded) description: Reader for the JMA Himawari AHI Level 1 data in gridded format, downloadable from http://www.cr.chiba-u.jp/databases/GEO/H8_9/FD/index_jp.html reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [ahi] # file pattern keys to sort files by with 'satpy.utils.group_files' group_keys: ['start_time'] datasets: B01: name: B01 sensor: ahi wavelength: [0.45,0.47,0.49] resolution: 0.01 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: 1 file_type: vis.01 B02: name: B02 sensor: ahi wavelength: [0.49,0.51,0.53] resolution: 0.01 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: 1 file_type: vis.02 B03: name: B03 sensor: ahi wavelength: [0.62,0.64,0.66] resolution: 0.005 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: 1 file_type: ext.01 B04: name: B04 sensor: ahi wavelength: [0.83, 0.85, 0.87] resolution: 0.01 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: vis.03 B05: name: B05 sensor: ahi wavelength: [1.5, 1.6, 1.7] resolution: 0.02 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: 1 file_type: sir.01 B06: name: B06 sensor: ahi wavelength: [2.2, 2.3, 2.4] resolution: 0.02 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: 1 file_type: sir.02 B07: name: B07 sensor: ahi wavelength: [3.7, 3.9, 4.1] resolution: 0.02 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: tir.05 B08: name: B08 sensor: ahi wavelength: [6.0, 6.2, 6.4] resolution: 0.02 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: tir.06 B09: name: B09 sensor: ahi wavelength: [6.7, 6.9, 7.1] resolution: 0.02 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: tir.07 B10: name: B10 sensor: ahi wavelength: [7.1, 7.3, 7.5] resolution: 0.02 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: tir.08 B11: name: B11 sensor: ahi wavelength: [8.4, 8.6, 8.8] resolution: 0.02 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: tir.09 B12: name: B12 sensor: ahi wavelength: [9.4, 9.6, 9.8] resolution: 0.02 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: tir.10 B13: name: B13 sensor: ahi wavelength: [10.2, 10.4, 10.6] resolution: 0.02 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: tir.01 B14: name: B14 sensor: ahi wavelength: [11.0, 11.2, 11.4] resolution: 0.02 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: tir.02 B15: name: B15 sensor: ahi wavelength: [12.2, 12.4, 12.6] resolution: 0.02 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: tir.03 B16: name: B16 sensor: ahi wavelength: [13.1, 13.3, 13.5] resolution: 0.02 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: tir.04 file_types: vis.01: file_reader: !!python/name:satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler file_patterns: ['{start_time:%Y%m%d%H%M}.vis.01.{area}.geoss', '{start_time:%Y%m%d%H%M}.vis.01.{area}.geoss.bz2'] vis.02: file_reader: !!python/name:satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler file_patterns: ['{start_time:%Y%m%d%H%M}.vis.02.{area}.geoss', '{start_time:%Y%m%d%H%M}.vis.02.{area}.geoss.bz2'] ext.01: file_reader: !!python/name:satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler file_patterns: ['{start_time:%Y%m%d%H%M}.ext.01.{area}.geoss', '{start_time:%Y%m%d%H%M}.ext.01.{area}.geoss.bz2'] vis.03: file_reader: !!python/name:satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler file_patterns: ['{start_time:%Y%m%d%H%M}.vis.03.{area}.geoss', '{start_time:%Y%m%d%H%M}.vis.03.{area}.geoss.bz2'] sir.01: file_reader: !!python/name:satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler file_patterns: ['{start_time:%Y%m%d%H%M}.sir.01.{area}.geoss', '{start_time:%Y%m%d%H%M}.sir.01.{area}.geoss.bz2'] sir.02: file_reader: !!python/name:satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler file_patterns: ['{start_time:%Y%m%d%H%M}.sir.02.{area}.geoss', '{start_time:%Y%m%d%H%M}.sir.02.{area}.geoss.bz2'] tir.05: file_reader: !!python/name:satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler file_patterns: ['{start_time:%Y%m%d%H%M}.tir.05.{area}.geoss', '{start_time:%Y%m%d%H%M}.tir.05.{area}.geoss.bz2'] tir.06: file_reader: !!python/name:satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler file_patterns: ['{start_time:%Y%m%d%H%M}.tir.06.{area}.geoss', '{start_time:%Y%m%d%H%M}.tir.06.{area}.geoss.bz2'] tir.07: file_reader: !!python/name:satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler file_patterns: ['{start_time:%Y%m%d%H%M}.tir.07.{area}.geoss', '{start_time:%Y%m%d%H%M}.tir.07.{area}.geoss.bz2'] tir.08: file_reader: !!python/name:satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler file_patterns: ['{start_time:%Y%m%d%H%M}.tir.08.{area}.geoss', '{start_time:%Y%m%d%H%M}.tir.08.{area}.geoss.bz2'] tir.09: file_reader: !!python/name:satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler file_patterns: ['{start_time:%Y%m%d%H%M}.tir.09.{area}.geoss', '{start_time:%Y%m%d%H%M}.tir.09.{area}.geoss.bz2'] tir.10: file_reader: !!python/name:satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler file_patterns: ['{start_time:%Y%m%d%H%M}.tir.10.{area}.geoss', '{start_time:%Y%m%d%H%M}.tir.10.{area}.geoss.bz2'] tir.01: file_reader: !!python/name:satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler file_patterns: ['{start_time:%Y%m%d%H%M}.tir.01.{area}.geoss', '{start_time:%Y%m%d%H%M}.tir.01.{area}.geoss.bz2'] tir.02: file_reader: !!python/name:satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler file_patterns: ['{start_time:%Y%m%d%H%M}.tir.02.{area}.geoss', '{start_time:%Y%m%d%H%M}.tir.02.{area}.geoss.bz2'] tir.03: file_reader: !!python/name:satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler file_patterns: ['{start_time:%Y%m%d%H%M}.tir.03.{area}.geoss', '{start_time:%Y%m%d%H%M}.tir.03.{area}.geoss.bz2'] tir.04: file_reader: !!python/name:satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler file_patterns: ['{start_time:%Y%m%d%H%M}.tir.04.{area}.geoss', '{start_time:%Y%m%d%H%M}.tir.04.{area}.geoss.bz2'] satpy-0.34.0/satpy/etc/readers/ami_l1b.yaml000066400000000000000000000251341420401153000204610ustar00rootroot00000000000000reader: name: ami_l1b short_name: AMI L1b long_name: GEO-KOMPSAT-2 AMI Level 1b description: > GEO-KOMPSAT-2 AMI Level 1b data reader in the NetCDF4 format. The file format and instrument are described on KMA's website `here `_. sensors: [ami] default_channels: reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader # file pattern keys to sort files by with 'satpy.utils.group_files' group_keys: ['start_time', 'platform_shortname', 'sensor', 'sector_info'] file_types: # Example: gk2a_ami_le1b_ir087_fd020ge_201901260310.nc # Below list is alphabetical ir087: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_ir087_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] ir096: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_ir096_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] ir105: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_ir105_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] ir112: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_ir112_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] ir123: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_ir123_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] ir133: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_ir133_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] nr013: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_nr013_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] nr016: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_nr016_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] sw038: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_sw038_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] vi004: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_vi004_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] vi005: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_vi005_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] vi006: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_vi006_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] vi008: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_vi008_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] wv063: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_wv063_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] wv069: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_wv069_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] wv073: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_wv073_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] datasets: # Below list is ordered the same as the table: # https://directory.eoportal.org/web/eoportal/satellite-missions/content/-/article/geo-kompsat-2 C01: name: VI004 wavelength: [0.450, 0.470, 0.490] resolution: 1000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: vi004 file_key: image_pixel_values C02: name: VI005 wavelength: [0.495, 0.509, 0.523] resolution: 1000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: vi005 file_key: image_pixel_values C03: name: VI006 wavelength: [0.599, 0.639, 0.679] resolution: 500 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: vi006 file_key: image_pixel_values C04: name: VI008 wavelength: [0.846, 0.863, 0.880] resolution: 1000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: vi008 file_key: image_pixel_values C05: name: NR013 wavelength: [1.363, 1.37, 1.377] resolution: 2000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: nr013 file_key: image_pixel_values C06: name: NR016 wavelength: [1.590, 1.61, 1.630] resolution: 2000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: nr016 file_key: image_pixel_values C07: name: SW038 wavelength: [3.74, 3.83, 3.92] resolution: 2000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: sw038 file_key: image_pixel_values C08: name: WV063 wavelength: [5.79, 6.21, 6.63] resolution: 2000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: wv063 file_key: image_pixel_values C09: name: WV069 wavelength: [6.74, 6.94, 7.14] resolution: 2000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: wv069 file_key: image_pixel_values C10: name: WV073 wavelength: [7.24, 7.33, 7.42] resolution: 2000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: wv073 file_key: image_pixel_values C11: name: IR087 wavelength: [8.415, 8.59, 8.765] resolution: 2000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: ir087 file_key: image_pixel_values C12: name: IR096 wavelength: [9.43, 9.62, 9.81] resolution: 2000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: ir096 file_key: image_pixel_values C13: name: IR105 wavelength: [10.115, 10.35, 10.585] resolution: 2000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: ir105 file_key: image_pixel_values C14: name: IR112 wavelength: [10.90, 11.23, 11.56] resolution: 2000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: ir112 file_key: image_pixel_values C15: name: IR123 wavelength: [11.805, 12.36, 12.915] resolution: 2000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: ir123 file_key: image_pixel_values C16: name: IR133 wavelength: [13.005, 13.29, 13.575] resolution: 2000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: ir133 file_key: image_pixel_values satpy-0.34.0/satpy/etc/readers/amsr2_l1b.yaml000066400000000000000000000173451420401153000207440ustar00rootroot00000000000000reader: name: amsr2_l1b description: GCOM-W1 AMSR2 instrument HDF5 reader # could this be a python hook ? reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [amsr2] default_channels: [] datasets: btemp_10.7v: name: 'btemp_10.7v' # FIXME: These are actually GHz not micrometers wavelength: [10.7, 10.7, 10.7] calibration: brightness_temperature resolution: 10000 standard_name: toa_brightness_temperature polarization: V file_type: amsr2_l1b file_key: "Brightness Temperature (10.7GHz,V)" fill_value: 65535 coordinates: - longitude - latitude btemp_10.7h: name: 'btemp_10.7h' wavelength: [10.7, 10.7, 10.7] calibration: brightness_temperature resolution: 10000 standard_name: toa_brightness_temperature polarization: H file_type: amsr2_l1b file_key: "Brightness Temperature (10.7GHz,H)" fill_value: 65535 coordinates: - longitude - latitude btemp_6.9v: name: 'btemp_6.9v' wavelength: [6.9, 6.9, 6.9] calibration: brightness_temperature resolution: 10000 standard_name: toa_brightness_temperature polarization: V file_type: amsr2_l1b file_key: "Brightness Temperature (6.9GHz,V)" fill_value: 65535 coordinates: - longitude - latitude btemp_6.9h: name: 'btemp_6.9h' wavelength: [6.9, 6.9, 6.9] calibration: brightness_temperature resolution: 10000 standard_name: toa_brightness_temperature polarization: H file_type: amsr2_l1b file_key: "Brightness Temperature (6.9GHz,H)" fill_value: 65535 coordinates: - longitude - latitude btemp_7.3v: name: 'btemp_7.3v' wavelength: [7.3, 7.3, 7.3] calibration: brightness_temperature resolution: 10000 standard_name: toa_brightness_temperature polarization: V file_type: amsr2_l1b file_key: "Brightness Temperature (7.3GHz,V)" fill_value: 65535 coordinates: - longitude - latitude btemp_7.3h: name: 'btemp_7.3h' wavelength: [7.3, 7.3, 7.3] calibration: brightness_temperature resolution: 10000 standard_name: toa_brightness_temperature polarization: H file_type: amsr2_l1b file_key: "Brightness Temperature (7.3GHz,H)" fill_value: 65535 coordinates: - longitude - latitude btemp_18.7v: name: 'btemp_18.7v' wavelength: [18.7, 18.7, 18.7] calibration: brightness_temperature resolution: 10000 standard_name: toa_brightness_temperature polarization: V file_type: amsr2_l1b file_key: "Brightness Temperature (18.7GHz,V)" fill_value: 65535 coordinates: - longitude - latitude btemp_18.7h: name: 'btemp_18.7h' wavelength: [18.7, 18.7, 18.7] calibration: brightness_temperature resolution: 10000 standard_name: toa_brightness_temperature polarization: H file_type: amsr2_l1b file_key: "Brightness Temperature (18.7GHz,H)" fill_value: 65535 coordinates: - longitude - latitude btemp_23.8v: name: 'btemp_23.8v' wavelength: [23.8, 23.8, 23.8] calibration: brightness_temperature resolution: 10000 standard_name: toa_brightness_temperature polarization: V file_type: amsr2_l1b file_key: "Brightness Temperature (23.8GHz,V)" fill_value: 65535 coordinates: - longitude - latitude btemp_23.8h: name: 'btemp_23.8h' wavelength: [23.8, 23.8, 23.8] calibration: brightness_temperature resolution: 10000 standard_name: toa_brightness_temperature polarization: H file_type: amsr2_l1b file_key: "Brightness Temperature (23.8GHz,H)" fill_value: 65535 coordinates: - longitude - latitude btemp_36.5v: name: 'btemp_36.5v' wavelength: [36.5, 36.5, 36.5] calibration: brightness_temperature resolution: 10000 standard_name: toa_brightness_temperature polarization: V file_type: amsr2_l1b file_key: "Brightness Temperature (36.5GHz,V)" fill_value: 65535 coordinates: - longitude - latitude btemp_36.5h: name: 'btemp_36.5h' wavelength: [36.5, 36.5, 36.5] calibration: brightness_temperature resolution: 10000 standard_name: toa_brightness_temperature polarization: H file_type: amsr2_l1b file_key: "Brightness Temperature (36.5GHz,H)" fill_value: 65535 coordinates: - longitude - latitude btemp_89.0av: name: 'btemp_89.0av' wavelength: [89.0, 89.0, 89.0] calibration: brightness_temperature resolution: 5000 navigation: amsr2_5km_a standard_name: toa_brightness_temperature polarization: V file_type: amsr2_l1b file_key: "Brightness Temperature (89.0GHz-A,V)" fill_value: 65535 coordinates: - longitude_a - latitude_a btemp_89.0ah: name: 'btemp_89.0ah' wavelength: [89.0, 89.0, 89.0] calibration: brightness_temperature resolution: 5000 navigation: amsr2_5km_a standard_name: toa_brightness_temperature polarization: H file_type: amsr2_l1b file_key: "Brightness Temperature (89.0GHz-A,H)" fill_value: 65535 coordinates: - longitude_a - latitude_a btemp_89.0bv: name: 'btemp_89.0bv' wavelength: [89.0, 89.0, 89.0] calibration: brightness_temperature resolution: 5000 navigation: amsr2_5km_b standard_name: toa_brightness_temperature polarization: V file_type: amsr2_l1b file_key: "Brightness Temperature (89.0GHz-B,V)" fill_value: 65535 coordinates: - longitude_b - latitude_b btemp_89.0bh: name: 'btemp_89.0bh' wavelength: [89.0, 89.0, 89.0] calibration: brightness_temperature resolution: 5000 navigation: amsr2_5km_b standard_name: toa_brightness_temperature polarization: H file_type: amsr2_l1b file_key: "Brightness Temperature (89.0GHz-B,H)" fill_value: 65535 coordinates: - longitude_b - latitude_b latitude_5km_a: name: latitude_a resolution: 5000 file_type: amsr2_l1b standard_name: latitude polarization: [H, V] units: degree file_key: 'Latitude of Observation Point for 89A' fill_value: -9999.0 latitude_5km_b: name: latitude_b resolution: 5000 file_type: amsr2_l1b standard_name: latitude polarization: [H, V] units: degree file_key: 'Latitude of Observation Point for 89B' fill_value: -9999.0 longitude_5km_a: name: longitude_a resolution: 5000 file_type: amsr2_l1b standard_name: longitude polarization: [H, V] units: degree file_key: 'Longitude of Observation Point for 89A' fill_value: -9999.0 longitude_5km_b: name: longitude_b resolution: 5000 file_type: amsr2_l1b standard_name: longitude polarization: [H, V] units: degree file_key: 'Longitude of Observation Point for 89B' fill_value: -9999.0 latitude_10km: name: latitude resolution: 10000 file_type: amsr2_l1b standard_name: latitude polarization: [H, V] units: degree file_key: 'Latitude of Observation Point for 89A' fill_value: -9999.0 longitude_10km: name: longitude resolution: 10000 file_type: amsr2_l1b standard_name: longitude polarization: [H, V] units: degree file_key: 'Longitude of Observation Point for 89A' fill_value: -9999.0 file_types: amsr2_l1b: file_reader: !!python/name:satpy.readers.amsr2_l1b.AMSR2L1BFileHandler file_patterns: ['{platform_shortname:3s}{instrument_shortname:3s}_{start_time:%Y%m%d%H%M}_{path_number:3d}{orbit_direction:1s}_{process_level:2s}{process_kind:2s}{product_id:3s}{resolution_id:1s}{dev_id:1s}{product_version:1s}{algorithm_version:3d}{parameter_version:3d}.h5'] satpy-0.34.0/satpy/etc/readers/amsr2_l2.yaml000066400000000000000000000026271420401153000206000ustar00rootroot00000000000000reader: name: amsr2_l2 short_name: AMSR2 Level 2 long_name: GCOM-W1 AMSR2 Level 2 (HDF5) description: > HDF5 reader for GCOM-W1 AMSR2 Level 2 files from JAXA. See https://suzaku.eorc.jaxa.jp/GCOM_W/data/data_w_product-2.html for more information. reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [amsr2] file_types: amsr2_l2_ssw: file_reader: !!python/name:satpy.readers.amsr2_l2.AMSR2L2FileHandler file_patterns: ['{platform_shortname:3s}{instrument_shortname:3s}_{start_time:%Y%m%d%H%M}_{path_number:3d}{orbit_direction:1s}_L2{process_kind:2s}SSW{resolution_id:1s}{dev_id:1s}{product_version:1s}{algorithm_version:3d}{parameter_version:3d}.h5'] datasets: longitude_ssw: name: longitude_ssw file_type: amsr2_l2_ssw standard_name: longitude resolution: 15000 units: degrees_east file_key: 'Longitude of Observation Point' fill_value: -9999.0 latitude_ssw: name: latitude_ssw file_type: amsr2_l2_ssw standard_name: latitude resolution: 15000 units: degrees_north file_key: 'Latitude of Observation Point' fill_value: -9999.0 #https://suzaku.eorc.jaxa.jp/GCOM_W/data/data_w_product-2.html ssw: name: ssw standard_name: wind_speed file_type: amsr2_l2_ssw file_key: 'Geophysical Data' fill_value: -32768 resolution: 15000 coordinates: - longitude_ssw - latitude_ssw satpy-0.34.0/satpy/etc/readers/amsr2_l2_gaasp.yaml000066400000000000000000000061651420401153000217540ustar00rootroot00000000000000reader: name: amsr2_l2_gaasp short_name: AMSR2 Level 2 GAASP long_name: GCOM-W1 AMSR2 Level 2 GAASP (NetCDF4) description: > NetCDF4 reader for GCOM-W1 AMSR2 Level 2 files processed using the GAASP software distributed by NOAA. See https://www.star.nesdis.noaa.gov/jpss/gcom.php for more information. reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [amsr2] file_types: amsr2_mbt: # Microwave Brightness Temperatures # Ex. AMSR2-MBT_v2r2_GW1_s202008120558310_e202008120607010_c202008120637340.nc file_reader: !!python/name:satpy.readers.amsr2_l2_gaasp.GAASPFileHandler file_patterns: ['AMSR2-MBT_v{software_version:d}r{revision_number:d}_{platform_shortname:3s}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time:%Y%m%d%H%M%S%f}.nc'] amsr2_precip: # Ex. AMSR2-OCEAN_v2r2_GW1_s202008120558310_e202008120607010_c202008120637340.nc file_reader: !!python/name:satpy.readers.amsr2_l2_gaasp.GAASPFileHandler file_patterns: ['AMSR2-PRECIP_v{software_version:d}r{revision_number:d}_{platform_shortname:3s}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time:%Y%m%d%H%M%S%f}.nc'] amsr2_ocean: # Ex. AMSR2-OCEAN_v2r2_GW1_s202008120558310_e202008120607010_c202008120637340.nc file_reader: !!python/name:satpy.readers.amsr2_l2_gaasp.GAASPFileHandler file_patterns: ['AMSR2-OCEAN_v{software_version:d}r{revision_number:d}_{platform_shortname:3s}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time:%Y%m%d%H%M%S%f}.nc'] amsr2_seaice_nh: var_suffix: "_NH" grid_epsg: 6931 # Ex. AMSR2-SEAICE-NH_v2r2_GW1_s202008120558310_e202008120607010_c202008120637340.nc file_reader: !!python/name:satpy.readers.amsr2_l2_gaasp.GAASPGriddedFileHandler file_patterns: - 'AMSR2-SEAICE-NH_v{software_version:d}r{revision_number:d}_{platform_shortname:3s}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time:%Y%m%d%H%M%S%f}.nc' amsr2_seaice_sh: var_suffix: "_SH" grid_epsg: 6932 # Ex. AMSR2-SEAICE-SH_v2r2_GW1_s202008120558310_e202008120607010_c202008120637340.nc file_reader: !!python/name:satpy.readers.amsr2_l2_gaasp.GAASPGriddedFileHandler file_patterns: - 'AMSR2-SEAICE-SH_v{software_version:d}r{revision_number:d}_{platform_shortname:3s}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time:%Y%m%d%H%M%S%f}.nc' amsr2_snow: # Ex. AMSR2-SNOW_v2r2_GW1_s202008120558310_e202008120607010_c202008120637340.nc file_reader: !!python/name:satpy.readers.amsr2_l2_gaasp.GAASPLowResFileHandler file_patterns: ['AMSR2-SNOW_v{software_version:d}r{revision_number:d}_{platform_shortname:3s}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time:%Y%m%d%H%M%S%f}.nc'] amsr2_soil: # Ex. AMSR2-SOIL_v2r2_GW1_s202008120558310_e202008120607010_c202008120637340.nc file_reader: !!python/name:satpy.readers.amsr2_l2_gaasp.GAASPLowResFileHandler file_patterns: ['AMSR2-SOIL_v{software_version:d}r{revision_number:d}_{platform_shortname:3s}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time:%Y%m%d%H%M%S%f}.nc'] datasets: {} satpy-0.34.0/satpy/etc/readers/amsub_l1c_aapp.yaml000066400000000000000000000103641420401153000220230ustar00rootroot00000000000000reader: name: amsub_l1c_aapp description: AAPP l1c Reader for AMSU-B data reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [amsub,] default_channels: [16, 17, 18, 19, 20] data_identification_keys: name: required: true frequency_double_sideband: type: !!python/name:satpy.readers.aapp_mhs_amsub_l1c.FrequencyDoubleSideBand frequency_range: type: !!python/name:satpy.readers.aapp_mhs_amsub_l1c.FrequencyRange resolution: polarization: enum: - H - V calibration: enum: - brightness_temperature transitive: true modifiers: required: true default: [] type: !!python/name:satpy.dataset.ModifierTuple datasets: '16': name: '16' frequency_range: central: 89. bandwidth: 1.0 unit: GHz polarization: 'V' resolution: 16000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - longitude - latitude file_type: amsub_aapp_l1c '17': name: '17' frequency_range: central: 150. bandwidth: 1.0 unit: GHz polarization: 'V' resolution: 16000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - longitude - latitude file_type: amsub_aapp_l1c '18': name: '18' frequency_double_sideband: unit: GHz central: 183.31 side: 1.0 bandwidth: 0.5 polarization: 'V' resolution: 16000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - longitude - latitude file_type: amsub_aapp_l1c '19': name: '19' frequency_double_sideband: unit: GHz central: 183.31 side: 3.0 bandwidth: 1.0 polarization: 'V' resolution: 16000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - longitude - latitude file_type: amsub_aapp_l1c '20': name: '20' frequency_double_sideband: unit: GHz central: 183.31 side: 7.0 bandwidth: 2.0 polarization: 'V' resolution: 16000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - longitude - latitude file_type: amsub_aapp_l1c solar_zenith_angle: name: solar_zenith_angle resolution: 16000 coordinates: - longitude - latitude file_type: amsub_aapp_l1c standard_name: solar_zenith_angle units: degrees solar_azimuth_angle: name: solar_azimuth_angle resolution: 16000 coordinates: - longitude - latitude file_type: amsub_aapp_l1c standard_name: solar_azimuth_angle units: degrees sensor_zenith_angle: name: sensor_zenith_angle resolution: 16000 coordinates: - longitude - latitude file_type: amsub_aapp_l1c standard_name: sensor_zenith_angle units: degrees sensor_azimuth_angle: name: sensor_azimuth_angle resolution: 16000 coordinates: - longitude - latitude file_type: amsub_aapp_l1c standard_name: sensor_azimuth_angle units: degrees latitude: name: latitude resolution: 16000 file_type: amsub_aapp_l1c standard_name: latitude units: degrees_north longitude: name: longitude resolution: 16000 file_type: amsub_aapp_l1c standard_name: longitude units: degrees_east file_types: amsub_aapp_l1c: file_reader: !!python/name:satpy.readers.aapp_mhs_amsub_l1c.MHS_AMSUB_AAPPL1CFile file_patterns: ['mhsl1c_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit_number:05d}.l1c'] satpy-0.34.0/satpy/etc/readers/ascat_l2_soilmoisture_bufr.yaml000066400000000000000000000026511420401153000245000ustar00rootroot00000000000000reader: name: ascat_l2_soilmoisture_bufr short_name: ASCAT L2 SOILMOISTURE BUFR long_name: METOP ASCAT Level 2 SOILMOISTURE BUFR description: > Reader for ASCAT L2 SOIL MOISUTRE FILES sensors: [scatterometer] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader default_datasets: file_types: ascat_l2_soilmoisture_bufr: file_reader: !!python/name:satpy.readers.ascat_l2_soilmoisture_bufr.AscatSoilMoistureBufr file_patterns: ["W_XX-EUMETSAT-{reception_location},SOUNDING+SATELLITE,{platform}+{instrument}_C_{header}_{start_time:%Y%m%d%H%M%S}_{perigee}_eps_o_{species}_ssm_l2.bin"] datasets: latitude: name: latitude file_type: ascat_l2_soilmoisture_bufr standard_name: latitude units: "degrees" resolution: 12000 key: 'latitude' fill_value: -1.e+100 longitude: name: longitude file_type: ascat_l2_soilmoisture_bufr standard_name: longitude units: "degrees" resolution: 12000 key: 'longitude' fill_value: -1.e+100 surface_soil_moisture: name: surface_soil_moisture file_type: ascat_l2_soilmoisture_bufr units: 'percent' coordinates: [longitude, latitude] key: surfaceSoilMoisture fill_value: -1.e+100 sensor: scatterometer soil_moisture_quality: name: soil_moisture_quality file_type: ascat_l2_soilmoisture_bufr coordinates: [longitude, latitude] key: soilMoistureQuality fill_value: -1.e+100 satpy-0.34.0/satpy/etc/readers/avhrr_l1b_aapp.yaml000066400000000000000000000073551420401153000220430ustar00rootroot00000000000000reader: name: avhrr_l1b_aapp description: AAPP l1b Reader for AVHRR reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [avhrr-3,] default_channels: [1, 2, 3a, 3b, 4, 5] datasets: '1': name: '1' wavelength: [0.58, 0.63, 0.68] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: avhrr_aapp_l1b '2': name: '2' wavelength: [0.725, 0.8625, 1.0] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: avhrr_aapp_l1b '3a': name: '3a' wavelength: [1.58, 1.61, 1.64] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: avhrr_aapp_l1b '3b': name: '3b' wavelength: [3.55, 3.74, 3.93] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: avhrr_aapp_l1b '4': name: '4' wavelength: [10.3, 10.8, 11.3] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: avhrr_aapp_l1b '5': name: '5' wavelength: [11.5, 12.0, 12.5] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: avhrr_aapp_l1b solar_zenith_angle: name: solar_zenith_angle resolution: 1050 coordinates: - longitude - latitude file_type: avhrr_aapp_l1b standard_name: solar_zenith_angle units: degrees sensor_zenith_angle: name: sensor_zenith_angle resolution: 1050 coordinates: - longitude - latitude file_type: avhrr_aapp_l1b standard_name: sensor_zenith_angle units: degrees sun_sensor_azimuth_difference_angle: name: sun_sensor_azimuth_difference_angle resolution: 1050 coordinates: - longitude - latitude file_type: avhrr_aapp_l1b units: degrees latitude: name: latitude resolution: 1050 file_type: avhrr_aapp_l1b standard_name: latitude units: degrees_north longitude: name: longitude resolution: 1050 file_type: avhrr_aapp_l1b standard_name: longitude units: degrees_east file_types: avhrr_aapp_l1b: file_reader: !!python/name:satpy.readers.aapp_l1b.AVHRRAAPPL1BFile file_patterns: ['hrpt_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit_number:05d}.l1b'] satpy-0.34.0/satpy/etc/readers/avhrr_l1b_eps.yaml000066400000000000000000000076741420401153000217150ustar00rootroot00000000000000reader: name: avhrr_l1b_eps description: EPS Reader for AVHRR reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [avhrr-3] default_channels: [1, 2, 3a, 3b, 4, 5] datasets: '1': name: '1' wavelength: [0.58, 0.63, 0.68] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: avhrr_eps '2': name: '2' wavelength: [0.725, 0.8625, 1.0] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: avhrr_eps '3a': name: '3a' wavelength: [1.58, 1.61, 1.64] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: avhrr_eps '3b': name: '3b' wavelength: [3.55, 3.74, 3.93] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: avhrr_eps '4': name: '4' wavelength: [10.3, 10.8, 11.3] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: avhrr_eps '5': name: '5' wavelength: [11.5, 12.0, 12.5] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: avhrr_eps latitude: name: latitude resolution: 1050 file_type: avhrr_eps standard_name: latitude units: degree longitude: name: longitude resolution: 1050 file_type: avhrr_eps standard_name: longitude units: degree solar_zenith_angle: name: solar_zenith_angle sensor: avhrr-3 resolution: 1050 coordinates: [longitude, latitude] file_type: avhrr_eps solar_azimuth_angle: name: solar_azimuth_angle sensor: avhrr-3 resolution: 1050 coordinates: [longitude, latitude] file_type: avhrr_eps satellite_zenith_angle: name: satellite_zenith_angle sensor: avhrr-3 resolution: 1050 coordinates: [longitude, latitude] file_type: avhrr_eps satellite_azimuth_angle: name: satellite_azimuth_angle sensor: avhrr-3 resolution: 1050 coordinates: [longitude, latitude] file_type: avhrr_eps file_types: avhrr_eps: file_reader: !!python/name:satpy.readers.eps_l1b.EPSAVHRRFile file_patterns: [ 'AVHR_xxx_1B_{platform_short_name}_{start_time:%Y%m%d%H%M%SZ}_{end_time:%Y%m%d%H%M%SZ}_{processing_mode}_{disposition_mode}_{creation_time:%Y%m%d%H%M%SZ}', 'AVHR_xxx_1B_{platform_short_name}_{start_time:%Y%m%d%H%M%SZ}_{end_time:%Y%m%d%H%M%SZ}_{processing_mode}_{disposition_mode}_{creation_time:%Y%m%d%H%M%SZ}.nat'] satpy-0.34.0/satpy/etc/readers/avhrr_l1b_gaclac.yaml000066400000000000000000000113501420401153000223220ustar00rootroot00000000000000reader: name: avhrr_l1b_gaclac description: AAPP l1b Reader for AVHRR reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [avhrr-3, avhrr-2, avhrr-1] datasets: '1': name: '1' wavelength: [0.58, 0.63, 0.68] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: '%' counts: units: count coordinates: - longitude - latitude file_type: gac_lac_l1b '2': name: '2' wavelength: [0.725, 0.8625, 1.0] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: '%' counts: units: count coordinates: - longitude - latitude file_type: gac_lac_l1b '3': name: '3' wavelength: [3.55, 3.74, 3.93] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K counts: units: count coordinates: - longitude - latitude file_type: gac_lac_l1b '3a': name: '3a' wavelength: [1.58, 1.61, 1.64] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: '%' counts: units: count coordinates: - longitude - latitude file_type: gac_lac_l1b '3b': name: '3b' wavelength: [3.55, 3.74, 3.93] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K counts: units: count coordinates: - longitude - latitude file_type: gac_lac_l1b '4': name: '4' wavelength: [10.3, 10.8, 11.3] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K counts: units: count coordinates: - longitude - latitude file_type: gac_lac_l1b '5': name: '5' wavelength: [11.5, 12.0, 12.5] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K counts: units: count coordinates: - longitude - latitude file_type: gac_lac_l1b solar_zenith_angle: name: solar_zenith_angle standard_name: solar_zenith_angle resolution: 1050 coordinates: - longitude - latitude file_type: gac_lac_l1b units: degrees sensor_zenith_angle: name: sensor_zenith_angle standard_name: sensor_zenith_angle resolution: 1050 coordinates: - longitude - latitude file_type: gac_lac_l1b units: degrees solar_azimuth_angle: name: solar_azimuth_angle standard_name: solar_azimuth_angle resolution: 1050 coordinates: - longitude - latitude file_type: gac_lac_l1b units: degrees sensor_azimuth_angle: name: sensor_azimuth_angle standard_name: sensor_azimuth_angle resolution: 1050 coordinates: - longitude - latitude file_type: gac_lac_l1b units: degrees sun_sensor_azimuth_difference_angle: name: sun_sensor_azimuth_difference_angle standard_name: angle_of_rotation_from_solar_azimuth_to_platform_azimuth resolution: 1050 coordinates: - longitude - latitude file_type: gac_lac_l1b units: degrees qual_flags: name: qual_flags long_name: Scanline Quality Flags resolution: 1050 file_type: gac_lac_l1b latitude: name: latitude resolution: 1050 file_type: gac_lac_l1b standard_name: latitude units: degrees_north longitude: name: longitude resolution: 1050 file_type: gac_lac_l1b standard_name: longitude units: degrees_east file_types: gac_lac_l1b: file_reader: !!python/name:satpy.readers.avhrr_l1b_gaclac.GACLACFile #NSS.GHRR.NJ.D95056.S1116.E1303.B0080506.GC file_patterns: ['{creation_site:3s}.{transfer_mode:4s}.{platform_id:2s}.D{start_time:%y%j.S%H%M}.E{end_time:%H%M}.B{orbit_number:05d}{end_orbit_last_digits:02d}.{station:2s}'] satpy-0.34.0/satpy/etc/readers/avhrr_l1b_hrpt.yaml000066400000000000000000000056331420401153000220740ustar00rootroot00000000000000reader: name: avhrr_l1b_hrpt description: HRPT Reader for AVHRR reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [avhrr-3, avhrr-2] default_channels: [1, 2, 3a, 3b, 4, 5] datasets: '1': name: '1' wavelength: [0.58, 0.63, 0.68] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: '%' counts: standard_name: counts units: '1' coordinates: [longitude, latitude] file_type: avhrr_hrpt '2': name: '2' wavelength: [0.725, 0.8625, 1.0] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: '%' counts: standard_name: counts units: '1' coordinates: [longitude, latitude] file_type: avhrr_hrpt '3a': name: '3a' wavelength: [1.58, 1.61, 1.64] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: '%' counts: standard_name: counts units: '1' coordinates: [longitude, latitude] file_type: avhrr_hrpt '3b': name: '3b' wavelength: [3.55, 3.74, 3.93] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K counts: standard_name: counts units: '1' coordinates: [longitude, latitude] file_type: avhrr_hrpt '4': name: '4' wavelength: [10.3, 10.8, 11.3] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K counts: standard_name: counts units: '1' coordinates: [longitude, latitude] file_type: avhrr_hrpt '5': name: '5' wavelength: [11.5, 12.0, 12.5] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K counts: standard_name: counts units: '1' coordinates: [longitude, latitude] file_type: avhrr_hrpt longitude: name: longitude resolution: 1050 file_type: avhrr_hrpt standard_name: longitude units: degree latitude: name: latitude resolution: 1050 file_type: avhrr_hrpt standard_name: latitude units: degree file_types: avhrr_hrpt: file_reader: !!python/name:satpy.readers.hrpt.HRPTFile file_patterns: ['{start_time:%Y%m%d%H%M%S}_{platform_name}.hmf', 'hrpt16_{platform_name:s}_{start_time:%d-%b-%Y_%H:%M:%S.%f}_{orbit_number:05d}'] satpy-0.34.0/satpy/etc/readers/avhrr_l1c_eum_gac_fdr_nc.yaml000066400000000000000000000143631420401153000240330ustar00rootroot00000000000000reader: name: avhrr_l1c_eum_gac_fdr_nc short_name: EUMETSAT_GAC_FDR long_name: EUMETCSAT GAC FDR NetCDF4 description: NetCDF4 reader for EUMETCSAT GAC FDR AVHRR L1c sensors: [avhrr-3, avhrr-2, avhrr-1] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: eumetsat_gac_fdr: file_reader: !!python/name:satpy.readers.satpy_cf_nc.SatpyCFFileHandler file_patterns: ['AVHRR-GAC_FDR_1C_{platform}_{start_time:%Y%m%dT%H%M%SZ}_{end_time:%Y%m%dT%H%M%SZ}_{processing_mode}_{disposition_mode}_{creation_time}_{version_int:04d}.nc'] datasets: 'reflectance_channel_1': name: 'reflectance_channel_1' wavelength: [0.58, 0.63, 0.68] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: '%' file_type: eumetsat_gac_fdr coordinates: [longitude, latitude] nc_key: 'reflectance_channel_1' 'reflectance_channel_2': name: 'reflectance_channel_2' wavelength: [0.725, 0.8625, 1.0] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: '%' file_type: eumetsat_gac_fdr coordinates: [longitude, latitude] nc_key: 'reflectance_channel_2' 'reflectance_channel_3': name: 'reflectance_channel_3' wavelength: [1.58, 1.61, 1.64] resolution: 1050 calibration: reflectance: standard_name: toa_brightness_temperature units: 'K' file_type: eumetsat_gac_fdr coordinates: [longitude, latitude] nc_key: 'reflectance_channel_3' 'reflectance_channel_3a': name: 'reflectance_channel_3a' wavelength: [1.58, 1.61, 1.64] resolution: 1050 calibration: reflectance: standard_name: toa_brightness_temperature units: 'K' file_type: eumetsat_gac_fdr coordinates: [longitude, latitude] nc_key: 'reflectance_channel_3a' 'brightness_temperature_channel_3': name: 'brightness_temperature_channel_3' wavelength: [3.55, 3.74, 3.93] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: eumetsat_gac_fdr coordinates: [longitude, latitude] nc_key: 'brightness_temperature_channel_3' 'brightness_temperature_channel_3b': name: 'brightness_temperature_channel_3b' wavelength: [3.55, 3.74, 3.93] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: eumetsat_gac_fdr coordinates: [longitude, latitude] nc_key: 'brightness_temperature_channel_3b' 'brightness_temperature_channel_4': name: 'brightness_temperature_channel_4' wavelength: [10.3, 10.8, 11.3] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: [longitude, latitude] file_type: fiduceo_avhrr_fcdr_nc nc_key: 'brightness_temperature_channel_4' 'brightness_temperature_channel_5': name: 'brightness_temperature_channel_5' wavelength: [11.5, 12.0, 12.5] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: eumetsat_gac_fdr coordinates: [longitude, latitude] nc_key: 'brightness_temperature_channel_' solar_zenith_angle: name: solar_zenith_angle standard_name: solar_zenith_angle resolution: 1050 file_type: eumetsat_gac_fdr coordinates: [longitude, latitude] units: degrees nc_key: 'solar_zenith_angle' sensor_zenith_angle: name: sensor_zenith_angle standard_name: sensor_zenith_angle resolution: 1050 file_type: eumetsat_gac_fdr coordinates: [longitude, latitude] units: degrees nc_key: 'satellite_zenith_angle' solar_azimuth_angle: name: solar_azimuth_angle standard_name: solar_azimuth_angle resolution: 1050 file_type: eumetsat_gac_fdr coordinates: [longitude, latitude] units: degrees nc_key: 'solar_azimuth_angle' sun_azimuth_angle: name: sensor_azimuth_angle standard_name: sensor_azimuth_angle resolution: 1050 file_type: eumetsat_gac_fdr coordinates: [longitude, latitude] units: degrees nc_key: 'satellite_azimuth_angle' sun_sensor_azimuth_difference_angle: name: sun_sensor_azimuth_difference_angle standard_name: angle_of_rotation_from_solar_azimuth_to_platform_azimuth resolution: 1050 file_type: fiduceo_avhrr_fcdr_nc coordinates: [longitude, latitude] units: degrees nc_key: 'relative_azimuth_angle' qual_flags: name: qual_flags file_type: eumetsat_gac_fdr nc_key: 'qual_flags' acq_time: name: qual_flags file_type: eumetsat_gac_fdr nc_key: 'qual_flags' latitude: name: latitude file_type: eumetsat_gac_fdr standard_name: latitude units: degrees_north nc_key: 'latitude' longitude: name: longitude file_type: eumetsat_gac_fdr standard_name: longitude units: degrees_east nc_key: 'longitude' overlap_free_end: name: overlap_free_end file_type: eumetsat_gac_fdr nc_key: 'overlap_free_end' overlap_free_start: name: overlap_free_start file_type: eumetsat_gac_fdr nc_key: 'overlap_free_start' midnight_line: name: midnight_line file_type: eumetsat_gac_fdr nc_key: 'midnight_line' equator_crossing_longitude: name: equator_crossing_longitude file_type: eumetsat_gac_fdr nc_key: 'equator_crossing_longitude' equator_crossing_time: name: equator_crossing_time file_type: eumetsat_gac_fdr nc_key: 'equator_crossing_time' satpy-0.34.0/satpy/etc/readers/caliop_l2_cloud.yaml000066400000000000000000000017271420401153000222110ustar00rootroot00000000000000reader: default_datasets: [] description: CALIOP Level 2 Cloud Layer Version 3 HDF4 reader name: caliop_l2_cloud reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [caliop] datasets: elevation: file_type: hdf4_caliop name: Lidar_Surface_Elevation resolution: 1000 coordinates: [Longitude, Latitude] layer_top_altitude: file_type: hdf4_caliop name: Layer_Top_Altitude resolution: 1000 coordinates: [Longitude, Latitude] units: km longitude: file_type: hdf4_caliop name: Longitude resolution: 1000 standard_name: longitude units: degree latitude: file_type: hdf4_caliop name: Latitude resolution: 1000 standard_name: latitude units: degree file_types: hdf4_caliop: file_patterns: - 'CAL_LID_L2_0{resolution:1s}kmCLay-ValStage1-V3-30.{start_time:%Y-%m-%dT%H-%M-%S}ZN.hdf' file_reader: !!python/name:satpy.readers.caliop_l2_cloud.HDF4BandReader satpy-0.34.0/satpy/etc/readers/clavrx.yaml000066400000000000000000000027761420401153000204630ustar00rootroot00000000000000reader: description: CLAVR-X Reader name: clavrx reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [viirs, modis, avhrr, ahi, abi] file_types: clavrx_hdf4: # clavrx_npp_d20170520_t2055235_e2056477_b28822.level2.hdf' # clavrx_H08_20180719_1300.level2.hdf file_reader: !!python/name:satpy.readers.clavrx.CLAVRXHDF4FileHandler file_patterns: - 'clavrx_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}.level2.hdf' - 'clavrx_{platform_shortname}.{start_time:%y%j.%H%M}.{resolution:s}.level2.hdf' - 'clavrx_hrpt_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit}.level2.hdf' - 'clavrx_{platform_shortname}_{start_time:%Y%m%d_%H%M}.level2.hdf' clavrx_nc: # clavrx_OR_ABI-L1b-RadF-M6C01_G16_s20211101600189.level2.nc # clavrx_H08_20210322_0300_B01_FLDK_R.level2.nc file_reader: !!python/name:satpy.readers.clavrx.CLAVRXNetCDFFileHandler file_patterns: - 'clavrx_OR_{sensor}-L1b-Rad{sector}-{mode}C{channel_number}_{platform_shortname}_s{start_time:%Y%j%H%M%S%f}.level2.nc' - 'clavrx_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B{channel_number}_{sector}_R.level2.nc' #datasets: # longitude: # name: longitude # resolution: 0 # file_type: level2 # latitude: # name: latitude # resolution: 0 # file_type: level2 # # resolution come from file # cld_temp_acha: # name: Solar_Zenith # resolution: 0 # coordinates: [longitude, latitude] # file_type: level2 satpy-0.34.0/satpy/etc/readers/cmsaf-claas2_l2_nc.yaml000066400000000000000000000016531420401153000224660ustar00rootroot00000000000000reader: description: > Reader for Spinning Enhanced Visible and Infrared Imager (SEVIRI) L2 data as produced by the Climate Monitoring Satellite Application Facility (CMSAF) in its CLoud property dAtAset using SEVIRI (CLAAS) dataset, edition 2, doi:10.5676/EUM_SAF_CM/CLAAS/V002. Information on CMSAF and its products can be found at https://www.cmsaf.eu/. name: cmsaf-claas2_l2_nc long_name: CMSAF CLAAS-2 data for SEVIRI-derived cloud products sensors: [seviri] doi: doi:10.5676/EUM_SAF_CM/CLAAS/V002. default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader # CMSAF naming convention: # https://www.cmsaf.eu/EN/Products/NamingConvention/Naming_Convention_node.html file_types: cmsaf-claas2: file_reader: !!python/name:satpy.readers.cmsaf_claas2.CLAAS2 file_patterns: ['{product:3s}in{start_time:%Y%m%d%H%M}{version:>03d}05SVMSG{proc_level:2s}MD.nc'] satpy-0.34.0/satpy/etc/readers/electrol_hrit.yaml000066400000000000000000000250541420401153000220150ustar00rootroot00000000000000reader: description: MSG HRIT Reader name: electrol_hrit sensors: [msu-gs] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: HRIT_00_6_4: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-00_6_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_00_7_4: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-00_7_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_00_9_4: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-00_9_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_03_8_4: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-03_8_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_06_4_4: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-06_4_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_08_0_4: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-08_0_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_08_7_4: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-08_7_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_09_7_4: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-09_7_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_10_7_4: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-10_7_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_11_9_4: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-11_9_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_00_6_4_C: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-00_6_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_00_7_4_C: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-00_7_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_00_9_4_C: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-00_9_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_03_8_4_C: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-03_8_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_06_4_4_C: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-06_4_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_08_0_4_C: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-08_0_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_08_7_4_C: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-08_7_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_09_7_4_C: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-09_7_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_10_7_4_C: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-10_7_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_11_9_4_C: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-11_9_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_PRO_4: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSPrologueFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-_________-PRO______-{start_time:%Y%m%d%H%M}-__'] HRIT_EPI_4: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSEpilogueFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-_________-EPI______-{start_time:%Y%m%d%H%M}-__'] datasets: '00_6': name: '00_6' resolution: 4000 wavelength: [0.5, 0.6, 0.65] calibration: # reflectance: # standard_name: toa_bidirectional_reflectance # units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: '1' file_type: [HRIT_00_6_4, HRIT_00_6_4_C] '00_7': name: '00_7' resolution: 4000 wavelength: [0.65, 0.7, 0.8] calibration: # reflectance: # standard_name: toa_bidirectional_reflectance # units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: '1' file_type: [HRIT_00_7_4, HRIT_00_7_4_C] '00_9': name: '00_9' resolution: 4000 wavelength: [0.8, 0.9, 0.9] calibration: # reflectance: # standard_name: toa_bidirectional_reflectance # units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: '1' file_type: [HRIT_00_9_4, HRIT_00_9_4_C] '03_8': name: '03_8' resolution: 4000 wavelength: [3.5, 3.8, 4.0] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 counts: standard_name: counts units: '1' file_type: [HRIT_03_8_4, HRIT_03_8_4_C] '06_4': name: '06_4' resolution: 4000 wavelength: [5.7, 6.4, 7.0] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 counts: standard_name: counts units: '1' file_type: [HRIT_06_4_4, HRIT_06_4_4_C] '08_0': name: '08_0' resolution: 4000 wavelength: [7.5, 8.0, 8.5] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 counts: standard_name: counts units: '1' file_type: [HRIT_08_0_4, HRIT_08_0_4_C] '08_7': name: '08_7' resolution: 4000 wavelength: [8.2, 8.7, 9.2] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 counts: standard_name: counts units: '1' file_type: [HRIT_08_7_4, HRIT_08_7_4_C] '09_7': name: '09_7' resolution: 4000 wavelength: [9.2, 9.7, 10.2] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 counts: standard_name: counts units: '1' file_type: [HRIT_09_7_4, HRIT_09_7_4_C] '10_7': name: '10_7' resolution: 4000 wavelength: [10.2, 10.8, 11.2] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 counts: standard_name: counts units: '1' file_type: [HRIT_10_7_4, HRIT_10_7_4_C] '11_9': name: '11_9' resolution: 4000 wavelength: [11.2, 11.9, 12.5] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 counts: standard_name: counts units: '1' file_type: [HRIT_11_9_4, HRIT_11_9_4_C] satpy-0.34.0/satpy/etc/readers/fci_l1c_nc.yaml000066400000000000000000001023441420401153000211340ustar00rootroot00000000000000reader: name: fci_l1c_nc short_name: FCI L1c NC long_name: MTG FCI Level-1c NetCDF description: > Reader for FCI L1c data in NetCDF4 format. Used to read Meteosat Third Generation (MTG) Flexible Combined Imager (FCI) L1c data. reader: !!python/name:satpy.readers.yaml_reader.GEOSegmentYAMLReader sensors: [ fci ] # Source: MTG FCI L1 Product User Guide [FCIL1PUG] # https://www.eumetsat.int/media/45923 file_types: fci_l1c_fdhsi: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: [ '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-FDHSI-{coverage}-{subsetting}-{component1}-BODY-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' ] expected_segments: 40 datasets: vis_04: name: vis_04 sensor: fci wavelength: [ 0.384, 0.444, 0.504 ] resolution: 1000 calibration: counts: standard_name: counts units: "count" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: fci_l1c_fdhsi vis_05: name: vis_05 sensor: fci wavelength: [0.470, 0.510, 0.550] resolution: 1000 calibration: counts: standard_name: counts units: "count" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: fci_l1c_fdhsi vis_06: name: vis_06 sensor: fci wavelength: [0.590, 0.640, 0.690] resolution: 1000 calibration: counts: standard_name: counts units: "count" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: fci_l1c_fdhsi vis_08: name: vis_08 sensor: fci wavelength: [0.815, 0.865, 0.915] resolution: 1000 calibration: counts: standard_name: counts units: "count" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: fci_l1c_fdhsi vis_09: name: vis_09 sensor: fci wavelength: [0.894, 0.914, 0.934] resolution: 1000 calibration: counts: standard_name: counts units: "count" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: fci_l1c_fdhsi nir_13: name: nir_13 sensor: fci wavelength: [1.350, 1.380, 1.410] resolution: 1000 calibration: counts: standard_name: counts units: "count" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: fci_l1c_fdhsi nir_16: name: nir_16 sensor: fci wavelength: [1.560, 1.610, 1.660] resolution: 1000 calibration: counts: standard_name: counts units: "count" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: fci_l1c_fdhsi nir_22: name: nir_22 sensor: fci wavelength: [2.200, 2.250, 2.300] resolution: 1000 calibration: counts: standard_name: counts units: "count" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: fci_l1c_fdhsi ir_38: name: ir_38 sensor: fci wavelength: [3.400, 3.800, 4.200] resolution: 2000 calibration: counts: standard_name: counts units: "count" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" file_type: fci_l1c_fdhsi wv_63: name: wv_63 sensor: fci wavelength: [5.300, 6.300, 7.300] resolution: 2000 calibration: counts: standard_name: counts units: "count" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" file_type: fci_l1c_fdhsi wv_73: name: wv_73 sensor: fci wavelength: [6.850, 7.350, 7.850] resolution: 2000 calibration: counts: standard_name: counts units: "count" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" file_type: fci_l1c_fdhsi ir_87: name: ir_87 sensor: fci wavelength: [8.300, 8.700, 9.100] resolution: 2000 calibration: counts: standard_name: counts units: "count" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" file_type: fci_l1c_fdhsi ir_97: name: ir_97 sensor: fci wavelength: [9.360, 9.660, 9.960] resolution: 2000 calibration: counts: standard_name: counts units: "count" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" file_type: fci_l1c_fdhsi ir_105: name: ir_105 sensor: fci wavelength: [9.800, 10.500, 11.200] resolution: 2000 calibration: counts: standard_name: counts units: "count" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" file_type: fci_l1c_fdhsi ir_123: name: ir_123 sensor: fci wavelength: [11.800, 12.300, 12.800] resolution: 2000 calibration: counts: standard_name: counts units: "count" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" file_type: fci_l1c_fdhsi ir_133: name: ir_133 sensor: fci wavelength: [12.700, 13.300, 13.900] resolution: 2000 calibration: counts: standard_name: counts units: "count" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" file_type: fci_l1c_fdhsi vis_04_pixel_quality: name: vis_04_pixel_quality sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_05_pixel_quality: name: vis_05_pixel_quality sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_06_pixel_quality: name: vis_06_pixel_quality sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_08_pixel_quality: name: vis_08_pixel_quality sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_09_pixel_quality: name: vis_09_pixel_quality sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi nir_13_pixel_quality: name: nir_13_pixel_quality sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi nir_16_pixel_quality: name: nir_16_pixel_quality sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi nir_22_pixel_quality: name: nir_22_pixel_quality sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi ir_38_pixel_quality: name: ir_38_pixel_quality sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi wv_63_pixel_quality: name: wv_63_pixel_quality sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi wv_73_pixel_quality: name: wv_73_pixel_quality sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_87_pixel_quality: name: ir_87_pixel_quality sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_97_pixel_quality: name: ir_97_pixel_quality sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_105_pixel_quality: name: ir_105_pixel_quality sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_123_pixel_quality: name: ir_123_pixel_quality sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_133_pixel_quality: name: ir_133_pixel_quality sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi vis_04_index_map: name: vis_04_index_map sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_05_index_map: name: vis_05_index_map sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_06_index_map: name: vis_06_index_map sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_08_index_map: name: vis_08_index_map sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_09_index_map: name: vis_09_index_map sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi nir_13_index_map: name: nir_13_index_map sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi nir_16_index_map: name: nir_16_index_map sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi nir_22_index_map: name: nir_22_index_map sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi ir_38_index_map: name: ir_38_index_map sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi wv_63_index_map: name: wv_63_index_map sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi wv_73_index_map: name: wv_73_index_map sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_87_index_map: name: ir_87_index_map sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_97_index_map: name: ir_97_index_map sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_105_index_map: name: ir_105_index_map sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_123_index_map: name: ir_123_index_map sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_133_index_map: name: ir_133_index_map sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi vis_04_time: name: vis_04_time units: s sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_05_time: name: vis_05_time units: s sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_06_time: name: vis_06_time units: s sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_08_time: name: vis_08_time units: s sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_09_time: name: vis_09_time units: s sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi nir_13_time: name: nir_13_time units: s sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi nir_16_time: name: nir_16_time units: s sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi nir_22_time: name: nir_22_time units: s sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi ir_38_time: name: ir_38_time units: s sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi wv_63_time: name: wv_63_time units: s sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi wv_73_time: name: wv_73_time units: s sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_87_time: name: ir_87_time units: s sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_97_time: name: ir_97_time units: s sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_105_time: name: ir_105_time units: s sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_123_time: name: ir_123_time units: s sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_133_time: name: ir_133_time units: s sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi vis_04_swath_direction: name: vis_04_swath_direction sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_05_swath_direction: name: vis_05_swath_direction sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_06_swath_direction: name: vis_06_swath_direction sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_08_swath_direction: name: vis_08_swath_direction sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_09_swath_direction: name: vis_09_swath_direction sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi nir_13_swath_direction: name: nir_13_swath_direction sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi nir_16_swath_direction: name: nir_16_swath_direction sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi nir_22_swath_direction: name: nir_22_swath_direction sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi ir_38_swath_direction: name: ir_38_swath_direction sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi wv_63_swath_direction: name: wv_63_swath_direction sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi wv_73_swath_direction: name: wv_73_swath_direction sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_87_swath_direction: name: ir_87_swath_direction sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_97_swath_direction: name: ir_97_swath_direction sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_105_swath_direction: name: ir_105_swath_direction sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_123_swath_direction: name: ir_123_swath_direction sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_133_swath_direction: name: ir_133_swath_direction sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi vis_04_swath_number: name: vis_04_swath_number sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_05_swath_number: name: vis_05_swath_number sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_06_swath_number: name: vis_06_swath_number sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_08_swath_number: name: vis_08_swath_number sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_09_swath_number: name: vis_09_swath_number sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi nir_13_swath_number: name: nir_13_swath_number sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi nir_16_swath_number: name: nir_16_swath_number sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi nir_22_swath_number: name: nir_22_swath_number sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi ir_38_swath_number: name: ir_38_swath_number sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi wv_63_swath_number: name: wv_63_swath_number sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi wv_73_swath_number: name: wv_73_swath_number sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_87_swath_number: name: ir_87_swath_number sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_97_swath_number: name: ir_97_swath_number sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_105_swath_number: name: ir_105_swath_number sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_123_swath_number: name: ir_123_swath_number sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_133_swath_number: name: ir_133_swath_number sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi vis_04_subsatellite_latitude: name: vis_04_subsatellite_latitude units: deg sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_05_subsatellite_latitude: name: vis_05_subsatellite_latitude units: deg sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_06_subsatellite_latitude: name: vis_06_subsatellite_latitude units: deg sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_08_subsatellite_latitude: name: vis_08_subsatellite_latitude units: deg sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_09_subsatellite_latitude: name: vis_09_subsatellite_latitude units: deg sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi nir_13_subsatellite_latitude: name: nir_13_subsatellite_latitude units: deg sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi nir_16_subsatellite_latitude: name: nir_16_subsatellite_latitude units: deg sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi nir_22_subsatellite_latitude: name: nir_22_subsatellite_latitude units: deg sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi ir_38_subsatellite_latitude: name: ir_38_subsatellite_latitude units: deg sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi wv_63_subsatellite_latitude: name: wv_63_subsatellite_latitude units: deg sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi wv_73_subsatellite_latitude: name: wv_73_subsatellite_latitude units: deg sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_87_subsatellite_latitude: name: ir_87_subsatellite_latitude units: deg sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_97_subsatellite_latitude: name: ir_97_subsatellite_latitude units: deg sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_105_subsatellite_latitude: name: ir_105_subsatellite_latitude units: deg sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_123_subsatellite_latitude: name: ir_123_subsatellite_latitude units: deg sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_133_subsatellite_latitude: name: ir_133_subsatellite_latitude units: deg sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi vis_04_subsatellite_longitude: name: vis_04_subsatellite_longitude units: deg sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_05_subsatellite_longitude: name: vis_05_subsatellite_longitude units: deg sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_06_subsatellite_longitude: name: vis_06_subsatellite_longitude units: deg sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_08_subsatellite_longitude: name: vis_08_subsatellite_longitude units: deg sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_09_subsatellite_longitude: name: vis_09_subsatellite_longitude units: deg sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi nir_13_subsatellite_longitude: name: nir_13_subsatellite_longitude units: deg sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi nir_16_subsatellite_longitude: name: nir_16_subsatellite_longitude units: deg sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi nir_22_subsatellite_longitude: name: nir_22_subsatellite_longitude units: deg sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi ir_38_subsatellite_longitude: name: ir_38_subsatellite_longitude units: deg sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi wv_63_subsatellite_longitude: name: wv_63_subsatellite_longitude units: deg sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi wv_73_subsatellite_longitude: name: wv_73_subsatellite_longitude units: deg sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_87_subsatellite_longitude: name: ir_87_subsatellite_longitude units: deg sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_97_subsatellite_longitude: name: ir_97_subsatellite_longitude units: deg sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_105_subsatellite_longitude: name: ir_105_subsatellite_longitude units: deg sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_123_subsatellite_longitude: name: ir_123_subsatellite_longitude units: deg sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_133_subsatellite_longitude: name: ir_133_subsatellite_longitude units: deg sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi vis_04_subsolar_latitude: name: vis_04_subsolar_latitude units: deg sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_05_subsolar_latitude: name: vis_05_subsolar_latitude units: deg sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_06_subsolar_latitude: name: vis_06_subsolar_latitude units: deg sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_08_subsolar_latitude: name: vis_08_subsolar_latitude units: deg sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_09_subsolar_latitude: name: vis_09_subsolar_latitude units: deg sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi nir_13_subsolar_latitude: name: nir_13_subsolar_latitude units: deg sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi nir_16_subsolar_latitude: name: nir_16_subsolar_latitude units: deg sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi nir_22_subsolar_latitude: name: nir_22_subsolar_latitude units: deg sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi ir_38_subsolar_latitude: name: ir_38_subsolar_latitude units: deg sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi wv_63_subsolar_latitude: name: wv_63_subsolar_latitude units: deg sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi wv_73_subsolar_latitude: name: wv_73_subsolar_latitude units: deg sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_87_subsolar_latitude: name: ir_87_subsolar_latitude units: deg sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_97_subsolar_latitude: name: ir_97_subsolar_latitude units: deg sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_105_subsolar_latitude: name: ir_105_subsolar_latitude units: deg sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_123_subsolar_latitude: name: ir_123_subsolar_latitude units: deg sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_133_subsolar_latitude: name: ir_133_subsolar_latitude units: deg sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi vis_04_subsolar_longitude: name: vis_04_subsolar_longitude units: deg sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_05_subsolar_longitude: name: vis_05_subsolar_longitude units: deg sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_06_subsolar_longitude: name: vis_06_subsolar_longitude units: deg sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_08_subsolar_longitude: name: vis_08_subsolar_longitude units: deg sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_09_subsolar_longitude: name: vis_09_subsolar_longitude units: deg sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi nir_13_subsolar_longitude: name: nir_13_subsolar_longitude units: deg sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi nir_16_subsolar_longitude: name: nir_16_subsolar_longitude units: deg sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi nir_22_subsolar_longitude: name: nir_22_subsolar_longitude units: deg sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi ir_38_subsolar_longitude: name: ir_38_subsolar_longitude units: deg sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi wv_63_subsolar_longitude: name: wv_63_subsolar_longitude units: deg sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi wv_73_subsolar_longitude: name: wv_73_subsolar_longitude units: deg sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_87_subsolar_longitude: name: ir_87_subsolar_longitude units: deg sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_97_subsolar_longitude: name: ir_97_subsolar_longitude units: deg sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_105_subsolar_longitude: name: ir_105_subsolar_longitude units: deg sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_123_subsolar_longitude: name: ir_123_subsolar_longitude units: deg sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_133_subsolar_longitude: name: ir_133_subsolar_longitude units: deg sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi vis_04_platform_altitude: name: vis_04_platform_altitude units: m sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_05_platform_altitude: name: vis_05_platform_altitude units: m sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_06_platform_altitude: name: vis_06_platform_altitude units: m sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_08_platform_altitude: name: vis_08_platform_altitude units: m sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_09_platform_altitude: name: vis_09_platform_altitude units: m sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi nir_13_platform_altitude: name: nir_13_platform_altitude units: m sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi nir_16_platform_altitude: name: nir_16_platform_altitude units: m sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi nir_22_platform_altitude: name: nir_22_platform_altitude units: m sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi ir_38_platform_altitude: name: ir_38_platform_altitude units: m sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi wv_63_platform_altitude: name: wv_63_platform_altitude units: m sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi wv_73_platform_altitude: name: wv_73_platform_altitude units: m sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_87_platform_altitude: name: ir_87_platform_altitude units: m sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_97_platform_altitude: name: ir_97_platform_altitude units: m sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_105_platform_altitude: name: ir_105_platform_altitude units: m sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_123_platform_altitude: name: ir_123_platform_altitude units: m sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_133_platform_altitude: name: ir_133_platform_altitude units: m sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi vis_04_earth_sun_distance: name: vis_04_earth_sun_distance units: km sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_05_earth_sun_distance: name: vis_05_earth_sun_distance units: km sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_06_earth_sun_distance: name: vis_06_earth_sun_distance units: km sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_08_earth_sun_distance: name: vis_08_earth_sun_distance units: km sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_09_earth_sun_distance: name: vis_09_earth_sun_distance units: km sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi nir_13_earth_sun_distance: name: nir_13_earth_sun_distance units: km sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi nir_16_earth_sun_distance: name: nir_16_earth_sun_distance units: km sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi nir_22_earth_sun_distance: name: nir_22_earth_sun_distance units: km sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi ir_38_earth_sun_distance: name: ir_38_earth_sun_distance units: km sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi wv_63_earth_sun_distance: name: wv_63_earth_sun_distance units: km sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi wv_73_earth_sun_distance: name: wv_73_earth_sun_distance units: km sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_87_earth_sun_distance: name: ir_87_earth_sun_distance units: km sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_97_earth_sun_distance: name: ir_97_earth_sun_distance units: km sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_105_earth_sun_distance: name: ir_105_earth_sun_distance units: km sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_123_earth_sun_distance: name: ir_123_earth_sun_distance units: km sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_133_earth_sun_distance: name: ir_133_earth_sun_distance units: km sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi vis_04_sun_satellite_distance: name: vis_04_sun_satellite_distance units: km sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_05_sun_satellite_distance: name: vis_05_sun_satellite_distance units: km sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_06_sun_satellite_distance: name: vis_06_sun_satellite_distance units: km sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_08_sun_satellite_distance: name: vis_08_sun_satellite_distance units: km sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi vis_09_sun_satellite_distance: name: vis_09_sun_satellite_distance units: km sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi nir_13_sun_satellite_distance: name: nir_13_sun_satellite_distance units: km sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi nir_16_sun_satellite_distance: name: nir_16_sun_satellite_distance units: km sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi nir_22_sun_satellite_distance: name: nir_22_sun_satellite_distance units: km sensor: fci resolution: 1000 file_type: fci_l1c_fdhsi ir_38_sun_satellite_distance: name: ir_38_sun_satellite_distance units: km sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi wv_63_sun_satellite_distance: name: wv_63_sun_satellite_distance units: km sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi wv_73_sun_satellite_distance: name: wv_73_sun_satellite_distance units: km sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_87_sun_satellite_distance: name: ir_87_sun_satellite_distance units: km sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_97_sun_satellite_distance: name: ir_97_sun_satellite_distance units: km sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_105_sun_satellite_distance: name: ir_105_sun_satellite_distance units: km sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_123_sun_satellite_distance: name: ir_123_sun_satellite_distance units: km sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi ir_133_sun_satellite_distance: name: ir_133_sun_satellite_distance units: km sensor: fci resolution: 2000 file_type: fci_l1c_fdhsi satpy-0.34.0/satpy/etc/readers/fci_l2_nc.yaml000066400000000000000000000655701420401153000210030ustar00rootroot00000000000000reader: name: fci_l2_nc short_name: FCI L2 NetCDF4 long_name: MTG FCI L2 (NetCDF4) description: Reader for EUMETSAT MTG FCI L2 files in NetCDF4 format. sensors: [fci] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: # Filename examples # FCI_SIM_OCA_2L_2KM_{creation_time:%Y%m%d}_1700.nc # W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-2-ASR--FD------NC4E_C_EUMT_20201105031219_L2PF_DEV_20170410171000_20170410172000_N__T_0104_0000.nc nc_fci_oca: file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCFileHandler file_patterns: ['FCI_SIM_OCA_2L_2KM_{creation_time:%Y%m%d}_1700.nc', 'W_XX-EUMETSAT-{reception_location},{instrument},{long_platform_id}+{processing_location}-{level}-OCA--{temp_str}_C_EUMT_{creation_time:%Y%m%d%H%M%S}_L2PF_{env}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_N__T_{rep_cycle_in_day}_{rep_cycle_count}.nc'] nc_fci_clm: file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCFileHandler file_patterns: ['FCI_SIM_CLM_2KM_{creation_time:%Y%m%d}_1700.nc', 'W_XX-EUMETSAT-{reception_location},{instrument},{long_platform_id}+{processing_location}-{level}-CLM--{temp_str}_C_EUMT_{creation_time:%Y%m%d%H%M%S}_L2PF_{env}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_N__T_{rep_cycle_in_day}_{rep_cycle_count}.nc'] nc_fci_test_clm: file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCFileHandler file_patterns: [ 'W_XX-EUMETSAT-{reception_location},{instrument},{long_platform_id}+{processing_location}-{level}-CLMTest-{temp_str}_C_EUMT_{creation_time:%Y%m%d%H%M%S}_L2PF_{env}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_N__T_{rep_cycle_in_day}_{rep_cycle_count}.nc' ] nc_fci_ct: file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCFileHandler file_patterns: ['W_XX-EUMETSAT-{reception_location},{instrument},{long_platform_id}+{processing_location}-{level}-CT--{temp_str}_C_EUMT_{creation_time:%Y%m%d%H%M%S}_L2PF_{env}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_N__T_{rep_cycle_in_day}_{rep_cycle_count}.nc'] nc_fci_cloud: file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCFileHandler file_patterns: ['W_XX-EUMETSAT-{reception_location},{instrument},{long_platform_id}+{processing_location}-{level}-CTTH--{temp_str}_C_EUMT_{creation_time:%Y%m%d%H%M%S}_L2PF_{env}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_N__T_{rep_cycle_in_day}_{rep_cycle_count}.nc' ] nc_fci_asr: file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCSegmentFileHandler file_patterns: [ "W_XX-EUMETSAT-{reception_location},{instrument},{long_platform_id}+{processing_location}-{level}-ASR--{temp_str}_C_EUMT_{creation_time:%Y%m%d%H%M%S}_L2PF_{env}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_N__T_{rep_cycle_in_day}_{rep_cycle_count}.nc"] nc_fci_gii: file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCSegmentFileHandler file_patterns: ["W_XX-EUMETSAT-{reception_location},{instrument},{long_platform_id}+{processing_location}-{level}-GII--{temp_str}_C_EUMT_{creation_time:%Y%m%d%H%M%S}_L2PF_{env}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_N__T_{rep_cycle_in_day}_{rep_cycle_count}.nc"] nc_fci_toz: file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCSegmentFileHandler file_patterns: [ "W_XX-EUMETSAT-{reception_location},{instrument},{long_platform_id}+{processing_location}-{level}-TOZ--{temp_str}_C_EUMT_{creation_time:%Y%m%d%H%M%S}_L2PF_{env}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_N__T_{rep_cycle_in_day}_{rep_cycle_count}.nc" ] datasets: retrieved_cloud_phase: name: retrieved_cloud_phase file_type: nc_fci_oca file_key: retrieved_cloud_phase standard_name: thermodynamic_phase_of_cloud_water_particles_at_cloud_top fill_value: -999. retrieved_cloud_optical_thickness_upper_layer: name: retrieved_cloud_optical_thickness_upper_layer file_type: nc_fci_oca file_key: retrieved_cloud_optical_thickness layer: 0 standard_name: cloud_optical_depth fill_value: -999. retrieved_cloud_optical_thickness_lower_layer: name: retrieved_cloud_optical_thickness_lower_layer file_type: nc_fci_oca file_key: retrieved_cloud_optical_thickness layer: 1 standard_name: cloud_optical_depth fill_value: -999. retrieved_cloud_particle_effective_radius: name: retrieved_cloud_particle_effective_radius file_type: nc_fci_oca file_key: retrieved_cloud_particle_effective_radius standard_name: effective_radius_of_cloud_condensed_water_particles_at_cloud_top fill_value: -999. retrieved_cloud_top_temperature: name: retrieved_cloud_top_temperature file_type: nc_fci_oca file_key: retrieved_cloud_top_temperature standard_name: air_temperature_at_cloud_top fill_value: -999. retrieved_cloud_top_pressure_upper_layer: name: retrieved_cloud_top_pressure_upper_layer file_type: nc_fci_oca file_key: retrieved_cloud_top_pressure layer: 0 standard_name: air_pressure_at_cloud_top fill_value: -999. retrieved_cloud_top_pressure_lower_layer: name: retrieved_cloud_top_pressure_lower_layer file_type: nc_fci_oca file_key: retrieved_cloud_top_pressure layer: 1 standard_name: air_pressure_at_cloud_top fill_value: -999. retrieved_cloud_top_height: name: retrieved_cloud_top_height file_type: nc_fci_oca file_key: retrieved_cloud_top_height standard_name: height_at_cloud_top fill_value: -999. retrieval_error_cloud_optical_thickness_upper_layer: name: retrieval_error_cloud_optical_thickness_upper_layer file_type: nc_fci_oca file_key: retrieval_error_cloud_optical_thickness layer: 0 standard_name: cloud_optical_depth fill_value: -999. retrieval_error_cloud_optical_thickness_lower_layer: name: retrieval_error_cloud_optical_thickness_lower_layer file_type: nc_fci_oca file_key: retrieval_error_cloud_optical_thickness layer: 1 standard_name: cloud_optical_depth fill_value: -999. retrieval_error_cloud_particle_effective_radius: name: retrieval_error_cloud_particle_effective_radius file_type: nc_fci_oca file_key: retrieval_error_cloud_particle_effective_radius standard_name: effective_radius_of_cloud_condensed_water_particles_at_cloud_top fill_value: -999. retrieval_error_cloud_top_pressure_upper_layer: name: retrieval_error_cloud_top_pressure_upper_layer file_type: nc_fci_oca file_key: retrieval_error_cloud_top_pressure layer: 0 standard_name: air_pressure_at_cloud_top fill_value: -999. retrieval_error_cloud_top_pressure_lower_layer: name: retrieval_error_cloud_top_pressure_lower_layer file_type: nc_fci_oca file_key: retrieval_error_cloud_top_pressure layer: 1 standard_name: air_pressure_at_cloud_top fill_value: -999. quality_jmeas: name: quality_jmeas file_type: nc_fci_oca file_key: quality_jmeas standard_name: cost_function fill_value: -999. cloud_state: name: cloud_state file_type: nc_fci_clm file_key: cloud_state standard_name: cloud_mask_classification fill_value: -999 mask_value: 0 quality_illumination: name: quality_illumination file_type: nc_fci_clm file_key: quality_illumination standard_name: illumination_classification fill_value: -999 mask_value: 0 quality_nwp_parameters: name: quality_nwp_parameters file_type: nc_fci_clm file_key: quality_nwp_parameters standard_name: quality_index fill_value: -999 mask_value: 0 quality_MTG_parameters: name: quality_MTG_parameters file_type: nc_fci_clm file_key: quality_MTG_parameters standard_name: quality_index fill_value: -999 mask_value: 0 quality_overall_processing: name: quality_overall_processing file_type: nc_fci_clm file_key: quality_overall_processing standard_name: quality_index fill_value: -999 mask_value: 0 # CLM Test cloud_test_sit1_flag: name: cloud_test_sit1_flag file_type: nc_fci_test_clm file_key: cloud_mask_test_flag standard_name: cloud_mask_test_sit1_flag extract_byte: 0 fill_value: -999 mask_value: 0 cloud_test_cmt1_flag: name: cloud_test_cmt1_flag file_type: nc_fci_test_clm file_key: cloud_mask_test_flag standard_name: cloud_mask_test_cmt1_flag extract_byte: 1 fill_value: -999 mask_value: 0 cloud_test_cmt2_flag: name: cloud_test_cmt2_flag file_type: nc_fci_test_clm file_key: cloud_mask_test_flag standard_name: cloud_mask_test_cmt2_flag extract_byte: 2 fill_value: -999 mask_value: 0 cloud_test_cmt3_flag: name: cloud_test_cmt3_flag file_type: nc_fci_test_clm file_key: cloud_mask_test_flag standard_name: cloud_mask_test_cmt3_flag extract_byte: 3 fill_value: -999 mask_value: 0 cloud_test_cmt4_flag: name: cloud_test_cmt4_flag file_type: nc_fci_test_clm file_key: cloud_mask_test_flag standard_name: cloud_mask_test_cmt4_flag extract_byte: 4 fill_value: -999 mask_value: 0 cloud_test_cmt5_flag: name: cloud_test_cmt5_flag file_type: nc_fci_test_clm file_key: cloud_mask_test_flag standard_name: cloud_mask_test_cmt5_flag extract_byte: 5 fill_value: -999 mask_value: 0 cloud_test_cmt6_flag: name: cloud_test_cmt6_flag file_type: nc_fci_test_clm file_key: cloud_mask_test_flag standard_name: cloud_mask_test_cmt6_flag extract_byte: 6 fill_value: -999 mask_value: 0 cloud_test_cmt7_flag: name: cloud_test_cmt7_flag file_type: nc_fci_test_clm file_key: cloud_mask_test_flag standard_name: cloud_mask_test_cmt7_flag extract_byte: 7 fill_value: -999 mask_value: 0 cloud_test_cmt8_flag: name: cloud_test_cmt8_flag file_type: nc_fci_test_clm file_key: cloud_mask_test_flag standard_name: cloud_mask_test_cmt8_flag extract_byte: 8 fill_value: -999 mask_value: 0 cloud_test_cmt9_flag: name: cloud_test_cmt9_flag file_type: nc_fci_test_clm file_key: cloud_mask_test_flag standard_name: cloud_mask_test_cmt9_flag extract_byte: 9 fill_value: -999 mask_value: 0 cloud_test_cmt10_flag: name: cloud_test_cmt10_flag file_type: nc_fci_test_clm file_key: cloud_mask_test_flag standard_name: cloud_mask_test_cmt0_flag extract_byte: 10 fill_value: -999 mask_value: 0 cloud_test_cmt11_flag: name: cloud_test_cmt11_flag file_type: nc_fci_test_clm file_key: cloud_mask_test_flag standard_name: cloud_mask_test_cmt11_flag extract_byte: 11 fill_value: -999 mask_value: 0 cloud_test_cmt12_flag: name: cloud_test_cmt12_flag file_type: nc_fci_test_clm file_key: cloud_mask_test_flag standard_name: cloud_mask_test_cmt12_flag extract_byte: 12 fill_value: -999 mask_value: 0 cloud_test_cmt13_flag: name: cloud_test_cmt13_flag file_type: nc_fci_test_clm file_key: cloud_mask_test_flag standard_name: cloud_mask_test_cmt13_flag extract_byte: 13 fill_value: -999 mask_value: 0 cloud_test_cmt14_flag: name: cloud_test_cmt14_flag file_type: nc_fci_test_clm file_key: cloud_mask_test_flag standard_name: cloud_mask_test_cmt14_flag extract_byte: 14 fill_value: -999 mask_value: 0 cloud_test_opqt_flag: name: cloud_test_opqt_flag file_type: nc_fci_test_clm file_key: cloud_mask_test_flag standard_name: cloud_mask_test_opqt_flag extract_byte: 15 fill_value: -999 mask_value: 0 cloud_test_cmrt1_flag: name: cloud_test_cmrt1_flag file_type: nc_fci_test_clm file_key: cloud_mask_test_flag standard_name: cloud_mask_test_cmrt1_flag extract_byte: 16 fill_value: -999 mask_value: 0 cloud_test_cmrt2_flag: name: cloud_test_cmrt2_flag file_type: nc_fci_test_clm file_key: cloud_mask_test_flag standard_name: cloud_mask_test_cmrt2_flag extract_byte: 17 fill_value: -999 mask_value: 0 cloud_test_cmrt3_flag: name: cloud_test_cmrt3_flag file_type: nc_fci_test_clm file_key: cloud_mask_test_flag standard_name: cloud_mask_test_cmrt3_flag extract_byte: 18 fill_value: -999 mask_value: 0 cloud_test_cmrt4_flag: name: cloud_test_cmrt4_flag file_type: nc_fci_test_clm file_key: cloud_mask_test_flag standard_name: cloud_mask_test_cmrt4_flag extract_byte: 19 fill_value: -999 mask_value: 0 cloud_test_cmrt5_flag: name: cloud_test_cmrt5_flag file_type: nc_fci_test_clm file_key: cloud_mask_test_flag standard_name: cloud_mask_test_cmrt5_flag extract_byte: 20 fill_value: -999 mask_value: 0 cloud_test_cmrt6_flag: name: cloud_test_cmrt6_flag file_type: nc_fci_test_clm file_key: cloud_mask_test_flag standard_name: cloud_mask_test_cmrt6_flag extract_byte: 21 fill_value: -999 mask_value: 0 cloud_test_dust_flag: name: cloud_test_dust_flag file_type: nc_fci_test_clm file_key: cloud_mask_test_flag standard_name: cloud_mask_test_dust_flag extract_byte: 22 fill_value: -999 mask_value: 0 cloud_test_ash_flag: name: cloud_test_ash_flag file_type: nc_fci_test_clm file_key: cloud_mask_test_flag standard_name: cloud_mask_test_ash_flag extract_byte: 23 fill_value: -999 mask_value: 0 cloud_test_dust_ash_flag: name: cloud_test_dust_ash_flag file_type: nc_fci_test_clm file_key: cloud_mask_test_flag standard_name: cloud_mask_test_dust_ash_flag extract_byte: 24 fill_value: -999 mask_value: 0 cloud_test_sit1: name: cloud_test_sit1 file_type: nc_fci_test_clm file_key: cloud_mask_test_result standard_name: cloud_mask_test_sit1 extract_byte: 0 fill_value: -999 mask_value: 0 cloud_test_cmt1: name: cloud_test_cmt1 file_type: nc_fci_test_clm file_key: cloud_mask_test_result standard_name: cloud_mask_test_cmt1 extract_byte: 1 fill_value: -999 mask_value: 0 cloud_test_cmt2: name: cloud_test_cmt2 file_type: nc_fci_test_clm file_key: cloud_mask_test_result standard_name: cloud_mask_test_cmt2 extract_byte: 2 fill_value: -999 mask_value: 0 cloud_test_cmt3: name: cloud_test_cmt3 file_type: nc_fci_test_clm file_key: cloud_mask_test_result standard_name: cloud_mask_test_cmt3 extract_byte: 3 fill_value: -999 mask_value: 0 cloud_test_cmt4: name: cloud_test_cmt4 file_type: nc_fci_test_clm file_key: cloud_mask_test_result standard_name: cloud_mask_test_cmt4 extract_byte: 4 fill_value: -999 mask_value: 0 cloud_test_cmt5: name: cloud_test_cmt5 file_type: nc_fci_test_clm file_key: cloud_mask_test_result standard_name: cloud_mask_test_cmt5 extract_byte: 5 fill_value: -999 mask_value: 0 cloud_test_cmt6: name: cloud_test_cmt6 file_type: nc_fci_test_clm file_key: cloud_mask_test_result standard_name: cloud_mask_test_cmt6 extract_byte: 6 fill_value: -999 mask_value: 0 cloud_test_cmt7: name: cloud_test_cmt7 file_type: nc_fci_test_clm file_key: cloud_mask_test_result standard_name: cloud_mask_test_cmt7 extract_byte: 7 fill_value: -999 mask_value: 0 cloud_test_cmt8: name: cloud_test_cmt8 file_type: nc_fci_test_clm file_key: cloud_mask_test_result standard_name: cloud_mask_test_cmt8 extract_byte: 8 fill_value: -999 mask_value: 0 cloud_test_cmt9: name: cloud_test_cmt9 file_type: nc_fci_test_clm file_key: cloud_mask_test_result standard_name: cloud_mask_test_cmt9 extract_byte: 9 fill_value: -999 mask_value: 0 cloud_test_cmt10: name: cloud_test_cmt10 file_type: nc_fci_test_clm file_key: cloud_mask_test_result standard_name: cloud_mask_test_cmt10 extract_byte: 10 fill_value: -999 mask_value: 0 cloud_test_cmt11: name: cloud_test_cmt11 file_type: nc_fci_test_clm file_key: cloud_mask_test_result standard_name: cloud_mask_test_cmt11 extract_byte: 11 fill_value: -999 mask_value: 0 cloud_test_cmt12: name: cloud_test_cmt12 file_type: nc_fci_test_clm file_key: cloud_mask_test_result standard_name: cloud_mask_test_cmt12 extract_byte: 12 fill_value: -999 mask_value: 0 cloud_test_cmt13: name: cloud_test_cmt13 file_type: nc_fci_test_clm file_key: cloud_mask_test_result standard_name: cloud_mask_test_cmt13 extract_byte: 13 fill_value: -999 mask_value: 0 cloud_test_cmt14: name: cloud_test_cmt14 file_type: nc_fci_test_clm file_key: cloud_mask_test_result standard_name: cloud_mask_test_cmt14 extract_byte: 14 fill_value: -999 mask_value: 0 cloud_test_opqt: name: cloud_test_opqt file_type: nc_fci_test_clm file_key: cloud_mask_test_result standard_name: cloud_mask_test_opqt extract_byte: 15 fill_value: -999 mask_value: 0 cloud_test_cmrt1: name: cloud_test_cmrt1 file_type: nc_fci_test_clm file_key: cloud_mask_test_result standard_name: cloud_mask_test_cmrt1 extract_byte: 16 fill_value: -999 mask_value: 0 cloud_test_cmrt2: name: cloud_test_cmrt2 file_type: nc_fci_test_clm file_key: cloud_mask_test_result standard_name: cloud_mask_test_cmrt2 extract_byte: 17 fill_value: -999 mask_value: 0 cloud_test_cmrt3: name: cloud_test_cmrt3 file_type: nc_fci_test_clm file_key: cloud_mask_test_result standard_name: cloud_mask_test_cmrt3 extract_byte: 18 fill_value: -999 mask_value: 0 cloud_test_cmrt4: name: cloud_test_cmrt4 file_type: nc_fci_test_clm file_key: cloud_mask_test_result standard_name: cloud_mask_test_cmrt4 extract_byte: 19 fill_value: -999 mask_value: 0 cloud_test_cmrt5: name: cloud_test_cmrt5 file_type: nc_fci_test_clm file_key: cloud_mask_test_result standard_name: cloud_mask_test_cmrt5 extract_byte: 20 fill_value: -999 mask_value: 0 cloud_test_cmrt6: name: cloud_test_cmrt6 file_type: nc_fci_test_clm file_key: cloud_mask_test_result standard_name: cloud_mask_test_cmrt6 extract_byte: 21 fill_value: -999 mask_value: 0 cloud_test_dust: name: cloud_test_dust file_type: nc_fci_test_clm file_key: cloud_mask_test_result standard_name: cloud_mask_test_dust extract_byte: 22 fill_value: -999 mask_value: 0 cloud_test_ash: name: cloud_test_ash file_type: nc_fci_test_clm file_key: cloud_mask_test_result standard_name: cloud_mask_test_ash extract_byte: 23 fill_value: -999 mask_value: 0 cloud_test_dust_ash: name: cloud_test_dust_ash file_type: nc_fci_test_clm file_key: cloud_mask_test_result standard_name: cloud_mask_test_dust_ash extract_byte: 24 fill_value: -999 mask_value: 0 cloud_mask_cmrt6_result: name: cloud_mask_cmrt6_result file_type: nc_fci_test_clm file_key: cloud_mask_cmrt6_test_result standard_name: cloud_mask_cmrt6_result extract_byte: 0 # fill_value: -999 mask_value: 0 latitude: name: latitude file_key: 'latitude' # resolution: file_type: [nc_fci_gii, nc_fci_asr, nc_fci_toz] standard_name: latitude fill_value: -32767 mask_value: -32767 units: degree_north longitude: name: longitude file_key: 'longitude' # resolution: file_type: [nc_fci_gii, nc_fci_asr, nc_fci_toz] standard_name: longitude fill_value: -32767 mask_value: -32767 units: degree_east # GII k_index: name: k_index file_type: nc_fci_gii file_key: k_index standard_name: k_index fill_value: -32767 mask_value: -32767 coordinates: - longitude - latitude lifted_index: name: lifted_index file_type: nc_fci_gii file_key: lifted_index standard_name: lifted_index fill_value: -32767 mask_value: -32767 coordinates: - longitude - latitude percent_cloud_free: name: percent_cloud_free file_type: nc_fci_gii file_key: percent_cloud_free standard_name: percent_cloud_free fill_value: -127 mask_value: -127 coordinates: - longitude - latitude prec_water_high: name: prec_water_high file_type: nc_fci_gii file_key: prec_water_high standard_name: prec_water_high fill_value: 65535 mask_value: 65535 coordinates: - longitude - latitude prec_water_low: name: prec_water_low file_type: nc_fci_gii file_key: prec_water_low standard_name: prec_water_low fill_value: 65535 mask_value: 65535 coordinates: - longitude - latitude prec_water_mid: name: prec_water_mid file_type: nc_fci_gii file_key: prec_water_mid standard_name: prec_water_mid fill_value: 65535 mask_value: 65535 coordinates: - longitude - latitude prec_water_total: name: prec_water_total file_type: nc_fci_gii file_key: prec_water_total standard_name: prec_water_total fill_value: 65535 mask_value: 65535 coordinates: - longitude - latitude # FCI CT L2 cloud_phase: name: cloud_phase file_type: nc_fci_ct file_key: cloud_phase # standard_name: cloud_phase fill_value: 0 mask_value: 0 cloud_type: name: cloud_type file_type: nc_fci_ct file_key: cloud_type # standard_name: cloud_type fill_value: 0 mask_value: 0 # FCI CTTH Product cloud_top_aviation_height: name: cloud_top_aviation_height file_type: nc_fci_cloud file_key: cloud_top_aviation_height fill_value: 0 mask_value: 0 cloud_top_height: name: cloud_top_height file_type: nc_fci_cloud file_key: cloud_top_height fill_value: 0 mask_value: 0 cloud_top_pressure: name: cloud_top_pressure file_type: nc_fci_cloud file_key: cloud_top_pressure fill_value: 0 mask_value: 0 cloud_top_temperature: name: cloud_top_temperature file_type: nc_fci_cloud file_key: cloud_top_temperature fill_value: 0 mask_value: 0 effective_cloudiness: name: effective_cloudiness file_type: nc_fci_cloud file_key: effective_cloudiness fill_value: 0 mask_value: 0 # ASR bt_max: name: bt_max file_type: nc_fci_asr file_key: bt_max standard_name: bt_max fill_value: 65535 mask_value: 65535 coordinates: - longitude - latitude bt_mean: name: bt_mean file_type: nc_fci_asr file_key: bt_mean standard_name: bt_mean fill_value: 65535 mask_value: 65535 coordinates: - longitude - latitude bt_min: name: bt_min file_type: nc_fci_asr file_key: bt_min standard_name: bt_min fill_value: 65535 mask_value: 65535 coordinates: - longitude - latitude bt_std: name: bt_std file_type: nc_fci_asr file_key: bt_std standard_name: bt_std fill_value: 65535 mask_value: 65535 coordinates: - longitude - latitude radiance_max: name: radiance_max file_type: nc_fci_asr file_key: radiance_max standard_name: radiance_max fill_value: 65535 mask_value: 65535 coordinates: - longitude - latitude radiance_mean: name: radiance_mean file_type: nc_fci_asr file_key: radiance_mean standard_name: radiance_mean fill_value: 65535 mask_value: 65535 coordinates: - longitude - latitude radiance_min: name: radiance_min file_type: nc_fci_asr file_key: radiance_min standard_name: radiance_min fill_value: 65535 mask_value: 65535 coordinates: - longitude - latitude radiance_std: name: radiance_std file_type: nc_fci_asr file_key: radiance_std standard_name: radiance_std fill_value: 65535 mask_value: 65535 coordinates: - longitude - latitude reflectance_max: name: reflectance_max file_type: nc_fci_asr file_key: reflectance_max standard_name: reflectance_max fill_value: 65535 mask_value: 65535 coordinates: - longitude - latitude reflectance_mean: name: reflectance_mean file_type: nc_fci_asr file_key: reflectance_mean standard_name: reflectance_mean fill_value: 65535 mask_value: 65535 coordinates: - longitude - latitude reflectance_min: name: reflectance_min file_type: nc_fci_asr file_key: reflectance_min standard_name: reflectance_min fill_value: 65535 mask_value: 65535 coordinates: - longitude - latitude reflectance_std: name: reflectance_std file_type: nc_fci_asr file_key: reflectance_std standard_name: reflectance_std fill_value: 65535 mask_value: 65535 coordinates: - longitude - latitude quality_bt: name: quality_bt file_type: nc_fci_asr file_key: quality_bt standard_name: quality_bt fill_value: 65535 mask_value: 65535 coordinates: - longitude - latitude quality_reflectance: name: quality_reflectance file_type: nc_fci_asr file_key: quality_reflectance standard_name: quality_reflectance fill_value: 65535 mask_value: 65535 coordinates: - longitude - latitude quality_radiance: name: quality_radiance file_type: nc_fci_asr file_key: quality_radiance standard_name: quality_radiance fill_value: 65535 mask_value: 65535 coordinates: - longitude - latitude land_pixel_percent: name: land_pixel_percent file_type: nc_fci_asr file_key: land_pixel_percent standard_name: land_pixel_percent fill_value: 65535 mask_value: 65535 coordinates: - longitude - latitude water_pixel_percent: name: water_pixel_percent file_type: nc_fci_asr file_key: water_pixel_percent standard_name: water_pixel_percent fill_value: 65535 mask_value: 65535 coordinates: - longitude - latitude pixel_percentage: name: pixel_percentage file_type: nc_fci_asr file_key: pixel_percentage standard_name: pixel_percentage fill_value: 65535 mask_value: 65535 coordinates: - longitude - latitude percent_pixels: name: percent_pixels file_type: nc_fci_toz file_key: percent_pixels standard_name: percent_pixels fill_value: 65535 mask_value: 65535 coordinates: - longitude - latitude number_of_iterations: name: number_of_iterations file_type: nc_fci_toz file_key: number_of_iterations standard_name: number_of_iterations fill_value: 65535 mask_value: 65535 coordinates: - longitude - latitude retrieval_type: name: retrieval_type file_type: nc_fci_toz file_key: retrieval_type standard_name: retrieval_type fill_value: 65535 mask_value: 65535 coordinates: - longitude - latitude total_ozone: name: total_ozone file_type: nc_fci_toz file_key: total_ozone standard_name: total_ozone fill_value: 65535 mask_value: 65535 coordinates: - longitude - latitude satpy-0.34.0/satpy/etc/readers/generic_image.yaml000066400000000000000000000032641420401153000217330ustar00rootroot00000000000000reader: name: generic_image description: generic image reader reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [images] default_channels: [image] datasets: image: name: image file_type: graphic file_types: graphic: file_reader: !!python/name:satpy.readers.generic_image.GenericImageFileHandler file_patterns: - '{start_time:%Y%m%d_%H%M}{filename}.png' - '{start_time:%Y%m%d_%H%M}{filename}.PNG' - '{start_time:%Y%m%d_%H%M}{filename}.jpg' - '{start_time:%Y%m%d_%H%M}{filename}.jpeg' - '{start_time:%Y%m%d_%H%M}{filename}.JPG' - '{start_time:%Y%m%d_%H%M}{filename}.JPEG' - '{start_time:%Y%m%d_%H%M}{filename}.tif' - '{start_time:%Y%m%d_%H%M}{filename}.tiff' - '{start_time:%Y%m%d_%H%M}{filename}.TIF' - '{start_time:%Y%m%d_%H%M}{filename}.TIFF' - '{filename}{start_time:%Y%m%d_%H%M}.png' - '{filename}{start_time:%Y%m%d_%H%M}.PNG' - '{filename}{start_time:%Y%m%d_%H%M}.jpg' - '{filename}{start_time:%Y%m%d_%H%M}.jpeg' - '{filename}{start_time:%Y%m%d_%H%M}.JPG' - '{filename}{start_time:%Y%m%d_%H%M}.JPEG' - '{filename}{start_time:%Y%m%d_%H%M}.tif' - '{filename}{start_time:%Y%m%d_%H%M}.tiff' - '{filename}{start_time:%Y%m%d_%H%M}.TIF' - '{filename}{start_time:%Y%m%d_%H%M}.TIFF' - '{filename}.png' - '{filename}.PNG' - '{filename}.jpg' - '{filename}.jpeg' - '{filename}.JPG' - '{filename}.JPEG' - '{filename}.tif' - '{filename}.tiff' - '{filename}.TIF' - '{filename}.TIFF' satpy-0.34.0/satpy/etc/readers/geocat.yaml000066400000000000000000000166541420401153000204260ustar00rootroot00000000000000reader: description: CSPP Geo and GEOCAT file reader name: geocat reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [abi, ahi, goes_imager] file_types: level2: file_reader: !!python/name:satpy.readers.geocat.GEOCATFileHandler file_patterns: # GOES-16 ABI files (must be first to capture things correctly): - 'geocatL{processing_level:1d}.{platform_shortname}.{sector_id}.{start_time:%Y%j.%H%M%S}.hdf' - 'geocatL{processing_level:1d}.{platform_shortname}.{sector_id}.{start_time:%Y%j.%H%M%S}.nc' # Generic file pattern - 'geocatL{processing_level:1d}.{platform_shortname}.{start_time:%Y%j.%H%M%S}.hdf' - 'geocatL{processing_level:1d}.{platform_shortname}.{start_time:%Y%j.%H%M%S}.nc' # Himawari 8 files: - 'geocatL2.{platform_shortname}.{start_time:%Y%j.%H%M%S}.{sector_id}.{res_id}.hdf' - 'geocatL2.{platform_shortname}.{start_time:%Y%j.%H%M%S}.{sector_id}.{res_id}.nc' ahi_level1: file_reader: !!python/name:satpy.readers.geocat.GEOCATFileHandler file_patterns: # we could use the H8 pattern above, but then the datasets listed below # would always be "available" - 'geocatL1.HIMAWARI-8.{start_time:%Y%j.%H%M%S}.{sector_id}.{res_id}.hdf' - 'geocatL1.HIMAWARI-8.{start_time:%Y%j.%H%M%S}.{sector_id}.{res_id}.nc' datasets: # AHI Level 1 Datasets (need to define here so wavelengths can be used) B01: name: B01 sensor: ahi wavelength: [0.45,0.47,0.49] calibration: reflectance: file_key: himawari_8_ahi_channel_1_reflectance standard_name: toa_bidirectional_reflectance units: "%" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B02: name: B02 sensor: ahi wavelength: [0.49,0.51,0.53] calibration: reflectance: file_key: himawari_8_ahi_channel_2_reflectance standard_name: toa_bidirectional_reflectance units: "%" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B03: name: B03 sensor: ahi wavelength: [0.62,0.64,0.66] calibration: reflectance: file_key: himawari_8_ahi_channel_3_reflectance standard_name: toa_bidirectional_reflectance units: "%" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B04: name: B04 sensor: ahi wavelength: [0.83, 0.85, 0.87] calibration: reflectance: file_key: himawari_8_ahi_channel_4_reflectance standard_name: toa_bidirectional_reflectance units: "%" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B05: name: B05 sensor: ahi wavelength: [1.5, 1.6, 1.7] calibration: reflectance: file_key: himawari_8_ahi_channel_5_reflectance standard_name: toa_bidirectional_reflectance units: "%" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B06: name: B06 sensor: ahi wavelength: [2.2, 2.3, 2.4] calibration: reflectance: file_key: himawari_8_ahi_channel_6_reflectance standard_name: toa_bidirectional_reflectance units: "%" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B07: name: B07 sensor: ahi wavelength: [3.7, 3.9, 4.1] calibration: brightness_temperature: file_key: himawari_8_ahi_channel_7_brightness_temperature standard_name: toa_brightness_temperature units: "K" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B08: name: B08 sensor: ahi wavelength: [6.0, 6.2, 6.4] calibration: brightness_temperature: file_key: himawari_8_ahi_channel_8_brightness_temperature standard_name: toa_brightness_temperature units: "K" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B09: name: B09 sensor: ahi wavelength: [6.7, 6.9, 7.1] calibration: brightness_temperature: file_key: himawari_8_ahi_channel_9_brightness_temperature standard_name: toa_brightness_temperature units: "K" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B10: name: B10 sensor: ahi wavelength: [7.1, 7.3, 7.5] calibration: brightness_temperature: file_key: himawari_8_ahi_channel_10_brightness_temperature standard_name: toa_brightness_temperature units: "K" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B11: name: B11 sensor: ahi wavelength: [8.4, 8.6, 8.8] calibration: brightness_temperature: file_key: himawari_8_ahi_channel_11_brightness_temperature standard_name: toa_brightness_temperature units: "K" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B12: name: B12 sensor: ahi wavelength: [9.4, 9.6, 9.8] calibration: brightness_temperature: file_key: himawari_8_ahi_channel_12_brightness_temperature standard_name: toa_brightness_temperature units: "K" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B13: name: B13 sensor: ahi wavelength: [10.2, 10.4, 10.6] calibration: brightness_temperature: file_key: himawari_8_ahi_channel_13_brightness_temperature standard_name: toa_brightness_temperature units: "K" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B14: name: B14 sensor: ahi wavelength: [11.0, 11.2, 11.4] calibration: brightness_temperature: file_key: himawari_8_ahi_channel_14_brightness_temperature standard_name: toa_brightness_temperature units: "K" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B15: name: B15 sensor: ahi wavelength: [12.2, 12.4, 12.6] calibration: brightness_temperature: file_key: himawari_8_ahi_channel_15_brightness_temperature standard_name: toa_brightness_temperature units: "K" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B16: name: B16 sensor: ahi wavelength: [13.1, 13.3, 13.5] calibration: brightness_temperature: file_key: himawari_8_ahi_channel_16_brightness_temperature standard_name: toa_brightness_temperature units: "K" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 satpy-0.34.0/satpy/etc/readers/ghrsst_l3c_sst.yaml000066400000000000000000000007561420401153000221240ustar00rootroot00000000000000reader: description: OSISAF SST GHRSST netCDF reader name: ghrsst_l3c_sst reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [avhrr/3, viirs] datasets: sea_surface_temperature: name: sea_surface_temperature file_type: ghrsst_osisaf_l2 resolution: 1000 file_types: ghrsst_osisaf_l2: file_reader: !!python/name:satpy.readers.ghrsst_l3c_sst.GHRSST_OSISAFL2 file_patterns: ['S-OSI_-FRA_-{satid:3s}_-NARSST_FIELD-{start_time:%Y%m%d%H00}Z.nc'] satpy-0.34.0/satpy/etc/readers/glm_l2.yaml000066400000000000000000000034611420401153000203300ustar00rootroot00000000000000reader: name: glm_l2 short_name: GLM Level 2 long_name: GOES-R GLM Level 2 description: > NetCDF4 reader for GOES-R series GLM data. Currently only gridded L2 files output from `gltmtools `_ are supported. sensors: [glm] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader # file pattern keys to sort files by with 'satpy.utils.group_files' group_keys: ['start_time', 'platform_shortname', 'scene_abbr'] # Typical filenames from Unidata THREDDS server: # http://thredds.unidata.ucar.edu/thredds/catalog/satellite/goes/east/ # products/GeostationaryLightningMapper/CONUS/current/catalog.html # OR_GLM-L2-GLMC-M3_G16_s20191920000000_e20191920001000_c20191920001380.nc file_types: glm_l2_imagery: file_reader: !!python/name:satpy.readers.glm_l2.NCGriddedGLML2 file_patterns: ['{system_environment:s}_{mission_id:3s}-L2-GLM{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] # glm_l2_lcfa - add this with glmtools datasets: # Additional datasets discovered dynamically in satpy/readers/glm_l2.py flash_extent_density: name: flash_extent_density file_type: glm_l2_imagery group_extent_density: name: group_extent_density file_type: glm_l2_imagery flash_centroid_density: name: flash_centroid_density file_type: glm_l2_imagery group_centroid_density: name: group_centroid_density file_type: glm_l2_imagery average_flash_area: name: average_flash_area file_type: glm_l2_imagery minimum_flash_area: name: minimum_flash_area file_type: glm_l2_imagery average_group_area: name: average_group_area file_type: glm_l2_imagery total_energy: name: total_energy file_type: glm_l2_imagery satpy-0.34.0/satpy/etc/readers/goes-imager_hrit.yaml000066400000000000000000000131331420401153000223760ustar00rootroot00000000000000reader: name: goes-imager_hrit short_name: GOES Imager HRIT long_name: GOES Imager Level 1 (HRIT) description: Reader for GOES Imager Level 1 data in HRIT format sensors: [goes_imager] default_channels: [00_7, 03_9, 06_6, 10_7] reader: !!python/name:satpy.readers.yaml_reader.GEOSegmentYAMLReader # eg. # L-000-MSG3__-GOES13______-06_6_075W-000005___-201703261200-__ # L-000-MSG3__-GOES13______-06_6_075W-PRO______-201703261200-__ file_types: HRIT_00_7: file_reader: !!python/name:satpy.readers.goes_imager_hrit.HRITGOESFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}______-00_7_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_00] expected_segments: 7 HRIT_00_7_C: file_reader: !!python/name:satpy.readers.goes_imager_hrit.HRITGOESFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}______-00_7_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_00] expected_segments: 7 HRIT_03_9: file_reader: !!python/name:satpy.readers.goes_imager_hrit.HRITGOESFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}______-03_9_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_03] expected_segments: 7 HRIT_03_9_C: file_reader: !!python/name:satpy.readers.goes_imager_hrit.HRITGOESFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}______-03_9_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_03] expected_segments: 7 HRIT_06_6: file_reader: !!python/name:satpy.readers.goes_imager_hrit.HRITGOESFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}______-06_6_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_06] expected_segments: 7 HRIT_06_6_C: file_reader: !!python/name:satpy.readers.goes_imager_hrit.HRITGOESFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}______-06_6_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_06] expected_segments: 7 HRIT_10_7: file_reader: !!python/name:satpy.readers.goes_imager_hrit.HRITGOESFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}______-10_7_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_10] expected_segments: 7 HRIT_10_7_C: file_reader: !!python/name:satpy.readers.goes_imager_hrit.HRITGOESFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}______-10_7_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_10] expected_segments: 7 HRIT_PRO_00: file_reader: !!python/name:satpy.readers.goes_imager_hrit.HRITGOESPrologueFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}______-00_7_{sublon:4s}-PRO______-{start_time:%Y%m%d%H%M}-__'] expected_segments: 7 HRIT_PRO_03: file_reader: !!python/name:satpy.readers.goes_imager_hrit.HRITGOESPrologueFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}______-03_9_{sublon:4s}-PRO______-{start_time:%Y%m%d%H%M}-__'] expected_segments: 7 HRIT_PRO_06: file_reader: !!python/name:satpy.readers.goes_imager_hrit.HRITGOESPrologueFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}______-06_6_{sublon:4s}-PRO______-{start_time:%Y%m%d%H%M}-__'] expected_segments: 7 HRIT_PRO_10: file_reader: !!python/name:satpy.readers.goes_imager_hrit.HRITGOESPrologueFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}______-10_7_{sublon:4s}-PRO______-{start_time:%Y%m%d%H%M}-__'] expected_segments: 7 datasets: '00_7': name: '00_7' resolution: 3000 wavelength: [0.55, 0.7, 0.75] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 counts: standard_name: counts file_type: [HRIT_00_7, HRIT_00_7_C] '03_9': name: '03_9' resolution: 3000 wavelength: [3.8, 3.9, 4.0] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 counts: standard_name: counts file_type: [HRIT_03_9, HRIT_03_9_C] '06_6': name: '06_6' resolution: 3000 wavelength: [6.5, 6.6, 7.0] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 counts: standard_name: counts file_type: [HRIT_06_6, HRIT_06_6_C] '10_7': name: '10_7' resolution: 3000 wavelength: [10.2, 10.7, 11.2] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 counts: standard_name: counts file_type: [HRIT_10_7, HRIT_10_7_C] satpy-0.34.0/satpy/etc/readers/goes-imager_nc.yaml000066400000000000000000000223711420401153000220340ustar00rootroot00000000000000reader: name: goes-imager_nc short_name: GOES Imager netCDF long_name: GOES Imager Level 1 (netCDF) description: > Reader for GOES Imager Level 1 data in netCDF format (from both NOAA CLASS and EUMETCast) References: - GOES 8-12: https://goes.gsfc.nasa.gov/text/databook/databook.pdf, page 20 ff. - GOES 13-15: https://goes.gsfc.nasa.gov/text/GOES-N_Databook/databook.pdf, chapter 3. sensors: [goes_imager] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: nc_goes_00_7: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESNCFileHandler file_patterns: ['{platform}.{start_time:%Y.%j.%H%M%S}.BAND_01.nc'] eum_nc_goes_00_7: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESEUMNCFileHandler file_patterns: ['goes15.{start_time:%Y.%j.%H%M%S}_1.nc'] requires: [eum_nc_goes_geo] nc_goes_03_9: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESNCFileHandler file_patterns: ['{platform}.{start_time:%Y.%j.%H%M%S}.BAND_02.nc'] eum_nc_goes_03_9: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESEUMNCFileHandler file_patterns: ['goes15.{start_time:%Y.%j.%H%M%S}_2.nc'] requires: [eum_nc_goes_geo] nc_goes_06_5: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESNCFileHandler file_patterns: ['goes12.{start_time:%Y.%j.%H%M%S}.BAND_03.nc', 'goes13.{start_time:%Y.%j.%H%M%S}.BAND_03.nc', 'goes14.{start_time:%Y.%j.%H%M%S}.BAND_03.nc', 'goes15.{start_time:%Y.%j.%H%M%S}.BAND_03.nc'] eum_nc_goes_06_5: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESEUMNCFileHandler file_patterns: ['goes15.{start_time:%Y.%j.%H%M%S}_3.nc'] requires: [eum_nc_goes_geo] nc_goes_06_8: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESNCFileHandler file_patterns: ['goes08.{start_time:%Y.%j.%H%M%S}.BAND_03.nc', 'goes09.{start_time:%Y.%j.%H%M%S}.BAND_03.nc', 'goes10.{start_time:%Y.%j.%H%M%S}.BAND_03.nc', 'goes11.{start_time:%Y.%j.%H%M%S}.BAND_03.nc'] nc_goes_10_7: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESNCFileHandler file_patterns: ['{platform}.{start_time:%Y.%j.%H%M%S}.BAND_04.nc'] eum_nc_goes_10_7: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESEUMNCFileHandler file_patterns: ['goes15.{start_time:%Y.%j.%H%M%S}_4.nc'] requires: [eum_nc_goes_geo] nc_goes_12_0: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESNCFileHandler file_patterns: ['goes08.{start_time:%Y.%j.%H%M%S}.BAND_05.nc', 'goes09.{start_time:%Y.%j.%H%M%S}.BAND_05.nc', 'goes10.{start_time:%Y.%j.%H%M%S}.BAND_05.nc', 'goes11.{start_time:%Y.%j.%H%M%S}.BAND_05.nc'] eum_nc_goes_12_0: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESEUMNCFileHandler file_patterns: ['goes15.{start_time:%Y.%j.%H%M%S}_5.nc'] requires: [eum_nc_goes_geo] nc_goes_13_3: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESNCFileHandler file_patterns: ['goes12.{start_time:%Y.%j.%H%M%S}.BAND_06.nc', 'goes13.{start_time:%Y.%j.%H%M%S}.BAND_06.nc', 'goes14.{start_time:%Y.%j.%H%M%S}.BAND_06.nc', 'goes15.{start_time:%Y.%j.%H%M%S}.BAND_06.nc'] eum_nc_goes_13_3: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESEUMNCFileHandler file_patterns: ['goes15.{start_time:%Y.%j.%H%M%S}_6.nc'] requires: [eum_nc_goes_geo] eum_nc_goes_geo: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESEUMGEONCFileHandler file_patterns: ['goes15.{start_time:%Y.%j.%H%M%S}_geo.nc'] datasets: '00_7': name: '00_7' wavelength: [0.52, 0.65, 0.71] calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: comment: Angle of incident radiation and annual variation of the earth-sun distance is not taken into account. A value of 100% corresponds to the radiance of a perfectly reflecting diffuse surface illuminated at normal incidence when the sun is at its annual-average distance from the Earth. units: "%" coordinates: - longitude_00_7 - latitude_00_7 file_type: [nc_goes_00_7, eum_nc_goes_00_7] '03_9': name: '03_9' wavelength: [3.73, 3.9, 4.07] calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: mW m-2 cm-1 sr-1 brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: - longitude_03_9 - latitude_03_9 file_type: [nc_goes_03_9, eum_nc_goes_03_9] '06_5': name: '06_5' wavelength: [5.8, 6.5, 7.3] calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: mW m-2 cm-1 sr-1 brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: - longitude_06_5 - latitude_06_5 file_type: [nc_goes_06_5, eum_nc_goes_06_5] '06_8': name: '06_8' wavelength: [6.5, 6.75, 7.0] calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: mW m-2 cm-1 sr-1 brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: - longitude_06_8 - latitude_06_8 file_type: nc_goes_06_8 '10_7': name: '10_7' wavelength: [10.2, 10.7, 11.2] calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: mW m-2 cm-1 sr-1 brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: - longitude_10_7 - latitude_10_7 file_type: [nc_goes_10_7, eum_nc_goes_10_7] '12_0': name: '12_0' wavelength: [11.5, 12.0, 12.5] calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: mW m-2 cm-1 sr-1 brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: - longitude_12_0 - latitude_12_0 file_type: nc_goes_12_0 '13_3': name: '13_3' wavelength: [13.0, 13.35, 13.7] calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: mW m-2 cm-1 sr-1 brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: - longitude_13_3 - latitude_13_3 file_type: [nc_goes_13_3, eum_nc_goes_13_3] longitude_00_7: name: longitude_00_7 file_type: [nc_goes_00_7, eum_nc_goes_geo] standard_name: longitude units: degrees_east latitude_00_7: name: latitude_00_7 file_type: [nc_goes_00_7, eum_nc_goes_geo] standard_name: latitude units: degrees_north longitude_03_9: name: longitude_03_9 file_type: [nc_goes_03_9, eum_nc_goes_geo] standard_name: longitude units: degrees_east latitude_03_9: name: latitude_03_9 file_type: [nc_goes_03_9, eum_nc_goes_geo] standard_name: latitude units: degrees_north longitude_06_5: name: longitude_06_5 file_type: [nc_goes_06_5, eum_nc_goes_geo] standard_name: longitude units: degrees_east latitude_06_5: name: latitude_06_5 file_type: [nc_goes_06_5, eum_nc_goes_geo] standard_name: latitude units: degrees_north longitude_06_8: name: longitude_06_8 file_type: nc_goes_06_8 standard_name: longitude units: degrees_east latitude_06_8: name: latitude_06_8 file_type: nc_goes_06_8 standard_name: latitude units: degrees_north longitude_10_7: name: longitude_10_7 file_type: [nc_goes_10_7, eum_nc_goes_geo] standard_name: longitude units: degrees_east latitude_10_7: name: latitude_10_7 file_type: [nc_goes_10_7, eum_nc_goes_geo] standard_name: latitude units: degrees_north longitude_12_0: name: longitude_12_0 file_type: nc_goes_12_0 standard_name: longitude units: degrees_east latitude_12_0: name: latitude_12_0 file_type: nc_goes_12_0 standard_name: latitude units: degrees_north longitude_13_3: name: longitude_13_3 file_type: [nc_goes_13_3, eum_nc_goes_geo] standard_name: longitude units: degrees_east latitude_13_3: name: latitude_13_3 file_type: [nc_goes_13_3, eum_nc_goes_geo] standard_name: latitude units: degrees_north satpy-0.34.0/satpy/etc/readers/gpm_imerg.yaml000066400000000000000000000034411420401153000211200ustar00rootroot00000000000000reader: description: HDF5 reader for the GPM/IMERG data name: gpm_imerg sensors: [multiple] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: gpm_imerg_h5: file_reader: !!python/name:satpy.readers.gpm_imerg.Hdf5IMERG file_patterns: ['3B-HHR.MS.MRG.3IMERG.{date:%Y%m%d}-S{start_time:%H%M%S}-E{end_time:%H%M%S}.{sequence:4s}.{version_id:4s}.HDF5', '3B-HHR-E.MS.MRG.3IMERG.{date:%Y%m%d}-S{start_time:%H%M%S}-E{end_time:%H%M%S}.{sequence:4s}.{version_id:4s}.RT-H5', '3B-HHR-L.MS.MRG.3IMERG.{date:%Y%m%d}-S{start_time:%H%M%S}-E{end_time:%H%M%S}.{sequence:4s}.{version_id:4s}.RT-H5'] datasets: HQobservationTime: name: HQobservationTime resolution: 0.1 file_type: gpm_imerg_h5 units: minutes HQprecipitation: name: HQprecipitation resolution: 3000 file_type: gpm_imerg_h5 units: mm/hr HQprecipSource: name: HQprecipSource resolution: 0.1 file_type: gpm_imerg_h5 IRkalmanFilterWeight: name: IRkalmanFilterWeight resolution: 0.1 file_type: gpm_imerg_h5 IRprecipitation: name: IRprecipitation resolution: 0.1 file_type: gpm_imerg_h5 units: mm/hr precipitationCal: name: precipitationCal resolution: 0.1 file_type: gpm_imerg_h5 units: mm/hr precipitationQualityIndex: name: precipitationQualityIndex resolution: 0.1 file_type: gpm_imerg_h5 precipitationUncal: name: precipitationUncal resolution: 0.1 file_type: gpm_imerg_h5 units: mm/hr probabilityLiquidPrecipitation: name: probabilityLiquidPrecipitation resolution: 0.1 file_type: gpm_imerg_h5 units: "%" randomError: name: randomError resolution: 0.1 file_type: gpm_imerg_h5 units: mm/hr satpy-0.34.0/satpy/etc/readers/grib.yaml000066400000000000000000000024401420401153000200730ustar00rootroot00000000000000reader: description: GRIB2 file reader name: grib reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [unknown] data_identification_keys: name: required: true level: resolution: modifiers: default: [] type: !!python/name:satpy.dataset.dataid.ModifierTuple file_types: grib: file_reader: !!python/name:satpy.readers.grib.GRIBFileHandler file_patterns: # NOAA NCEP: # gfs.t18z.sfluxgrbf106.grib2 - '{stem}.grib2' - 'gfs{stem}.f{forecast_time:3d}' - 'gdas{stem}.f{forecast_time:3d}' # EUMETSAT: # S-OSI_-NOR_-MULT-AHLDLI_FIELD-201805011200Z.grb.gz - '{stem}.grb' - '{stem}.grb2' # NWCSAF input file name format: # S_NWC_NWP_2017-03-14T00:00:00Z_002.grib - 'S_NWC_NWP_{start_time:%Y-%m-%dT%H:%M:%S}Z_{forecast_time:3d}.grib' # grib_ncep: # file_reader: !!python/name:satpy.readers.grib.GRIBFileHandler # file_patterns: # # NOAA NCEP: # # gfs.t18z.sfluxgrbf106.grib2 # - '{model_name}.t{model_hour:2d}z.{field_set}.grib2' # keys: # shortName: # id_key: name # values: ['gh', 't', 'u', 'v', 'r', 'icaht'] # level: # id_key: level # values: [0, 100, 125, 150, 175, 200, 225, 250, 275, 300, 350, 400, 450, 500, 600, 700, 750, 850] satpy-0.34.0/satpy/etc/readers/hsaf_grib.yaml000066400000000000000000000026401420401153000210760ustar00rootroot00000000000000reader: description: Reader for Hydrology SAF products name: hsaf_grib reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [hsaf] file_types: hsafgrib: file_reader: !!python/name:satpy.readers.hsaf_grib.HSAFFileHandler file_patterns: ['h03_{sensing_time:%Y%m%d_%H%M}_{region:3s}.grb', 'h05_{sensing_time:%Y%m%d_%H%M}_{accum_time:2s}_{region:3s}.grb', 'h03B_{sensing_time:%Y%m%d_%H%M}_{region:3s}.grb', 'S-HSAF-h03B_{sensing_time:%Y%m%d_%H%M}_{region:3s}.grb', 'h05B_{sensing_time:%Y%m%d_%H%M}_{accum_time:2s}_{region:3s}.grb', 'S-HSAF-h05B_{sensing_time:%Y%m%d_%H%M}_{accum_time:2s}_{region:3s}.grb'] datasets: h03: name: h03 msg_name: irrate sensor: hsaf resolution: 3000 standard_name: instantaneous_rainfall_rate units: kg m-2 s-1 file_type: hsafgrib h03B: name: h03B msg_name: irrate sensor: hsaf resolution: 3000 standard_name: instantaneous_rainfall_rate units: kg m-2 s-1 file_type: hsafgrib h05: name: h05 msg_name: accumrain sensor: hsaf resolution: 3000 standard_name: accumulated_rainfall_rate units: kg m-2 file_type: hsafgrib h05B: name: h05B msg_name: accumrain sensor: hsaf resolution: 3000 standard_name: accumulated_rainfall_rate units: kg m-2 file_type: hsafgrib satpy-0.34.0/satpy/etc/readers/hy2_scat_l2b_h5.yaml000066400000000000000000000070451420401153000220250ustar00rootroot00000000000000reader: description: Generic Eumetsat HY2 L2B H5 Wind field Reader name: hy2_scat_l2b_h5 reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [scatterometer] default_datasets: file_types: hy2_scat_l2b_h5: file_reader: !!python/name:satpy.readers.hy2_scat_l2b_h5.HY2SCATL2BH5FileHandler file_patterns: - 'W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,{platform_name}+SM_C_EUMP_{start_date:%Y%m%d------}_{orbit_number}_o_250_{product_level}.h5' - '{platform_name}_OPER_SCA_{product_level}_OR_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{orbit_number}_pwp_250_07_owv.h5' datasets: wvc_lon: name: wvc_lon resolution: 25000 file_type: hy2_scat_l2b_h5 standard_name: longitude units: degree wvc_lat: name: wvc_lat resolution: 25000 file_type: hy2_scat_l2b_h5 standard_name: latitude units: degree wind_speed_selection: name: wind_speed_selection resolution: 25000 coordinates: [wvc_lon, wvc_lat] file_type: hy2_scat_l2b_h5 standard_name: wind_speed sensor: scatterometer wind_dir_selection: name: wind_dir_selection resolution: 25000 coordinates: [wvc_lon, wvc_lat] file_type: hy2_scat_l2b_h5 standard_name: wind_dir sensor: scatterometer wind_speed: name: wind_speed resolution: 25000 coordinates: [wvc_lon, wvc_lat] file_type: hy2_scat_l2b_h5 standard_name: wind_speed sensor: scatterometer wind_dir: name: wind_dir resolution: 25000 coordinates: [wvc_lon, wvc_lat] file_type: hy2_scat_l2b_h5 standard_name: wind_dir sensor: scatterometer max_likelihood_est: name: max_likelihood_est resolution: 25000 coordinates: [wvc_lon, wvc_lat] file_type: hy2_scat_l2b_h5 standard_name: max_likelihood_est sensor: scatterometer model_speed: name: model_speed resolution: 25000 coordinates: [wvc_lon, wvc_lat] file_type: hy2_scat_l2b_h5 standard_name: model_speed sensor: scatterometer model_dir: name: model_dir resolution: 25000 coordinates: [wvc_lon, wvc_lat] file_type: hy2_scat_l2b_h5 standard_name: model_dir sensor: scatterometer num_ambigs: name: num_ambigs resolution: 25000 coordinates: [wvc_lon, wvc_lat] file_type: hy2_scat_l2b_h5 standard_name: num_ambigs sensor: scatterometer num_in_aft: name: num_in_aft resolution: 25000 coordinates: [wvc_lon, wvc_lat] file_type: hy2_scat_l2b_h5 standard_name: num_in_aft sensor: scatterometer num_in_fore: name: num_in_fore resolution: 25000 coordinates: [wvc_lon, wvc_lat] file_type: hy2_scat_l2b_h5 standard_name: num_in_fore sensor: scatterometer num_out_aft: name: num_out_aft resolution: 25000 coordinates: [wvc_lon, wvc_lat] file_type: hy2_scat_l2b_h5 standard_name: num_out_aft sensor: scatterometer num_out_fore: name: num_out_fore resolution: 25000 coordinates: [wvc_lon, wvc_lat] file_type: hy2_scat_l2b_h5 standard_name: num_out_fore sensor: scatterometer wvc_quality_flag: name: wvc_quality_flag resolution: 25000 coordinates: [wvc_lon, wvc_lat] file_type: hy2_scat_l2b_h5 standard_name: wvc_quality_flag sensor: scatterometer wvc_row_time: name: wvc_row_time file_type: hy2_scat_l2b_h5 standard_name: wvc_row_time wvc_selection: name: wvc_selection resolution: 25000 coordinates: [wvc_lon, wvc_lat] file_type: hy2_scat_l2b_h5 standard_name: wvc_selection sensor: scatterometer satpy-0.34.0/satpy/etc/readers/iasi_l2.yaml000066400000000000000000000073261420401153000205020ustar00rootroot00000000000000reader: description: Reader for IASI L2 files name: iasi_l2 reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [iasi] default_datasets: datasets: longitude: name: longitude file_type: iasi_l2_hdf5 standard_name: longitude units: "degrees" resolution: 12000 latitude: name: latitude file_type: iasi_l2_hdf5 standard_name: latitude units: "degrees" resolution: 12000 ozone_mixing_ratio: name: ozone_mixing_ratio file_type: iasi_l2_hdf5 units: "kg/kg" resolution: 12000 coordinates: [longitude, latitude] ozone_mixing_ratio_quality: name: ozone_mixing_ratio_quality file_type: iasi_l2_hdf5 units: "" resolution: 12000 coordinates: [longitude, latitude] ozone_total_column: name: ozone_total_column file_type: iasi_l2_hdf5 units: "kg/m^2" resolution: 12000 coordinates: [longitude, latitude] pressure: name: pressure file_type: iasi_l2_hdf5 units: "hPa" resolution: 12000 coordinates: [longitude, latitude] pressure_quality: name: pressure_quality file_type: iasi_l2_hdf5 units: "" resolution: 12000 coordinates: [longitude, latitude] temperature: name: temperature file_type: iasi_l2_hdf5 units: "K" resolution: 12000 coordinates: [longitude, latitude] temperature_quality: name: temperature_quality file_type: iasi_l2_hdf5 units: "" resolution: 12000 coordinates: [longitude, latitude] water_mixing_ratio: name: water_mixing_ratio file_type: iasi_l2_hdf5 units: "kg/kg" resolution: 12000 coordinates: [longitude, latitude] water_total_column: name: water_total_column file_type: iasi_l2_hdf5 units: "mm" resolution: 12000 coordinates: [longitude, latitude] surface_skin_temperature: name: surface_skin_temperature file_type: iasi_l2_hdf5 units: "K" resolution: 12000 coordinates: [longitude, latitude] surface_skin_temperature_quality: name: surface_skin_temperature_quality file_type: iasi_l2_hdf5 units: "" resolution: 12000 coordinates: [longitude, latitude] emissivity: name: emissivity file_type: iasi_l2_hdf5 units: "1" resolution: 12000 coordinates: [longitude, latitude] emissivity_quality: name: emissivity_quality file_type: iasi_l2_hdf5 units: "" resolution: 12000 coordinates: [longitude, latitude] water_mixing_ratio_quality: name: water_mixing_ratio_quality file_type: iasi_l2_hdf5 units: "" resolution: 12000 coordinates: [longitude, latitude] satellite_azimuth_angle: name: satellite_azimuth_angle file_type: iasi_l2_hdf5 units: "degrees" resolution: 12000 coordinates: [longitude, latitude] satellite_zenith_angle: name: satellite_zenith_angle file_type: iasi_l2_hdf5 units: "degrees" resolution: 12000 coordinates: [longitude, latitude] sensing_time: name: sensing_time file_type: iasi_l2_hdf5 units: "ut_time" resolution: 12000 coordinates: [longitude, latitude] solar_azimuth_angle: name: solar_azimuth_angle file_type: iasi_l2_hdf5 units: "degrees" resolution: 12000 coordinates: [longitude, latitude] solar_zenith_angle: name: solar_zenith_angle file_type: iasi_l2_hdf5 units: "degrees" resolution: 12000 coordinates: [longitude, latitude] file_types: iasi_l2_hdf5: file_reader: !!python/name:satpy.readers.iasi_l2.IASIL2HDF5 file_patterns: ["W_XX-EUMETSAT-{reception_location},{instrument},{long_platform_id}+{processing_location}_C_EUMS_{processing_time:%Y%m%d%H%M%S}_IASI_PW3_02_{platform_id}_{start_time:%Y%m%d%H%M%S}Z_{end_time:%Y%m%d%H%M%S}Z.hdf"] satpy-0.34.0/satpy/etc/readers/iasi_l2_so2_bufr.yaml000066400000000000000000000147101420401153000222760ustar00rootroot00000000000000reader: name: iasi_l2_so2_bufr short_name: IASI L2 SO2 BUFR long_name: METOP IASI Level 2 SO2 BUFR description: > Reader for IASI L2 files sensors: [iasi] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader default_datasets: file_types: iasi_l2_so2_bufr: file_reader: !!python/name:satpy.readers.iasi_l2_so2_bufr.IASIL2SO2BUFR file_patterns: ["W_XX-EUMETSAT-{reception_location},SOUNDING+SATELLITE,{platform}+{instrument}_C_EUMC_{start_time:%Y%m%d%H%M%S}_{perigee}_eps_o_{species}_{level}.bin"] datasets: year: name: year file_type: iasi_l2_so2_bufr units: "year" resolution: 12000 coordinates: [longitude, latitude] key: '#1#year' fill_value: -1.e+100 month: name: month file_type: iasi_l2_so2_bufr units: "month" resolution: 12000 coordinates: [longitude, latitude] key: '#1#month' fill_value: -1.e+100 day: name: day file_type: iasi_l2_so2_bufr units: "day" resolution: 12000 coordinates: [longitude, latitude] key: '#1#day' fill_value: -1.e+100 hour: name: hour file_type: iasi_l2_so2_bufr units: "hour" resolution: 12000 coordinates: [longitude, latitude] key: '#1#hour' fill_value: -1.e+100 minute: name: minute file_type: iasi_l2_so2_bufr units: "minute" resolution: 12000 coordinates: [longitude, latitude] key: '#1#minute' fill_value: -1.e+100 second: name: second file_type: iasi_l2_so2_bufr units: "second" resolution: 12000 coordinates: [longitude, latitude] key: '#1#second' fill_value: -1.e+100 orbit_number: name: orbit_number file_type: iasi_l2_so2_bufr units: "" resolution: 12000 coordinates: [longitude, latitude] key: '#1#orbitNumber' fill_value: -1.e+100 scanline_number: name: scanline_number file_type: iasi_l2_so2_bufr units: "" resolution: 12000 coordinates: [longitude, latitude] key: '#1#scanLineNumber' fill_value: -1.e+100 latitude: name: latitude file_type: iasi_l2_so2_bufr standard_name: latitude units: "degrees" resolution: 12000 key: '#1#latitude' fill_value: -1.e+100 longitude: name: longitude file_type: iasi_l2_so2_bufr standard_name: longitude units: "degrees" resolution: 12000 key: '#1#longitude' fill_value: -1.e+100 field_of_view_number: name: field_of_view_number file_type: iasi_l2_so2_bufr units: "" resolution: 12000 coordinates: [longitude, latitude] key: '#1#fieldOfViewNumber' fill_value: -1.e+100 satellite_zenith_angle: name: satellite_zenith_angle file_type: iasi_l2_so2_bufr units: "degrees" resolution: 12000 coordinates: [longitude, latitude] key: '#1#satelliteZenithAngle' fill_value: -1.e+100 satellite_azimuth_angle: name: satellite_azimuth_angle file_type: iasi_l2_so2_bufr units: "degrees" resolution: 12000 coordinates: [longitude, latitude] key: '#1#bearingOrAzimuth' fill_value: -1.e+100 solar_zenith_angle: name: solar_zenith_angle file_type: iasi_l2_so2_bufr units: "degrees" resolution: 12000 coordinates: [longitude, latitude] key: '#1#solarZenithAngle' fill_value: -1.e+100 solar_azimuth_angle: name: solar_azimuth_angle file_type: iasi_l2_so2_bufr units: "degrees" resolution: 12000 coordinates: [longitude, latitude] key: '#1#solarAzimuth' fill_value: -1.e+100 so2_quality_flag: name: so2_quality_flag file_type: iasi_l2_so2_bufr units: "" resolution: 12000 coordinates: [longitude, latitude] key: '#1#generalRetrievalQualityFlagForSo2' fill_value: -1.e+100 so2_height_1: name: so2_height_1 file_type: iasi_l2_so2_bufr units: "dobson" resolution: 12000 coordinates: [longitude, latitude] key: '#1#sulphurDioxide' fill_value: -1.e+100 so2_height_2: name: so2_height_2 file_type: iasi_l2_so2_bufr units: "dobson" resolution: 12000 coordinates: [longitude, latitude] key: '#2#sulphurDioxide' fill_value: -1.e+100 so2_height_3: name: so2_height_3 file_type: iasi_l2_so2_bufr units: "dobson" resolution: 12000 coordinates: [longitude, latitude] key: '#3#sulphurDioxide' fill_value: -1.e+100 so2_height_4: name: so2_height_4 file_type: iasi_l2_so2_bufr units: "dobson" resolution: 12000 coordinates: [longitude, latitude] key: '#4#sulphurDioxide' fill_value: -1.e+100 so2_height_5: name: so2_height_5 file_type: iasi_l2_so2_bufr units: "dobson" resolution: 12000 coordinates: [longitude, latitude] key: '#5#sulphurDioxide' fill_value: -1.e+100 so2_height_6: name: so2_height_6 file_type: iasi_l2_so2_bufr units: "dobson" resolution: 12000 coordinates: [longitude, latitude] key: '#6#sulphurDioxide' fill_value: -1.e+100 height_1: name: height_1 file_type: iasi_l2_so2_bufr units: "meters" resolution: 12000 coordinates: [longitude, latitude] key: '#1#height' fill_value: -1.e+100 height_2: name: height_2 file_type: iasi_l2_so2_bufr units: "meters" resolution: 12000 coordinates: [longitude, latitude] key: '#2#height' fill_value: -1.e+100 height_3: name: height_3 file_type: iasi_l2_so2_bufr units: "meters" resolution: 12000 coordinates: [longitude, latitude] key: '#3#height' fill_value: -1.e+100 height_4: name: height_4 file_type: iasi_l2_so2_bufr units: "meters" resolution: 12000 coordinates: [longitude, latitude] key: '#4#height' fill_value: -1.e+100 height_5: name: height_5 file_type: iasi_l2_so2_bufr units: "meters" resolution: 12000 coordinates: [longitude, latitude] key: '#5#height' fill_value: -1.e+100 height_6: name: height_6 file_type: iasi_l2_so2_bufr units: "meters" resolution: 12000 coordinates: [longitude, latitude] key: '#6#height' fill_value: -1.e+100 height_7: name: height_7 file_type: iasi_l2_so2_bufr units: "meters" resolution: 12000 coordinates: [longitude, latitude] key: '#7#height' fill_value: -1.e+100 brightnessTemperatureRealPart: name: brightnessTemperatureRealPart file_type: iasi_l2_so2_bufr units: "K" resolution: 12000 coordinates: [longitude, latitude] key: '#1#brightnessTemperatureRealPart' fill_value: -1.e+100 satpy-0.34.0/satpy/etc/readers/jami_hrit.yaml000066400000000000000000000070021420401153000211150ustar00rootroot00000000000000reader: name: jami_hrit short_name: JAMI HRIT long_name: MTSAT-1R JAMI Level 1 (HRIT) description: > Reader for MTSAT-1R JAMI data in JMA HRIT format. Note that there exist two versions of the dataset. A segmented (data split into multiple files) and a non-segmented version (all data in one file). References: - https://www.wmo-sat.info/oscar/instruments/view/236 - http://www.data.jma.go.jp/mscweb/notice/Himawari7_e.html sensors: [jami] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: hrit_vis: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}VIS_{start_time:%Y%m%d%H%M}_{segment:03d}' - 'IMG_DK{area:02d}VIS_{start_time:%Y%m%d%H%M}' - 'HRIT_MTSAT1_{start_time:%Y%m%d_%H%M}_DK{area:02d}VIS' hrit_ir1: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR1_{start_time:%Y%m%d%H%M}_{segment:03d}' - 'IMG_DK{area:02d}IR1_{start_time:%Y%m%d%H%M}' - 'HRIT_MTSAT1_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR1' hrit_ir2: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR2_{start_time:%Y%m%d%H%M}_{segment:03d}' - 'IMG_DK{area:02d}IR2_{start_time:%Y%m%d%H%M}' - 'HRIT_MTSAT1_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR2' hrit_ir3: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR3_{start_time:%Y%m%d%H%M}_{segment:03d}' - 'IMG_DK{area:02d}IR3_{start_time:%Y%m%d%H%M}' - 'HRIT_MTSAT1_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR3' hrit_ir4: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR4_{start_time:%Y%m%d%H%M}_{segment:03d}' - 'IMG_DK{area:02d}IR4_{start_time:%Y%m%d%H%M}' - 'HRIT_MTSAT1_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR4' datasets: VIS: name: VIS sensor: jami wavelength: [0.55, 0.675, 0.90] resolution: 1000 calibration: counts: standard_name: counts units: 1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: hrit_vis IR1: name: IR1 sensor: jami wavelength: [10.3, 10.8, 11.3] resolution: 4000 calibration: counts: standard_name: counts units: 1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" file_type: hrit_ir1 IR2: name: IR2 sensor: jami wavelength: [11.5, 12.0, 12.5] resolution: 4000 calibration: counts: standard_name: counts units: 1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" file_type: hrit_ir2 IR3: name: IR3 sensor: jami wavelength: [6.5, 6.75, 7.0] resolution: 4000 calibration: counts: standard_name: counts units: 1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" file_type: hrit_ir3 IR4: name: IR4 sensor: jami wavelength: [3.5, 3.75, 4.0] resolution: 4000 calibration: counts: standard_name: counts units: 1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" file_type: hrit_ir4 satpy-0.34.0/satpy/etc/readers/li_l2.yaml000066400000000000000000000104101420401153000201450ustar00rootroot00000000000000reader: description: Generic MTG LI L2 product reader name: li_l2 reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [li] default_datasets: datasets: # There is only one dataset per file. The following list provide only the optional # contained products, which are dependend on the file name. af: name: af sensor: li resolution: 2000 file_type: li_af afa: name: afa sensor: li resolution: 2000 file_type: li_afa afr: name: afr sensor: li resolution: 2000 file_type: li_afr lgr: name: lgr sensor: li file_type: li_lgr lef: name: lef sensor: li file_type: li_lef lfl: name: lfl sensor: li file_type: li_lfl # Source: LI L2 Product User Guide [LIL2PUG] Draft version -- 2016 file_types: li_l2: file_reader: !!python/name:satpy.readers.li_l2.LIFileHandler file_patterns: ['{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-{processing_evel}-{type}-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day}.nc'] li_af: file_reader: !!python/name:satpy.readers.li_l2.LIFileHandler file_patterns: ['{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-{processing_evel}-AF-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day}.nc'] li_afa: file_reader: !!python/name:satpy.readers.li_l2.LIFileHandler file_patterns: ['{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-{processing_evel}-AFA-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day}.nc'] li_afr: file_reader: !!python/name:satpy.readers.li_l2.LIFileHandler file_patterns: ['{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-{processing_evel}-AFR-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day}.nc'] li_lgr: file_reader: !!python/name:satpy.readers.li_l2.LIFileHandler file_patterns: ['{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-{processing_evel}-LGR-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day}.nc'] li_lef: file_reader: !!python/name:satpy.readers.li_l2.LIFileHandler file_patterns: ['{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-{processing_evel}-LEF-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day}.nc'] li_lfl: file_reader: !!python/name:satpy.readers.li_l2.LIFileHandler file_patterns: ['{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-{processing_evel}-LFL-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day}.nc'] satpy-0.34.0/satpy/etc/readers/maia.yaml000066400000000000000000000051241420401153000200610ustar00rootroot00000000000000reader: description: MAIA Reader name: maia reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [viirs, avhrr] file_types: maia: file_reader: !!python/name:satpy.readers.maia.MAIAFileHandler # example of file name: viiCT_npp_DB_20121010_S132824_E132947_ASC_D_La050_Lo-012_00001.h5 file_patterns: - 'viiCT_{platform_name}_{origin}_{start_time:%Y%m%d_S%H%M%S}_E{end_time:%H%M%S}_{orbit_type}_La{center_lat}_Lo{center_lon}_{orbit:5d}.h5' - 'avhCT_{platform_name}_{origin}_{start_time:%Y%m%d_S%H%M%S}_E{end_time:%H%M%S}_{orbit_type}_La{center_lat}_Lo{center_lon}_{orbit:5d}.h5' datasets: Mask_in: name: Mask_in file_type: maia coordinates: [Longitude, Latitude ] Latitude: name: Latitude file_type: maia units: "degrees" standard_name: latitude Longitude: name: Longitude file_type: maia units: "degrees" standard_name: longitude Alt_surface: name: Alt_surface units: m file_type: maia coordinates: [Longitude, Latitude ] CloudType: name: CloudType file_type: maia coordinates: [Longitude, Latitude ] CloudMask: name: CloudMask file_type: maia coordinates: [Longitude, Latitude ] # CloudType and CloudMask are bitfields # description of sub fields ct: name: ct file_type: maia coordinates: [Longitude, Latitude ] # Cloud Mask On Pixel cma: name: cma file_type: maia coordinates: [Longitude, Latitude ] # Cloud Mask Confidence cma_conf: name: cma_conf file_type: maia coordinates: [Longitude, Latitude ] # Cloud Mask Quality cma_qual: name: CM_qual file_type: maia coordinates: [Longitude, Latitude ] land_water_background: name: land_water_background file_type: maia coordinates: [Longitude, Latitude ] opaq_cloud: name: opaq_cloud file_type: maia coordinates: [Longitude, Latitude ] CloudTopPres: name: CloudTopPres units: hPa file_type: maia coordinates: [Longitude, Latitude ] CloudTopTemp: name: CloudTopTemp units: degree celcius file_type: maia coordinates: [Longitude, Latitude ] Mask_ind: name: Mask_ind file_type: maia coordinates: [Longitude, Latitude ] fov_qual: name: fov_qual file_type: maia coordinates: [Longitude, Latitude ] Tsurf: name: Tsurf units: degrees celcius file_type: maia coordinates: [Longitude, Latitude ] Sat_zenith: name: Sat_zenith units: degrees file_type: maia coordinates: [Longitude, Latitude ] file_type: maia coordinates: [Longitude, Latitude ] satpy-0.34.0/satpy/etc/readers/mersi2_l1b.yaml000066400000000000000000000451201420401153000211110ustar00rootroot00000000000000reader: description: FY-3D Medium Resolution Spectral Imager 2 (MERSI-2) L1B Reader name: mersi2_l1b sensors: [mersi-2] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: mersi2_l1b_1000: file_reader: !!python/name:satpy.readers.mersi2_l1b.MERSI2L1B rows_per_scan: 10 file_patterns: # tf2019071182739.FY3D-X_MERSI_1000M_L1B.HDF - 'tf{start_time:%Y%j%H%M%S}.{platform_shortname}-{trans_band:1s}_MERSI_1000M_L1B.{ext}' # FY3D_20190808_130200_130300_8965_MERSI_1000M_L1B.HDF - '{platform_shortname}_{start_time:%Y%m%d_%H%M%S}_{end_time:%H%M%S}_{orbit_number:s}_MERSI_1000M_L1B.{ext}' # From National Meteorological Satellite Center - '{platform_shortname}_MERSI_GBAL_L1_{start_time:%Y%m%d_%H%M}_1000M_MS.{ext:3s}' # Generic - '{filename_mda}_MERSI_1000M_L1B.{ext:3s}' mersi2_l1b_250: file_reader: !!python/name:satpy.readers.mersi2_l1b.MERSI2L1B rows_per_scan: 40 file_patterns: # tf2019071182739.FY3D-X_MERSI_0250M_L1B.HDF - 'tf{start_time:%Y%j%H%M%S}.{platform_shortname}-{trans_band:1s}_MERSI_0250M_L1B.{ext}' # From National Meteorological Satellite Center - '{platform_shortname}_MERSI_GBAL_L1_{start_time:%Y%m%d_%H%M}_0250M_MS.{ext:3s}' # Generic - '{filename_mda}_MERSI_0250M_L1B.{ext:3s}' mersi2_l1b_1000_geo: file_reader: !!python/name:satpy.readers.mersi2_l1b.MERSI2L1B rows_per_scan: 10 file_patterns: # tf2019071182739.FY3D-X_MERSI_GEO1K_L1B.HDF - 'tf{start_time:%Y%j%H%M%S}.{platform_shortname}-{trans_band:1s}_MERSI_GEO1K_L1B.{ext}' # FY3D_20190808_130200_130300_8965_MERSI_GEO1K_L1B.HDF - '{platform_shortname}_{start_time:%Y%m%d_%H%M%S}_{end_time:%H%M%S}_{orbit_number:s}_MERSI_GEO1K_L1B.{ext}' # From National Meteorological Satellite Center - '{platform_shortname}_MERSI_GBAL_L1_{start_time:%Y%m%d_%H%M}_GEO1K_MS.{ext:3s}' # Generic - '{filename_mda}_MERSI_GEO1K_L1B.{ext:3s}' mersi2_l1b_250_geo: file_reader: !!python/name:satpy.readers.mersi2_l1b.MERSI2L1B rows_per_scan: 40 file_patterns: # tf2019071182739.FY3D-X_MERSI_GEOQK_L1B.HDF - 'tf{start_time:%Y%j%H%M%S}.{platform_shortname}-{trans_band:1s}_MERSI_GEOQK_L1B.{ext}' # From National Meteorological Satellite Center - '{platform_shortname}_MERSI_GBAL_L1_{start_time:%Y%m%d_%H%M}_GEOQK_MS.{ext:3s}' # Generic - '{filename_mda}_MERSI_GEOQK_L1B.{ext:3s}' # NOTE: OSCAR website currently has bands in wavelength order # https://www.wmo-sat.info/oscar/instruments/view/279 # The order below is by the wavelength in the input files # The slides at the below link have band 5 and 19 swapped: # http://www.wmo.int/pages/prog/sat/meetings/documents/IPET-SUP-4_Doc_05-04_FY-3D-ppt.pdf datasets: '1': name: '1' wavelength: [0.445, 0.470, 0.495] resolution: 1000: file_type: mersi2_l1b_1000 file_key: Data/EV_250_Aggr.1KM_RefSB band_index: 0 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 0 250: file_type: mersi2_l1b_250 file_key: Data/EV_250_RefSB_b1 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 0 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '2': name: '2' wavelength: [0.525, 0.550, 0.575] resolution: 1000: file_type: mersi2_l1b_1000 file_key: Data/EV_250_Aggr.1KM_RefSB band_index: 1 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 1 250: file_type: mersi2_l1b_250 file_key: Data/EV_250_RefSB_b2 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 1 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '3': name: '3' wavelength: [0.625, 0.650, 0.675] resolution: 1000: file_type: mersi2_l1b_1000 file_key: Data/EV_250_Aggr.1KM_RefSB band_index: 2 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 2 250: file_type: mersi2_l1b_250 file_key: Data/EV_250_RefSB_b3 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 2 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '4': name: '4' wavelength: [0.840, 0.865, 0.890] resolution: 1000: file_type: mersi2_l1b_1000 file_key: Data/EV_250_Aggr.1KM_RefSB band_index: 3 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 3 250: file_type: mersi2_l1b_250 file_key: Data/EV_250_RefSB_b4 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 3 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '5': name: '5' wavelength: [1.37, 1.38, 1.39] # or 30nm bandwidth? resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 0 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 0 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '6': name: '6' wavelength: [1.615, 1.640, 1.665] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 1 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 1 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '7': name: '7' wavelength: [2.105, 2.130, 2.155] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 2 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 2 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '8': name: '8' wavelength: [0.402, 0.412, 0.422] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 3 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 3 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '9': name: '9' wavelength: [0.433, 0.443, 0.453] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 4 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 4 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '10': name: '10' wavelength: [0.480, 0.490, 0.500] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 5 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 5 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '11': name: '11' wavelength: [0.545, 0.555, 0.565] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 6 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 6 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '12': name: '12' wavelength: [0.660, 0.670, 0.680] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 7 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 7 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '13': name: '13' wavelength: [0.699, 0.709, 0.719] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 8 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 8 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '14': name: '14' wavelength: [0.736, 0.746, 0.756] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 9 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 9 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '15': name: '15' wavelength: [0.855, 0.865, 0.875] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 10 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 10 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '16': name: '16' wavelength: [0.895, 0.905, 0.915] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 11 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 11 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '17': name: '17' wavelength: [0.926, 0.936, 0.946] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 12 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 12 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '18': name: '18' wavelength: [0.915, 0.940, 0.965] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 13 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 13 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '19': name: '19' wavelength: [1.23, 1.24, 1.25] # or 1.03um? resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 14 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 14 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts # Not sure how to get radiance for BT channels '20': name: '20' wavelength: [3.710, 3.800, 3.890] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_Emissive band_index: 0 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 0 coordinates: [longitude, latitude] calibration: brightness_temperature: units: "K" standard_name: toa_brightness_temperature counts: units: "1" standard_name: counts '21': name: '21' wavelength: [3.9725, 4.050, 4.1275] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_Emissive band_index: 1 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 1 coordinates: [longitude, latitude] calibration: brightness_temperature: units: "K" standard_name: toa_brightness_temperature counts: units: "1" standard_name: counts '22': name: '22' wavelength: [6.950, 7.20, 7.450] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_Emissive band_index: 2 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 2 coordinates: [longitude, latitude] calibration: brightness_temperature: units: "K" standard_name: toa_brightness_temperature counts: units: "1" standard_name: counts '23': name: '23' wavelength: [8.400, 8.550, 8.700] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_Emissive band_index: 3 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 3 coordinates: [longitude, latitude] calibration: brightness_temperature: units: "K" standard_name: toa_brightness_temperature counts: units: "1" standard_name: counts '24': name: '24' wavelength: [10.300, 10.800, 11.300] resolution: 1000: file_type: mersi2_l1b_1000 file_key: Data/EV_250_Aggr.1KM_Emissive band_index: 0 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 4 250: file_type: mersi2_l1b_250 file_key: Data/EV_250_Emissive_b24 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 4 coordinates: [longitude, latitude] calibration: brightness_temperature: units: "K" standard_name: toa_brightness_temperature counts: units: "1" standard_name: counts '25': name: '25' wavelength: [11.500, 12.000, 12.500] resolution: 1000: file_type: mersi2_l1b_1000 file_key: Data/EV_250_Aggr.1KM_Emissive band_index: 1 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 5 250: file_type: mersi2_l1b_250 file_key: Data/EV_250_Emissive_b25 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 5 coordinates: [longitude, latitude] calibration: brightness_temperature: units: "K" standard_name: toa_brightness_temperature counts: units: "1" standard_name: counts longitude: name: longitude units: degrees_east standard_name: longitude resolution: 1000: file_type: mersi2_l1b_1000_geo file_key: Geolocation/Longitude 250: file_type: mersi2_l1b_250_geo file_key: Longitude latitude: name: latitude units: degrees_north standard_name: latitude resolution: 1000: file_type: mersi2_l1b_1000_geo file_key: Geolocation/Latitude 250: file_type: mersi2_l1b_250_geo file_key: Latitude solar_zenith_angle: name: solar_zenith_angle units: degree standard_name: solar_zenith_angle resolution: 1000 coordinates: [longitude, latitude] file_type: mersi2_l1b_1000_geo file_key: Geolocation/SolarZenith solar_azimuth_angle: name: solar_azimuth_angle units: degree standard_name: solar_azimuth_angle resolution: 1000 coordinates: [longitude, latitude] file_type: mersi2_l1b_1000_geo file_key: Geolocation/SolarAzimuth satellite_zenith_angle: name: satellite_zenith_angle units: degree standard_name: sensor_zenith_angle resolution: 1000 coordinates: [longitude, latitude] file_type: mersi2_l1b_1000_geo file_key: Geolocation/SensorZenith satellite_azimuth_angle: name: satellite_azimuth_angle units: degree standard_name: sensor_azimuth_angle resolution: 1000 coordinates: [longitude, latitude] file_type: mersi2_l1b_1000_geo file_key: Geolocation/SensorAzimuth satpy-0.34.0/satpy/etc/readers/mhs_l1c_aapp.yaml000066400000000000000000000102341420401153000214770ustar00rootroot00000000000000reader: name: mhs_l1c_aapp description: AAPP l1c Reader for MHS data reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [mhs,] default_channels: [] data_identification_keys: name: required: true frequency_double_sideband: type: !!python/name:satpy.readers.aapp_mhs_amsub_l1c.FrequencyDoubleSideBand frequency_range: type: !!python/name:satpy.readers.aapp_mhs_amsub_l1c.FrequencyRange resolution: polarization: enum: - H - V calibration: enum: - brightness_temperature transitive: true modifiers: required: true default: [] type: !!python/name:satpy.dataset.ModifierTuple datasets: '1': name: '1' frequency_range: central: 89. bandwidth: 2.8 unit: GHz polarization: 'V' resolution: 16000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - longitude - latitude file_type: mhs_aapp_l1c '2': name: '2' frequency_range: central: 157. bandwidth: 2.8 unit: GHz polarization: 'V' resolution: 16000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - longitude - latitude file_type: mhs_aapp_l1c '3': name: '3' frequency_double_sideband: unit: GHz central: 183.31 side: 1.0 bandwidth: 1.0 polarization: 'H' resolution: 16000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - longitude - latitude file_type: mhs_aapp_l1c '4': name: '4' frequency_double_sideband: unit: GHz central: 183.31 side: 3.0 bandwidth: 2.0 polarization: 'H' resolution: 16000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - longitude - latitude file_type: mhs_aapp_l1c '5': name: '5' frequency_range: unit: GHz central: 190.311 bandwidth: 2.0 polarization: 'V' resolution: 16000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - longitude - latitude file_type: mhs_aapp_l1c solar_zenith_angle: name: solar_zenith_angle resolution: 16000 coordinates: - longitude - latitude file_type: mhs_aapp_l1c standard_name: solar_zenith_angle units: degrees solar_azimuth_angle: name: solar_azimuth_angle resolution: 16000 coordinates: - longitude - latitude file_type: mhs_aapp_l1c standard_name: solar_azimuth_angle units: degrees sensor_zenith_angle: name: sensor_zenith_angle resolution: 16000 coordinates: - longitude - latitude file_type: mhs_aapp_l1c standard_name: sensor_zenith_angle units: degrees sensor_azimuth_angle: name: sensor_azimuth_angle resolution: 16000 coordinates: - longitude - latitude file_type: mhs_aapp_l1c standard_name: sensor_azimuth_angle units: degrees latitude: name: latitude resolution: 16000 file_type: mhs_aapp_l1c standard_name: latitude units: degrees_north longitude: name: longitude resolution: 16000 file_type: mhs_aapp_l1c standard_name: longitude units: degrees_east file_types: mhs_aapp_l1c: file_reader: !!python/name:satpy.readers.aapp_mhs_amsub_l1c.MHS_AMSUB_AAPPL1CFile file_patterns: ['mhsl1c_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit_number:05d}.l1c'] satpy-0.34.0/satpy/etc/readers/mimicTPW2_comp.yaml000066400000000000000000000006631420401153000217460ustar00rootroot00000000000000reader: description: NetCDF4 reader for the MIMIC TPW Version 2.0 product name: mimicTPW2_comp sensors: [mimic] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: mimicTPW2_comp: file_reader: !!python/name:satpy.readers.mimic_TPW2_nc.MimicTPW2FileHandler file_patterns: ['comp{start_time:%Y%m%d.%H%M%S}.nc'] sensor: ['mimic'] platform_name: ['microwave'] satpy-0.34.0/satpy/etc/readers/mirs.yaml000066400000000000000000000067661420401153000201410ustar00rootroot00000000000000reader: description: NetCDF Reader for the Microwave Integrated Retrieval System Level 2 swath products name: mirs short_name: MiRS Level 2 NetCDF4 long_name: MiRS Level 2 Swath Product Reader (NetCDF4) reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [amsu, amsu-mhs, atms, ssmis, gmi] data_files: - url: "https://zenodo.org/record/4472664/files/limbcoef_atmsland_noaa20.txt" known_hash: "08a3b7c1594a963610dd864b7ecd12f0ab486412d35185c2371d924dd92c5779" - url: "https://zenodo.org/record/4472664/files/limbcoef_atmsland_snpp.txt" known_hash: "4b01543699792306711ef1699244e96186487e8a869e4ae42bf1f0e4d00fd063" - url: "https://zenodo.org/record/4472664/files/limbcoef_atmssea_noaa20.txt" known_hash: "6853d0536b11c31dc130ab12c61fa322a76d3823a4b8ff9a18a0ecedbf269a88" - url: "https://zenodo.org/record/4472664/files/limbcoef_atmssea_snpp.txt" known_hash: "d0f806051b80320e046bdae6a9b68616152bbf8c2dbf3667b9834459259c0d72" file_types: mirs_atms: file_reader: !!python/name:satpy.readers.mirs.MiRSL2ncHandler file_patterns: - 'NPR-MIRS-IMG_v{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S}{extra_num1}_e{end_time:%Y%m%d%H%M%S}{extra_num2}_c{creation_time:%Y%m%d%H%M%S}{extra_num3}.nc' metop_amsu: file_reader: !!python/name:satpy.readers.mirs.MiRSL2ncHandler file_patterns: - 'IMG_SX.{platform_shortname}.D{start_time:%y%j.S%H%M}.E{end_time:%H%M}.B{num}.WE.HR.ORB.nc' datasets: longitude: name: longitude file_type: metop_amsu file_key: Longitude units: degrees valid_range: [ -180., 180. ] standard_name: longitude latitude: name: latitude file_type: metop_amsu file_key: Latitude valid_range: [-90., 90.] units: degrees standard_name: latitude rain_rate: name: RR description: Rain Rate file_key: RR file_type: metop_amsu units: mm/hr coordinates: [longitude, latitude] mask: name: Sfc_type file_key: Sfc_type file_type: metop_amsu description: Surface Type:0-ocean,1-sea ice,2-land,3-snow units: "1" coordinates: [longitude, latitude] sea_ice: name: SIce description: Sea Ice file_key: SIce file_type: metop_amsu units: "%" coordinates: [longitude, latitude] snow_cover: name: Snow description: Snow Cover long_name: snow_cover file_key: Snow file_type: metop_amsu units: '1' coordinates: [longitude, latitude] total_precipitable_water: name: TPW description: Total Precipitable Water file_key: TPW file_type: metop_amsu units: mm coordinates: [longitude, latitude] swe: name: SWE description: Snow Water Equivalence file_key: SWE file_type: metop_amsu units: cm coordinates: [longitude, latitude] cloud_liquid_water: name: CLW description: Cloud Liquid Water file_key: CLW file_type: metop_amsu units: mm coordinates: [longitude, latitude] skin_temperature: name: TSkin description: skin temperature file_key: TSkin file_type: metop_amsu units: K coordinates: [longitude, latitude] snow_fall_rate: name: SFR description: snow fall rate file_key: SFR file_type: metop_amsu units: mm/hr coordinates: [longitude, latitude] bt: name: BT file_type: metop_amsu description: Channel Brightness Temperature for every channel long_name: Channel Temperature (K) units: K valid_range: [0, 50000] standard_name: brightness_temperature satpy-0.34.0/satpy/etc/readers/modis_l1b.yaml000066400000000000000000000341141420401153000210240ustar00rootroot00000000000000reader: default_datasets: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36] description: Generic MODIS HDF-EOS Reader name: modis_l1b reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [modis] navigations: hdf_eos_geo: description: MODIS navigation file_type: hdf_eos_geo latitude_key: Latitude longitude_key: Longitude nadir_resolution: [1000] rows_per_scan: 10 datasets: '1': name: '1' resolution: 250: {file_type: hdf_eos_data_250m} 500: {file_type: hdf_eos_data_500m} 1000: {file_type: hdf_eos_data_1000m} calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.62 - 0.645 - 0.67 '2': name: '2' resolution: 250: {file_type: hdf_eos_data_250m} 500: {file_type: hdf_eos_data_500m} 1000: {file_type: hdf_eos_data_1000m} calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.841 - 0.8585 - 0.876 '3': name: '3' resolution: 500: {file_type: hdf_eos_data_500m} 1000: {file_type: hdf_eos_data_1000m} calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.459 - 0.469 - 0.479 '4': name: '4' resolution: 500: {file_type: hdf_eos_data_500m} 1000: {file_type: hdf_eos_data_1000m} calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.545 - 0.555 - 0.565 '5': name: '5' resolution: 500: {file_type: hdf_eos_data_500m} 1000: {file_type: hdf_eos_data_1000m} calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 1.23 - 1.24 - 1.25 '6': name: '6' resolution: 500: {file_type: hdf_eos_data_500m} 1000: {file_type: hdf_eos_data_1000m} calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 1.628 - 1.64 - 1.652 '7': name: '7' resolution: 500: {file_type: hdf_eos_data_500m} 1000: {file_type: hdf_eos_data_1000m} calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 2.105 - 2.13 - 2.155 '8': file_type: hdf_eos_data_1000m name: '8' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.405 - 0.4125 - 0.42 '9': file_type: hdf_eos_data_1000m name: '9' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.438 - 0.443 - 0.448 '10': file_type: hdf_eos_data_1000m name: '10' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.483 - 0.488 - 0.493 '11': file_type: hdf_eos_data_1000m name: '11' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.526 - 0.531 - 0.536 '12': file_type: hdf_eos_data_1000m name: '12' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.546 - 0.551 - 0.556 13hi: file_type: hdf_eos_data_1000m name: '13hi' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.662 - 0.667 - 0.672 13lo: file_type: hdf_eos_data_1000m name: '13lo' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.662 - 0.667 - 0.672 14hi: file_type: hdf_eos_data_1000m name: '14hi' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.673 - 0.678 - 0.683 14lo: file_type: hdf_eos_data_1000m name: '14lo' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.673 - 0.678 - 0.683 '15': file_type: hdf_eos_data_1000m name: '15' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.743 - 0.748 - 0.753 '16': file_type: hdf_eos_data_1000m name: '16' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.862 - 0.8695 - 0.877 '17': file_type: hdf_eos_data_1000m name: '17' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.89 - 0.905 - 0.92 '18': file_type: hdf_eos_data_1000m name: '18' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.931 - 0.936 - 0.941 '19': file_type: hdf_eos_data_1000m name: '19' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.915 - 0.94 - 0.965 '20': file_type: hdf_eos_data_1000m name: '20' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 3.66 - 3.75 - 3.84 '21': file_type: hdf_eos_data_1000m name: '21' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 3.929 - 3.959 - 3.989 '22': file_type: hdf_eos_data_1000m name: '22' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 3.929 - 3.959 - 3.989 '23': file_type: hdf_eos_data_1000m name: '23' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 4.02 - 4.05 - 4.08 '24': file_type: hdf_eos_data_1000m name: '24' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 4.433 - 4.4655 - 4.498 '25': file_type: hdf_eos_data_1000m name: '25' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 4.482 - 4.5155 - 4.549 '26': file_type: hdf_eos_data_1000m name: '26' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 1.36 - 1.375 - 1.39 '27': file_type: hdf_eos_data_1000m name: '27' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 6.535 - 6.715 - 6.895 '28': file_type: hdf_eos_data_1000m name: '28' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 7.175 - 7.325 - 7.475 '29': file_type: hdf_eos_data_1000m name: '29' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 8.4 - 8.55 - 8.7 '30': file_type: hdf_eos_data_1000m name: '30' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 9.58 - 9.73 - 9.88 '31': file_type: hdf_eos_data_1000m name: '31' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 10.78 - 11.03 - 11.28 '32': file_type: hdf_eos_data_1000m name: '32' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 11.77 - 12.02 - 12.27 '33': file_type: hdf_eos_data_1000m name: '33' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 13.185 - 13.335 - 13.485 '34': file_type: hdf_eos_data_1000m name: '34' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 13.485 - 13.635 - 13.785 '35': file_type: hdf_eos_data_1000m name: '35' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 13.785 - 13.935 - 14.085 '36': file_type: hdf_eos_data_1000m name: '36' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 14.085 - 14.235 - 14.385 longitude: name: longitude resolution: 5000: # For EUM reduced (thinned) files file_type: hdf_eos_data_1000m 1000: file_type: [hdf_eos_geo, hdf_eos_data_1000m, hdf_eos_data_500m, hdf_eos_data_250m] # Both 500m and 250m files have 1km resolution Longitude/Latitude # 1km Longitude/Latitude can be interpolated to 500m or 250m resolution 500: file_type: [hdf_eos_geo, hdf_eos_data_500m, hdf_eos_data_250m] 250: file_type: [hdf_eos_geo, hdf_eos_data_500m, hdf_eos_data_250m] standard_name: longitude units: degree latitude: name: latitude resolution: 5000: # For EUM reduced (thinned) files file_type: hdf_eos_data_1000m 1000: file_type: [hdf_eos_geo, hdf_eos_data_1000m, hdf_eos_data_500m, hdf_eos_data_250m] # Both 500m and 250m files have 1km resolution Longitude/Latitude # 1km Longitude/Latitude can be interpolated to 500m or 250m resolution 500: file_type: [hdf_eos_geo, hdf_eos_data_500m, hdf_eos_data_250m] 250: file_type: [hdf_eos_geo, hdf_eos_data_500m, hdf_eos_data_250m] standard_name: latitude units: degree solar_zenith_angle: name: solar_zenith_angle sensor: modis resolution: 1000: file_type: [hdf_eos_geo, hdf_eos_data_1000m] 500: file_type: [hdf_eos_geo] 250: file_type: [hdf_eos_geo] coordinates: [longitude, latitude] file_type: [hdf_eos_geo, hdf_eos_data_1000m] solar_azimuth_angle: name: solar_azimuth_angle sensor: modis resolution: 1000: file_type: [hdf_eos_geo, hdf_eos_data_1000m] 500: file_type: [hdf_eos_geo] 250: file_type: [hdf_eos_geo] coordinates: [longitude, latitude] file_type: [hdf_eos_geo, hdf_eos_data_1000m] satellite_zenith_angle: name: satellite_zenith_angle sensor: modis resolution: 1000: file_type: [hdf_eos_geo, hdf_eos_data_1000m] 500: file_type: [hdf_eos_geo] 250: file_type: [hdf_eos_geo] coordinates: [longitude, latitude] file_type: [hdf_eos_geo, hdf_eos_data_1000m] satellite_azimuth_angle: name: satellite_azimuth_angle sensor: modis resolution: 1000: file_type: [hdf_eos_geo, hdf_eos_data_1000m] 500: file_type: [hdf_eos_geo] 250: file_type: [hdf_eos_geo] coordinates: [longitude, latitude] file_type: [hdf_eos_geo, hdf_eos_data_1000m] file_types: hdf_eos_data_250m: file_patterns: - 'M{platform_indicator:1s}D02Qkm_A{start_time:%y%j_%H%M%S}_{processing_time:%Y%j%H%M%S}.hdf' - 'M{platform_indicator:1s}D02QKM.A{start_time:%Y%j.%H%M}.{collection:03d}.{processing_time:%Y%j%H%M%S}.hdf' - 'M{platform_indicator:1s}D02QKM.A{start_time:%Y%j.%H%M}.{collection:03d}{suffix}.hdf' - 'M{platform_indicator:1s}D02QKM.{start_time:%y%j%H%M%S}.hdf' - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.250m.hdf' - '{platform_indicator:1s}{start_time:%Y%j%H%M%S}.L1B_QKM' file_reader: !!python/name:satpy.readers.modis_l1b.MixedHDFEOSReader hdf_eos_data_500m: file_patterns: - 'M{platform_indicator:1s}D02Hkm_A{start_time:%y%j_%H%M%S}_{processing_time:%Y%j%H%M%S}.hdf' - 'M{platform_indicator:1s}D02HKM.A{start_time:%Y%j.%H%M}.{collection:03d}.{processing_time:%Y%j%H%M%S}.hdf' - 'M{platform_indicator:1s}D02HKM.A{start_time:%Y%j.%H%M}.{collection:03d}{suffix}.hdf' - 'M{platform_indicator:1s}D02HKM.{start_time:%y%j%H%M%S}.hdf' - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.500m.hdf' - '{platform_indicator:1s}{start_time:%Y%j%H%M%S}.L1B_HKM' file_reader: !!python/name:satpy.readers.modis_l1b.MixedHDFEOSReader hdf_eos_data_1000m: file_patterns: - 'M{platform_indicator:1s}D021km_A{start_time:%y%j_%H%M%S}_{processing_time:%Y%j%H%M%S}.hdf' - 'M{platform_indicator:1s}D021KM.A{start_time:%Y%j.%H%M}.{collection:03d}.{processing_time:%Y%j%H%M%S}.hdf' - 'M{platform_indicator:1s}D021KM.A{start_time:%Y%j.%H%M}.{collection:03d}{suffix}.hdf' - 'thin_M{platform_indicator:1s}D021KM.A{start_time:%Y%j.%H%M}.{collection:03d}{suffix}.hdf' - 'M{platform_indicator:1s}D021KM.{start_time:%y%j%H%M%S}.hdf' - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.1000m.hdf' - '{platform_indicator:1s}{start_time:%Y%j%H%M%S}.L1B_LAC' - 'M{platform_indicator:1s}D021KM_A{start_time:%Y%j_%H%M}_{collection:03d}_NRT.hdf' file_reader: !!python/name:satpy.readers.modis_l1b.MixedHDFEOSReader hdf_eos_geo: file_patterns: - 'M{platform_indicator:1s}D03_A{start_time:%y%j_%H%M%S}_{processing_time:%Y%j%H%M%S}.hdf' - 'M{platform_indicator:1s}D03.A{start_time:%Y%j.%H%M}.{collection:03d}.{processing_time:%Y%j%H%M%S}.hdf' - 'M{platform_indicator:1s}D03.A{start_time:%Y%j.%H%M}.{collection:03d}{suffix}.hdf' - 'M{platform_indicator:1s}D03.{start_time:%y%j%H%M%S}.hdf' - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.geo.hdf' - '{platform_indicator:1s}{start_time:%Y%j%H%M%S}.GEO' file_reader: !!python/name:satpy.readers.modis_l1b.HDFEOSGeoReader satpy-0.34.0/satpy/etc/readers/modis_l2.yaml000066400000000000000000001142051420401153000206630ustar00rootroot00000000000000reader: description: MODIS HDF-EOS L2 Reader name: modis_l2 reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [modis] file_types: mod35_hdf: file_patterns: - 'M{platform_indicator:1s}D35_L2.A{acquisition_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf' - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.mod35.hdf' file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler mod06_hdf: file_patterns: - 'M{platform_indicator:1s}D06_L2.A{acquisition_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf' - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.mod06.hdf' file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler mod06ct_hdf: file_patterns: - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.mod06ct.hdf' file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler hdf_eos_geo: file_patterns: - 'M{platform_indicator:1s}D03_A{start_time:%y%j_%H%M%S}_{processing_time:%Y%j%H%M%S}.hdf' - 'M{platform_indicator:1s}D03.A{start_time:%Y%j.%H%M}.{collection:03d}.{processing_time:%Y%j%H%M%S}.hdf' - 'M{platform_indicator:1s}D03.A{start_time:%Y%j.%H%M}.{collection:03d}{suffix}.hdf' - 'M{platform_indicator:1s}D03.{start_time:%y%j%H%M%S}.hdf' - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.geo.hdf' file_reader: !!python/name:satpy.readers.modis_l1b.HDFEOSGeoReader icecon_hdf: file_patterns: - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.icecon.hdf' file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler inversion_hdf: file_patterns: - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.inversion.hdf' file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler ist_hdf: file_patterns: - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.ist.hdf' file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler mask_byte1_hdf: file_patterns: - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.mask_byte1.hdf' file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler mod07_hdf: file_patterns: - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.mod07.hdf' - 'M{platform_indicator:1s}D07_L2.A{acquisition_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf' file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler mod28_hdf: file_patterns: - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.mod28.hdf' file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler modlst_hdf: file_patterns: - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.modlst.hdf' file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler ndvi_1000m_hdf: file_patterns: - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.ndvi.1000m.hdf' file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler snowmask_hdf: file_patterns: - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.snowmask.hdf' file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler datasets: longitude: name: longitude resolution: 5000: file_type: [mod35_hdf, mod06_hdf, mod06ct_hdf, mod07_hdf] 1000: file_type: [hdf_eos_geo, mod35_hdf, mod06_hdf] 500: file_type: hdf_eos_geo 250: file_type: hdf_eos_geo standard_name: longitude units: degree latitude: name: latitude resolution: 5000: # For EUM reduced (thinned) files file_type: [mod35_hdf, mod06_hdf, mod06ct_hdf, mod07_hdf] 1000: file_type: [hdf_eos_geo, mod35_hdf, mod06_hdf] 500: file_type: hdf_eos_geo 250: file_type: hdf_eos_geo standard_name: latitude units: degree ########################## #Datasets in file mod35_l2 ########################## cloud_mask: # byte Cloud_Mask(Byte_Segment, Cell_Along_Swath_1km, Cell_Across_Swath_1km) name: cloud_mask coordinates: [longitude, latitude] resolution: 250: file_type: mod35_hdf # Quality Assurance flag is necessary for 250m resolution dataset quality_assurance: True byte: [4, 5] bit_start: 0 bit_count: 1 1000: file_type: [mod35_hdf, mask_byte1_hdf] quality_assurance: False byte: 0 bit_start: 1 bit_count: 2 # NOTE: byte information and file_key below are unused for the # mask_byte1_hdf file type. # The dimension of the dataset where the byte information is stored file_key: Cloud_Mask imapp_file_key: MODIS_Cloud_Mask category: True byte_dimension: 0 quality_assurance: # byte Quality_Assurance(Cell_Along_Swath_1km, Cell_Across_Swath_1km, QA_Dimension) name: quality_assurance resolution: 1000 # The dimension of the dataset where the byte information is stored byte_dimension: 2 # The byte to consider to extract relevant bits byte: 0 bit_start: 0 bit_count: 1 category: True file_key: Quality_Assurance file_type: mod35_hdf coordinates: [longitude, latitude] ########################## #Datasets in file mod06_l2 ########################## # file contents: https://atmosphere-imager.gsfc.nasa.gov/sites/default/files/ModAtmo/MOD06_L2_CDL_fs.txt brightness_temperature: name: brightness_temperature long_name: Observed Brightness Temperature from Averaged Radiances in a 5x5 1-km Pixel Region units: K file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 5000: file_key: Brightness_Temperature surface_temperature: name: surface_temperature long_name: Surface Temperature from Ancillary Data units: K coordinates: [longitude, latitude] resolution: 1000: file_key: surface_temperature_1km file_type: mod06_hdf 5000: file_key: Surface_Temperature file_type: [mod06_hdf, mod06ct_hdf] surface_pressure: name: surface_pressure long_name: Surface Pressure from Ancillary Data units: hPa file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 5000: file_key: Surface_Pressure cloud_height_method: name: cloud_height_method long_name: Index Indicating MODIS Bands Used for Cloud Top Pressure Retrieval units: None comment: "1: CO2-slicing retrieval, bands 36/35, 2: CO2-slicing retrieval, bands 35/34, 3: CO2-slicing retrieval, bands 35/33, 4: CO2-slicing retrieval, bands 34/33, 6: IR-window retrieval, band 31" file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Height_Method cloud_top_height: name: cloud_top_height long_name: Geopotential Height at Retrieved Cloud Top Pressure Level (rounded to nearest 50 m) units: m file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: cloud_top_height_1km 5000: file_key: Cloud_Top_Height cloud_top_height_nadir: name: cloud_top_height_nadir long_name: Geopotential Height at Retrieved Cloud Top Pressure Level for Sensor Zenith (View) Angles <=32 Degrees (rounded to nearest 50 m) units: m file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Top_Height_Nadir cloud_top_height_nadir_day: name: cloud_top_height_nadir_day long_name: Geopotential Height at Retrieved Cloud Top Pressure Level for Sensor Zenith (View) Angles <=32 Degrees, Day Data Only (rounded to nearest 50 m) units: m file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Top_Height_Nadir_Day cloud_top_height_nadir_night: name: cloud_top_height_nadir_night long_name: Geopotential Height at Retrieved Cloud Top Pressure Level for Sensor Zenith (View) Angles <=32 Degrees, Night Data Only (rounded to nearest 50 m) units: m file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Top_Height_Nadir_Night cloud_top_pressure: name: cloud_top_pressure long_name: Cloud Top Pressure Level (rounded to nearest 5 mb) units: hPa coordinates: [longitude, latitude] resolution: 1000: file_key: cloud_top_pressure_1km file_type: mod06_hdf 5000: file_key: Cloud_Top_Pressure file_type: [mod06_hdf, mod06ct_hdf] cloud_top_pressure_nadir: name: cloud_top_pressure_nadir long_name: Cloud Top Pressure Level for Sensor Zenith (View) Angles <= 32 Degrees (rounded to nearest 5 mb) units: hPa file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Top_Pressure_Nadir cloud_top_pressure_night: name: cloud_top_pressure_night long_name: Cloud Top Pressure Level, Night Data Only (rounded to nearest 5 mb) units: hPa file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Top_Pressure_Night cloud_top_pressure_nadir_night: name: cloud_top_pressure_nadir_night long_name: Cloud Top Pressure Level for Sensor Zenith (View) Angles <= 32 Degrees (rounded to nearest 5 mb), Night Data Only units: hPa file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Top_Pressure_Nadir_Night cloud_top_pressure_day: name: cloud_top_pressure_day long_name: Cloud Top Pressure Level, Day Only (rounded to nearest 5 mb) units: hPa file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Top_Pressure_Day cloud_top_pressure_nadir_day: name: cloud_top_pressure_nadir_day long_name: Cloud Top Pressure Level for Sensor Zenith (View) Angles <= 32 Degrees (rounded to nearest 5 mb), Day Data Only units: hPa file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Top_Pressure_Nadir_Day cloud_top_temperature: name: cloud_top_temperature long_name: Temperature from Ancillary Data at Retrieved Cloud Top Pressure Level units: K coordinates: [longitude, latitude] resolution: 1000: file_key: cloud_top_temperature_1km file_type: mod06_hdf 5000: file_key: Cloud_Top_Temperature file_type: [mod06_hdf, mod06ct_hdf] cloud_top_temperature_nadir: name: cloud_top_temperature_nadir long_name: Temperature from Ancillary Data at Retrieved Cloud Top Pressure Level for Sensor Zenith (View) Angles <= 32 Degrees units: K file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Top_Temperature_Nadir cloud_top_temperature_night: name: cloud_top_temperature_night long_name: Temperature from Ancillary Data at Retrieved Cloud Top Pressure Level, Night Only units: K file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Top_Temperature_Night cloud_top_temperature_nadir_night: name: cloud_top_temperature_nadir_night long_name: Temperature from Ancillary Data at Retrieved Cloud Top Pressure Level for Sensor Zenith (View) Angles <= 32 Degrees, Night Data Only units: K file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Top_Temperature_Nadir_Night cloud_top_temperature_day: name: cloud_top_temperature_day long_name: Temperature from Ancillary Data at Retrieved Cloud Top Pressure Level, Day Only units: K file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Top_Temperature_Day cloud_top_temperature_nadir_day: name: cloud_top_temperature_nadir_day long_name: Temperature from Ancillary Data at Retrieved Cloud Top Pressure Level for Sensor Zenith (View) Angles <= 32 Degrees, Day Data Only units: K file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Top_Temperature_Nadir_Day tropopause_height: name: tropopause_height long_name: Tropopause Height from Ancillary Data units: hPa file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 5000: file_key: Tropopause_Height cloud_fraction: name: cloud_fraction long_name: Cloud Fraction in Retrieval Region (5x5 1-km Pixels) from 1-km Cloud Mask units: None file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Fraction cloud_fraction_nadir: name: cloud_fraction_nadir long_name: Cloud Fraction in Retrieval Region (5x5 1-km Pixels) from 1-km Cloud Mask for Sensor Zenith (View) Angles <= 32 Degrees units: None file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Fraction_Nadir cloud_fraction_night: name: cloud_fraction_night long_name: Cloud Fraction in Retrieval Region (5x5 1-km Pixels) from 1-km Cloud Mask, Night Only units: None file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Fraction_Night cloud_fraction_nadir_night: name: cloud_fraction_nadir_night long_name: Cloud Fraction in Retrieval Region (5x5 1-km Pixels) from 1-km Cloud Mask for Sensor Zenith (View) Angles <= 32 Degrees, Night Data Only units: None file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Fraction_Nadir_Night cloud_fraction_day: name: cloud_fraction_day long_name: Cloud Fraction in Retrieval Region (5x5 1-km Pixels) from 1-km Cloud Mask, Day Only units: None file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Fraction_Day cloud_fraction_nadir_day: name: cloud_fraction_nadir_day long_name: Cloud Fraction in Retrieval Region (5x5 1-km Pixels) from 1-km Cloud Mask for Sensor Zenith (View) Angles <= 32 Degrees, Day Data Only units: None file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Fraction_Nadir_Day cloud_effective_emissivity: name: cloud_effective_emissivity long_name: Cloud Effective Emissivity from Cloud Top Pressure Retrieval units: None file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Effective_Emissivity cloud_effective_emissivity_nadir: name: cloud_effective_emissivity_nadir long_name: Cloud Effective Emissivity from Cloud Top Pressure Retrieval for Sensor Zenith (View) Angles <= 32 Degrees units: None file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Effective_Emissivity_Nadir cloud_effective_emissivity_night: name: cloud_effective_emissivity_night long_name: Cloud Effective Emissivity from Cloud Top Pressure Retrieval, Night Only units: None file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Effective_Emissivity_Night cloud_effective_emissivity_nadir_night: name: cloud_effective_emissivity_nadir_night long_name: Cloud Effective Emissivity from Cloud Top Pressure Retrieval for Sensor Zenith (View) Angles <= 32 Degrees, Night Data Only units: None file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Effective_Emissivity_Nadir_Night cloud_effective_emissivity_day: name: cloud_effective_emissivity_day long_name: Cloud Effective Emissivity from Cloud Top Pressure Retrieval, Day Only units: None file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Effective_Emissivity_Day cloud_effective_emissivity_nadir_day: name: cloud_effective_emissivity_nadir_day long_name: Cloud Effective Emissivity from Cloud Top Pressure Retrieval for Sensor Zenith (View) Angles <= 32 Degrees, Day Data Only units: None file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Effective_Emissivity_Nadir_Day cloud_top_pressure_infrared: name: cloud_top_pressure_infrared long_name: Cloud Top Pressure from IR Window Retrieval units: hPa file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Top_Pressure_Infrared spectral_cloud_forcing: name: spectral_cloud_forcing long_name: Spectral Cloud Forcing (cloud minus clear radiance) units: Watts/meter2/steradian/micron file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 5000: file_key: Spectral_Cloud_Forcing cloud_top_pressure_from_ratios: name: cloud_top_pressure_from_ratios long_name: Cloud Top Pressure Levels from Ratios of Bands 36/35, 35/34, 35/33, 34/33 from the CO2-slicing Algorithm units: hPa file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Top_Pressure_From_Ratios radiance_variance: name: radiance_variance long_name: Band 31 Radiance Standard Deviation units: Watts/meter2/steradian/micron file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 5000: file_key: Radiance_Variance cloud_phase_infrared: name: cloud_phase_infrared long_name: Cloud Phase from 8.5 and 11 um Bands units: None comment: "0: cloud free, 1: water cloud, 2: ice cloud, 3: mixed phase cloud, 6: undetermined phase" coordinates: [longitude, latitude] category: True resolution: 1000: file_key: Cloud_Phase_Infrared_1km file_type: mod06_hdf 5000: file_key: Cloud_Phase_Infrared file_type: [mod06_hdf, mod06ct_hdf] cloud_phase_infrared_night: name: cloud_phase_infrared_night long_name: Cloud Phase from 8.5 and 11 um Bands, Night Only units: None file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] category: True resolution: 5000: file_key: Cloud_Phase_Infrared_Night cloud_phase_infrared_day: name: cloud_phase_infrared_day long_name: Cloud Phase from 8.5 and 11 um Bands, Day Only units: None file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] category: True resolution: 5000: file_key: Cloud_Phase_Infrared_Day os_top_flag: name: os_top_flag long_name: Upper Tropospheric/Lower Stratospheric (UTLS) Cloud Flag at 1-km resolution - valid from -50 to +50 Degrees Latitude units: None comment: "0: stratospheric cloud test not performed, 1: stratospheric cloud not indicated, 2: stratospheric cloud indicated (BTD35-33 > 0.5K)" file_type: mod06_hdf coordinates: [longitude, latitude] category: True resolution: 1000: file_key: os_top_flag_1km cloud_emissivity: name: cloud_emissivity long_name: Cloud Emissivity at 1-km resolution from LEOCAT Cloud Top Pressure Retrieval units: unitless file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: cloud_emissivity_1km cloud_top_method: name: cloud_top_method long_name: Index Indicating the MODIS Band(s) Used to Produce the Cloud Top Pressure Result units: unitless comment: "1: CO2-slicing retrieval, bands 36/35, 2: CO2-slicing retrieval, bands 35/34, 3: CO2-slicing retrieval, bands 35/33, 4: CO2-slicing retrieval, bands 34/33, 6: IR-window retrieval, band 31" file_type: mod06_hdf coordinates: [longitude, latitude] category: True resolution: 1000: file_key: cloud_top_method_1km cloud_emiss11: name: cloud_emiss11 long_name: 11 micron Cloud Emissivity at 1-km resolution from LEOCAT for All Clouds units: unitless file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: cloud_emiss11_1km cloud_emiss12: name: cloud_emiss12 long_name: 12 micron Cloud Emissivity at 1-km resolution from LEOCAT for All Clouds units: unitless file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: cloud_emiss12_1km cloud_emiss13: name: cloud_emiss13 long_name: 13.3 micron Cloud Emissivity at 1-km resolution from LEOCAT for All Clouds units: unitless file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: cloud_emiss13_1km cloud_emiss85: name: cloud_emiss85 long_name: 8.5 micron Cloud Emissivity at 1-km resolution from LEOCAT for All Clouds units: unitless file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: cloud_emiss85_1km cloud_effective_radius: name: cloud_effective_radius long_name: "Cloud Particle Effective Radius two-channel retrieval using band 7 and either band 1, 2, or 5 (specified in Quality_Assurance_1km)from best points: not failed in any way, not marked for clear sky restoral" units: micron file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Effective_Radius cloud_effective_radius_pcl: name: cloud_effective_radius_pcl long_name: Cloud Particle Effective Radius two-channel retrieval using band 7 and either band 1, 2, or 5 (specified in Quality_Assurance_1km)from points identified as either partly cloudy from 250m cloud mask test or 1km cloud edges units: micron file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Effective_Radius_PCL cloud_effective_radius_16: name: cloud_effective_radius_16 long_name: "Cloud Particle Effective Radius two-channel retrieval using band 6 and either band 1, 2, or 5 (specified in Quality_Assurance_1km)from best points: not failed in any way, not marked for clear sky restoral" units: micron file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Effective_Radius_16 cloud_effective_radius_16_PCL: name: cloud_effective_radius_16_PCL long_name: Cloud Particle Effective Radius two-channel retrieval using band 6 and either band 1, 2, or 5 (specified in Quality_Assurance_1km)from points identified as either partly cloudy from 250m cloud mask test or 1km cloud edges units: micron file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Effective_Radius_16_PCL cloud_effective_radius_37: name: cloud_effective_radius_37 long_name: "Cloud Particle Effective Radius two-channel retrieval using band 20 and either band 1, 2, or 5 (specified in Quality_Assurance_1km)from best points: not failed in any way, not marked for clear sky restoral" units: micron file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Effective_Radius_37 cloud_effective_radius_37_PCL: name: cloud_effective_radius_37_PCL long_name: Cloud Particle Effective Radius two-channel retrieval using band 20 and either band 1, 2, or 5 (specified in Quality_Assurance_1km)from points identified as either partly cloudy from 250m cloud mask test or 1km cloud edges units: micron file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Effective_Radius_37_PCL cloud_optical_thickness: name: cloud_optical_thickness long_name: "Cloud Optical Thickness two-channel retrieval using band 7 and either band 1, 2, or 5 (specified in Quality_Assurance_1km)from best points: not failed in any way, not marked for clear sky restoral" units: None file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Optical_Thickness cloud_optical_thickness_pcl: name: cloud_optical_thickness_pcl long_name: Cloud Optical Thickness two-channel retrieval using band 7 and either band 1, 2, or 5 (specified in Quality_Assurance_1km)from points identified as either partly cloudy from 250m cloud mask test or 1km cloud edges units: None file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Optical_Thickness_PCL cloud_effective_radius_1621: name: cloud_effective_radius_1621 long_name: "Cloud Particle Effective Radius two-channel retrieval using band 7 and band 6from best points: not failed in any way, not marked for clear sky restoral" units: None file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Effective_Radius_1621 cloud_effective_radius_1621_pcl: name: cloud_effective_radius_1621_pcl long_name: Cloud Particle Effective Radius two-channel retrieval using band 7 and band 6from points identified as either partly cloudy from 250m cloud mask test or 1km cloud edges units: None file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Effective_Radius_1621_PCL cloud_optical_thickness_1621: name: cloud_optical_thickness_1621 long_name: "Cloud Optical Thickness two-channel retrieval using band 7 and band 6 from best points: not failed in any way, not marked for clear sky restoral" units: None file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Optical_Thickness_1621 cloud_optical_thickness_1621_pcl: name: cloud_optical_thickness_1621_pcl long_name: Cloud Optical Thickness two-channel retrieval using band 7 and band 6 from points identified as either partly cloudy from 250m cloud mask test or 1km cloud edges units: None file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Optical_Thickness_1621_PCL cloud_water_path: name: cloud_water_path long_name: "Column Water Path two-band retrieval using band 7 and either band 1, 2, or 5 (specified in Quality_Assurance_1km)from best points: not failed in any way, not marked for clear sky restoral" units: g/m^2 file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Water_Path cloud_water_path_pcl: name: cloud_water_path_pcl long_name: Column Water Path two-band retrieval using band 7 and either band 1, 2, or 5 (specified in Quality_Assurance_1km)from points identified as either partly cloudy from 250m cloud mask test or 1km cloud edges units: g/m^2 file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Water_Path_PCL cloud_water_path_1621: name: cloud_water_path_1621 long_name: "Column Water Path two-band retrieval using band 7 and band 6from best points: not failed in any way, not marked for clear sky restoral" units: g/m^2 file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Water_Path_1621 cloud_water_path_1621_pcl: name: cloud_water_path_1621_pcl long_name: Column Water Path two-band retrieval using band 7 and band 6from points identified as either partly cloudy from 250m cloud mask test or 1km cloud edges units: g/m^2 file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Water_Path_1621_PCL cloud_water_path_16: name: cloud_water_path_16 long_name: "Column Water Path two-band retrieval using band 6 and either band 1, 2, or 5 (specified in Quality_Assurance_1km)from best points: not failed in any way, not marked for clear sky restoral" units: g/m^2 file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Water_Path_16 cloud_water_path_16_pcl: name: cloud_water_path_16_pcl long_name: Column Water Path two-band retrieval using band 6 and either band 1, 2, or 5 (specified in Quality_Assurance_1km)from points identified as either partly cloudy from 250m cloud mask test or 1km cloud edges units: g/m^2 file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Water_Path_16_PCL cloud_water_path_37: name: cloud_water_path_37 long_name: "Column Water Path two-band retrieval using band 20 and either band 1, 2, or 5 (specified in Quality_Assurance_1km)from best points: not failed in any way, not marked for clear sky restoral" units: g/m^2 file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Water_Path_37 cloud_water_path_37_pcl: name: cloud_water_path_37_pcl long_name: Column Water Path two-band retrieval using band 20 and either band 1, 2, or 5 (specified in Quality_Assurance_1km)from points identified as either partly cloudy from 250m cloud mask test or 1km cloud edges units: g/m^2 file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Water_Path_37_PCL cloud_effective_radius_uncertainty: name: cloud_effective_radius_uncertainty long_name: Cloud Effective Particle Radius (from band 7) Relative Uncertainty (Percent)from both best points and points identified as cloud edge at 1km resolution or partly cloudy at 250m units: "%" file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Effective_Radius_Uncertainty cloud_effective_radius_uncertainty_16: name: cloud_effective_radius_uncertainty_16 long_name: Cloud Effective Particle Radius (from band 6) Relative Uncertainty (Percent)from both best points and points identified as cloud edge at 1km resolution or partly cloudy at 250m units: "%" file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Effective_Radius_Unvertainty_16 cloud_effective_radius_uncertainty_37: name: cloud_effective_radius_uncertainty_37 long_name: Cloud Effective Particle Radius (from band 20) Relative Uncertainty (Percent)from both best points and points identified as cloud edge at 1km resolution or partly cloudy at 250m units: "%" file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Effective_Radius_Unvertainty_37 cloud_optical_thickness_uncertainty: name: cloud_optical_thickness_uncertainty long_name: Cloud Optical Thickness Relative Uncertainty (Percent)from both best points and points identified as cloud edge at 1km resolution or partly cloudy at 250m units: "%" file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Optical_Thickness_Uncertainty cloud_water_path_uncertainty: name: cloud_water_path_uncertainty long_name: Cloud Water Path Relative Uncertainty (Percent)from both best points and points identified as cloud edge at 1km resolution or partly cloudy at 250m units: "%" file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Water_Path_Uncertainty cloud_effective_radius_uncertainty_1621: name: cloud_effective_radius_uncertainty_1621 long_name: Cloud Effective Particle Radius Relative Uncertainty (Percent) using band 7 and band 6from both best points and points identified as cloud edge at 1km resolution or partly cloudy at 250m units: "%" file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Effective_Radius_Uncertainty_1621 cloud_optical_thickness_uncertainty_1621: name: cloud_optical_thickness_uncertainty_1621 long_name: Cloud Optical Thickness Relative Uncertainty (Percent) using band 7 and band 6from both best points and points identified as cloud edge at 1km resolution or partly cloudy at 250m units: "%" file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Optical_Thickness_Uncertainty_1621 cloud_water_path_uncertainty_1621: name: cloud_water_path_uncertainty_1621 long_name: Cloud Water Path Relative Uncertainty (Percent) using band 7 and band 6from both best points and points identified as cloud edge at 1km resolution or partly cloudy at 250m units: "%" file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Water_Path_Uncertainty_1621 cloud_water_path_uncertainty_16: name: cloud_water_path_uncertainty_16 long_name: Cloud Water Path Relative Uncertainty (Percent)from both best points and points identified as cloud edge at 1km resolution or partly cloudy at 250m using the 0.86-1.6um retrieval units: "%" file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Water_Path_Uncertainty_16 cloud_water_path_uncertainty_37: name: cloud_water_path_uncertainty_37 long_name: Cloud Water Path Relative Uncertainty (Percent)from both best points and points identified as cloud edge at 1km resolution or partly cloudy at 250m using the 0.86-3.7um retrieval units: "%" file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Water_Path_Uncertainty_37 above_cloud_water_vapor_094: name: above_cloud_water_vapor_094 long_name: Above-cloud water vapor amount from 0.94um channel, ocean only, tau > 5. units: cm file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Above_Cloud_Water_Vapor_094 irw_low_cloud_temperature_from_cop: name: irw_low_cloud_temperature_from_cop long_name: Low Cloud Temperature from IR Window retrieval using cloud emissivity based on cloud optical thickness units: K file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: IRW_Low_Cloud_Temperature_From_COP cloud_phase_optical_properties: name: cloud_phase_optical_properties long_name: Cloud Phase Determination Used in Optical Thickness/Effective Radius Retrieval units: None comment: "0: cloud mask undetermined, 1: clear sky, 2: liquid water cloud, 3: ice cloud, 4: undetermined phase cloud (but attempted as liquid water)" file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] category: True resolution: 1000: file_key: Cloud_Phase_Optical_Properties cloud_multi_layer_flag: name: cloud_multi_layer_flag long_name: Cloud Multi Layer Identification From MODIS Shortwave Observations units: None file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] category: True resolution: 1000: file_key: Cloud_Multi_Layer_Flag cirrus_reflectance: name: cirrus_reflectance long_name: Cirrus Reflectance units: None file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 1000: file_key: Cirrus_Reflectance cirrus_reflectance_flag: name: cirrus_reflectance_flag long_name: Cirrus Reflectance Flag units: None comment: "0: bad data, 1: non-cirrus pixel, 2: cirrus pixel, 3: contrail pixel" file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 1000: file_key: Cirrus_Reflectance_Flag # Ice Concentration ice_con: name: ice_concentration file_type: icecon_hdf coordinates: [longitude, latitude] resolution: 1000 file_key: Ice_Concentration ice_mask: # TODO: Do fancy integer handling name: ice_mask file_type: icecon_hdf coordinates: [longitude, latitude] category: True resolution: 1000 file_key: Ice_Mask # Inversion inversion_depth: name: inversion_depth file_type: inversion_hdf coordinates: [longitude, latitude] resolution: 1000 file_key: Inversion_Depth inversion_strength: name: inversion_strength file_type: inversion_hdf coordinates: [longitude, latitude] resolution: 1000 file_key: Inversion_Strength # IST ice_surface_temperature: name: ice_surface_temperature file_type: ist_hdf coordinates: [longitude, latitude] resolution: 1000 file_key: Ice_Surface_Temperature # MOD07 # Total Precipitable Water water_vapor: name: water_vapor file_type: mod07_hdf coordinates: [longitude, latitude] resolution: 5000 file_key: Water_Vapor # MOD28 sea_surface_temperature: name: sea_surface_temperature file_type: mod28_hdf coordinates: [longitude, latitude] resolution: 1000 file_key: Sea_Surface_Temperature # MODLST land_surface_temperature: name: lst file_type: modlst_hdf coordinates: [longitude, latitude] resolution: 1000 file_key: LST # NDVI ndvi: name: ndvi file_type: ndvi_1000m_hdf coordinates: [longitude, latitude] resolution: 1000 file_key: NDVI # Snow Mask snow_mask: name: snow_mask file_type: snowmask_hdf coordinates: [longitude, latitude] category: True resolution: 1000 file_key: Snow_Mask # mask_byte1 # See the MOD35 cloud_mask entry which also handles mask_byte1 cloud_mask land_sea_mask_mask_byte1: name: land_sea_mask resolution: 1000 file_key: MODIS_Simple_LandSea_Mask file_type: mask_byte1_hdf category: True coordinates: [longitude, latitude] snow_ice_mask_mask_byte1: name: snow_ice_mask resolution: 1000 file_key: MODIS_Snow_Ice_Flag file_type: mask_byte1_hdf category: True coordinates: [longitude, latitude] satpy-0.34.0/satpy/etc/readers/msi_safe.yaml000066400000000000000000000147101420401153000207410ustar00rootroot00000000000000reader: description: SAFE Reader for MSI data (Sentinel-2) name: msi_safe sensors: [msi] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: safe_granule: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIL1C file_patterns: ['{fmission_id:3s}_MSIL1C_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L1C_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}.jp2'] requires: [safe_metadata, safe_tile_metadata] safe_tile_metadata: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSITileMDXML file_patterns: ['{fmission_id:3s}_MSIL1C_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L1C_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/MTD_TL.xml'] safe_metadata: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIMDXML file_patterns: ['{fmission_id:3s}_MSIL1C_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/MTD_MSIL1C.xml'] datasets: B01: name: B01 sensor: MSI wavelength: [0.415, 0.443, 0.470] resolution: 60 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 file_type: safe_granule B02: name: B02 sensor: MSI wavelength: [0.440, 0.490, 0.540] resolution: 10 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 file_type: safe_granule B03: name: B03 sensor: MSI wavelength: [0.540, 0.560, 0.580] resolution: 10 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 file_type: safe_granule B04: name: B04 sensor: MSI wavelength: [0.645, 0.665, 0.685] resolution: 10 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 file_type: safe_granule B05: name: B05 sensor: MSI wavelength: [0.695, 0.705, 0.715] resolution: 20 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 file_type: safe_granule B06: name: B06 sensor: MSI wavelength: [0.731, 0.740, 0.749] resolution: 20 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 file_type: safe_granule B07: name: B07 sensor: MSI wavelength: [0.764, 0.783, 0.802] resolution: 20 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 file_type: safe_granule B08: name: B08 sensor: MSI wavelength: [0.780, 0.842, 0.905] resolution: 10 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 file_type: safe_granule B8A: name: B8A sensor: MSI wavelength: [0.855, 0.865, 0.875] resolution: 20 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 file_type: safe_granule B09: name: B09 sensor: MSI wavelength: [0.935, 0.945, 0.955] resolution: 60 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 file_type: safe_granule B10: name: B10 sensor: MSI wavelength: [1.365, 1.375, 1.385] resolution: 60 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 file_type: safe_granule B11: name: B11 sensor: MSI wavelength: [1.565, 1.610, 1.655] resolution: 20 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 file_type: safe_granule B12: name: B12 sensor: MSI wavelength: [2.100, 2.190, 2.280] resolution: 20 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 file_type: safe_granule solar_zenith_angle: name: solar_zenith_angle resolution: [10, 20, 60] file_type: safe_tile_metadata xml_tag: Sun_Angles_Grid/Zenith solar_azimuth_angle: name: solar_azimuth_angle resolution: [10, 20, 60] file_type: safe_tile_metadata xml_tag: Sun_Angles_Grid/Azimuth satellite_azimuth_angle: name: satellite_azimuth_angle resolution: [10, 20, 60] file_type: safe_tile_metadata xml_tag: Viewing_Incidence_Angles_Grids xml_item: Azimuth satellite_zenith_angle: name: satellite_zenith_angle resolution: [10, 20, 60] file_type: safe_tile_metadata xml_tag: Viewing_Incidence_Angles_Grids xml_item: Zenith satpy-0.34.0/satpy/etc/readers/msu_gsa_l1b.yaml000066400000000000000000000141131420401153000213440ustar00rootroot00000000000000reader: description: H5 reader for MSG-GS/A data name: msu_gsa_l1b sensors: [msu_gsa] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: msu_gsa_l1b: file_reader: !!python/name:satpy.readers.msu_gsa_l1b.MSUGSAFileHandler file_patterns: ['ArcticaM{mission_id:1s}_{start_time:%Y%m%d%H%M}.h5'] datasets: longitude: name: longitude units: degrees_east standard_name: longitude resolution: 4000: file_type: msu_gsa_l1b file_key: Geolocation/resolution_4km/Longitude 1000: file_type: msu_gsa_l1b file_key: Geolocation/resolution_1km/Longitude latitude: name: latitude units: degrees_north standard_name: latitude resolution: 4000: file_type: msu_gsa_l1b file_key: Geolocation/resolution_4km/Latitude 1000: file_type: msu_gsa_l1b file_key: Geolocation/resolution_1km/Latitude # The channels C01-C03 (VIS) are available at 1km resolution C01: name: C01 sensor: msu_gsa wavelength: [0.5, 0.6, 0.65] resolution: 1000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance units: W m-2 sr-1 coordinates: [longitude, latitude] file_type: msu_gsa_l1b file_key: Data/resolution_1km/Radiance_01 C02: name: C02 sensor: msu_gsa wavelength: [0.65, 0.7, 0.8] resolution: 1000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance units: W m-2 sr-1 coordinates: [longitude, latitude] file_type: msu_gsa_l1b file_key: Data/resolution_1km/Radiance_02 C03: name: C03 sensor: msu_gsa wavelength: [0.8, 0.9, 0.9] resolution: 1000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance units: W m-2 sr-1 coordinates: [longitude, latitude] file_type: msu_gsa_l1b file_key: Data/resolution_1km/Radiance_03 # The channels C04-C10 (IR) are available at 4km resolution C04: name: C04 sensor: msu_gsa wavelength: [3.5, 3.8, 4.0] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: [longitude, latitude] file_type: msu_gsa_l1b file_key: Data/resolution_4km/Brightness_Temperature_04 C05: name: C05 sensor: msu_gsa wavelength: [5.7, 6.4, 7.0] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: [longitude, latitude] file_type: msu_gsa_l1b file_key: Data/resolution_4km/Brightness_Temperature_05 C06: name: C06 sensor: msu_gsa wavelength: [7.5, 8.0, 8.5] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: [longitude, latitude] file_type: msu_gsa_l1b file_key: Data/resolution_4km/Brightness_Temperature_06 C07: name: C07 sensor: msu_gsa wavelength: [8.2, 8.7, 9.2] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: [longitude, latitude] file_type: msu_gsa_l1b file_key: Data/resolution_4km/Brightness_Temperature_07 C08: name: C08 sensor: msu_gsa wavelength: [9.2, 9.7, 10.2] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: [longitude, latitude] file_type: msu_gsa_l1b file_key: Data/resolution_4km/Brightness_Temperature_08 C09: name: C09 sensor: msu_gsa wavelength: [10.2, 10.8, 11.2] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: [longitude, latitude] file_type: msu_gsa_l1b file_key: Data/resolution_4km/Brightness_Temperature_09 C10: name: C10 sensor: msu_gsa wavelength: [11.2, 11.9, 12.5] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: [longitude, latitude] file_type: msu_gsa_l1b file_key: Data/resolution_4km/Brightness_Temperature_10 # The solar and viewing geometry is available at both resolutions solar_zenith_angle: name: solar_zenith_angle units: degrees standard_name: solar_zenith_angle resolution: 4000: file_type: msu_gsa_l1b file_key: Data/resolution_4km/Solar_Zenith_Angle 1000: file_type: msu_gsa_l1b file_key: Data/resolution_1km/Solar_Zenith_Angle coordinates: [longitude, latitude] solar_azimuth_angle: name: solar_azimuth_angle units: degrees standard_name: solar_azimuth_angle resolution: 4000: file_type: msu_gsa_l1b file_key: Data/resolution_4km/Solar_Azimuth_Angle 1000: file_type: msu_gsa_l1b file_key: Data/resolution_1km/Solar_Azimuth_Angle coordinates: [longitude, latitude] satellite_zenith_angle: name: satellite_zenith_angle units: degrees standard_name: satellite_zenith_angle resolution: 4000: file_type: msu_gsa_l1b file_key: Data/resolution_4km/Satellite_Zenith_Angle 1000: file_type: msu_gsa_l1b file_key: Data/resolution_1km/Satellite_Zenith_Angle coordinates: [longitude, latitude] satellite_azimuth_angle: name: satellite_azimuth_angle units: degrees standard_name: satellite_azimuth_angle resolution: 4000: file_type: msu_gsa_l1b file_key: Data/resolution_4km/Satellite_Azimuth_Angle 1000: file_type: msu_gsa_l1b file_key: Data/resolution_1km/Satellite_Azimuth_Angle coordinates: [longitude, latitude] satpy-0.34.0/satpy/etc/readers/mtsat2-imager_hrit.yaml000066400000000000000000000106141420401153000226540ustar00rootroot00000000000000reader: name: mtsat2-imager_hrit short_name: MTSAT-2 Imager HRIT long_name: MTSAT-2 Imager Level 1 (HRIT) description: > Reader for MTSAT-2 Imager data in JMA HRIT format. Note that there exist two versions of the dataset. A segmented (data split into multiple files) and a non-segmented version (all data in one file). References: - https://www.wmo-sat.info/oscar/instruments/view/219 - http://www.data.jma.go.jp/mscweb/notice/Himawari7_e.html sensors: [mtsat2_imager] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.GEOSegmentYAMLReader file_types: hrit_vis: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}VIS_{start_time:%Y%m%d%H%M}' - 'HRIT_MTSAT2_{start_time:%Y%m%d_%H%M}_DK{area:02d}VIS' hrit_ir1: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR1_{start_time:%Y%m%d%H%M}' - 'HRIT_MTSAT2_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR1' hrit_ir2: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR2_{start_time:%Y%m%d%H%M}' - 'HRIT_MTSAT2_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR2' hrit_ir3: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR3_{start_time:%Y%m%d%H%M}' - 'HRIT_MTSAT2_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR3' hrit_ir4: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR4_{start_time:%Y%m%d%H%M}' - 'HRIT_MTSAT2_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR4' hrit_vis_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}VIS_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_ir1_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR1_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_ir2_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR2_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_ir3_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR3_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_ir4_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR4_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 datasets: VIS: name: VIS sensor: mtsat2_imager wavelength: [0.55, 0.675, 0.80] resolution: 1000 calibration: counts: standard_name: counts units: 1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: [hrit_vis, hrit_vis_seg] IR1: name: IR1 sensor: mtsat2_imager wavelength: [10.3, 10.8, 11.3] resolution: 4000 calibration: counts: standard_name: counts units: 1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" file_type: [hrit_ir1, hrit_ir1_seg] IR2: name: IR2 sensor: mtsat2_imager wavelength: [11.5, 12.0, 12.5] resolution: 4000 calibration: counts: standard_name: counts units: 1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" file_type: [hrit_ir2, hrit_ir2_seg] IR3: name: IR3 sensor: mtsat2_imager wavelength: [6.5, 6.75, 7.0] resolution: 4000 calibration: counts: standard_name: counts units: 1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" file_type: [hrit_ir3, hrit_ir3_seg] IR4: name: IR4 sensor: mtsat2_imager wavelength: [3.5, 3.75, 4.0] resolution: 4000 calibration: counts: standard_name: counts units: 1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" file_type: [hrit_ir4, hrit_ir4_seg] satpy-0.34.0/satpy/etc/readers/mviri_l1b_fiduceo_nc.yaml000066400000000000000000000102261420401153000232130ustar00rootroot00000000000000# References: # - MFG User Handbook # - FIDUCEO MVIRI FCDR Product User Guide reader: name: mviri_l1b_fiduceo_nc short_name: FIDUCEO MVIRI FCDR long_name: > Fundamental Climate Data Record of re-calibrated Level 1.5 Infrared, Water Vapour, and Visible radiances from the Meteosat Visible Infra-Red Imager (MVIRI) instrument onboard the Meteosat First Generation satellites description: > Reader for FIDUCEO MVIRI FCDR data in netCDF format. For documentation see http://doi.org/10.15770/EUM_SEC_CLM_0009 . sensors: [mviri] default_channels: [VIS, WV, IR] reader: !!python/name:satpy.readers.yaml_reader.GEOFlippableFileYAMLReader file_types: nc_easy: file_reader: !!python/name:satpy.readers.mviri_l1b_fiduceo_nc.FiduceoMviriEasyFcdrFileHandler file_patterns: [ 'FIDUCEO_FCDR_{level}_{sensor}_{platform}-{projection_longitude:f}_{start_time:%Y%m%d%H%M}_{end_time:%Y%m%d%H%M}_EASY_{processor_version}_{format_version}.nc' # Example: FIDUCEO_FCDR_L15_MVIRI_MET7-57.0_201701201000_201701201030_EASY_v2.6_fv3.1.nc ] nc_full: file_reader: !!python/name:satpy.readers.mviri_l1b_fiduceo_nc.FiduceoMviriFullFcdrFileHandler file_patterns: [ 'FIDUCEO_FCDR_{level}_{sensor}_{platform}-{projection_longitude:f}_{start_time:%Y%m%d%H%M}_{end_time:%Y%m%d%H%M}_FULL_{processor_version}_{format_version}.nc' # Example: FIDUCEO_FCDR_L15_MVIRI_MET7-57.0_201701201000_201701201030_FULL_v2.6_fv3.1.nc ] datasets: VIS: name: VIS resolution: 2250 wavelength: [0.5, 0.7, 0.9] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: # Confirmed by EUM: No (1/wavenumber) here. Hence no standard name. units: W m-2 sr-1 counts: standard_name: counts units: count file_type: [nc_easy, nc_full] WV: name: WV resolution: 4500 wavelength: [5.7, 6.4, 7.1] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [nc_easy, nc_full] IR: name: IR resolution: 4500 wavelength: [10.5, 11.5, 12.5] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [nc_easy, nc_full] quality_pixel_bitmask: name: quality_pixel_bitmask resolution: 2250 file_type: [nc_easy, nc_full] data_quality_bitmask: name: data_quality_bitmask resolution: 2250 file_type: [nc_easy, nc_full] u_independent_toa_bidirectional_reflectance: name: u_independent_toa_bidirectional_reflectance long_name: "independent uncertainty per pixel" units: "%" resolution: 2250 file_type: [nc_easy] u_structured_toa_bidirectional_reflectance: name: u_structured_toa_bidirectional_reflectance long_name: "structured uncertainty per pixel" units: "%" resolution: 2250 file_type: [nc_easy] solar_zenith_angle: name: solar_zenith_angle standard_name: solar_zenith_angle long_name: "Solar zenith angle" units: degree resolution: [2250, 4500] file_type: [nc_easy, nc_full] solar_azimuth_angle: name: solar_azimuth_angle standard_name: solar_azimuth_angle long_name: "Solar azimuth angle" units: degree resolution: [2250, 4500] file_type: [nc_easy, nc_full] satellite_zenith_angle: name: satellite_zenith_angle standard_name: sensor_zenith_angle long_name: "Satellite zenith angle" units: degree resolution: [2250, 4500] file_type: [nc_easy, nc_full] satellite_azimuth_angle: name: satellite_azimuth_angle standard_name: sensor_azimuth_angle long_name: "Satellite azimuth angle" units: degree resolution: [2250, 4500] file_type: [nc_easy, nc_full] satpy-0.34.0/satpy/etc/readers/nucaps.yaml000066400000000000000000000102241420401153000204400ustar00rootroot00000000000000reader: description: NUCAPS Retrieval Reader name: nucaps reader: !!python/name:satpy.readers.nucaps.NUCAPSReader sensors: [cris, atms, viirs] data_identification_keys: name: required: true level: resolution: modifiers: default: [] type: !!python/name:satpy.dataset.dataid.ModifierTuple file_types: nucaps: file_reader: !!python/name:satpy.readers.nucaps.NUCAPSFileHandler file_patterns: - 'NUCAPS-EDR_{nucaps_version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time:%Y%m%d%H%M%S%f}.nc' - 'NUCAPS-sciEDR_{am_pm:2s}_{platform_shortname:3s}_s{start_time:%Y%m%d%H%M%S}_e{end_time:%Y%m%d%H%M%S}_STC_fsr.nc' datasets: longitude: name: longitude file_type: nucaps file_key: Longitude units: degrees standard_name: longitude latitude: name: latitude file_type: nucaps file_key: Latitude units: degrees standard_name: latitude Solar_Zenith: name: Solar_Zenith coordinates: [longitude, latitude] file_type: nucaps Topography: name: Topography coordinates: [longitude, latitude] file_type: nucaps Land_Fraction: name: Land_Fraction coordinates: [longitude, latitude] file_type: nucaps Effective_Pressure: name: Effective_Pressure coordinates: [longitude, latitude] file_type: nucaps pressure_based: True Surface_Pressure: name: Surface_Pressure coordinates: [longitude, latitude] file_type: nucaps Skin_Temperature: name: Skin_Temperature coordinates: [longitude, latitude] file_type: nucaps Quality_Flag: name: Quality_Flag coordinates: [longitude, latitude] file_type: nucaps # Can't include cloud products until we figure out how to handle cloud layers dimension # Cloud_Top_Pressure: # name: Cloud_Top_Pressure # coordinates: [longitude, latitude] # file_type: nucaps # pressure_based: True # Cloud_Top_Fraction: # name: Cloud_Top_Fraction # coordinates: [longitude, latitude] # file_type: nucaps # pressure_based: True Temperature: name: Temperature coordinates: [longitude, latitude] file_type: nucaps pressure_based: True H2O: name: H2O coordinates: [longitude, latitude] file_type: nucaps pressure_based: True H2O_MR: name: H2O_MR coordinates: [longitude, latitude] file_type: nucaps pressure_based: True O3: name: O3 coordinates: [longitude, latitude] file_type: nucaps pressure_based: True O3_MR: name: O3_MR coordinates: [longitude, latitude] file_type: nucaps pressure_based: True Liquid_H2O: name: Liquid_H2O coordinates: [longitude, latitude] file_type: nucaps pressure_based: True Liquid_H2O_MR: name: Liquid_H2O_MR coordinates: [longitude, latitude] file_type: nucaps pressure_based: True CO: name: CO coordinates: [longitude, latitude] file_type: nucaps pressure_based: True CO_MR: name: CO_MR coordinates: [longitude, latitude] file_type: nucaps pressure_based: True CH4: name: CH4 coordinates: [longitude, latitude] file_type: nucaps pressure_based: True CH4_MR: name: CH4_MR coordinates: [longitude, latitude] file_type: nucaps pressure_based: True CO2: name: CO2 coordinates: [longitude, latitude] file_type: nucaps pressure_based: True HNO3: name: HNO3 coordinates: [longitude, latitude] file_type: nucaps pressure_based: True HNO3_MR: name: HNO3_MR coordinates: [longitude, latitude] file_type: nucaps pressure_based: True N2O: name: N2O coordinates: [longitude, latitude] file_type: nucaps pressure_based: True N2O_MR: name: N2O_MR coordinates: [longitude, latitude] file_type: nucaps pressure_based: True SO2: name: SO2 coordinates: [longitude, latitude] file_type: nucaps pressure_based: True SO2_MR: name: SO2_MR coordinates: [longitude, latitude] file_type: nucaps pressure_based: True Pressure_Levels: name: Pressure_Levels standard_name: air_pressure file_type: nucaps file_key: Pressure index: 0 satpy-0.34.0/satpy/etc/readers/nwcsaf-geo.yaml000066400000000000000000000342671420401153000212150ustar00rootroot00000000000000reader: description: NetCDF4 reader for the NWCSAF GEO 2016/2018 format name: nwcsaf-geo sensors: [seviri, abi, ahi] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: nc_nwcsaf_cma: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_CMA_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc', 'S_NWC_CMA_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z_PLAX.nc'] nc_nwcsaf_ct: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_CT_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc', 'S_NWC_CT_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z_PLAX.nc'] nc_nwcsaf_ctth: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_CTTH_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc', 'S_NWC_CTTH_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z_PLAX.nc'] nc_nwcsaf_cmic: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_CMIC_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc', 'S_NWC_CMIC_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z_PLAX.nc'] nc_nwcsaf_pc: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_PC_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc', 'S_NWC_PC_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z_PLAX.nc'] nc_nwcsaf_crr: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_CRR_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc'] nc_nwcsaf_ishai: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_iSHAI_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc'] nc_nwcsaf_ci: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_CI_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc'] nc_nwcsaf_rdt: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_RDT-CW_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc'] nc_nwcsaf_asii: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_ASII-NG_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc'] nc_nwcsaf_asii_tf: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_ASII-TF_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc'] nc_nwcsaf_asii_gw: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_ASII-GW_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc'] datasets: # ---- CMA products ------------ cma: name: cma resolution: 3000 file_type: nc_nwcsaf_cma cma_pal: name: cma_pal resolution: 3000 file_type: nc_nwcsaf_cma cma_cloudsnow: name: cma_cloudsnow resolution: 3000 file_type: nc_nwcsaf_cma cma_cloudsnow_pal: name: cma_cloudsnow_pal resolution: 3000 file_type: nc_nwcsaf_cma cma_dust: name: cma_dust resolution: 3000 file_type: nc_nwcsaf_cma cma_dust_pal: name: cma_dust_pal resolution: 3000 file_type: nc_nwcsaf_cma cma_volcanic: name: cma_volcanic resolution: 3000 file_type: nc_nwcsaf_cma cma_volcanic_pal: name: cma_volcanic_pal resolution: 3000 file_type: nc_nwcsaf_cma cma_conditions: name: cma_conditions resolution: 3000 file_type: nc_nwcsaf_cma cma_status_flag: name: cma_status_flag resolution: 3000 file_type: nc_nwcsaf_cma # ---- CT products ------------ ct: name: ct resolution: 3000 file_type: nc_nwcsaf_ct ct_pal: name: ct_pal resolution: 3000 file_type: nc_nwcsaf_ct ct_cumuliform: name: ct_cumuliform resolution: 3000 file_type: nc_nwcsaf_ct ct_cumuliform_pal: name: ct_cumuliform_pal resolution: 3000 file_type: nc_nwcsaf_ct ct_multilayer: name: ct_cumuliform resolution: 3000 file_type: nc_nwcsaf_ct ct_multilayer_pal: name: ct_cumuliform_pal resolution: 3000 file_type: nc_nwcsaf_ct ct_quality: name: ct_quality resolution: 3000 file_type: nc_nwcsaf_ct ct_conditions: name: ct_conditions resolution: 3000 file_type: nc_nwcsaf_ct # ---- CTTH products ------------ ctth_alti: name: ctth_alti resolution: 3000 file_type: nc_nwcsaf_ctth ctth_alti_pal: name: ctth_alti_pal resolution: 3000 file_type: nc_nwcsaf_ctth ctth_pres: name: ctth_pres resolution: 3000 file_type: nc_nwcsaf_ctth ctth_pres_pal: name: ctth_pres_pal resolution: 3000 file_type: nc_nwcsaf_ctth ctth_tempe: name: ctth_tempe resolution: 3000 file_type: nc_nwcsaf_ctth ctth_tempe_pal: name: ctth_tempe_pal resolution: 3000 file_type: nc_nwcsaf_ctth ctth_effectiv: name: ctth_effectiv resolution: 3000 file_type: nc_nwcsaf_ctth ctth_effectiv_pal: name: ctth_effectiv_pal resolution: 3000 file_type: nc_nwcsaf_ctth ctth_method: name: ctth_method resolution: 3000 file_type: nc_nwcsaf_ctth ctth_conditions: name: ctth_conditions resolution: 3000 file_type: nc_nwcsaf_ctth ctth_quality: name: ctth_quality resolution: 3000 file_type: nc_nwcsaf_ctth ctth_status_flag: name: ctth_status_flag resolution: 3000 file_type: nc_nwcsaf_ctth # ---- CMIC products ------------ cmic_phase: name: cmic_phase resolution: 3000 file_type: nc_nwcsaf_cmic cmic_phase_pal: name: cmic_phase_pal resolution: 3000 file_type: nc_nwcsaf_cmic cmic_reff: name: cmic_reff resolution: 3000 file_type: nc_nwcsaf_cmic cmic_reff_pal: name: cmic_reff_pal resolution: 3000 file_type: nc_nwcsaf_cmic cmic_cot: name: cmic_cot resolution: 3000 file_type: nc_nwcsaf_cmic cmic_cot_pal: name: cmic_cot_pal resolution: 3000 file_type: nc_nwcsaf_cmic cmic_lwp: name: cmic_lwp resolution: 3000 file_type: nc_nwcsaf_cmic cmic_lwp_pal: name: cmic_lwp_pal resolution: 3000 file_type: nc_nwcsaf_cmic cmic_iwp: name: cmic_iwp resolution: 3000 file_type: nc_nwcsaf_cmic cmic_iwp_pal: name: cmic_iwp_pal resolution: 3000 file_type: nc_nwcsaf_cmic cmic_status_flag: name: cmic_status_flag resolution: 3000 file_type: nc_nwcsaf_cmic cmic_conditions: name: cmic_conditions resolution: 3000 file_type: nc_nwcsaf_cmic cmic_quality: name: cmic_quality resolution: 3000 file_type: nc_nwcsaf_cmic # ---- PC products ------------ pc: name: pc resolution: 3000 file_type: nc_nwcsaf_pc pc_pal: name: pc_pal resolution: 3000 file_type: nc_nwcsaf_pc pc_conditions: name: pc_conditions resolution: 3000 file_type: nc_nwcsaf_pc pc_quality: name: pc_quality resolution: 3000 file_type: nc_nwcsaf_pc # ---- CRR products ------------ crr: name: crr resolution: 3000 file_type: nc_nwcsaf_crr crr_pal: name: crr_pal resolution: 3000 file_type: nc_nwcsaf_crr crr_accum: name: crr_accum resolution: 3000 file_type: nc_nwcsaf_crr crr_accum_pal: name: crr_accum_pal resolution: 3000 file_type: nc_nwcsaf_crr crr_intensity: name: crr_intensity resolution: 3000 file_type: nc_nwcsaf_crr crr_intensity_pal: name: crr_intensity_pal resolution: 3000 file_type: nc_nwcsaf_crr crr_status_flag: name: crr_status_flag resolution: 3000 file_type: nc_nwcsaf_crr crr_conditions: name: crr_conditions resolution: 3000 file_type: nc_nwcsaf_crr crr_quality: name: crr_quality resolution: 3000 file_type: nc_nwcsaf_crr # ----iSHAI products ------------ ishai_tpw: name: ishai_tpw resolution: 3000 file_type: nc_nwcsaf_ishai ishai_tpw_pal: name: ishai_tpw_pal resolution: 3000 file_type: nc_nwcsaf_ishai ishai_shw: name: ishai_shw resolution: 3000 file_type: nc_nwcsaf_ishai ishai_shw_pal: name: ishai_shw_pal resolution: 3000 file_type: nc_nwcsaf_ishai ishai_li: name: ishai_li resolution: 3000 file_type: nc_nwcsaf_ishai ishai_li_pal: name: ishai_li_pal resolution: 3000 file_type: nc_nwcsaf_ishai ishai_ki: name: ishai_ki resolution: 3000 file_type: nc_nwcsaf_ishai ishai_ki_pal: name: ishai_ki_pal resolution: 3000 file_type: nc_nwcsaf_ishai ishai_shw: name: ishai_shw resolution: 3000 file_type: nc_nwcsaf_ishai ishai_shw_pal: name: ishai_shw_pal resolution: 3000 file_type: nc_nwcsaf_ishai ishai_bl: name: ishai_bl resolution: 3000 file_type: nc_nwcsaf_ishai ishai_bl_pal: name: ishai_bl_pal resolution: 3000 file_type: nc_nwcsaf_ishai ishai_ml: name: ishai_ml resolution: 3000 file_type: nc_nwcsaf_ishai ishai_ml_pal: name: ishai_ml_pal resolution: 3000 file_type: nc_nwcsaf_ishai ishai_hl: name: ishai_hl resolution: 3000 file_type: nc_nwcsaf_ishai ishai_hl_pal: name: ishai_hl_pal resolution: 3000 file_type: nc_nwcsaf_ishai ishai_toz: name: ishai_toz resolution: 3000 file_type: nc_nwcsaf_ishai ishai_toz_pal: name: ishai_toz_pal resolution: 3000 file_type: nc_nwcsaf_ishai ishai_skt: name: ishai_skt resolution: 3000 file_type: nc_nwcsaf_ishai ishai_skt_pal: name: ishai_skt_pal resolution: 3000 file_type: nc_nwcsaf_ishai ishai_difftpw: name: ishai_difftpw resolution: 3000 file_type: nc_nwcsaf_ishai ishai_difftpw_pal: name: ishai_difftpw_pal resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffshw: name: ishai_diffshw resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffshw_pal: name: ishai_diffshw_pal resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffli: name: ishai_diffli resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffli_pal: name: ishai_diffli_pal resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffki: name: ishai_diffki resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffki_pal: name: ishai_diffki_pal resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffbl: name: ishai_diffbl resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffbl_pal: name: ishai_diffbl_pal resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffml: name: ishai_diffml resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffml_pal: name: ishai_diffml_pal resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffhl: name: ishai_diffhl resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffhl_pal: name: ishai_diffhl_pal resolution: 3000 file_type: nc_nwcsaf_ishai ishai_difftoz: name: ishai_difftoz resolution: 3000 file_type: nc_nwcsaf_ishai ishai_difftoz_pal: name: ishai_difftoz_pal resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffskt: name: ishai_diffskt resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffskt_pal: name: ishai_diffskt_pal resolution: 3000 file_type: nc_nwcsaf_ishai ihsai_status_flag: name: ihsai_status_flag resolution: 3000 file_type: nc_nwcsaf_ishai ishai_residual: name: ishai_residual resolution: 3000 file_type: nc_nwcsaf_ishai ishai_residual_pal: name: ishai_residual_pal resolution: 3000 file_type: nc_nwcsaf_ishai ishai_conditions: name: ishai_conditions resolution: 3000 file_type: nc_nwcsaf_ishai ishai_quality: name: ishai_quality resolution: 3000 file_type: nc_nwcsaf_ishai # ----CI products ------------ ci_prob30: name: ci_prob30 resolution: 3000 file_type: nc_nwcsaf_ci ci_prob60: name: ci_prob60 resolution: 3000 file_type: nc_nwcsaf_ci ci_prob90: name: ci_prob90 resolution: 3000 file_type: nc_nwcsaf_ci # 2018 version ci_prob_pal: name: ci_prob_pal resolution: 3000 file_type: nc_nwcsaf_ci # 2016 Version ci_pal: name: ci_pal resolution: 3000 file_type: nc_nwcsaf_ci ci_status_flag: name: ci_status_flag resolution: 3000 file_type: nc_nwcsaf_ci ci_conditions: name: ci_conditions resolution: 3000 file_type: nc_nwcsaf_ci ci_quality: name: ci_quality resolution: 3000 file_type: nc_nwcsaf_ci # ----RDT products ------------ MapCellCatType: name: MapCellCatType resolution: 3000 file_type: nc_nwcsaf_rdt MapCellCatType_pal: name: MapCellCatType_pal resolution: 3000 file_type: nc_nwcsaf_rdt MapCell_conditions: name: MapCell_conditions resolution: 3000 file_type: nc_nwcsaf_rdt MapCell_quality: name: MapCell_quality resolution: 3000 file_type: nc_nwcsaf_rdt # ----ASII products in multiple files ------------ asii_turb_trop_prob: name: asii_turb_trop_prob resolution: 3000 file_type: [nc_nwcsaf_asii_tf, nc_nwcsaf_asii] asii_turb_prob_pal: name: asii_turb_prob_pal resolution: 3000 file_type: [nc_nwcsaf_asii_tf, nc_nwcsaf_asii_gw] # ----ASII-TF product ------------ asii_turb_prob_status_flag: name: asii_turb_trop_prob_status_flag resolution: 3000 file_type: nc_nwcsaf_asii_tf asiitf_conditions: name: asiitf_conditions resolution: 3000 file_type: nc_nwcsaf_asii_tf asiitf_quality: name: asiitf_quality resolution: 3000 file_type: nc_nwcsaf_asii_tf # ----ASII-GW product ------------ asii_turb_wave_prob: name: asii_turb_wave_prob resolution: 3000 file_type: nc_nwcsaf_asii_gw asii_turb_wave_prob_status_flag: name: asii_turb_wave_prob_status_flag resolution: 3000 file_type: nc_nwcsaf_asii_gw asiigw_conditions: name: asiigw_conditions resolution: 3000 file_type: nc_nwcsaf_asii_gw asiigw_quality: name: asiigw_quality resolution: 3000 file_type: nc_nwcsaf_asii_gw satpy-0.34.0/satpy/etc/readers/nwcsaf-msg2013-hdf5.yaml000066400000000000000000000100541420401153000223470ustar00rootroot00000000000000reader: description: HDF5 reader for the NWCSAF/Geo Seviri 2013 format name: nwcsaf-msg2013-hdf5 sensors: [seviri] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: h5_nwcsaf_cma: file_reader: !!python/name:satpy.readers.nwcsaf_msg2013_hdf5.Hdf5NWCSAF # SAFNWC_MSG4_CMa__201908271145_MSG-N_______.PLAX.CTTH.0.h5 file_patterns: ['SAFNWC_{platform_id}_CMa__{start_time:%Y%m%d%H%M}_{region_id:_<12s}.PLAX.CTTH.0.h5'] h5_nwcsaf_ct: file_reader: !!python/name:satpy.readers.nwcsaf_msg2013_hdf5.Hdf5NWCSAF # SAFNWC_MSG4_CT___201906241245_MSG-N_______.PLAX.CTTH.0.h5 file_patterns: ['SAFNWC_{platform_id}_CT___{start_time:%Y%m%d%H%M}_{region_id:_<12s}.PLAX.CTTH.0.h5'] h5_nwcsaf_ctth: file_reader: !!python/name:satpy.readers.nwcsaf_msg2013_hdf5.Hdf5NWCSAF # SAFNWC_MSG4_CTTH_201906241245_MSG-N_______.PLAX.CTTH.0.h5 file_patterns: ['SAFNWC_{platform_id}_CTTH_{start_time:%Y%m%d%H%M}_{region_id:_<12s}.PLAX.CTTH.0.h5'] datasets: # ---- CMA products ------------ cma: name: cma sensor: seviri resolution: 3000 file_type: h5_nwcsaf_cma cma_pal: name: cma_pal sensor: seviri resolution: 3000 file_type: h5_nwcsaf_cma cma_cloudsnow: name: cma_cloudsnow sensor: seviri resolution: 3000 file_type: h5_nwcsaf_cma cma_cloudsnow_pal: name: cma_cloudsnow_pal sensor: seviri resolution: 3000 file_type: h5_nwcsaf_cma cma_dust: name: cma_dust sensor: seviri resolution: 3000 file_type: h5_nwcsaf_cma cma_dust_pal: name: cma_dust_pal sensor: seviri resolution: 3000 file_type: h5_nwcsaf_cma cma_volcanic: name: cma_volcanic sensor: seviri resolution: 3000 file_type: h5_nwcsaf_cma cma_volcanic_pal: name: cma_volcanic_pal sensor: seviri resolution: 3000 file_type: h5_nwcsaf_cma cma_conditions: name: cma_conditions sensor: seviri resolution: 3000 file_type: h5_nwcsaf_cma cma_status_flag: name: cma_status_flag sensor: seviri resolution: 3000 file_type: h5_nwcsaf_cma # ---- CT products ------------ ct: name: ct sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ct file_key: CT ct_pal: name: ct_pal sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ct file_key: 01-PALETTE ct_quality: name: ct_quality sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ct file_key: CT_QUALITY ct_phase: name: ct_phase sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ct file_key: CT_PHASE ct_phase_pal: name: ct_phase_pal sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ct file_key: 02-PALETTE # ---- CTTH products ------------ ctth_alti: name: ctth_alti sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ctth file_key: CTTH_HEIGHT ctth_alti_pal: name: ctth_alti_pal sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ctth file_key: 02-PALETTE ctth_pres: name: ctth_pres sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ctth file_key: CTTH_PRESS ctth_pres_pal: name: ctth_pres_pal sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ctth file_key: 01-PALETTE ctth_tempe: name: ctth_tempe sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ctth file_key: CTTH_TEMPER ctth_tempe_pal: name: ctth_tempe_pal sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ctth file_key: 03-PALETTE ctth_effective_cloudiness: name: ctth_effective_cloudiness sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ctth file_key: CTTH_EFFECT ctth_effective_cloudiness_pal: name: ctth_eff_pal sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ctth file_key: 04-PALETTE ctth_quality: name: ctth_quality sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ctth file_key: CTTH_QUALITY satpy-0.34.0/satpy/etc/readers/nwcsaf-pps_nc.yaml000066400000000000000000000177001420401153000217160ustar00rootroot00000000000000reader: description: NetCDF4 reader for the NWCSAF/PPS 2014 format name: nwcsaf-pps_nc sensors: ['avhrr-3', 'viirs', 'modis'] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: nc_nwcsaf_cma: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_CMA_{platform_id}_{orbit_number}_{start_time:%Y%m%dT%H%M%S%f}Z_{end_time:%Y%m%dT%H%M%S%f}Z.nc', 'W_XX-EUMETSAT-Darmstadt,SING+LEV+SAT,{platform_id}+CMA_C_EUMS_{start_time:%Y%m%d%H%M%S}_{orbit_number}.nc', 'W_XX-EUMETSAT-Darmstadt,SING+LEV+SAT,{platform_id}+CMA_C_EUMS_{start_time:%Y%m%d%H%M%S}_{orbit_number}.nc.bz2'] nc_nwcsaf_cmaprob: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_CMAPROB_{platform_id}_{orbit_number}_{start_time:%Y%m%dT%H%M%S%f}Z_{end_time:%Y%m%dT%H%M%S%f}Z.nc'] nc_nwcsaf_ct: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_CT_{platform_id}_{orbit_number}_{start_time:%Y%m%dT%H%M%S%f}Z_{end_time:%Y%m%dT%H%M%S%f}Z.nc', 'W_XX-EUMETSAT-Darmstadt,SING+LEV+SAT,{platform_id}+CT_C_EUMS_{start_time:%Y%m%d%H%M%S}_{orbit_number}.nc', 'W_XX-EUMETSAT-Darmstadt,SING+LEV+SAT,{platform_id}+CT_C_EUMS_{start_time:%Y%m%d%H%M%S}_{orbit_number}.nc.bz2'] nc_nwcsaf_ctth: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_CTTH_{platform_id}_{orbit_number}_{start_time:%Y%m%dT%H%M%S%f}Z_{end_time:%Y%m%dT%H%M%S%f}Z.nc', 'W_XX-EUMETSAT-Darmstadt,SING+LEV+SAT,{platform_id}+CTTH_C_EUMS_{start_time:%Y%m%d%H%M%S}_{orbit_number}.nc', 'W_XX-EUMETSAT-Darmstadt,SING+LEV+SAT,{platform_id}+CTTH_C_EUMS_{start_time:%Y%m%d%H%M%S}_{orbit_number}.nc.bz2'] nc_nwcsaf_pc: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_PC_{platform_id}_{orbit_number}_{start_time:%Y%m%dT%H%M%S%f}Z_{end_time:%Y%m%dT%H%M%S%f}Z.nc'] nc_nwcsaf_cpp: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_CPP_{platform_id}_{orbit_number}_{start_time:%Y%m%dT%H%M%S%f}Z_{end_time:%Y%m%dT%H%M%S%f}Z.nc'] file_key_prefix: cpp_ nc_nwcsaf_cmic: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_CMIC_{platform_id}_{orbit_number}_{start_time:%Y%m%dT%H%M%S%f}Z_{end_time:%Y%m%dT%H%M%S%f}Z.nc'] file_key_prefix: cmic_ datasets: lon: name: lon file_type: - nc_nwcsaf_cma - nc_nwcsaf_ct - nc_nwcsaf_ctth units: "degrees" standard_name: longitude lat: name: lat file_type: - nc_nwcsaf_cma - nc_nwcsaf_ct - nc_nwcsaf_ctth units: "degrees" standard_name: latitude # ---- CMA products ------------ cma: name: cma file_type: nc_nwcsaf_cma coordinates: [lon, lat] cma_pal: name: cma_pal file_type: nc_nwcsaf_cma cma_extended: name: cma_extended file_type: nc_nwcsaf_cma coordinates: [lon, lat] cma_extended_pal: name: cma_extended_pal file_type: nc_nwcsaf_cma cmaprob: name: cmaprob file_type: nc_nwcsaf_cmaprob coordinates: [lon, lat] cmaprob_pal: name: cmaprob_pal scale_offset_dataset: cmaprob file_type: nc_nwcsaf_cmaprob cmaprob_conditions: name: cmaprob_conditions file_type: nc_nwcsaf_cmaprob coordinates: [lon, lat] standard_name: cmaprob_conditions cmaprob_quality: name: cmaprob_quality file_type: nc_nwcsaf_cmaprob coordinates: [lon, lat] standard_name: cmaprob_quality cmaprob_status_flag: name: cmaprob_status_flag file_type: nc_nwcsaf_cmaprob coordinates: [lon, lat] standard_name: cmaprob_status_flag # ---- CT products ------------ ct: name: ct file_type: nc_nwcsaf_ct coordinates: [lon, lat] standard_name: cloudtype ct_conditions: name: ct_conditions file_type: nc_nwcsaf_ct coordinates: [lon, lat] standard_name: cloudtype_conditions ct_quality: name: ct_quality file_type: nc_nwcsaf_ct coordinates: [lon, lat] standard_name: cloudtype_quality ct_status_flag: name: ct_status_flag file_type: nc_nwcsaf_ct coordinates: [lon, lat] standard_name: cloudtype_status_flag ct_pal: name: ct_pal file_type: nc_nwcsaf_ct standard_name: palette # ---- PC products ------------ pc_conditions: name: pc_conditions file_type: nc_nwcsaf_pc coordinates: [lon, lat] pc_precip_intense: name: pc_precip_intense file_type: nc_nwcsaf_pc coordinates: [lon, lat] pc_precip_moderate: name: pc_precip_moderate file_type: nc_nwcsaf_pc coordinates: [lon, lat] pc_precip_light: name: pc_precip_light file_type: nc_nwcsaf_pc coordinates: [lon, lat] pc_status_flag: name: pc_status_flag file_type: nc_nwcsaf_pc coordinates: [lon, lat] pc_quality: name: pc_quality file_type: nc_nwcsaf_pc coordinates: [lon, lat] # ---- CTTH products ------------ ctth_alti: name: ctth_alti file_type: nc_nwcsaf_ctth coordinates: [lon, lat] ctth_alti_pal: name: ctth_alti_pal file_type: nc_nwcsaf_ctth ctth_quality: name: ctth_quality file_type: nc_nwcsaf_ctth coordinates: [lon, lat] ctth_conditions: name: ctth_conditions file_type: nc_nwcsaf_ctth coordinates: [lon, lat] ctth_status_flag: name: ctth_status_flag file_type: nc_nwcsaf_ctth coordinates: [lon, lat] ctth_pres: name: ctth_pres file_type: nc_nwcsaf_ctth coordinates: [lon, lat] ctth_pres_pal: name: ctth_pres_pal file_type: nc_nwcsaf_ctth ctth_tempe: name: ctth_tempe file_type: nc_nwcsaf_ctth coordinates: [lon, lat] ctth_tempe_pal: name: ctth_tempe_pal file_type: nc_nwcsaf_ctth # ---- CMIC products (Was CPP in PPS<=2018)------------ cmic_phase: name: cmic_phase file_key: phase file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] coordinates: [lon, lat] cmic_phase_pal: name: [cmic_phase_pal, cpp_phase_pal] file_key: phase_pal file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] cmic_reff: name: cmic_reff file_key: reff file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] coordinates: [lon, lat] cmic_reff_pal: name: [cmic_reff_pal, cpp_reff_pal] file_key: reff_pal scale_offset_dataset: reff file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] cmic_cot: name: cmic_cot file_key: cot file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] coordinates: [lon, lat] cmic_cot_pal: name: [cmic_cot_pal, cpp_cot_pal] file_key: cot_pal scale_offset_dataset: cot file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] cmic_cwp: name: cmic_cwp file_key: cwp file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] coordinates: [lon, lat] cmic_cwp_pal: name: [cmic_cwp_pal, cpp_cwp_pal] file_key: cwp_pal scale_offset_dataset: cwp file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] cmic_iwp: name: cmic_iwp file_key: iwp file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] coordinates: [lon, lat] cmic_iwp_pal: name: [cmic_iwp_pal, cpp_iwp_pal] file_key: iwp_pal scale_offset_dataset: iwp file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] cmic_lwp: name: cmic_lwp file_key: lwp file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] coordinates: [lon, lat] cmic_lwp_pal: name: [cmic_lwp_pal, cpp_lwp_pal] file_key: lwp_pal scale_offset_dataset: lwp file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] cmic_status_flag: name: [cmic_status_flag, cpp_status_flag] file_key: status_flag file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] coordinates: [lon, lat] cmic_conditions: name: [cmic_conditions, cpp_conditions] file_key: conditions file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] coordinates: [lon, lat] cmic_quality: name: [cmic_quality, cpp_quality] file_key: quality file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] coordinates: [lon, lat] satpy-0.34.0/satpy/etc/readers/olci_l1b.yaml000066400000000000000000000314421420401153000206400ustar00rootroot00000000000000reader: description: NC Reader for OLCI data name: olci_l1b sensors: [olci] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: esa_l1b: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI1B file_patterns: - '{mission_id:3s}_OL_1_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{platform_mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/{dataset_name}_radiance.nc' - '{mission_id:3s}_OL_1_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{platform_mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/{dataset_name}_radiance.nc' requires: [esa_cal] esa_angles: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCIAngles file_patterns: - '{mission_id:3s}_OL_1_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{platform_mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/tie_geometries.nc' - '{mission_id:3s}_OL_1_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{platform_mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/tie_geometries.nc' esa_geo: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCIGeo file_patterns: - '{mission_id:3s}_OL_1_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{platform_mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/geo_coordinates.nc' - '{mission_id:3s}_OL_1_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{platform_mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/geo_coordinates.nc' esa_cal: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCICal file_patterns: - '{mission_id:3s}_OL_1_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{platform_mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/instrument_data.nc' - '{mission_id:3s}_OL_1_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{platform_mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/instrument_data.nc' esa_meteo: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCIMeteo file_patterns: - '{mission_id:3s}_OL_1_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{platform_mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/tie_meteo.nc' - '{mission_id:3s}_OL_1_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{platform_mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/tie_meteo.nc' datasets: longitude: name: longitude resolution: 300 file_type: esa_geo standard_name: longitude units: degree latitude: name: latitude resolution: 300 file_type: esa_geo standard_name: latitude units: degree altitude: name: altitude resolution: 300 file_type: esa_geo standard_name: altitude units: m Oa01: name: Oa01 sensor: olci wavelength: [0.3925,0.4,0.4075] resolution: 300 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" coordinates: [longitude, latitude] file_type: esa_l1b Oa02: name: Oa02 sensor: olci wavelength: [0.4075, 0.4125, 0.4175] resolution: 300 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" coordinates: [longitude, latitude] file_type: esa_l1b Oa03: name: Oa03 sensor: olci wavelength: [0.4375,0.4425,0.4475] resolution: 300 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" coordinates: [longitude, latitude] file_type: esa_l1b Oa04: name: Oa04 sensor: olci wavelength: [0.485,0.49,0.495] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa05: name: Oa05 sensor: olci wavelength: [0.505,0.51,0.515] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa06: name: Oa06 sensor: olci wavelength: [0.555,0.56,0.565] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa07: name: Oa07 sensor: olci wavelength: [0.615,0.62,0.625] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa08: name: Oa08 sensor: olci wavelength: [0.66,0.665,0.67] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa09: name: Oa09 sensor: olci wavelength: [0.67,0.67375,0.6775] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa10: name: Oa10 sensor: olci wavelength: [0.6775,0.68125,0.685] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa11: name: Oa11 sensor: olci wavelength: [0.70375,0.70875,0.71375] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa12: name: Oa12 sensor: olci wavelength: [0.75,0.75375,0.7575] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa13: name: Oa13 sensor: olci wavelength: [0.76,0.76125,0.7625] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa14: name: Oa14 sensor: olci wavelength: [0.760625, 0.764375, 0.768125] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa15: name: Oa15 sensor: olci wavelength: [0.76625, 0.7675, 0.76875] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa16: name: Oa16 sensor: olci wavelength: [0.77125, 0.77875, 0.78625] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa17: name: Oa17 sensor: olci wavelength: [0.855, 0.865, 0.875] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa18: name: Oa18 sensor: olci wavelength: [0.88, 0.885, 0.89] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa19: name: Oa19 sensor: olci wavelength: [0.895, 0.9, 0.905] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa20: name: Oa20 sensor: olci wavelength: [0.93, 0.94, 0.95] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa21: name: Oa21 sensor: olci wavelength: [1.0, 1.02, 1.04] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b solar_zenith_angle: name: solar_zenith_angle sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_angles solar_azimuth_angle: name: solar_azimuth_angle sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_angles satellite_zenith_angle: name: satellite_zenith_angle sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_angles satellite_azimuth_angle: name: satellite_azimuth_angle sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_angles humidity: name: humidity sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_meteo sea_level_pressure: name: sea_level_pressure sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_meteo total_columnar_water_vapour: name: total_columnar_water_vapour sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_meteo total_ozone: name: total_ozone sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_meteo satpy-0.34.0/satpy/etc/readers/olci_l2.yaml000066400000000000000000000311161420401153000204750ustar00rootroot00000000000000reader: description: NC Reader for OLCI data name: olci_l2 sensors: [olci] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: esa_l2_reflectance: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/{dataset_name}_reflectance.nc'] esa_l2_chl_nn: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/chl_nn.nc'] esa_l2_chl_oc4me: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/chl_oc4me.nc'] esa_l2_iop_nn: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/iop_nn.nc'] esa_l2_trsp: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/trsp.nc'] esa_l2_tsm_nn: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/tsm_nn.nc'] esa_l2_wqsf: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/wqsf.nc'] esa_angles: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCIAngles file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/tie_geometries.nc'] esa_geo: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCIGeo file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/geo_coordinates.nc'] datasets: longitude: name: longitude resolution: 300 file_type: esa_geo standard_name: longitude units: degree latitude: name: latitude resolution: 300 file_type: esa_geo standard_name: latitude units: degree Oa01: name: Oa01 sensor: olci wavelength: [0.3925,0.4,0.4075] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 calibration: reflectance: standard_name: water_leaving_reflectance units: "%" coordinates: [longitude, latitude] file_type: esa_l2_reflectance Oa02: name: Oa02 sensor: olci wavelength: [0.4075, 0.4125, 0.4175] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 calibration: reflectance: standard_name: water_leaving_reflectance units: "%" coordinates: [longitude, latitude] file_type: esa_l2_reflectance Oa03: name: Oa03 sensor: olci wavelength: [0.4375,0.4425,0.4475] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 calibration: reflectance: standard_name: water_leaving_reflectance units: "%" coordinates: [longitude, latitude] file_type: esa_l2_reflectance Oa04: name: Oa04 sensor: olci wavelength: [0.485,0.49,0.495] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa05: name: Oa05 sensor: olci wavelength: [0.505,0.51,0.515] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa06: name: Oa06 sensor: olci wavelength: [0.555,0.56,0.565] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa07: name: Oa07 sensor: olci wavelength: [0.615,0.62,0.625] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa08: name: Oa08 sensor: olci wavelength: [0.66,0.665,0.67] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa09: name: Oa09 sensor: olci wavelength: [0.67,0.67375,0.6775] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa10: name: Oa10 sensor: olci wavelength: [0.6775,0.68125,0.685] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa11: name: Oa11 sensor: olci wavelength: [0.70375,0.70875,0.71375] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa12: name: Oa12 sensor: olci wavelength: [0.75,0.75375,0.7575] modifiers: [sunz_corrected, rayleigh_corrected] coordinates: [longitude, latitude] resolution: 300 calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa13: name: Oa13 sensor: olci wavelength: [0.76,0.76125,0.7625] modifiers: [sunz_corrected, rayleigh_corrected] coordinates: [longitude, latitude] resolution: 300 calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa14: name: Oa14 sensor: olci wavelength: [0.760625, 0.764375, 0.768125] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa15: name: Oa15 sensor: olci wavelength: [0.76625, 0.7675, 0.76875] modifiers: [sunz_corrected, rayleigh_corrected] coordinates: [longitude, latitude] resolution: 300 calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa16: name: Oa16 sensor: olci wavelength: [0.77125, 0.77875, 0.78625] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa17: name: Oa17 sensor: olci wavelength: [0.855, 0.865, 0.875] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa18: name: Oa18 sensor: olci wavelength: [0.88, 0.885, 0.89] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa19: name: Oa19 sensor: olci wavelength: [0.895, 0.9, 0.905] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa20: name: Oa20 sensor: olci wavelength: [0.93, 0.94, 0.95] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa21: name: Oa21 sensor: olci wavelength: [1.0, 1.02, 1.04] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance chl_oc4me: name: chl_oc4me sensor: olci resolution: 300 calibration: reflectance: standard_name: algal_pigment_concentration units: "lg(re mg.m-3)" coordinates: [longitude, latitude] file_type: esa_l2_chl_oc4me nc_key: CHL_OC4ME chl_nn: name: chl_nn sensor: olci resolution: 300 calibration: reflectance: standard_name: algal_pigment_concentration units: "lg(re mg.m-3)" coordinates: [longitude, latitude] file_type: esa_l2_chl_nn nc_key: CHL_NN iop_nn: name: iop_nn sensor: olci resolution: 300 calibration: reflectance: standard_name: cdm_absorption_coefficient units: "lg(re m-l)" coordinates: [longitude, latitude] file_type: esa_l2_iop_nn nc_key: ADG443_NN trsp: name: trsp sensor: olci resolution: 300 calibration: reflectance: standard_name: diffuse_attenuation_coefficient units: "lg(re m-l)" coordinates: [longitude, latitude] file_type: esa_l2_trsp nc_key: KD490_M07 tsm_nn: name: tsm_nn sensor: olci resolution: 300 calibration: reflectance: standard_name: total_suspended_matter_concentration units: "lg(re g.m-3)" coordinates: [longitude, latitude] file_type: esa_l2_tsm_nn nc_key: TSM_NN wqsf: name: wqsf sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_l2_wqsf nc_key: WQSF mask: name: mask sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_l2_wqsf nc_key: WQSF solar_zenith_angle: name: solar_zenith_angle sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_angles solar_azimuth_angle: name: solar_azimuth_angle sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_angles satellite_zenith_angle: name: satellite_zenith_angle sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_angles satellite_azimuth_angle: name: satellite_azimuth_angle sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_angles satpy-0.34.0/satpy/etc/readers/omps_edr.yaml000066400000000000000000000174471420401153000207750ustar00rootroot00000000000000reader: description: Generic OMPS EDR reader name: omps_edr reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [omps] file_types: # HDF5 files from NASA GES DISC HTTP # https://search.earthdata.nasa.gov/search/granules # https://snpp-omps.gesdisc.eosdis.nasa.gov/data//SNPP_OMPS_Level2/OMPS_NPP_NMSO2_L2.2/ # https://disc.sci.gsfc.nasa.gov/ omps_tc_so2_edr_ges_disc: file_reader: !!python/name:satpy.readers.omps_edr.EDRFileHandler file_patterns: ['{instrument_shortname}-{platform_shortname}_NMSO2-L2_{start_time:%Ym%m%dt%H%M%S}_o{orbit:05d}_{end_time:%Ym%m%dt%H%M%S}.h5'] # HDF5-EOS files from NASA DRL # ftp://is.sci.gsfc.nasa.gov/gsfcdata/npp/omps/level2/ omps_tc_so2_edr: file_reader: !!python/name:satpy.readers.omps_edr.EDREOSFileHandler file_patterns: ['{instrument_shortname}-{platform_shortname}-TC_EDR_SO2NRT-{start_time:%Ym%m%dt%H%M%S}-o{orbit:05d}-{end_time:%Ym%m%dt%H%M%S}.he5'] # HDF5 files from NASA DRL # ftp://is.sci.gsfc.nasa.gov/gsfcdata/npp/omps/level2/ omps_tc_to3_edr: file_reader: !!python/name:satpy.readers.omps_edr.EDRFileHandler file_patterns: ['{instrument_shortname}-{platform_shortname}-TC_EDR_TO3-{version}-{start_time:%Ym%m%dt%H%M%S}-o{orbit:05d}-{end_time:%Ym%m%dt%H%M%S}.h5'] # HDF5 file from FMI SAMPO https://sampo.fmi.fi/ via Eumetcast omps_sampo: file_reader: !!python/name:satpy.readers.omps_edr.EDRFileHandler file_patterns: ['{instrument_shortname}-{platform_shortname}_NMSO2-PCA-L2_{version}_{start_time:%Ym%m%dt%H%M%S}_o{orbit:05d}_{end_time:%Ym%m%dt%H%M%S}.h5'] # ftp://omisips1.omisips.eosdis.nasa.gov/OMPS/LANCE/NMSO2-L2-NRT-NRT/ # ftp://omisips1.omisips.eosdis.nasa.gov/OMPS/LANCE/NMSO2-L2-NRT-NRT/OMPS-NPP_NMSO2-L2-NRT_2017m0804t030731_o29890_2017m0804t021637.he5 datasets: reflectivity_331: name: reflectivity_331 resolution: 50000 coordinates: [longitude_to3, latitude_to3] file_type: omps_tc_to3_edr file_key: SCIENCE_DATA/Reflectivity331 uvaerosol_index: name: uvaerosol_index resolution: 50000 coordinates: [longitude_to3, latitude_to3] file_type: omps_tc_to3_edr file_key: SCIENCE_DATA/UVAerosolIndex so2_trm: name: so2_trm resolution: 50000 coordinates: [longitude_so2, latitude_so2] file_type: omps_tc_so2_edr file_key: HDFEOS/SWATHS/OMPS Column Amount SO2/Data Fields/ColumnAmountSO2_TRM longitude_to3: name: longitude_to3 resolution: 50000 file_type: omps_tc_to3_edr file_key: GEOLOCATION_DATA/Longitude units: degrees standard_name: longitude latitude_to3: name: latitude_to3 resolution: 50000 file_type: omps_tc_to3_edr file_key: GEOLOCATION_DATA/Latitude units: degrees standard_name: latitude longitude_so2: name: longitude_so2 resolution: 50000 file_type: omps_tc_so2_edr file_key: HDFEOS/SWATHS/OMPS Column Amount SO2/Geolocation Fields/Longitude units: degrees standard_name: longitude latitude_so2: name: latitude_so2 resolution: 50000 file_type: omps_tc_so2_edr file_key: HDFEOS/SWATHS/OMPS Column Amount SO2/Geolocation Fields/Latitude units: degrees standard_name: latitude #[file_key:so2_trm_eos] #variable_name=HDFEOS/SWATHS/{file_group}/Data Fields/ColumnAmountSO2_TRM #units_attr=Units #missing_attr=MissingValue #factor=ScaleFactor #offset=Offset aerosol_index: name: aerosol_index resolution: 50000 coordinates: [longitude_so2_gd, latitude_so2_gd] file_type: omps_tc_so2_edr_ges_disc file_key: ScienceData/AerosolIndex cldfra: name: cldfra resolution: 50000 coordinates: [longitude_so2_gd, latitude_so2_gd] file_type: omps_tc_so2_edr_ges_disc file_key: ScienceData/CloudFraction cldpres: name: cldpres resolution: 50000 coordinates: [longitude_so2_gd, latitude_so2_gd] file_type: omps_tc_so2_edr_ges_disc file_key: ScienceData/CloudPressure cldrefl: name: cldrefl resolution: 50000 coordinates: [longitude_so2_gd, latitude_so2_gd] file_type: omps_tc_so2_edr_ges_disc file_key: ScienceData/CloudRefletivity tco3_dvcf: name: tco3_dvcf resolution: 50000 coordinates: [longitude_so2_gd, latitude_so2_gd] file_type: omps_tc_so2_edr_ges_disc file_key: ScienceData/ColumnAmountO3isf tco3_toms: name: tco3_toms resolution: 50000 coordinates: [longitude_so2_gd, latitude_so2_gd] file_type: omps_tc_so2_edr_ges_disc file_key: ScienceData/ColumnAmountO3pair tcso2_abv: name: tcso2_abv resolution: 50000 coordinates: [longitude_so2_gd, latitude_so2_gd] file_type: omps_tc_so2_edr_ges_disc file_key: ScienceData/ColumnAmountSO2_ABV tcso2_pbl: name: tcso2_pbl resolution: 50000 coordinates: [longitude_so2_gd, latitude_so2_gd] file_type: omps_tc_so2_edr_ges_disc file_key: ScienceData/ColumnAmountSO2_PBL tcso2_stl: name: tcso2_stl resolution: 50000 coordinates: [longitude_so2_gd, latitude_so2_gd] file_type: omps_tc_so2_edr_ges_disc file_key: ScienceData/ColumnAmountSO2_STL tcso2_trl: name: tcso2_trl resolution: 50000 coordinates: [longitude_so2_gd, latitude_so2_gd] file_type: omps_tc_so2_edr_ges_disc file_key: ScienceData/ColumnAmountSO2_TRL tcso2_trm: name: tcso2_trm resolution: 50000 coordinates: [longitude_so2_gd, latitude_so2_gd] file_type: omps_tc_so2_edr_ges_disc file_key: ScienceData/ColumnAmountSO2_TRM tcso2_tru: name: tcso2_tru resolution: 50000 coordinates: [longitude_so2_gd, latitude_so2_gd] file_type: omps_tc_so2_edr_ges_disc file_key: ScienceData/ColumnAmountSO2_TRU longitude_so2_gd: name: longitude_so2_gd resolution: 50000 file_type: omps_tc_so2_edr_ges_disc file_key: GeolocationData/Longitude units: degrees standard_name: longitude latitude_so2_gd: name: latitude_so2_gd resolution: 50000 file_type: omps_tc_so2_edr_ges_disc file_key: GeolocationData/Latitude units: degrees standard_name: latitude longitude_sampo: name: longitude_sampo resolution: 50000 file_type: omps_sampo file_key: GEOLOCATION_DATA/Longitude units: degrees_east standard_name: longitude latitude_sampo: name: latitude_sampo resolution: 50000 file_type: omps_sampo file_key: GEOLOCATION_DATA/Latitude units: degrees_north standard_name: latitude ColumnAmountO3: name: tco3_sampo resolution: 50000 coordinates: [longitude_sampo, latitude_sampo] file_type: omps_sampo file_key: SCIENCE_DATA/ColumnAmountO3 ColumnAmountSO2_PBL: name: tcso2_pbl_sampo resolution: 50000 coordinates: [longitude_sampo, latitude_sampo] file_type: omps_sampo file_key: SCIENCE_DATA/ColumnAmountSO2_PBL ColumnAmountSO2_STL: name: tcso2_stl_sampo resolution: 50000 coordinates: [longitude_sampo, latitude_sampo] file_type: omps_sampo file_key: SCIENCE_DATA/ColumnAmountSO2_STL ColumnAmountSO2_TRL: name: tcso2_trl_sampo resolution: 50000 coordinates: [longitude_sampo, latitude_sampo] file_type: omps_sampo file_key: SCIENCE_DATA/ColumnAmountSO2_TRL ColumnAmountSO2_TRM: name: tcso2_trm_sampo resolution: 50000 coordinates: [longitude_sampo, latitude_sampo] file_type: omps_sampo file_key: SCIENCE_DATA/ColumnAmountSO2_TRM ColumnAmountSO2_TRU: name: tcso2_tru_sampo resolution: 50000 coordinates: [longitude_sampo, latitude_sampo] file_type: omps_sampo file_key: SCIENCE_DATA/ColumnAmountSO2_TRU UVAerosolIndex: name: uvaerosol_index_sampo resolution: 50000 coordinates: [longitude_sampo, latitude_sampo] file_type: omps_sampo file_key: SCIENCE_DATA/UVAerosolIndex CloudFraction: name: cldfra_sampo resolution: 50000 coordinates: [longitude_sampo, latitude_sampo] file_type: omps_sampo file_key: SCIENCE_DATA/CloudFraction satpy-0.34.0/satpy/etc/readers/safe_sar_l2_ocn.yaml000066400000000000000000000075061420401153000221770ustar00rootroot00000000000000reader: description: SAFE Reader for SAR L2 OCN data name: safe_sar_l2_ocn sensors: [sar-c] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: safe_measurement: file_reader: !!python/name:satpy.readers.safe_sar_l2_ocn.SAFENC file_patterns: ['{fmission_id:3s}_{fsar_mode:2s}_{fproduct_type:3s}{fresolution:1s}_{fprocessing_level:1s}{fproduct_class:1s}{fpolarization:2s}_{fstart_time:%Y%m%dT%H%M%S}_{fend_time:%Y%m%dT%H%M%S}_{forbit_number:6d}_{fmission_data_take_id:6s}_{fproduct_unique_id:4s}.SAFE/measurement/{mission_id:3s}-{swath_id:2s}-{product_type:3s}-{polarization:2s}-{start_time:%Y%m%dt%H%M%S}-{end_time:%Y%m%dt%H%M%S}-{orbit_number:6d}-{mission_data_take_id:6s}-{image_number:3s}.nc'] datasets: owiLat: name: owiLat file_type: safe_measurement standard_name: latitude units: degree owiLon: name: owiLon file_type: safe_measurement standard_name: longitude units: degree owiWindDirection: name: owiWindDirection sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: degree owiWindSpeed: name: owiWindSpeed sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: m s-1 owiEcmwfWindDirection: name: owiEcmwfWindDirection sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: degree owiEcmwfWindSpeed: name: owiEcmwfWindSpeed sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: m s-1 owiHs: name: owiHs sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: m owiWl: name: owiWl sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: m owiDirmet: name: owiDirmet sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: degrees owiWindSeaHs: name: owiWindSeaHs sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: m owiIncidenceAngle: name: owiIncidenceAngle sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: degrees owiElevationAngle: name: owiElevationAngle sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: degrees owiNrcs: name: owiNrcs sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: m2/m2 owiNesz: name: owiNesz sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: m2/m2 owiNrcsNeszCorr: name: owiNrcsNeszCorr sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: m2/m2 owiPolarisationName: name: owiPolarisationName sensor: sar-c file_type: safe_measurement owiPBright: name: owiPBright sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: '%' owiNrcsCmod: name: owiNrcsCmod sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: m2/m2 owiCalConstObsi: name: owiCalConstObsi sensor: sar-c file_type: safe_measurement owiCalConstInci: name: owiCalConstInci sensor: sar-c file_type: safe_measurement owiInversionQuality: name: owiInversionQuality sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] owiMask: name: owiMask sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] owiHeading: name: owiHeading sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: degrees owiWindQuality: name: owiWindQuality sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] satpy-0.34.0/satpy/etc/readers/sar-c_safe.yaml000066400000000000000000000124121420401153000211530ustar00rootroot00000000000000reader: description: SAFE Reader for SAR-C data name: sar-c_safe sensors: [sar-c] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader data_identification_keys: name: required: true polarization: transitive: true resolution: transitive: false calibration: enum: - gamma - sigma_nought - beta_nought transitive: true quantity: enum: - natural - dB transitive: true modifiers: default: [] type: !!python/name:satpy.dataset.dataid.ModifierTuple coord_identification_keys: name: required: true polarization: transitive: true resolution: transitive: false file_types: safe_measurement: file_reader: !!python/name:satpy.readers.sar_c_safe.SAFEGRD file_patterns: ['{fmission_id:3s}_{fsar_mode:2s}_{fproduct_type:3s}{fresolution:1s}_{fprocessing_level:1s}{fproduct_class:1s}{fpolarization:2s}_{fstart_time:%Y%m%dT%H%M%S}_{fend_time:%Y%m%dT%H%M%S}_{forbit_number:6d}_{fmission_data_take_id:6s}_{fproduct_unique_id:4s}.SAFE/measurement/{mission_id:3s}-{swath_id:2s}-{product_type:3s}-{polarization:2s}-{start_time:%Y%m%dt%H%M%S}-{end_time:%Y%m%dt%H%M%S}-{orbit_number:6d}-{mission_data_take_id:6s}-{image_number:3s}.tiff'] requires: [safe_calibration, safe_noise, safe_annotation] safe_calibration: file_reader: !!python/name:satpy.readers.sar_c_safe.SAFEXMLCalibration file_patterns: ['{fmission_id:3s}_{fsar_mode:2s}_{fproduct_type:3s}{fresolution:1s}_{fprocessing_level:1s}{fproduct_class:1s}{fpolarization:2s}_{fstart_time:%Y%m%dT%H%M%S}_{fend_time:%Y%m%dT%H%M%S}_{forbit_number:6d}_{fmission_data_take_id:6s}_{fproduct_unique_id:4s}.SAFE/annotation/calibration/calibration-{mission_id:3s}-{swath_id:2s}-{product_type:3s}-{polarization:2s}-{start_time:%Y%m%dt%H%M%S}-{end_time:%Y%m%dt%H%M%S}-{orbit_number:6d}-{mission_data_take_id:6s}-{image_number:3s}.xml'] requires: [safe_annotation] safe_noise: file_reader: !!python/name:satpy.readers.sar_c_safe.SAFEXMLNoise file_patterns: ['{fmission_id:3s}_{fsar_mode:2s}_{fproduct_type:3s}{fresolution:1s}_{fprocessing_level:1s}{fproduct_class:1s}{fpolarization:2s}_{fstart_time:%Y%m%dT%H%M%S}_{fend_time:%Y%m%dT%H%M%S}_{forbit_number:6d}_{fmission_data_take_id:6s}_{fproduct_unique_id:4s}.SAFE/annotation/calibration/noise-{mission_id:3s}-{swath_id:2s}-{product_type:3s}-{polarization:2s}-{start_time:%Y%m%dt%H%M%S}-{end_time:%Y%m%dt%H%M%S}-{orbit_number:6d}-{mission_data_take_id:6s}-{image_number:3s}.xml'] requires: [safe_annotation] safe_annotation: file_reader: !!python/name:satpy.readers.sar_c_safe.SAFEXMLAnnotation file_patterns: ['{fmission_id:3s}_{fsar_mode:2s}_{fproduct_type:3s}{fresolution:1s}_{fprocessing_level:1s}{fproduct_class:1s}{fpolarization:2s}_{fstart_time:%Y%m%dT%H%M%S}_{fend_time:%Y%m%dT%H%M%S}_{forbit_number:6d}_{fmission_data_take_id:6s}_{fproduct_unique_id:4s}.SAFE/annotation/{mission_id:3s}-{swath_id:2s}-{product_type:3s}-{polarization:2s}-{start_time:%Y%m%dt%H%M%S}-{end_time:%Y%m%dt%H%M%S}-{orbit_number:6d}-{mission_data_take_id:6s}-{image_number:3s}.xml'] datasets: latitude: name: latitude resolution: 80 file_type: safe_measurement standard_name: latitude polarization: [hh, hv, vv, vh] units: degree longitude: name: longitude resolution: 80 file_type: safe_measurement standard_name: longitude polarization: [hh, hv, vv, vh] units: degree altitude: name: altitude resolution: 80 file_type: safe_measurement standard_name: altitude polarization: [hh, hv, vv, vh] units: meter measurement: name: measurement sensor: sar-c wavelength: [5.400, 5.405, 5.410] resolution: 80 polarization: [hh, hv, vv, vh] calibration: gamma: standard_name: backscatter units: 1 sigma_nought: standard_name: backscatter units: 1 beta_nought: standard_name: backscatter units: 1 quantity: [natural, dB] coordinates: [longitude, latitude] file_type: safe_measurement noise: name: noise sensor: sar-c resolution: 80 polarization: [hh, hv, vv, vh] file_type: safe_noise xml_item: - noiseVector - noiseRangeVector xml_tag: - noiseLut - noiseRangeLut sigma: name: sigma_squared sensor: sar-c resolution: 80 polarization: [hh, hv, vv, vh] file_type: safe_calibration xml_item: calibrationVector xml_tag: sigma beta: name: beta_squared sensor: sar-c resolution: 80 polarization: [hh, hv, vv, vh] file_type: safe_calibration xml_item: calibrationVector xml_tag: beta gamma: name: gamma_squared sensor: sar-c resolution: 80 polarization: [hh, hv, vv, vh] file_type: safe_calibration xml_item: calibrationVector xml_tag: gamma incidence_angle: name: incidence_angle sensor: sar-c resolution: 80 polarization: [hh, hv, vv, vh] units: degrees file_type: safe_annotation xml_item: geolocationGridPoint xml_tag: incidenceAngle calibration_constant: name: calibration_constant sensor: sar-c polarization: [hh, hv, vv, vh] units: 1 file_type: safe_calibration satpy-0.34.0/satpy/etc/readers/satpy_cf_nc.yaml000066400000000000000000000010231420401153000214340ustar00rootroot00000000000000reader: name: satpy_cf_nc description: Reader for Satpy's NC/CF files reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [many] default_channels: [] #datasets: file_types: graphic: file_reader: !!python/name:satpy.readers.satpy_cf_nc.SatpyCFFileHandler file_patterns: - '{platform_name}-{sensor}-{resolution_type}-{start_time:%Y%m%d%H%M%S}-{end_time:%Y%m%d%H%M%S}.nc' - '{platform_name}-{sensor}-{start_time:%Y%m%d%H%M%S}-{end_time:%Y%m%d%H%M%S}.nc' satpy-0.34.0/satpy/etc/readers/scatsat1_l2b.yaml000066400000000000000000000020241420401153000214300ustar00rootroot00000000000000reader: description: Generic Eumetsat Scatsat-1 L2B Wind field Reader name: scatsat1_l2b reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [scatterometer] default_datasets: datasets: longitude: name: longitude resolution: 25000 file_type: scatsat standard_name: longitude units: degree latitude: name: latitude resolution: 25000 file_type: scatsat standard_name: latitude units: degree wind_speed: name: wind_speed sensor: Scatterometer resolution: 25000 coordinates: [longitude, latitude] file_type: scatsat standard_name: wind_speed wind_direction: name: wind_direction resolution: 25000 coordinates: [longitude, latitude] file_type: scatsat standard_name: wind_direction file_types: scatsat: file_reader: !!python/name:satpy.readers.scatsat1_l2b.SCATSAT1L2BFileHandler file_patterns: ['S1L2B{start_date:%Y%j}_{start_orbit}_{end_orbit}_{direction}_{cell_spacing}_{prod_date}T{prod_time}_{version}.h5'] satpy-0.34.0/satpy/etc/readers/seadas_l2.yaml000066400000000000000000000025611420401153000210110ustar00rootroot00000000000000reader: description: MODIS and VIIRS SEADAS Reader name: seadas_l2 reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [modis, viirs] file_types: chlora_seadas: file_patterns: # IMAPP-style filenames: - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.seadas.hdf' file_reader: !!python/name:satpy.readers.seadas_l2.SEADASL2HDFFileHandler geo_resolution: 1000 chlora_seadas_viirs: # SEADAS_npp_d20211118_t1728125_e1739327.hdf file_patterns: - 'SEADAS_{platform_indicator:s}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}.hdf' file_reader: !!python/name:satpy.readers.seadas_l2.SEADASL2HDFFileHandler geo_resolution: 750 datasets: longitude: name: longitude file_type: [chlora_seadas, seadas_hdf_viirs] file_key: longitude resolution: 1000: file_type: chlora_seadas 750: file_type: chlora_seadas_viirs latitude: name: latitude file_type: [chlora_seadas, seadas_hdf_viirs] file_key: latitude resolution: 1000: file_type: chlora_seadas 750: file_type: chlora_seadas_viirs chlor_a: name: chlor_a file_type: [chlora_seadas, seadas_hdf_viirs] file_key: chlor_a resolution: 1000: file_type: chlora_seadas 750: file_type: chlora_seadas_viirs coordinates: [longitude, latitude] satpy-0.34.0/satpy/etc/readers/seviri_l1b_hrit.yaml000066400000000000000000000337011420401153000222410ustar00rootroot00000000000000# References: # - MSG Level 1.5 Image Data Format Description # - Radiometric Calibration of MSG SEVIRI Level 1.5 Image Data in Equivalent # Spectral Blackbody Radiance reader: name: seviri_l1b_hrit short_name: SEVIRI L1b HRIT long_name: MSG SEVIRI Level 1b (HRIT) description: > HRIT reader for EUMETSAT MSG SEVIRI Level 1b files. sensors: [seviri] default_channels: [HRV, IR_016, IR_039, IR_087, IR_097, IR_108, IR_120, IR_134, VIS006, VIS008, WV_062, WV_073] reader: !!python/name:satpy.readers.yaml_reader.GEOSegmentYAMLReader file_types: HRIT_HRV: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-HRV______-{segment:06d}___-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 24 HRIT_IR_016: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_016___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_039: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_039___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_087: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_087___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_097: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_097___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_108: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_108___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_120: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_120___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_134: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_134___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_VIS006: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-VIS006___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_VIS008: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-VIS008___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_WV_062: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-WV_062___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_WV_073: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-WV_073___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_HRV_C: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-HRV______-{segment:06d}___-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 24 HRIT_IR_016_C: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_016___-{segment:06d}___-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_039_C: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_039___-{segment:06d}___-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_087_C: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_087___-{segment:06d}___-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_097_C: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_097___-{segment:06d}___-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_108_C: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_108___-{segment:06d}___-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_120_C: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_120___-{segment:06d}___-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_134_C: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_134___-{segment:06d}___-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_VIS006_C: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-VIS006___-{segment:06d}___-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_VIS008_C: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-VIS008___-{segment:06d}___-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_WV_062_C: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-WV_062___-{segment:06d}___-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_WV_073_C: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-WV_073___-{segment:06d}___-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_PRO: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGPrologueFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-_________-PRO______-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-_________-PRO______-{start_time:%Y%m%d%H%M}-__.bz2'] HRIT_EPI: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGEpilogueFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-_________-EPI______-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-_________-EPI______-{start_time:%Y%m%d%H%M}-__.bz2'] datasets: HRV: name: HRV resolution: 1000.134348869 wavelength: [0.5, 0.7, 0.9] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [HRIT_HRV, HRIT_HRV_C] IR_016: name: IR_016 resolution: 3000.403165817 wavelength: [1.5, 1.64, 1.78] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [HRIT_IR_016, HRIT_IR_016_C] IR_039: name: IR_039 resolution: 3000.403165817 wavelength: [3.48, 3.92, 4.36] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [HRIT_IR_039, HRIT_IR_039_C] IR_087: name: IR_087 resolution: 3000.403165817 wavelength: [8.3, 8.7, 9.1] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [HRIT_IR_087, HRIT_IR_087_C] IR_097: name: IR_097 resolution: 3000.403165817 wavelength: [9.38, 9.66, 9.94] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [HRIT_IR_097, HRIT_IR_097_C] IR_108: name: IR_108 resolution: 3000.403165817 wavelength: [9.8, 10.8, 11.8] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [HRIT_IR_108, HRIT_IR_108_C] IR_120: name: IR_120 resolution: 3000.403165817 wavelength: [11.0, 12.0, 13.0] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [HRIT_IR_120, HRIT_IR_120_C] IR_134: name: IR_134 resolution: 3000.403165817 wavelength: [12.4, 13.4, 14.4] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [HRIT_IR_134, HRIT_IR_134_C] VIS006: name: VIS006 resolution: 3000.403165817 wavelength: [0.56, 0.635, 0.71] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [HRIT_VIS006, HRIT_VIS006_C] VIS008: name: VIS008 resolution: 3000.403165817 wavelength: [0.74, 0.81, 0.88] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [HRIT_VIS008, HRIT_VIS008_C] WV_062: name: WV_062 resolution: 3000.403165817 wavelength: [5.35, 6.25, 7.15] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [HRIT_WV_062, HRIT_WV_062_C] WV_073: name: WV_073 resolution: 3000.403165817 wavelength: [6.85, 7.35, 7.85] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [HRIT_WV_073, HRIT_WV_073_C] satpy-0.34.0/satpy/etc/readers/seviri_l1b_icare.yaml000066400000000000000000000134451420401153000223610ustar00rootroot00000000000000# References: # - MSG Level 1.5 Image Data Format Description # - Radiometric Calibration of MSG SEVIRI Level 1.5 Image Data in Equivalent # Spectral Blackbody Radiance reader: name: seviri_l1b_icare short_name: SEVIRI L1b ICARE long_name: MSG SEVIRI Level 1b in HDF format from ICARE (Lille) description: > A reader for L1b SEVIRI data that has been retrieved from the ICARE service as HDF. sensors: [seviri] default_channels: [HRV, IR_016, IR_039, IR_087, IR_097, IR_108, IR_120, IR_134, VIS006, VIS008, WV_062, WV_073] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: ICARE_HRV: file_reader: !!python/name:satpy.readers.seviri_l1b_icare.SEVIRI_ICARE file_patterns: ['GEO_L1B-{platform_shortname:4s}_{start_time:%Y-%m-%dT%H-%M-%S}_G_HRV_{version:5s}.hdf'] ICARE_IR_016: file_reader: !!python/name:satpy.readers.seviri_l1b_icare.SEVIRI_ICARE file_patterns: ['GEO_L1B-{platform_shortname:4s}_{start_time:%Y-%m-%dT%H-%M-%S}_G_IR016_{version:5s}.hdf'] ICARE_IR_039: file_reader: !!python/name:satpy.readers.seviri_l1b_icare.SEVIRI_ICARE file_patterns: ['GEO_L1B-{platform_shortname:4s}_{start_time:%Y-%m-%dT%H-%M-%S}_G_IR039_{version:5s}.hdf'] ICARE_IR_087: file_reader: !!python/name:satpy.readers.seviri_l1b_icare.SEVIRI_ICARE file_patterns: ['GEO_L1B-{platform_shortname:4s}_{start_time:%Y-%m-%dT%H-%M-%S}_G_IR087_{version:5s}.hdf'] ICARE_IR_097: file_reader: !!python/name:satpy.readers.seviri_l1b_icare.SEVIRI_ICARE file_patterns: ['GEO_L1B-{platform_shortname:4s}_{start_time:%Y-%m-%dT%H-%M-%S}_G_IR097_{version:5s}.hdf'] ICARE_IR_108: file_reader: !!python/name:satpy.readers.seviri_l1b_icare.SEVIRI_ICARE file_patterns: ['GEO_L1B-{platform_shortname:4s}_{start_time:%Y-%m-%dT%H-%M-%S}_G_IR108_{version:5s}.hdf'] ICARE_IR_120: file_reader: !!python/name:satpy.readers.seviri_l1b_icare.SEVIRI_ICARE file_patterns: ['GEO_L1B-{platform_shortname:4s}_{start_time:%Y-%m-%dT%H-%M-%S}_G_IR120_{version:5s}.hdf'] ICARE_IR_134: file_reader: !!python/name:satpy.readers.seviri_l1b_icare.SEVIRI_ICARE file_patterns: ['GEO_L1B-{platform_shortname:4s}_{start_time:%Y-%m-%dT%H-%M-%S}_G_IR134_{version:5s}.hdf'] ICARE_VIS006: file_reader: !!python/name:satpy.readers.seviri_l1b_icare.SEVIRI_ICARE file_patterns: ['GEO_L1B-{platform_shortname:4s}_{start_time:%Y-%m-%dT%H-%M-%S}_G_VIS06_{version:5s}.hdf'] ICARE_VIS008: file_reader: !!python/name:satpy.readers.seviri_l1b_icare.SEVIRI_ICARE file_patterns: ['GEO_L1B-{platform_shortname:4s}_{start_time:%Y-%m-%dT%H-%M-%S}_G_VIS08_{version:5s}.hdf'] ICARE_WV_062: file_reader: !!python/name:satpy.readers.seviri_l1b_icare.SEVIRI_ICARE file_patterns: ['GEO_L1B-{platform_shortname:4s}_{start_time:%Y-%m-%dT%H-%M-%S}_G_WV062_{version:5s}.hdf'] ICARE_WV_073: file_reader: !!python/name:satpy.readers.seviri_l1b_icare.SEVIRI_ICARE file_patterns: ['GEO_L1B-{platform_shortname:4s}_{start_time:%Y-%m-%dT%H-%M-%S}_G_WV073_{version:5s}.hdf'] datasets: HRV: name: HRV resolution: 1000.134348869 wavelength: [0.5, 0.7, 0.9] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: ICARE_HRV IR_016: name: IR_016 resolution: 3000.403165817 wavelength: [1.5, 1.64, 1.78] calibration: reflectance: standard_name: reflectance units: "%" file_type: ICARE_IR_016 IR_039: name: IR_039 resolution: 3000.403165817 wavelength: [3.48, 3.92, 4.36] calibration: brightness_temperature: standard_name: brightness_temperature units: K file_type: ICARE_IR_039 IR_087: name: IR_087 resolution: 3000.403165817 wavelength: [8.3, 8.7, 9.1] calibration: brightness_temperature: standard_name: brightness_temperature units: K file_type: ICARE_IR_087 IR_097: name: IR_097 resolution: 3000.403165817 wavelength: [9.38, 9.66, 9.94] calibration: brightness_temperature: standard_name: brightness_temperature units: K file_type: ICARE_IR_097 IR_108: name: IR_108 resolution: 3000.403165817 wavelength: [9.8, 10.8, 11.8] calibration: brightness_temperature: standard_name: brightness_temperature units: K file_type: ICARE_IR_108 IR_120: name: IR_120 resolution: 3000.403165817 wavelength: [11.0, 12.0, 13.0] calibration: brightness_temperature: standard_name: brightness_temperature units: K file_type: ICARE_IR_120 IR_134: name: IR_134 resolution: 3000.403165817 wavelength: [12.4, 13.4, 14.4] calibration: brightness_temperature: standard_name: brightness_temperature units: K file_type: ICARE_IR_134 VIS006: name: VIS006 resolution: 3000.403165817 wavelength: [0.56, 0.635, 0.71] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: ICARE_VIS006 VIS008: name: VIS008 resolution: 3000.403165817 wavelength: [0.74, 0.81, 0.88] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: ICARE_VIS008 WV_062: name: WV_062 resolution: 3000.403165817 wavelength: [5.35, 6.25, 7.15] calibration: brightness_temperature: standard_name: brightness_temperature units: "K" file_type: ICARE_WV_062 WV_073: name: WV_073 resolution: 3000.403165817 wavelength: [6.85, 7.35, 7.85] calibration: brightness_temperature: standard_name: brightness_temperature units: "K" file_type: ICARE_WV_073 satpy-0.34.0/satpy/etc/readers/seviri_l1b_native.yaml000066400000000000000000000143351420401153000225630ustar00rootroot00000000000000reader: name: seviri_l1b_native short_name: SEVIRI L1b Native long_name: MSG SEVIRI Level 1b (Native) description: > Reader for EUMETSAT MSG SEVIRI Level 1b native format files. sensors: [seviri] default_channels: [HRV, IR_016, IR_039, IR_087, IR_097, IR_108, IR_120, IR_134, VIS006, VIS008, WV_062, WV_073] reader: !!python/name:satpy.readers.yaml_reader.GEOFlippableFileYAMLReader # file pattern keys to sort files by with 'satpy.utils.group_files' group_keys: ['end_time', 'satid'] file_types: native_msg: file_reader: !!python/name:satpy.readers.seviri_l1b_native.NativeMSGFileHandler file_patterns: ['{satid:4s}-{instr:4s}-MSG{product_level:2d}-{base_algorithm_version:4s}-NA-{end_time:%Y%m%d%H%M%S.%f}000Z-{processing_time:%Y%m%d%H%M%S}-{order_id:s}.nat', '{satid:4s}-{instr:4s}-MSG{product_level:2d}-{base_algorithm_version:4s}-NA-{end_time:%Y%m%d%H%M%S.%f}000Z-{order_id:s}.nat', '{satid:4s}-{instr:4s}-MSG{product_level:2d}-{base_algorithm_version:4s}-NA-{end_time:%Y%m%d%H%M%S.%f}000Z' ] # Note: the end_time value in the SEVIRI native filenames is officially called Nominal Image Time (SNIT field in # the 15_MAIN_PRODUCT_HEADER) marking the time where the product is defined to be valid. This time always matches # the scan acquisition end time (SSST in 15_MAIN_PRODUCT_HEADER). datasets: HRV: name: HRV resolution: 1000.134348869 wavelength: [0.5, 0.7, 0.9] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: native_msg IR_016: name: IR_016 resolution: 3000.403165817 wavelength: [1.5, 1.64, 1.78] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: native_msg IR_039: name: IR_039 resolution: 3000.403165817 wavelength: [3.48, 3.92, 4.36] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: native_msg IR_087: name: IR_087 resolution: 3000.403165817 wavelength: [8.3, 8.7, 9.1] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: native_msg IR_097: name: IR_097 resolution: 3000.403165817 wavelength: [9.38, 9.66, 9.94] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: native_msg IR_108: name: IR_108 resolution: 3000.403165817 wavelength: [9.8, 10.8, 11.8] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: native_msg IR_120: name: IR_120 resolution: 3000.403165817 wavelength: [11.0, 12.0, 13.0] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: native_msg IR_134: name: IR_134 resolution: 3000.403165817 wavelength: [12.4, 13.4, 14.4] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: native_msg VIS006: name: VIS006 resolution: 3000.403165817 wavelength: [0.56, 0.635, 0.71] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: native_msg VIS008: name: VIS008 resolution: 3000.403165817 wavelength: [0.74, 0.81, 0.88] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: native_msg WV_062: name: WV_062 resolution: 3000.403165817 wavelength: [5.35, 6.25, 7.15] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: native_msg WV_073: name: WV_073 resolution: 3000.403165817 wavelength: [6.85, 7.35, 7.85] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: native_msg satpy-0.34.0/satpy/etc/readers/seviri_l1b_nc.yaml000066400000000000000000000132231420401153000216700ustar00rootroot00000000000000reader: name: seviri_l1b_nc short_name: SEVIRI L1b NetCDF4 long_name: MSG SEVIRI Level 1b NetCDF4 description: > NetCDF4 reader for EUMETSAT MSG SEVIRI Level 1b files. sensors: [seviri] reader: !!python/name:satpy.readers.yaml_reader.GEOFlippableFileYAMLReader group_keys: ["start_time", "satid"] file_types: seviri_l1b_nc: file_reader: !!python/name:satpy.readers.seviri_l1b_nc.NCSEVIRIFileHandler file_patterns: ['W_XX-EUMETSAT-Darmstadt,VIS+IR+HRV+IMAGERY,{satid:4s}+SEVIRI_C_EUMG_{start_time:%Y%m%d%H%M%S}.nc'] datasets: HRV: name: HRV resolution: 1000.134348869 wavelength: [0.5, 0.7, 0.9] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: seviri_l1b_nc nc_key: 'ch12' IR_016: name: IR_016 resolution: 3000.403165817 wavelength: [1.5, 1.64, 1.78] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: seviri_l1b_nc nc_key: 'ch3' IR_039: name: IR_039 resolution: 3000.403165817 wavelength: [3.48, 3.92, 4.36] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: seviri_l1b_nc nc_key: 'ch4' IR_087: name: IR_087 resolution: 3000.403165817 wavelength: [8.3, 8.7, 9.1] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: seviri_l1b_nc nc_key: 'ch7' IR_097: name: IR_097 resolution: 3000.403165817 wavelength: [9.38, 9.66, 9.94] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: seviri_l1b_nc nc_key: 'ch8' IR_108: name: IR_108 resolution: 3000.403165817 wavelength: [9.8, 10.8, 11.8] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: seviri_l1b_nc nc_key: 'ch9' IR_120: name: IR_120 resolution: 3000.403165817 wavelength: [11.0, 12.0, 13.0] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: seviri_l1b_nc nc_key: 'ch10' IR_134: name: IR_134 resolution: 3000.403165817 wavelength: [12.4, 13.4, 14.4] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: seviri_l1b_nc nc_key: 'ch11' VIS006: name: VIS006 resolution: 3000.403165817 wavelength: [0.56, 0.635, 0.71] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: seviri_l1b_nc nc_key: 'ch1' VIS008: name: VIS008 resolution: 3000.403165817 wavelength: [0.74, 0.81, 0.88] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: seviri_l1b_nc nc_key: 'ch2' WV_062: name: WV_062 resolution: 3000.403165817 wavelength: [5.35, 6.25, 7.15] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: seviri_l1b_nc nc_key: 'ch5' WV_073: name: WV_073 resolution: 3000.403165817 wavelength: [6.85, 7.35, 7.85] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: seviri_l1b_nc nc_key: 'ch6' satpy-0.34.0/satpy/etc/readers/seviri_l2_bufr.yaml000066400000000000000000000700201420401153000220630ustar00rootroot00000000000000reader: description: SEVIRI L2 BUFR Product Reader name: seviri_l2_bufr sensors: [seviri] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.GEOFlippableFileYAMLReader file_types: seviri_l2_bufr_asr: file_reader: !!python/name:satpy.readers.seviri_l2_bufr.SeviriL2BufrFileHandler file_patterns: - 'ASRBUFRProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:s}_{satellite:s}_{mission:s}_{subsat:s}' - '{spacecraft:s}-SEVI-MSGASRE-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}.bfr' - '{spacecraft:s}-SEVI-MSGASRE-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}' seviri_l2_bufr_cla: file_reader: !!python/name:satpy.readers.seviri_l2_bufr.SeviriL2BufrFileHandler file_patterns: - 'CLABUFRProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:s}_{satellite:s}_{mission:s}_{subsat:s}' - '{spacecraft:s}-SEVI-MSGCLAP-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}.bfr' - '{spacecraft:s}-SEVI-MSGCLAP-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}' seviri_l2_bufr_csr: file_reader: !!python/name:satpy.readers.seviri_l2_bufr.SeviriL2BufrFileHandler file_patterns: - 'CSRBUFRProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:s}_{satellite:s}_{mission:s}_{subsat:s}' - '{spacecraft:s}-SEVI-MSGCSKR-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}.bfr' - '{spacecraft:s}-SEVI-MSGCSKR-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}' seviri_l2_bufr_gii: file_reader: !!python/name:satpy.readers.seviri_l2_bufr.SeviriL2BufrFileHandler file_patterns: - 'GIIBUFRProduct_{start_time:%Y%m%d%H%M%S}Z_00_{server:s}_{satellite:s}_{mission:s}_{subsat:s}' - '{spacecraft:s}-SEVI-MSGGIIN-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}.bfr' - '{spacecraft:s}-SEVI-MSGGIIN-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}' seviri_l2_bufr_thu: file_reader: !!python/name:satpy.readers.seviri_l2_bufr.SeviriL2BufrFileHandler file_patterns: - 'THBUFRProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:s}_{satellite:s}_{mission:s}_{subsat:s}' - '{spacecraft:s}-SEVI-MSGTPHU-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}.bfr' - '{spacecraft:s}-SEVI-MSGTPHU-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}' seviri_l2_bufr_toz: file_reader: !!python/name:satpy.readers.seviri_l2_bufr.SeviriL2BufrFileHandler file_patterns: - 'TOZBUFRProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:s}_{satellite:s}_{mission:s}_{subsat:s}' - '{spacecraft:s}-SEVI-MSGTOZN-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}.bfr' - '{spacecraft:s}-SEVI-MSGTOZN-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}' datasets: latitude: name: latitude key: 'latitude' resolution: [48006.450653072,9001.209497451] file_type: [seviri_l2_bufr_asr,seviri_l2_bufr_cla,seviri_l2_bufr_csr,seviri_l2_bufr_gii,seviri_l2_bufr_thu,seviri_l2_bufr_toz] standard_name: latitude units: degree_north fill_value: -1.e+100 longitude: name: longitude key: 'longitude' resolution: [48006.450653072,9001.209497451] file_type: [seviri_l2_bufr_asr,seviri_l2_bufr_cla,seviri_l2_bufr_csr,seviri_l2_bufr_gii,seviri_l2_bufr_thu,seviri_l2_bufr_toz] standard_name: longitude units: degree_east fill_value: -1.e+100 # ---- ASR products ------------ nir39all: name: nir39all key: '#19#brightnessTemperature' resolution: 48006.450653072 wavelength: [ 3.48, 3.92, 4.36 ] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 nir39clr: name: nir39clr key: '#20#brightnessTemperature' resolution: 48006.450653072 wavelength: [ 3.48, 3.92, 4.36 ] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 nir39cld: name: nir39cld key: '#21#brightnessTemperature' resolution: 48006.450653072 wavelength: [ 3.48, 3.92, 4.36 ] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 nir39low: name: nir39low key: '#22#brightnessTemperature' resolution: 48006.450653072 wavelength: [ 3.48, 3.92, 4.36 ] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 nir39med: name: nir39med key: '#23#brightnessTemperature' resolution: 48006.450653072 wavelength: [ 3.48, 3.92, 4.36 ] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 nir39high: name: nir39high key: '#24#brightnessTemperature' resolution: 48006.450653072 wavelength: [ 3.48, 3.92, 4.36 ] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 wv62all: name: wv62all key: '#25#brightnessTemperature' resolution: 48006.450653072 wavelength: [5.35, 6.25, 7.15] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 wv62clr: name: wv62clr key: '#26#brightnessTemperature' resolution: 48006.450653072 wavelength: [5.35, 6.25, 7.15] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 wv62cld: name: wv62cld key: '#27#brightnessTemperature' resolution: 48006.450653072 wavelength: [5.35, 6.25, 7.15] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 wv62low: name: wv62low key: '#28#brightnessTemperature' resolution: 48006.450653072 wavelength: [5.35, 6.25, 7.15] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 wv62med: name: wv62med key: '#29#brightnessTemperature' resolution: 48006.450653072 wavelength: [5.35, 6.25, 7.15] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 wv62high: name: wv62high key: '#30#brightnessTemperature' resolution: 48006.450653072 wavelength: [5.35, 6.25, 7.15] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 wv73all: name: wv73all key: '#31#brightnessTemperature' resolution: 48006.450653072 wavelength: [6.85, 7.35, 7.85] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 wv73clr: name: wv73clr key: '#32#brightnessTemperature' resolution: 48006.450653072 wavelength: [6.85, 7.35, 7.85] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 wv73cld: name: wv73cld key: '#33#brightnessTemperature' resolution: 48006.450653072 wavelength: [6.85, 7.35, 7.85] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 wv73low: name: wv73low key: '#34#brightnessTemperature' resolution: 48006.450653072 wavelength: [6.85, 7.35, 7.85] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 wv73med: name: wv73med key: '#35#brightnessTemperature' resolution: 48006.450653072 wavelength: [6.85, 7.35, 7.85] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 wv73high: name: wv73high key: '#36#brightnessTemperature' resolution: 48006.450653072 wavelength: [6.85, 7.35, 7.85] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir87all: name: ir87all key: '#37#brightnessTemperature' resolution: 48006.450653072 wavelength: [8.3, 8.7, 9.1] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir87clr: name: ir87clr key: '#38#brightnessTemperature' resolution: 48006.450653072 wavelength: [8.3, 8.7, 9.1] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir87cld: name: ir87cld key: '#39#brightnessTemperature' resolution: 48006.450653072 wavelength: [8.3, 8.7, 9.1] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir87low: name: ir87low key: '#40#brightnessTemperature' resolution: 48006.450653072 wavelength: [8.3, 8.7, 9.1] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir87med: name: ir87med key: '#41#brightnessTemperature' resolution: 48006.450653072 wavelength: [8.3, 8.7, 9.1] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir87high: name: ir87high key: '#42#brightnessTemperature' resolution: 48006.450653072 wavelength: [8.3, 8.7, 9.1] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir97all: name: ir97all key: '#43#brightnessTemperature' resolution: 48006.450653072 wavelength: [9.38, 9.66, 9.94] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir97clr: name: ir97clr key: '#44#brightnessTemperature' resolution: 48006.450653072 wavelength: [9.38, 9.66, 9.94] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir97cld: name: ir97cld key: '#45#brightnessTemperature' resolution: 48006.450653072 wavelength: [9.38, 9.66, 9.94] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir97low: name: ir97low key: '#46#brightnessTemperature' resolution: 48006.450653072 wavelength: [9.38, 9.66, 9.94] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir97med: name: ir97med key: '#47#brightnessTemperature' resolution: 48006.450653072 wavelength: [9.38, 9.66, 9.94] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir97high: name: ir97high key: '#48#brightnessTemperature' resolution: 48006.450653072 wavelength: [9.38, 9.66, 9.94] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir108all: name: ir108all key: '#49#brightnessTemperature' resolution: 48006.450653072 wavelength: [9.8, 10.8, 11.8] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir108clr: name: ir108clr key: '#50#brightnessTemperature' resolution: 48006.450653072 wavelength: [9.8, 10.8, 11.8] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir108cld: name: ir108cld key: '#51#brightnessTemperature' resolution: 48006.450653072 wavelength: [9.8, 10.8, 11.8] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir108low: name: ir108low key: '#52#brightnessTemperature' resolution: 48006.450653072 wavelength: [9.8, 10.8, 11.8] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir108med: name: ir108med key: '#53#brightnessTemperature' resolution: 48006.450653072 wavelength: [9.8, 10.8, 11.8] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir108high: name: ir108high key: '#54#brightnessTemperature' resolution: 48006.450653072 wavelength: [9.8, 10.8, 11.8] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir120all: name: ir120all key: '#55#brightnessTemperature' resolution: 48006.450653072 wavelength: [11.0, 12.0, 13.0] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir120clr: name: ir120clr key: '#56#brightnessTemperature' resolution: 48006.450653072 wavelength: [11.0, 12.0, 13.0] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir120cld: name: ir120cld key: '#57#brightnessTemperature' resolution: 48006.450653072 wavelength: [11.0, 12.0, 13.0] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir120low: name: ir120low key: '#58#brightnessTemperature' resolution: 48006.450653072 wavelength: [11.0, 12.0, 13.0] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir120med: name: ir120med key: '#59#brightnessTemperature' resolution: 48006.450653072 wavelength: [11.0, 12.0, 13.0] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir120high: name: ir120high key: '#60#brightnessTemperature' resolution: 48006.450653072 wavelength: [11.0, 12.0, 13.0] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir134all: name: ir134all key: '#61#brightnessTemperature' resolution: 48006.450653072 wavelength: [12.4, 13.4, 14.4] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir134clr: name: ir134clr key: '#62#brightnessTemperature' resolution: 48006.450653072 wavelength: [12.4, 13.4, 14.4] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir134cld: name: ir134cld key: '#63#brightnessTemperature' resolution: 48006.450653072 wavelength: [12.4, 13.4, 14.4] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir134low: name: ir134low key: '#64#brightnessTemperature' resolution: 48006.450653072 wavelength: [12.4, 13.4, 14.4] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir134med: name: ir134med key: '#65#brightnessTemperature' resolution: 48006.450653072 wavelength: [12.4, 13.4, 14.4] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir134high: name: ir134high key: '#66#brightnessTemperature' resolution: 48006.450653072 wavelength: [12.4, 13.4, 14.4] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 pcld: name: pcld key: '#1#cloudAmountInSegment' resolution: 48006.450653072 standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: 0 pclr: name: pclr key: '#1#amountSegmentCloudFree' resolution: 48006.450653072 standard_name: clear_sky_area_fraction units: '%' file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: 0 pclrs: name: pclrs key: '#2#amountSegmentCloudFree' resolution: 48006.450653072 standard_name: clar_sky_area_fraction units: '%' file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: 0 # ---- CLA products ------------ hca: name: hca key: '#1#amountOfHighClouds' resolution: 48006.450653072 standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_cla coordinates: - longitude - latitude fill_value: 0 lca: name: lca key: '#1#amountOfLowClouds' resolution: 48006.450653072 standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_cla coordinates: - longitude - latitude fill_value: 0 mca: name: mca key: '#1#amountOfMiddleClouds' resolution: 48006.450653072 standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_cla coordinates: - longitude - latitude fill_value: 0 tca: name: tca key: '#1#cloudAmountInSegment' resolution: 48006.450653072 standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_cla coordinates: - longitude - latitude fill_value: 0 # ---- CSR products ------------ nir39: name: nir39 key: '#4#brightnessTemperature' resolution: 48006.450653072 wavelength: [3.48, 3.92, 4.36] standard_name: toa_brightness_temperature units: "W/sr-1/m-2" file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 cld39: name: cld39 key: '#4#cloudAmountInSegment' resolution: 48006.450653072 wavelength: [3.48, 3.92, 4.36] standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 wv62: name: wv62 key: '#5#brightnessTemperature' resolution: 48006.450653072 wavelength: [5.35, 6.25, 7.15] standard_name: toa_brightness_temperature units: "W/sr-1/m-2" file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 cld62: name: cld62 key: '#5#cloudAmountInSegment' resolution: 48006.450653072 wavelength: [5.35, 6.25, 7.15] standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 wv73: name: wv73 key: '#6#brightnessTemperature' resolution: 48006.450653072 wavelength: [6.85, 7.35, 7.85] standard_name: toa_brightness_temperature units: "W/sr-1/m-2" file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 cld73: name: cld73 key: '#6#cloudAmountInSegment' resolution: 48006.450653072 wavelength: [6.85, 7.35, 7.85] standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 ir87: name: ir87 key: '#7#brightnessTemperature' resolution: 48006.450653072 wavelength: [8.3, 8.7, 9.1] standard_name: toa_brightness_temperature units: "W/sr-1/m-2" file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 cld87: name: cld87 key: '#7#cloudAmountInSegment' resolution: 48006.450653072 wavelength: [8.3, 8.7, 9.1] standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 ir97: name: ir97 key: '#8#brightnessTemperature' resolution: 48006.450653072 wavelength: [9.38, 9.66, 9.94] standard_name: toa_brightness_temperature units: "W/sr-1/m-2" file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 cld97: name: cld97 key: '#8#cloudAmountInSegment' resolution: 48006.450653072 wavelength: [9.38, 9.66, 9.94] standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 ir108: name: ir108 key: '#9#brightnessTemperature' resolution: 48006.450653072 wavelength: [9.8, 10.8, 11.8] standard_name: toa_brightness_temperature units: "W/sr-1/m-2" file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 cld108: name: cld108 key: '#9#cloudAmountInSegment' resolution: 48006.450653072 wavelength: [9.8, 10.8, 11.8] standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 ir120: name: ir120 key: '#10#brightnessTemperature' resolution: 48006.450653072 wavelength: [11.0, 12.0, 13.0] standard_name: toa_brightness_temperature units: "W/sr-1/m-2" file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 cld120: name: cld120 key: '#10#cloudAmountInSegment' resolution: 48006.450653072 wavelength: [11.0, 12.0, 13.0] standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 ir134: name: ir134 key: '#11#brightnessTemperature' resolution: 48006.450653072 wavelength: [12.4, 13.4, 14.4] standard_name: toa_brightness_temperature units: "W/sr-1/m-2" file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 cld134: name: cld134 key: '#11#cloudAmountInSegment' resolution: 48006.450653072 wavelength: [12.4, 13.4, 14.4] standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 # ---- GII products ------------ ki: name: ki key: '#1#kIndex' resolution: 9001.209497451 standard_name: atmosphere_stability_k_index coordinates: - longitude - latitude units: "" file_type: seviri_l2_bufr_gii fill_value: -1.e+100 ko: name: ko key: '#1#koIndex' resolution: 9001.209497451 standard_name: atmosphere_stability_ko_index coordinates: - longitude - latitude units: "" file_type: seviri_l2_bufr_gii fill_value: -1.e+100 li: name: li key: '#1#parcelLiftedIndexTo500Hpa' resolution: 9001.209497451 standard_name: atmosphere_stability_lifted_index coordinates: - longitude - latitude units: "" file_type: seviri_l2_bufr_gii fill_value: -1.e+100 lpw1: name: lpw1 key: '#2#precipitableWater' resolution: 9001.209497451 standard_name: lwe_thickness_of_precipitation_amount coordinates: - longitude - latitude units: mm file_type: seviri_l2_bufr_gii fill_value: -1.e+100 lpw2: name: lpw2 key: '#3#precipitableWater' resolution: 9001.209497451 standard_name: lwe_thickness_of_precipitation_amount coordinates: - longitude - latitude units: mm file_type: seviri_l2_bufr_gii fill_value: -1.e+100 lpw3: name: lpw3 key: '#4#precipitableWater' resolution: 9001.209497451 standard_name: lwe_thickness_of_precipitation_amount coordinates: - longitude - latitude units: mm file_type: seviri_l2_bufr_gii fill_value: -1.e+100 mb: name: mb key: '#1#maximumBuoyancy' resolution: 9001.209497451 standard_name: atmosphere_stability_maximum_bouyancy_index coordinates: - longitude - latitude units: "" file_type: seviri_l2_bufr_gii fill_value: -1.e+100 stza: name: stza key: '#1#satelliteZenithAngle' resolution: 9001.209497451 standard_name: sensor_zenith_angle coordinates: - longitude - latitude units: degrees file_type: seviri_l2_bufr_gii fill_value: 0 tpw: name: tpw key: '#1#precipitableWater' resolution: 9001.209497451 standard_name: lwe_thickness_of_precipitation_amount coordinates: - longitude - latitude units: mm file_type: seviri_l2_bufr_gii fill_value: -1.e+100 # ---- THU products ------------ thu62: name: thu62 key: '#1#relativeHumidity' resolution: 48006.450653072 standard_name: relative_humidity units: '%' file_type: seviri_l2_bufr_thu coordinates: - longitude - latitude fill_value: -1.e+100 thu73: name: thu73 key: '#2#relativeHumidity' resolution: 48006.450653072 standard_name: relative_humidity units: '%' file_type: seviri_l2_bufr_thu coordinates: - longitude - latitude fill_value: -1.e+100 # ---- TOZ products ------------ toz: name: toz key: '#1#totalOzone' resolution: 9001.209497451 standard_name: atmosphere_mass_content_of_ozone units: dobson file_type: seviri_l2_bufr_toz coordinates: - longitude - latitude fill_value: 0 qual: name: qual key: '#1#totalOzone->totalOzoneQuality' resolution: 9001.209497451 standard_name: total_ozone_quality units: "" file_type: seviri_l2_bufr_toz coordinates: - longitude - latitude fill_value: 0 satpy-0.34.0/satpy/etc/readers/seviri_l2_grib.yaml000066400000000000000000000257161420401153000220640ustar00rootroot00000000000000reader: name: seviri_l2_grib short_name: SEVIRI L2 GRIB long_name: MSG SEVIRI L2 (GRIB) description: Reader for EUMETSAT MSG SEVIRI L2 files in GRIB format. sensors: [seviri] reader: !!python/name:satpy.readers.yaml_reader.GEOFlippableFileYAMLReader file_types: # EUMETSAT MSG SEVIRI L2 Cloud Mask files in GRIB format grib_seviri_clm: file_reader: !!python/name:satpy.readers.seviri_l2_grib.SeviriL2GribFileHandler file_patterns: - 'CLMEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - '{spacecraft:4s}-SEVI-MSGCLMK-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' - '{spacecraft:4s}-SEVI-MSGCLMK-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' - '{spacecraft:4s}-SEVI-MSGCLMK-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' # EUMETSAT MSG SEVIRI L2 Optimal Cloud Analysis files in GRIB format grib_seviri_oca: file_reader: !!python/name:satpy.readers.seviri_l2_grib.SeviriL2GribFileHandler file_patterns: - 'OCAEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - '{spacecraft:4s}-SEVI-MSGOCAE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' - '{spacecraft:4s}-SEVI-MSGOCAE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' - '{spacecraft:4s}-SEVI-MSGOCAE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' # EUMETSAT MSG SEVIRI L2 Active Fire Monitoring files in GRIB format grib_seviri_fir: file_reader: !!python/name:satpy.readers.seviri_l2_grib.SeviriL2GribFileHandler file_patterns: - 'FIREncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - '{spacecraft:4s}-SEVI-MSGFIRG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' - '{spacecraft:4s}-SEVI-MSGFIRG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' - '{spacecraft:4s}-SEVI-MSGFIRG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' grib_seviri_aes: file_reader: !!python/name:satpy.readers.seviri_l2_grib.SeviriL2GribFileHandler file_patterns: - 'AESGRIBProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - '{spacecraft:4s}-SEVI-MSGAESE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' - '{spacecraft:4s}-SEVI-MSGAESE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' - '{spacecraft:4s}-SEVI-MSGAESE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' grib_seviri_cth: file_reader: !!python/name:satpy.readers.seviri_l2_grib.SeviriL2GribFileHandler file_patterns: - 'CTHEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - '{spacecraft:4s}-SEVI-MSGCLTH-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' - '{spacecraft:4s}-SEVI-MSGCLTH-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' - '{spacecraft:4s}-SEVI-MSGCLTH-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' grib_seviri_crm: file_reader: !!python/name:satpy.readers.seviri_l2_grib.SeviriL2GribFileHandler file_patterns: - 'CRMEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - '{spacecraft:4s}-SEVI-MSGCRMN-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' - '{spacecraft:4s}-SEVI-MSGCRMN-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' - '{spacecraft:4s}-SEVI-MSGCRMN-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' grib_seviri_mpe: file_reader: !!python/name:satpy.readers.seviri_l2_grib.SeviriL2GribFileHandler file_patterns: - 'MPEGRIBProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - '{spacecraft:4s}-SEVI-MSGMPEG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' - '{spacecraft:4s}-SEVI-MSGMPEG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' - '{spacecraft:4s}-SEVI-MSGMPEG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' datasets: cloud_mask: name: cloud_mask resolution: 3000.403165817 file_type: grib_seviri_clm parameter_number: 7 units: "1" long_name: cloud_mask_classification pixel_scene_type: name: pixel_scene_type resolution: 3000.403165817 file_type: grib_seviri_oca parameter_number: 8 units: "1" long_name: scene_classification measurement_cost: name: measurement_cost resolution: 3000.403165817 file_type: grib_seviri_oca parameter_number: 30 units: "1" long_name: cost_function upper_layer_cloud_optical_depth: name: upper_layer_cloud_optical_depth resolution: 3000.403165817 file_type: grib_seviri_oca parameter_number: 31 units: "1" long_name: cloud_optical_depth upper_layer_cloud_top_pressure: name: upper_layer_cloud_top_pressure resolution: 3000.403165817 file_type: grib_seviri_oca parameter_number: 32 units: Pa standard_name: air_pressure_at_cloud_top upper_layer_cloud_effective_radius: name: upper_layer_cloud_effective_radius resolution: 3000.403165817 file_type: grib_seviri_oca parameter_number: 33 units: m standard_name: effective_radius_of_cloud_condensed_water_particles_at_cloud_top error_in_upper_layer_cloud_optical_depth: name: error_in_upper_layer_cloud_optical_depth resolution: 3000.403165817 file_type: grib_seviri_oca parameter_number: 34 units: "1" long_name: cloud_optical_depth error_in_upper_layer_cloud_top_pressure: name: error_in_upper_layer_cloud_top_pressure resolution: 3000.403165817 file_type: grib_seviri_oca parameter_number: 35 units: Pa standard_name: air_pressure_at_cloud_top_standard_error error_in_upper_layer_cloud_effective_radius: name: error_in_upper_layer_cloud_effective_radius resolution: 3000.403165817 file_type: grib_seviri_oca parameter_number: 36 units: m standard_name: effective_radius_of_cloud_condensed_water_particles_at_cloud_top_standard_error lower_layer_cloud_optical_depth: name: lower_layer_cloud_optical_depth resolution: 3000.403165817 file_type: grib_seviri_oca parameter_number: 37 units: "1" long_name: cloud_optical_depth lower_layer_cloud_top_pressure: name: lower_layer_cloud_top_pressure resolution: 3000.403165817 file_type: grib_seviri_oca parameter_number: 38 units: Pa standard_name: air_pressure_at_cloud_top error_in_lower_layer_cloud_optical_depth: name: error_in_lower_layer_cloud_optical_depth resolution: 3000.403165817 file_type: grib_seviri_oca parameter_number: 39 units: "1" long_name: cloud_optical_depth_standard_error error_in_lower_layer_cloud_top_pressure: name: error_in_lower_layer_cloud_top_pressure resolution: 3000.403165817 file_type: grib_seviri_oca parameter_number: 40 units: Pa standard_name: air_pressure_at_cloud_top_standard_error fire_probability: name: fire_probability resolution: 3000.403165817 file_type: grib_seviri_fir parameter_number: 192 units: "%" long_name: fire_probability active_fires: name: active_fires resolution: 3000.403165817 file_type: grib_seviri_fir parameter_number: 9 units: "1" long_name: active_fire_classification aerosol_optical_thickness_vis06: name: aerosol_optical_thickness_vis06 resolution: 9001.209497451 file_type: grib_seviri_aes parameter_number: 20 units: "1" long_name: aerosol_optical_thickness_vis06 aerosol_optical_thickness_vis08: name: aerosol_optical_thickness_vis08 resolution: 9001.209497451 file_type: grib_seviri_aes parameter_number: 21 units: "1" long_name: aerosol_optical_thickness_vis08 aerosol_optical_thickness_vis16: name: aerosol_optical_thickness_vis16 resolution: 9001.209497451 file_type: grib_seviri_aes parameter_number: 22 units: "1" long_name: aerosol_optical_thickness_vis16 angstroem_coefficient: name: angstroem_coefficient resolution: 9001.209497451 file_type: grib_seviri_aes parameter_number: 23 units: "1" long_name: angstroem_coefficient aes_quality: name: aes_quality resolution: 9001.209497451 file_type: grib_seviri_aes parameter_number: 192 units: "1" long_name: aes_quality cloud_top_height: name: cloud_top_height resolution: 9001.209497451 file_type: grib_seviri_cth parameter_number: 2 units: m long_name: cloud_top_height cloud_top_quality: name: cloud_top_quality resolution: 9001.209497451 file_type: grib_seviri_cth parameter_number: 3 units: "1" long_name: cloud_top_quality vis_refl_06: name: vis_refl_06 resolution: 3000.403165817 wavelength: [0.56, 0.635, 0.71] file_type: grib_seviri_crm parameter_number: 9 units: "%" long_name: vis_refl_06 vis_refl_08: name: vis_refl_08 resolution: 3000.403165817 wavelength: [0.74, 0.81, 0.88] file_type: grib_seviri_crm parameter_number: 10 units: "%" long_name: vis_refl_08 vis_refl_16: name: vis_refl_16 resolution: 3000.403165817 wavelength: [1.5, 1.64, 1.78] file_type: grib_seviri_crm parameter_number: 11 units: "%" long_name: vis_refl_16 nir_refl_39: name: nir_refl_39 resolution: 3000.403165817 wavelength: [3.48, 3.92, 4.36] file_type: grib_seviri_crm parameter_number: 12 units: "%" long_name: nir_refl_39 num_accumulations: name: num_accumulations resolution: 3000.403165817 file_type: grib_seviri_crm parameter_number: 6 units: "1" long_name: num_accumulations solar_zenith_angle: name: solar_zenith_angle resolution: 3000.403165817 file_type: grib_seviri_crm parameter_number: 7 units: degrees long_name: solar_zenith_angle relative_azimuth_angle: name: relative_azimuth_angle resolution: 3000.403165817 file_type: grib_seviri_crm parameter_number: 8 units: degrees long_name: relative_azimuth_angle instantaneous_rain_rate: name: instantaneous_rain_rate resolution: 3000.403165817 file_type: grib_seviri_mpe parameter_number: 1 units: "kg m-2 s-1" long_name: instantaneous_rain_rate satpy-0.34.0/satpy/etc/readers/slstr_l1b.yaml000066400000000000000000000241421420401153000210600ustar00rootroot00000000000000reader: description: NC Reader for SLSTR data name: slstr_l1b sensors: [slstr] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader data_identification_keys: name: required: true wavelength: type: !!python/name:satpy.dataset.dataid.WavelengthRange resolution: transitive: false calibration: enum: - reflectance - brightness_temperature - radiance - counts transitive: true view: enum: - nadir - oblique transitive: true stripe: enum: - a - b - i - f modifiers: default: [] type: !!python/name:satpy.dataset.dataid.ModifierTuple coord_identification_keys: name: required: true resolution: transitive: false view: enum: - nadir - oblique transitive: true stripe: enum: - a - b - i - f file_types: esa_l1b_refl: file_reader: !!python/name:satpy.readers.slstr_l1b.NCSLSTR1B file_patterns: ['{mission_id:3s}_SL_{processing_level:1s}_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/{dataset_name}_radiance_{stripe:1s}{view:1s}.nc'] esa_l1b_tir: file_reader: !!python/name:satpy.readers.slstr_l1b.NCSLSTR1B file_patterns: ['{mission_id:3s}_SL_{processing_level:1s}_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/{dataset_name}_BT_{stripe:1s}{view:1s}.nc'] esa_angles: file_reader: !!python/name:satpy.readers.slstr_l1b.NCSLSTRAngles file_patterns: ['{mission_id:3s}_SL_{processing_level:1s}_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/geometry_t{view:1s}.nc'] esa_geo: file_reader: !!python/name:satpy.readers.slstr_l1b.NCSLSTRGeo file_patterns: ['{mission_id:3s}_SL_{processing_level:1s}_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/geodetic_{stripe:1s}{view:1s}.nc'] esa_l1b_flag: file_reader: !!python/name:satpy.readers.slstr_l1b.NCSLSTRFlag file_patterns: ['{mission_id:3s}_SL_{processing_level:1s}_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/flags_{stripe:1s}{view:1s}.nc'] datasets: longitude: name: longitude resolution: [500, 1000] view: [nadir, oblique] stripe: [a, b, i, f] file_type: esa_geo file_key: longitude_{stripe:1s}{view:1s} standard_name: longitude units: degree latitude: name: latitude resolution: [500, 1000] view: [nadir, oblique] stripe: [a, b, i, f] file_type: esa_geo file_key: latitude_{stripe:1s}{view:1s} standard_name: latitude units: degree elevation: name: elevation resolution: [500, 1000] view: [nadir, oblique] stripe: [a, b, i, f] file_type: esa_geo file_key: elevation_{stripe:1s}{view:1s} standard_name: elevation units: m # The channels S1-S3 are available in nadir (default) and oblique view. S1: name: S1 sensor: slstr wavelength: [0.545,0.555,0.565] resolution: 500 view: [nadir, oblique] stripe: [a, b] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude, latitude] file_type: esa_l1b_refl S2: name: S2 sensor: slstr wavelength: [0.649, 0.659, 0.669] resolution: 500 view: [nadir, oblique] stripe: [a, b] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude, latitude] file_type: esa_l1b_refl S3: name: S3 sensor: slstr wavelength: [0.855, 0.865, 0.875] resolution: 500 view: [nadir, oblique] stripe: [a, b] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude, latitude] file_type: esa_l1b_refl # The channels S4-S6 are available in nadir (default) and oblique view and for both in the # a,b and c stripes. S4: name: S4 sensor: slstr wavelength: [1.3675, 1.375, 1.36825] resolution: 500 view: [nadir, oblique] stripe: [a, b] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude, latitude] file_type: esa_l1b_refl S5: name: S5 sensor: slstr wavelength: [1.58, 1.61, 1.64] resolution: 500 view: [nadir, oblique] stripe: [a, b] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude, latitude] file_type: esa_l1b_refl S6: name: S6 sensor: slstr wavelength: [2.225, 2.25, 2.275] resolution: 500 view: [nadir, oblique] stripe: [a, b] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude, latitude] file_type: esa_l1b_refl # The channels S7-S9, F1 and F2 are available in nadir (default) and oblique view. S7: name: S7 sensor: slstr wavelength: [3.55, 3.74, 3.93] resolution: 1000 view: [nadir, oblique] stripe: i calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" coordinates: [longitude, latitude] file_type: esa_l1b_tir S8: name: S8 sensor: slstr wavelength: [10.4, 10.85, 11.3] resolution: 1000 view: [nadir, oblique] stripe: i calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" coordinates: [longitude, latitude] file_type: esa_l1b_tir S9: name: S9 sensor: slstr wavelength: [11.57, 12.0225, 12.475] resolution: 1000 view: [nadir, oblique] stripe: i calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" coordinates: [longitude, latitude] file_type: esa_l1b_tir F1: name: F1 sensor: slstr wavelength: [3.55, 3.74, 3.93] resolution: 1000 view: [nadir, oblique] stripe: f calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" coordinates: [longitude, latitude] file_type: esa_l1b_tir F2: name: F2 sensor: slstr wavelength: [10.4, 10.85, 11.3] resolution: 1000 view: [nadir, oblique] stripe: i calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" coordinates: [longitude, latitude] file_type: esa_l1b_tir solar_zenith_angle: name: solar_zenith_angle sensor: slstr resolution: [500, 1000] coordinates: [longitude, latitude] view: [nadir, oblique] standard_name: solar_zenith_angle file_type: esa_angles file_key: solar_zenith_t{view:1s} solar_azimuth_angle: name: solar_azimuth_angle sensor: slstr resolution: [500, 1000] coordinates: [longitude, latitude] view: [nadir, oblique] standard_name: solar_azimuth_angle file_type: esa_angles file_key: solar_azimuth_t{view:1s} satellite_zenith_angle: name: satellite_zenith_angle sensor: slstr resolution: [500, 1000] coordinates: [longitude, latitude] view: [nadir, oblique] standard_name: satellite_zenith_angle file_type: esa_angles file_key: sat_zenith_t{view:1s} satellite_azimuth_angle: name: satellite_azimuth_angle sensor: slstr resolution: [500, 1000] coordinates: [longitude, latitude] view: [nadir, oblique] standard_name: satellite_azimuth_angle file_type: esa_angles file_key: sat_azimuth_t{view:1s} # CloudFlags are all bitfields. They are available in nadir (default) and oblique view for # each of the a,b,c,i stripes. cloud: name: cloud sensor: slstr resolution: [500, 1000] coordinates: [longitude, latitude] view: [nadir, oblique] stripe: [a, b, i, f] file_type: esa_l1b_flag file_key: cloud_{stripe:1s}{view:1s} confidence: name: confidence sensor: slstr resolution: [500, 1000] coordinates: [longitude, latitude] view: [nadir, oblique] stripe: [a, b, i, f] file_type: esa_l1b_flag file_key: confidence_{stripe:1s}{view:1s} pointing: name: pointing sensor: slstr resolution: [500, 1000] coordinates: [longitude, latitude] view: [nadir, oblique] stripe: [a, b, i, f] file_type: esa_l1b_flag file_key: pointing_{stripe:1s}{view:1s} bayes: name: bayes sensor: slstr resolution: [500, 1000] coordinates: [longitude, latitude] view: [nadir, oblique] stripe: [a, b, i, f] file_type: esa_l1b_flag file_key: bayes_{stripe:1s}{view:1s} satpy-0.34.0/satpy/etc/readers/slstr_l2.yaml000066400000000000000000000032701420401153000207160ustar00rootroot00000000000000reader: description: NC Reader for Sentinel-3 SLSTR Level 2 data name: slstr_l2 sensors: [slstr_l2] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: SLSTRB: file_reader: !!python/name:satpy.readers.slstr_l2.SLSTRL2FileHandler file_patterns: ['{start_time:%Y%m%d%H%M%S}-{generating_centre:3s}-{type_id:3s}_GHRSST-SSTskin-SLSTR{something:1s}-{end_time:%Y%m%d%H%M%S}-{version}.nc', '{mission_id:3s}_SL_{processing_level:1s}_WST____{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3.tar'] datasets: longitude: name: longitude resolution: 1000 view: nadir file_type: SLSTRB standard_name: lon units: degree latitude: name: latitude resolution: 1000 view: nadir file_type: SLSTRB standard_name: lat units: degree sea_surface_temperature: name: sea_surface_temperature sensor: slstr_l2 coordinates: [longitude, latitude] file_type: SLSTRB resolution: 1000 view: nadir units: kelvin standard_name: sea_surface_temperature sea_ice_fraction: name: sea_ice_fraction sensor: slstr_l2 coordinates: [longitude, latitude] file_type: SLSTRB resolution: 1000 view: nadir units: "%" standard_name: sea_ice_fraction # Quality estimation 0-5: no data, cloud, worst, low, acceptable, best quality_level: name: quality_level sensor: slstr_l2 coordinates: [longitude, latitude] file_type: SLSTRB resolution: 1000 view: nadir standard_name: quality_level satpy-0.34.0/satpy/etc/readers/smos_l2_wind.yaml000066400000000000000000000011001420401153000215370ustar00rootroot00000000000000reader: description: SMOS Level 2 Wind NetCDF reader name: smos_l2_wind reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [smos] file_types: smos_l2_wind: # Ex: SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc file_reader: !!python/name:satpy.readers.smos_l2_wind.SMOSL2WINDFileHandler file_patterns: - '{platform_shortname:2s}_{file_class:4s}_{file_category:4s}{semantic_descriptor:6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{version:3s}_{counter:3s}_{site_id:1s}.nc' satpy-0.34.0/satpy/etc/readers/tropomi_l2.yaml000066400000000000000000000046421420401153000212440ustar00rootroot00000000000000reader: description: TROPOMI Level 2 NetCDF reader name: tropomi_l2 reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [tropomi] file_types: tropomi_l2: # Ex: S5P_OFFL_L2__NO2____20180709T170334_20180709T184504_03821_01_010002_20180715T184729.nc file_reader: !!python/name:satpy.readers.tropomi_l2.TROPOMIL2FileHandler file_patterns: - '{platform_shortname:3s}_{data_type:4s}_{level:3s}_{product:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{orbit:5d}_{collection:2d}_{processor_version:6d}_{creation_time:%Y%m%dT%H%M%S}.nc' datasets: latitude: name: 'latitude' file_type: tropomi_l2 file_key: 'PRODUCT/latitude' standard_name: latitude longitude: name: 'longitude' file_type: tropomi_l2 file_key: 'PRODUCT/longitude' standard_name: longitude latitude_bounds: name: 'latitude_bounds' file_type: tropomi_l2 file_key: 'PRODUCT/SUPPORT_DATA/GEOLOCATIONS/latitude_bounds' standard_name: latitude_bounds longitude_bounds: name: 'longitude_bounds' file_type: tropomi_l2 file_key: 'PRODUCT/SUPPORT_DATA/GEOLOCATIONS/longitude_bounds' standard_name: longitude_bounds assembled_lat_bounds: name: 'assembled_lat_bounds' file_type: tropomi_l2 file_key: 'PRODUCT/SUPPORT_DATA/GEOLOCATIONS/latitude_bounds' standard_name: assembled_latitude_bounds assembled_lon_bounds: name: 'assembled_lon_bounds' file_type: tropomi_l2 file_key: 'PRODUCT/SUPPORT_DATA/GEOLOCATIONS/longitude_bounds' standard_name: assembled_longitude_bounds offset_time: name: 'offset_time' file_type: tropomi_l2 file_key: 'PRODUCT/delta_time' standard_name: offset_time ref_time: name: 'ref_time' file_type: tropomi_l2 file_key: 'PRODUCT/time' standard_name: ref_time tm5_constant_a: name: 'tm5_constant_a' file_type: tropomi_l2 file_key: 'PRODUCT/tm5_constant_a' standard_name: tm5_constant_a tm5_constant_b: name: 'tm5_constant_b' file_type: tropomi_l2 file_key: 'PRODUCT/tm5_constant_b' standard_name: tm5_constant_b time_utc: name: 'time_utc' file_type: tropomi_l2 file_key: 'PRODUCT/time_utc' standard_name: time_utc satpy-0.34.0/satpy/etc/readers/vaisala_gld360.yaml000066400000000000000000000017061420401153000216530ustar00rootroot00000000000000reader: description: Vaisala Global Lightning Dataset 360 reader name: vaisala_gld360 reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [vaisala_gld360] file_types: vaisala_gld360: file_reader: !!python/name:satpy.readers.vaisala_gld360.VaisalaGLD360TextFileHandler file_patterns: ['flashes_{start_time:%Y%m%d}.txt'] datasets: time: name: time sensor: vaisala_gld360 resolution: 2000 file_type: vaisala_gld360 latitude: name: latitude sensor: vaisala_gld360 resolution: 2000 file_type: vaisala_gld360 standard_name: latitude units: degree_north longitude: name: longitude sensor: vaisala_gld360 resolution: 2000 file_type: vaisala_gld360 standard_name: longitude units: degree_east power: name: power sensor: vaisala_gld360 resolution: 2000 file_type: vaisala_gld360 coordinates: - longitude - latitude units: kA satpy-0.34.0/satpy/etc/readers/vii_l1b_nc.yaml000066400000000000000000000303501420401153000211560ustar00rootroot00000000000000reader: name: vii_l1b_nc short_name: VII L1B RAD NetCDF4 long_name: EPS-SG VII L1B Radiance (NetCDF4) description: > Reader for EUMETSAT EPSG-SG Visual Infrared Imager Level 1B Radiance files in NetCDF4 format. sensors: [vii] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: # EUMETSAT EPSG-SG Visual Infrared Imager Level 1B Radiance files in NetCDF4 format nc_vii_l1b_rad: file_reader: !!python/name:satpy.readers.vii_l1b_nc.ViiL1bNCFileHandler file_patterns: ['W_DE-AIRBUSDS-Friedrichshafen,SAT,{spacecraft_name:s}-VII-1B-RAD_C_EUM_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc', 'W_xx-eumetsat-darmstadt,SAT,{spacecraft_name:s}-VII-1B-RAD_C_EUM_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc', 'W_xx-eumetsat-darmstadt,SAT,{spacecraft_name:s}-VII-1B-RAD_C_EUMT_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc'] cached_longitude: data/measurement_data/longitude cached_latitude: data/measurement_data/latitude datasets: # --- Coordinates --- lon_tie_points: name: lon_tie_points file_type: nc_vii_l1b_rad file_key: data/measurement_data/longitude standard_name: longitude lat_tie_points: name: lat_tie_points file_type: nc_vii_l1b_rad file_key: data/measurement_data/latitude standard_name: latitude lon_pixels: name: lon_pixels file_type: nc_vii_l1b_rad file_key: cached_longitude orthorect_data: data/measurement_data/delta_lon_E_dem standard_name: longitude lat_pixels: name: lat_pixels file_type: nc_vii_l1b_rad file_key: cached_latitude orthorect_data: data/measurement_data/delta_lat_N_dem standard_name: latitude # --- Measurement data --- vii_443: name: vii_443 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_443 coordinates: [lon_pixels, lat_pixels] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength chan_solar_index: 0 wavelength: [0.428, 0.443, 0.458] vii_555: name: vii_555 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_555 coordinates: [lon_pixels, lat_pixels] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength chan_solar_index: 1 wavelength: [0.545, 0.555, 0.565] vii_668: name: vii_668 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_668 coordinates: [lon_pixels, lat_pixels] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength chan_solar_index: 2 wavelength: [0.658, 0.668, 0.678] vii_752: name: vii_752 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_752 coordinates: [lon_pixels, lat_pixels] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength chan_solar_index: 3 wavelength: [0.7465, 0.7515, 0.7565] vii_763: name: vii_763 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_763 coordinates: [lon_pixels, lat_pixels] calibration: [reflectance, radiance] chan_solar_index: 4 wavelength: [0.75695, 0.7627, 0.76845] vii_865: name: vii_865 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_865 coordinates: [lon_pixels, lat_pixels] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength chan_solar_index: 5 wavelength: [0.855, 0.865, 0.875] vii_914: name: vii_914 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_914 coordinates: [lon_pixels, lat_pixels] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength chan_solar_index: 6 wavelength: [0.904, 0.914, 0.924] vii_1240: name: vii_1240 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_1240 coordinates: [lon_pixels, lat_pixels] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength chan_solar_index: 7 wavelength: [1.230, 1.240, 1.250] vii_1375: name: vii_1375 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_1375 coordinates: [lon_pixels, lat_pixels] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength chan_solar_index: 8 wavelength: [1.355, 1.375, 1.395] vii_1630: name: vii_1630 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_1630 coordinates: [lon_pixels, lat_pixels] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength chan_solar_index: 9 wavelength: [1.620, 1.630, 1.640] vii_2250: name: vii_2250 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_2250 coordinates: [lon_pixels, lat_pixels] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength chan_solar_index: 10 wavelength: [2.225, 2.250, 2.275] vii_3740: name: vii_3740 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_3740 coordinates: [lon_pixels, lat_pixels] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength chan_thermal_index: 0 wavelength: [3.650, 3.740, 3.830] vii_3959: name: vii_3959 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_3959 coordinates: [lon_pixels, lat_pixels] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength chan_thermal_index: 1 wavelength: [3.929, 3.959, 3.989] vii_4050: name: vii_4050 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_4050 coordinates: [lon_pixels, lat_pixels] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength chan_thermal_index: 2 wavelength: [4.020, 4.050, 4.080] vii_6725: name: vii_6725 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_6725 coordinates: [lon_pixels, lat_pixels] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength chan_thermal_index: 3 wavelength: [6.540, 6.725, 6.910] vii_7325: name: vii_7325 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_7325 coordinates: [lon_pixels, lat_pixels] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength chan_thermal_index: 4 wavelength: [7.180, 7.325, 7.470] vii_8540: name: vii_8540 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_8540 coordinates: [lon_pixels, lat_pixels] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength chan_thermal_index: 5 wavelength: [8.395, 8.540, 8.685] vii_10690: name: vii_10690 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_10690 coordinates: [lon_pixels, lat_pixels] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength chan_thermal_index: 6 wavelength: [10.440, 10.690, 10.940] vii_12020: name: vii_12020 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_12020 coordinates: [lon_pixels, lat_pixels] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength chan_thermal_index: 7 wavelength: [11.770, 12.020, 12.270] vii_13345: name: vii_13345 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_13345 coordinates: [lon_pixels, lat_pixels] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength chan_thermal_index: 8 wavelength: [13.190, 13.345, 13.500] # --- Geometric data --- # TODO Geometric data on tie points are kept for test purposes solar_zenith_tie_points: name: solar_zenith_tie_points standard_name: solar_zenith_angle file_type: nc_vii_l1b_rad file_key: data/measurement_data/solar_zenith coordinates: [lon_tie_points, lat_tie_points] solar_azimuth_tie_points: name: solar_azimuth_tie_points standard_name: solar_azimuth_angle file_type: nc_vii_l1b_rad file_key: data/measurement_data/solar_azimuth coordinates: [lon_tie_points, lat_tie_points] observation_zenith_tie_points: name: observation_zenith_tie_points standard_name: sensor_zenith_angle file_type: nc_vii_l1b_rad file_key: data/measurement_data/observation_zenith coordinates: [lon_tie_points, lat_tie_points] observation_azimuth_tie_points: name: observation_azimuth_tie_points standard_name: sensor_azimuth_angle file_type: nc_vii_l1b_rad file_key: data/measurement_data/observation_azimuth coordinates: [lon_tie_points, lat_tie_points] solar_zenith: name: solar_zenith standard_name: solar_zenith_angle file_type: nc_vii_l1b_rad file_key: data/measurement_data/solar_zenith interpolate: True coordinates: [lon_pixels, lat_pixels] solar_azimuth: name: solar_azimuth standard_name: solar_azimuth_angle file_type: nc_vii_l1b_rad file_key: data/measurement_data/solar_azimuth interpolate: True coordinates: [lon_pixels, lat_pixels] observation_zenith: name: observation_zenith standard_name: sensor_zenith_angle file_type: nc_vii_l1b_rad file_key: data/measurement_data/observation_zenith interpolate: True coordinates: [lon_pixels, lat_pixels] observation_azimuth: name: observation_azimuth standard_name: sensor_azimuth_angle file_type: nc_vii_l1b_rad file_key: data/measurement_data/observation_azimuth interpolate: True coordinates: [lon_pixels, lat_pixels] # --- Orthorectification data --- delta_lat_N_dem: name: delta_lat_N_dem file_type: nc_vii_l1b_rad file_key: data/measurement_data/delta_lat_N_dem coordinates: [lon_pixels, lat_pixels] standard_name: parallax_delta_latitude delta_lon_N_dem: name: delta_lon_N_dem file_type: nc_vii_l1b_rad file_key: data/measurement_data/delta_lon_N_dem coordinates: [lon_pixels, lat_pixels] standard_name: parallax_delta_longitude satpy-0.34.0/satpy/etc/readers/vii_l2_nc.yaml000066400000000000000000000371751420401153000210310ustar00rootroot00000000000000reader: name: vii_l2_nc short_name: VII L2 NetCDF4 long_name: EPS-SG VII L2 (NetCDF4) description: > Reader for EUMETSAT EPSG-SG Visual Infrared Imager Level 2 files in NetCDF4 format. sensors: [vii] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: # EUMETSAT EPSG-SG Visual Infrared Imager Level 2 Cloud Mask files in NetCDF4 format nc_vii_l2_cld: file_reader: !!python/name:satpy.readers.vii_l2_nc.ViiL2NCFileHandler file_patterns: ['W_xx-eumetsat-darmstadt,SAT,{spacecraft_name:s}-VII-02-CLD_C_EUM_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc'] cached_longitude: data/measurement_data/longitude cached_latitude: data/measurement_data/latitude orthorect: False # EUMETSAT EPSG-SG Visual Infrared Imager Level 2 Cloud Top Pressure (using the Oxygen-A Band) files in NetCDF4 format nc_vii_l2_ctp: file_reader: !!python/name:satpy.readers.vii_l2_nc.ViiL2NCFileHandler file_patterns: ['W_xx-eumetsat-darmstadt,SAT,{spacecraft_name:s}-VII-02-CTP_C_EUM_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc'] cached_longitude: data/measurement_data/longitude cached_latitude: data/measurement_data/latitude # EUMETSAT EPSG-SG Visual Infrared Imager Level 2 Cloud Mask and First Guess Cloud Properties files in NetCDF4 format nc_vii_l2_icm: file_reader: !!python/name:satpy.readers.vii_l2_nc.ViiL2NCFileHandler file_patterns: ['W_xx-eumetsat-darmstadt,SAT,{spacecraft_name:s}-VII-02-ICM_C_EUM_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc'] cached_longitude: data/measurement_data/longitude cached_latitude: data/measurement_data/latitude # EUMETSAT EPSG-SG Visual Infrared Imager Level 2 Optimal Cloud Analysis files in NetCDF4 format nc_vii_l2_oca: file_reader: !!python/name:satpy.readers.vii_l2_nc.ViiL2NCFileHandler file_patterns: ['W_xx-eumetsat-darmstadt,SAT,{spacecraft_name:s}-VII-02-OCA_C_EUM_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc'] cached_longitude: data/measurement_data/longitude cached_latitude: data/measurement_data/latitude # EUMETSAT EPSG-SG Visual Infrared Imager Level 2 Total Precipitable Water (from VII visible/near-infrared) files in NetCDF4 format nc_vii_l2_wvv: file_reader: !!python/name:satpy.readers.vii_l2_nc.ViiL2NCFileHandler file_patterns: ['W_xx-eumetsat-darmstadt,SAT,{spacecraft_name:s}-VII-02-WVV_C_EUM_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc'] cached_longitude: data/measurement_data/longitude cached_latitude: data/measurement_data/latitude interpolate: False orthorect: False # EUMETSAT EPSG-SG Visual Infrared Imager Level 2 Total Precipitable Water (from VII thermal infra-red) files in NetCDF4 format nc_vii_l2_wvi: file_reader: !!python/name:satpy.readers.vii_l2_nc.ViiL2NCFileHandler file_patterns: ['W_xx-eumetsat-darmstadt,SAT,{spacecraft_name:s}-VII-02-WVI_C_EUM_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc'] cached_longitude: data/measurement_data/longitude cached_latitude: data/measurement_data/latitude interpolate: False orthorect: False datasets: # --- Coordinates --- # TODO Coordinates on tie points are kept for test purposes lon_tie_points: name: lon_tie_points file_type: [nc_vii_l2_cld, nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca] file_key: data/measurement_data/longitude standard_name: longitude lat_tie_points: name: lat_tie_points file_type: [nc_vii_l2_cld, nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca] file_key: data/measurement_data/latitude standard_name: latitude lon_pixels_no_orthorect: name: lon_pixels_no_orthorect file_type: [nc_vii_l2_cld, nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca, nc_vii_l2_wvi, nc_vii_l2_wvv] file_key: cached_longitude standard_name: longitude lat_pixels_no_orthorect: name: lat_pixels_no_orthorect file_type: [nc_vii_l2_cld, nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca, nc_vii_l2_wvi, nc_vii_l2_wvv] file_key: cached_latitude standard_name: latitude lon_pixels: name: lon_pixels file_type: [nc_vii_l2_cld, nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca, nc_vii_l2_wvi, nc_vii_l2_wvv] file_key: cached_longitude orthorect_data: data/measurement_data/delta_lon standard_name: longitude lat_pixels: name: lat_pixels file_type: [nc_vii_l2_cld, nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca, nc_vii_l2_wvi, nc_vii_l2_wvv] file_key: cached_latitude orthorect_data: data/measurement_data/delta_lat standard_name: latitude lon_pixels2: name: lon_pixels2 file_type: nc_vii_l2_oca file_key: cached_longitude orthorect_data: data/measurement_data/delta_lon_cloud2 standard_name: longitude lat_pixels2: name: lat_pixels2 file_type: nc_vii_l2_oca file_key: cached_latitude orthorect_data: data/measurement_data/delta_lat_cloud2 standard_name: latitude # --- Measurement data --- cs_confidence: name: cs_confidence file_type: [nc_vii_l2_cld, nc_vii_l2_icm] file_key: data/measurement_data/cs_confidence coordinates: [lon_pixels, lat_pixels] standard_name: cloud_area_fraction flag_cm: name: flag_cm file_type: [nc_vii_l2_cld, nc_vii_l2_icm] file_key: data/measurement_data/flag_cm coordinates: [lon_pixels, lat_pixels] standard_name: cloud_mask_classification surface_type: name: surface_type file_type: [nc_vii_l2_cld, nc_vii_l2_icm] file_key: data/measurement_data/surface_type coordinates: [lon_pixels, lat_pixels] standard_name: surface_type ctp_o2: name: ctp_o2 file_type: nc_vii_l2_ctp file_key: data/measurement_data/ctp_o2 coordinates: [lon_pixels, lat_pixels] standard_name: air_pressure_at_cloud_top log10_ctp_o2_err: name: log10_ctp_o2_err file_type: nc_vii_l2_ctp file_key: data/measurement_data/log10_ctp_o2_err coordinates: [lon_pixels, lat_pixels] standard_name: air_pressure_at_cloud_top log10_cot_o2: name: log10_cot_o2 file_type: nc_vii_l2_ctp file_key: data/measurement_data/log10_cot_o2 coordinates: [lon_pixels, lat_pixels] standard_name: cloud_optical_depth log10_cot_o2_err: name: log10_cot_o2_err file_type: nc_vii_l2_ctp file_key: data/measurement_data/log10_cot_o2_err coordinates: [lon_pixels, lat_pixels] standard_name: cloud_optical_depth vii_ch_sel1: name: vii_ch_sel1 file_type: nc_vii_l2_icm file_key: data/measurement_data/vii_ch_sel1 coordinates: [lon_pixels, lat_pixels] standard_name: toa_outgoing_radiance_per_unit_wavelength vii_ch_sel2: name: vii_ch_sel2 file_type: nc_vii_l2_icm file_key: data/measurement_data/vii_ch_sel2 coordinates: [lon_pixels, lat_pixels] standard_name: toa_outgoing_radiance_per_unit_wavelength vii_ch_sel3: name: vii_ch_sel3 file_type: nc_vii_l2_icm file_key: data/measurement_data/vii_ch_sel3 coordinates: [lon_pixels, lat_pixels] standard_name: toa_outgoing_radiance_per_unit_wavelength flag_cph: name: flag_cph file_type: nc_vii_l2_icm file_key: data/measurement_data/flag_cph coordinates: [lon_pixels, lat_pixels] standard_name: thermodynamic_phase_of_cloud_water_particles_at_cloud_top log10_cot_fg: name: log10_cot_fg file_type: nc_vii_l2_icm file_key: data/measurement_data/log10_cot_fg coordinates: [lon_pixels, lat_pixels] standard_name: cloud_optical_depth log10_err_cot_fg: name: log10_err_cot_fg file_type: nc_vii_l2_icm file_key: data/measurement_data/log10_err_cot_fg coordinates: [lon_pixels, lat_pixels] standard_name: cloud_optical_depth cth_fg: name: cth_fg file_type: nc_vii_l2_icm file_key: data/measurement_data/cth_fg coordinates: [lon_pixels, lat_pixels] standard_name: height_at_cloud_top err_cth_fg: name: err_cth_fg file_type: nc_vii_l2_icm file_key: data/measurement_data/err_cth_fg coordinates: [lon_pixels, lat_pixels] standard_name: height_at_cloud_top moca_model_final: name: moca_model_final file_type: nc_vii_l2_oca file_key: data/measurement_data/moca_model_final coordinates: [lon_pixels, lat_pixels] standard_name: scene_classification log10_cot: name: log10_cot file_type: nc_vii_l2_oca file_key: data/measurement_data/log10_cot coordinates: [lon_pixels, lat_pixels] standard_name: cloud_optical_depth log10_err_cot: name: log10_err_cot file_type: nc_vii_l2_oca file_key: data/measurement_data/log10_err_cot coordinates: [lon_pixels, lat_pixels] standard_name: cloud_optical_depth cre: name: cre file_type: nc_vii_l2_oca file_key: data/measurement_data/cre coordinates: [lon_pixels, lat_pixels] standard_name: effective_radius_of_cloud_condensed_water_particles_at_cloud_top log10_err_cre: name: log10_err_cre file_type: nc_vii_l2_oca file_key: data/measurement_data/log10_err_cre coordinates: [lon_pixels, lat_pixels] standard_name: effective_radius_of_cloud_condensed_water_particles_at_cloud_top ctp: name: ctp file_type: nc_vii_l2_oca file_key: data/measurement_data/ctp coordinates: [lon_pixels, lat_pixels] standard_name: air_pressure_at_cloud_top log10_err_ctp: name: log10_err_ctp file_type: nc_vii_l2_oca file_key: data/measurement_data/log10_err_ctp coordinates: [lon_pixels, lat_pixels] standard_name: air_pressure_at_cloud_top ctt: name: ctt file_type: nc_vii_l2_oca file_key: data/measurement_data/ctt coordinates: [lon_pixels, lat_pixels] standard_name: air_temperature_at_cloud_top log10_cot2: name: log10_cot2 file_type: nc_vii_l2_oca file_key: data/measurement_data/log10_cot2 coordinates: [lon_pixels2, lat_pixels2] standard_name: cloud_optical_depth log10_err_cot2: name: log10_err_cot2 file_type: nc_vii_l2_oca file_key: data/measurement_data/log10_err_cot2 coordinates: [lon_pixels2, lat_pixels2] standard_name: cloud_optical_depth ctp2: name: ctp2 file_type: nc_vii_l2_oca file_key: data/measurement_data/ctp2 coordinates: [lon_pixels2, lat_pixels2] standard_name: air_pressure_at_cloud_top log10_err_ctp2: name: log10_err_ctp2 file_type: nc_vii_l2_oca file_key: data/measurement_data/log10_err_ctp2 coordinates: [lon_pixels2, lat_pixels2] standard_name: air_pressure_at_cloud_top ctt2: name: ctt2 file_type: nc_vii_l2_oca file_key: data/measurement_data/ctt2 coordinates: [lon_pixels2, lat_pixels2] standard_name: air_temperature_at_cloud_top tpw: name: tpw file_type: [nc_vii_l2_wvi, nc_vii_l2_wvv] file_key: data/measurement_data/tpw coordinates: [lon_pixels, lat_pixels] standard_name: mass_of_water_in_air tpw_err: name: tpw_err file_type: [nc_vii_l2_wvi, nc_vii_l2_wvv] file_key: data/measurement_data/tpw_err coordinates: [lon_pixels, lat_pixels] standard_name: mass_of_water_in_air # --- Geometric data --- # TODO Geometric data on tie points are kept for test purposes solar_zenith_tie_points: name: solar_zenith_tie_points standard_name: solar_zenith_angle file_type: [nc_vii_l2_cld, nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca] file_key: data/measurement_data/solar_zenith coordinates: [lon_tie_points, lat_tie_points] solar_azimuth_tie_points: name: solar_azimuth_tie_points standard_name: solar_azimuth_angle file_type: [nc_vii_l2_cld, nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca] file_key: data/measurement_data/solar_azimuth coordinates: [lon_tie_points, lat_tie_points] observation_zenith_tie_points: name: observation_zenith_tie_points standard_name: sensor_zenith_angle file_type: [nc_vii_l2_cld, nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca] file_key: data/measurement_data/observation_zenith coordinates: [lon_tie_points, lat_tie_points] observation_azimuth_tie_points: name: observation_azimuth_tie_points standard_name: sensor_azimuth_angle file_type: [nc_vii_l2_cld, nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca] file_key: data/measurement_data/observation_azimuth coordinates: [lon_tie_points, lat_tie_points] solar_zenith: name: solar_zenith standard_name: solar_zenith_angle file_type: [nc_vii_l2_cld, nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca, nc_vii_l2_wvi, nc_vii_l2_wvv] file_key: data/measurement_data/solar_zenith interpolate: True coordinates: [lon_pixels_no_orthorect, lat_pixels_no_orthorect] solar_azimuth: name: solar_azimuth standard_name: solar_azimuth_angle file_type: [nc_vii_l2_cld, nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca, nc_vii_l2_wvi, nc_vii_l2_wvv] file_key: data/measurement_data/solar_azimuth interpolate: True coordinates: [lon_pixels_no_orthorect, lat_pixels_no_orthorect] observation_zenith: name: observation_zenith standard_name: sensor_zenith_angle file_type: [nc_vii_l2_cld, nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca, nc_vii_l2_wvi, nc_vii_l2_wvv] file_key: data/measurement_data/observation_zenith interpolate: True coordinates: [lon_pixels_no_orthorect, lat_pixels_no_orthorect] observation_azimuth: name: observation_azimuth standard_name: sensor_azimuth_angle file_type: [nc_vii_l2_cld, nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca, nc_vii_l2_wvi, nc_vii_l2_wvv] file_key: data/measurement_data/observation_azimuth interpolate: True coordinates: [lon_pixels_no_orthorect, lat_pixels_no_orthorect] # --- Orthorectification data --- delta_lat: name: delta_lat file_type: [nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca] file_key: data/measurement_data/delta_lat coordinates: [lon_pixels, lat_pixels] standard_name: parallax_delta_latitude delta_lon: name: delta_lon file_type: [nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca] file_key: data/measurement_data/delta_lon coordinates: [lon_pixels, lat_pixels] standard_name: parallax_delta_longitude delta_lat_cloud2: name: delta_lat_cloud2 file_type: nc_vii_l2_oca file_key: data/measurement_data/delta_lat_cloud2 coordinates: [lon_pixels, lat_pixels] standard_name: parallax_delta_latitude delta_lon_cloud2: name: delta_lon_cloud2 file_type: nc_vii_l2_oca file_key: data/measurement_data/delta_lon_cloud2 coordinates: [lon_pixels, lat_pixels] standard_name: parallax_delta_longitude # --- Quality Information data --- log10_j: name: log10_j file_type: [nc_vii_l2_ctp, nc_vii_l2_oca, nc_vii_l2_wvi, nc_vii_l2_wvv] file_key: data/quality_information/log10_j coordinates: [lon_pixels, lat_pixels] standard_name: cost_function flag_ml: name: flag_ml file_type: nc_vii_l2_ctp file_key: data/quality_information/flag_ml coordinates: [lon_pixels, lat_pixels] standard_name: cloud_multilayer_classification qi_forecast: name: qi_forecast file_type: [nc_vii_l2_wvi, nc_vii_l2_wvv] file_key: data/quality_information/qi_forecast coordinates: [lon_pixels, lat_pixels] standard_name: mass_of_water_in_air satpy-0.34.0/satpy/etc/readers/viirs_compact.yaml000066400000000000000000000236271420401153000220240ustar00rootroot00000000000000reader: description: Generic Eumetsat Compact VIIRS Reader name: viirs_compact reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [viirs] default_datasets: datasets: longitude_m: name: longitude_m resolution: 742 file_type: compact_m standard_name: longitude units: degree latitude_m: name: latitude_m resolution: 742 file_type: compact_m standard_name: latitude units: degree longitude_dnb: name: longitude_dnb resolution: 743 file_type: compact_dnb standard_name: longitude units: degree latitude_dnb: name: latitude_dnb resolution: 743 file_type: compact_dnb standard_name: latitude units: degree M01: name: M01 sensor: viirs wavelength: [0.402,0.412,0.422] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M02: name: M02 sensor: viirs wavelength: [0.436,0.445,0.454] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M03: name: M03 sensor: viirs wavelength: [0.478,0.488,0.498] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M04: name: M04 sensor: viirs wavelength: [0.545,0.555,0.565] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M05: name: M05 sensor: viirs wavelength: [0.662,0.672,0.682] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M06: name: M06 sensor: viirs wavelength: [0.739,0.746,0.754] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M07: name: M07 sensor: viirs wavelength: [0.846,0.865,0.885] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M08: name: M08 sensor: viirs wavelength: [1.230,1.240,1.250] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M09: name: M09 sensor: viirs resolution: 742 wavelength: [1.371,1.378,1.386] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M10: name: M10 sensor: viirs wavelength: [1.580,1.610,1.640] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M11: name: M11 sensor: viirs resolution: 742 wavelength: [2.225,2.250,2.275] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M12: name: M12 sensor: viirs wavelength: [3.610,3.700,3.790] resolution: 742 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M13: name: M13 sensor: viirs wavelength: [3.973,4.050,4.128] resolution: 742 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M14: name: M14 sensor: viirs resolution: 742 wavelength: [8.400,8.550,8.700] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M15: name: M15 sensor: viirs resolution: 742 wavelength: [10.263,10.763,11.263] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M16: name: M16 sensor: viirs wavelength: [11.538,12.013,12.489] resolution: 742 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m DNB: name: DNB sensor: viirs wavelength: [0.500,0.700,0.900] resolution: 743 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W cm-2 sr-1 coordinates: [longitude_dnb, latitude_dnb] file_type: compact_dnb satellite_azimuth_angle: name: satellite_azimuth_angle sensor: viirs resolution: 742 file_type: compact_m units: degree coordinates: [longitude_m, latitude_m] standard_name: platform_azimuth_angle solar_azimuth_angle: name: solar_azimuth_angle sensor: viirs resolution: 742 file_type: compact_m units: degree coordinates: [longitude_m, latitude_m] standard_name: solar_azimuth_angle satellite_zenith_angle: name: satellite_zenith_angle sensor: viirs resolution: 742 file_type: compact_m units: degree coordinates: [longitude_m, latitude_m] standard_name: platform_zenith_angle solar_zenith_angle: name: solar_zenith_angle sensor: viirs resolution: 742 file_type: compact_m units: degree coordinates: [longitude_m, latitude_m] standard_name: solar_zenith_angle satellite_azimuth_angle_dnb: name: dnb_satellite_azimuth_angle sensor: viirs resolution: 743 file_type: compact_dnb units: degree coordinates: [longitude_dnb, latitude_dnb] standard_name: platform_azimuth_angle solar_azimuth_angle_dnb: name: dnb_solar_azimuth_angle sensor: viirs resolution: 743 file_type: compact_dnb units: degree coordinates: [longitude_dnb, latitude_dnb] standard_name: solar_azimuth_angle satellite_zenith_angle_dnb: name: dnb_satellite_zenith_angle sensor: viirs resolution: 743 file_type: compact_dnb units: degree coordinates: [longitude_dnb, latitude_dnb] standard_name: platform_zenith_angle solar_zenith_angle_dnb: name: dnb_solar_zenith_angle sensor: viirs resolution: 743 file_type: compact_dnb units: degree coordinates: [longitude_dnb, latitude_dnb] standard_name: solar_zenith_angle lunar_zenith_angle_dnb: name: dnb_lunar_zenith_angle sensor: viirs resolution: 743 file_type: compact_dnb units: degree coordinates: [longitude_dnb, latitude_dnb] standard_name: lunar_zenith_angle lunar_azimuth_angle_dnb: name: dnb_lunar_azimuth_angle sensor: viirs resolution: 743 file_type: compact_dnb units: degree coordinates: [longitude_dnb, latitude_dnb] standard_name: lunar_azimuth_angle moon_illumination_fraction_dnb: name: dnb_moon_illumination_fraction resolution: 743 file_type: compact_dnb file_types: compact_m: file_reader: !!python/name:satpy.readers.viirs_compact.VIIRSCompactFileHandler file_patterns: ['SVMC_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_eum_ops.h5'] compact_dnb: file_reader: !!python/name:satpy.readers.viirs_compact.VIIRSCompactFileHandler file_patterns: ['SVDNBC_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_eum_ops.h5'] satpy-0.34.0/satpy/etc/readers/viirs_edr_active_fires.yaml000066400000000000000000000072611420401153000236670ustar00rootroot00000000000000reader: description: VIIRS Active Fires Reader name: viirs_edr_active_fires reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [viirs] file_types: fires_netcdf_img: file_reader: !!python/name:satpy.readers.viirs_edr_active_fires.VIIRSActiveFiresFileHandler variable_prefix: "" file_patterns: - 'AFIMG_{satellite_name}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time}_{source}.nc' fires_netcdf: file_reader: !!python/name:satpy.readers.viirs_edr_active_fires.VIIRSActiveFiresFileHandler variable_prefix: "Fire Pixels/" file_patterns: - 'AFEDR_{satellite_name}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time}_{source}.nc' - 'AFMOD_{satellite_name}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time}_{source}.nc' fires_text_img: file_reader: !!python/name:satpy.readers.viirs_edr_active_fires.VIIRSActiveFiresTextFileHandler skip_rows: 15 columns: ["latitude", "longitude", "T4", "Along-scan", "Along-track", "confidence_cat", "power"] file_patterns: - 'AFIMG_{satellite_name}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time}_{source}.txt' fires_text: file_reader: !!python/name:satpy.readers.viirs_edr_active_fires.VIIRSActiveFiresTextFileHandler skip_rows: 15 columns: ["latitude", "longitude", "T13", "Along-scan", "Along-track", "confidence_pct", "power"] file_patterns: - 'AFMOD_{satellite_name}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time}_{source}.txt' - 'AFEDR_{satellite_name}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time}_{source}.txt' datasets: confidence_cat: name: confidence_cat file_type: [fires_netcdf_img, fires_text_img] file_key: "{variable_prefix}FP_confidence" coordinates: [longitude, latitude] units: '1' flag_meanings: ['low', 'medium', 'high'] flag_values: [7, 8, 9] _FillValue: 0 confidence_pct: name: confidence_pct file_type: [fires_netcdf, fires_text] file_key: "{variable_prefix}FP_confidence" coordinates: [longitude, latitude] units: '%' # this is not a category product but we should define a fill value # since we aren't going to scale the data to a float data type in # the python code _FillValue: 255 longitude: name: longitude standard_name: longitude file_type: [fires_netcdf_img, fires_netcdf, fires_text_img, fires_text] file_key: "{variable_prefix}FP_longitude" units: 'degrees_east' latitude: name: latitude standard_name: latitude file_type: [fires_netcdf_img, fires_netcdf, fires_text_img, fires_text] file_key: "{variable_prefix}FP_latitude" units: 'degrees_north' power: name: power file_type: [fires_netcdf_img, fires_netcdf, fires_text_img, fires_text] file_key: "{variable_prefix:s}FP_power" coordinates: [longitude, latitude] units: 'MW' T13: name: T13 file_type: [fires_netcdf, fires_text] file_key: "{variable_prefix}FP_T13" coordinates: [longitude, latitude] standard_name: toa_brightness_temperature units: 'K' T4: name: T4 file_type: [fires_netcdf_img, fires_text_img] file_key: "{variable_prefix}FP_T4" coordinates: [longitude, latitude] standard_name: toa_brightness_temperature units: 'K' satpy-0.34.0/satpy/etc/readers/viirs_edr_flood.yaml000066400000000000000000000015331420401153000223230ustar00rootroot00000000000000reader: description: VIIRS flood HDF4 reader name: viirs_edr_flood reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [viirs] file_types: viirs_edr: file_reader: !!python/name:satpy.readers.viirs_edr_flood.VIIRSEDRFlood file_patterns: - 'WATER_VIIRS_Prj_SVI_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_{source:8s}_{dim0:d}_{dim1:d}_01.hdf' - 'WATER_VIIRS_Prj_SVI_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_{source:8s}_{aoi:3s}_{dim0:d}_{dim1:d}_01.hdf' - 'WATER_COM_VIIRS_Prj_SVI_d{start_time:%Y%m%d}_d{end_time:%Y%m%d}_{dim0:d}_{dim1:d}_{unknown1:2d}_{total_days:3d}day_{tile_num:3d}.hdf' datasets: water_detection: name: 'WaterDetection' file_type: viirs_edr satpy-0.34.0/satpy/etc/readers/viirs_l1b.yaml000066400000000000000000000357041420401153000210530ustar00rootroot00000000000000reader: description: Generic NASA VIIRS L1B Reader name: viirs_l1b reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [viirs] default_datasets: navigations: vgeoi: description: VIIRS L1B I-band Navigation file_type: vgeoi latitude_key: geolocation_data/latitude longitude_key: geolocation_data/longitude nadir_resolution: [371] rows_per_scan: 32 vgeom: description: VIIRS L1B M-band Navigation file_type: vgeom latitude_key: geolocation_data/latitude longitude_key: geolocation_data/longitude nadir_resolution: [742] rows_per_scan: 16 vgeod: description: VIIRS L1B DNB Navigation file_type: vgeod latitude_key: geolocation_data/latitude longitude_key: geolocation_data/longitude nadir_resolution: [742] rows_per_scan: 16 file_types: vgeoi: file_reader: !!python/name:satpy.readers.viirs_l1b.VIIRSL1BFileHandler file_patterns: - 'VGEOI_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S}_c{creation_time:%Y%m%d%H%M%S}.nc' - 'V{platform_shortname:2s}03IMG.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}{creator}.nc' - 'V{platform_shortname:2s}03IMG_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.nc' vgeom: file_reader: !!python/name:satpy.readers.viirs_l1b.VIIRSL1BFileHandler file_patterns: - 'VGEOM_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S}_c{creation_time:%Y%m%d%H%M%S}.nc' - 'V{platform_shortname:2s}03MOD.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}{creator}.nc' - 'V{platform_shortname:2s}03MOD_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.nc' vgeod: file_reader: !!python/name:satpy.readers.viirs_l1b.VIIRSL1BFileHandler file_patterns: - 'VGEOD_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S}_c{creation_time:%Y%m%d%H%M%S}.nc' - 'V{platform_shortname:2s}03DNB.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}{creator}.nc' - 'V{platform_shortname:2s}03DNB_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.nc' vl1bi: file_reader: !!python/name:satpy.readers.viirs_l1b.VIIRSL1BFileHandler file_patterns: - 'VL1BI_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S}_c{creation_time:%Y%m%d%H%M%S}.nc' - 'V{platform_shortname:2s}02IMG.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}{creator}.nc' - 'V{platform_shortname:2s}02IMG_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.nc' vl1bm: file_reader: !!python/name:satpy.readers.viirs_l1b.VIIRSL1BFileHandler file_patterns: - 'VL1BM_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S}_c{creation_time:%Y%m%d%H%M%S}.nc' - 'V{platform_shortname:2s}02MOD.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}{creator}.nc' - 'V{platform_shortname:2s}02MOD_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.nc' vl1bd: file_reader: !!python/name:satpy.readers.viirs_l1b.VIIRSL1BFileHandler file_patterns: - 'VL1BD_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S}_c{creation_time:%Y%m%d%H%M%S}.nc' - 'V{platform_shortname:2s}02DNB.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}{creator}.nc' - 'V{platform_shortname:2s}02DNB_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.nc' datasets: i_lon: name: i_lon resolution: 371 file_type: vgeoi file_key: geolocation_data/longitude units: degrees standard_name: longitude i_lat: name: i_lat resolution: 371 file_type: vgeoi file_key: geolocation_data/latitude units: degrees standard_name: latitude m_lon: name: m_lon resolution: 742 file_type: vgeom file_key: geolocation_data/longitude units: degrees standard_name: longitude m_lat: name: m_lat resolution: 742 file_type: vgeom file_key: geolocation_data/latitude units: degrees standard_name: latitude dnb_lon: name: dnb_lon resolution: 743 file_type: vgeod file_key: geolocation_data/longitude units: degrees standard_name: longitude dnb_lat: name: dnb_lat resolution: 743 file_type: vgeod file_key: geolocation_data/latitude units: degrees standard_name: latitude I01: name: I01 wavelength: [0.600, 0.640, 0.680] resolution: 371 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [i_lon, i_lat] file_type: vl1bi I02: name: I02 wavelength: [0.845, 0.865, 0.884] resolution: 371 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [i_lon, i_lat] file_type: vl1bi I03: name: I03 wavelength: [1.580, 1.610, 1.640] resolution: 371 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [i_lon, i_lat] file_type: vl1bi I04: name: I04 wavelength: [3.580, 3.740, 3.900] resolution: 371 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [i_lon, i_lat] file_type: vl1bi I05: name: I05 wavelength: [10.500, 11.450, 12.300] resolution: 371 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [i_lon, i_lat] file_type: vl1bi I_SOLZ: name: i_solar_zenith_angle standard_name: solar_zenith_angle resolution: 371 units: degrees coordinates: [i_lon, i_lat] file_type: vgeoi file_key: geolocation_data/solar_zenith I_SOLA: name: i_solar_azimuth_angle standard_name: solar_azimuth_angle resolution: 371 units: degrees coordinates: [i_lon, i_lat] file_type: vgeoi file_key: geolocation_data/solar_azimuth I_SENZ: name: i_satellite_zenith_angle standard_name: sensor_zenith_angle resolution: 371 units: degrees coordinates: [i_lon, i_lat] file_type: vgeoi file_key: geolocation_data/sensor_zenith I_SENA: name: i_satellite_azimuth_angle standard_name: sensor_azimuth_angle resolution: 371 units: degrees coordinates: [i_lon, i_lat] file_type: vgeoi file_key: geolocation_data/sensor_azimuth M01: name: M01 wavelength: [0.402, 0.412, 0.422] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M02: name: M02 wavelength: [0.436, 0.445, 0.454] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M03: name: M03 wavelength: [0.478, 0.488, 0.498] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M04: name: M04 wavelength: [0.545, 0.555, 0.565] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M05: name: M05 wavelength: [0.662, 0.672, 0.682] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M06: name: M06 wavelength: [0.739, 0.746, 0.754] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M07: name: M07 wavelength: [0.846, 0.865, 0.885] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M08: name: M08 wavelength: [1.230, 1.240, 1.250] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M09: name: M09 wavelength: [1.371, 1.378, 1.386] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M10: name: M10 wavelength: [1.580, 1.610, 1.640] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M11: name: M11 wavelength: [2.225, 2.250, 2.275] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M12: name: M12 wavelength: [3.610, 3.700, 3.790] resolution: 742 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M13: name: M13 wavelength: [3.973, 4.050, 4.128] resolution: 742 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M14: name: M14 wavelength: [8.400, 8.550, 8.700] resolution: 742 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M15: name: M15 wavelength: [10.263, 10.763, 11.263] resolution: 742 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M16: name: M16 wavelength: [11.538, 12.013, 12.489] resolution: 742 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M_SOLZ: name: solar_zenith_angle standard_name: solar_zenith_angle resolution: 742 units: degrees coordinates: [m_lon, m_lat] file_type: vgeom file_key: geolocation_data/solar_zenith M_SOLA: name: solar_azimuth_angle standard_name: solar_azimuth_angle resolution: 742 units: degrees coordinates: [m_lon, m_lat] file_type: vgeom file_key: geolocation_data/solar_azimuth M_SENZ: name: satellite_zenith_angle standard_name: sensor_zenith_angle resolution: 742 units: degrees coordinates: [m_lon, m_lat] file_type: vgeom file_key: geolocation_data/sensor_zenith M_SENA: name: satellite_azimuth_angle standard_name: sensor_azimuth_angle resolution: 742 units: degrees coordinates: [m_lon, m_lat] file_type: vgeom file_key: geolocation_data/sensor_azimuth DNB: name: DNB wavelength: [0.500, 0.700, 0.900] resolution: 743 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 sr-1 file_units: W cm-2 sr-1 coordinates: [dnb_lon, dnb_lat] file_type: vl1bd file_key: observation_data/DNB_observations DNB_SZA: name: dnb_solar_zenith_angle standard_name: solar_zenith_angle resolution: 743 coordinates: [dnb_lon, dnb_lat] file_type: vgeod file_key: geolocation_data/solar_zenith DNB_SENZ: name: dnb_satellite_zenith_angle standard_name: sensor_zenith_angle resolution: 743 coordinates: [dnb_lon, dnb_lat] file_type: vgeod file_key: geolocation_data/solar_zenith DNB_LZA: name: dnb_lunar_zenith_angle standard_name: lunar_zenith_angle resolution: 743 coordinates: [dnb_lon, dnb_lat] file_type: vgeod file_key: geolocation_data/lunar_zenith DNB_SAA: name: dnb_solar_azimuth_angle standard_name: solar_azimuth_angle resolution: 743 coordinates: [dnb_lon, dnb_lat] file_type: vgeod file_key: geolocation_data/solar_azimuth DNB_SENA: name: dnb_satellite_azimuth_angle standard_name: sensor_azimuth_angle resolution: 743 coordinates: [dnb_lon, dnb_lat] file_type: vgeod file_key: geolocation_data/sensor_azimuth DNB_LAA: name: dnb_lunar_azimuth_angle standard_name: lunar_azimuth_angle resolution: 743 coordinates: [dnb_lon, dnb_lat] file_type: vgeod file_key: geolocation_data/lunar_azimuth dnb_moon_illumination_fraction: name: dnb_moon_illumination_fraction resolution: 743 file_type: vgeod file_key: geolocation_data/moon_illumination_fraction coordinates: [dnb_lon, dnb_lat] satpy-0.34.0/satpy/etc/readers/viirs_sdr.yaml000066400000000000000000000405321420401153000211600ustar00rootroot00000000000000reader: name: viirs_sdr description: VIIRS SDR Reader reader: !!python/name:satpy.readers.viirs_sdr.VIIRSSDRReader sensors: [viirs] # file pattern keys to sort files by with 'satpy.utils.group_files' # by default, don't use start_time group files (only orbit and platform) group_keys: ['orbit', 'platform_shortname'] datasets: i_lon: name: i_longitude resolution: 371 file_type: generic_file dataset_groups: [GITCO, GIMGO] file_key: All_Data/{dataset_group}_All/Longitude file_units: "degrees_east" standard_name: longitude coordinates: [i_longitude, i_latitude] i_lat: name: i_latitude resolution: 371 file_type: generic_file dataset_groups: [GITCO, GIMGO] file_key: All_Data/{dataset_group}_All/Latitude file_units: "degrees_north" standard_name: latitude coordinates: [i_longitude, i_latitude] m_lon: name: m_longitude resolution: 742 file_type: generic_file dataset_groups: [GMTCO, GMODO] file_key: All_Data/{dataset_group}_All/Longitude file_units: "degrees_east" standard_name: longitude coordinates: [m_longitude, m_latitude] m_lat: name: m_latitude resolution: 742 file_type: generic_file dataset_groups: [GMTCO, GMODO] file_key: All_Data/{dataset_group}_All/Latitude file_units: "degrees_north" standard_name: latitude coordinates: [m_longitude, m_latitude] dnb_lon: name: dnb_longitude resolution: 743 file_type: generic_file dataset_groups: [GDNBO] file_key: All_Data/{dataset_group}_All/Longitude file_units: "degrees_east" standard_name: longitude coordinates: [dnb_longitude, dnb_latitude] dnb_lat: name: dnb_latitude resolution: 743 file_type: generic_file dataset_groups: [GDNBO] file_key: All_Data/{dataset_group}_All/Latitude file_units: "degrees_north" standard_name: latitude coordinates: [dnb_longitude, dnb_latitude] I01: name: I01 wavelength: [0.600, 0.640, 0.680] modifiers: [sunz_corrected_iband] dataset_groups: [SVI01] file_type: generic_file resolution: 371 coordinates: [i_longitude, i_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance file_units: "1" units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 I02: name: I02 wavelength: [0.845, 0.865, 0.884] modifiers: [sunz_corrected_iband] dataset_groups: [SVI02] file_type: generic_file resolution: 371 coordinates: [i_longitude, i_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance file_units: "1" units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 I03: name: I03 wavelength: [1.580, 1.610, 1.640] modifiers: [sunz_corrected_iband] dataset_groups: [SVI03] file_type: generic_file resolution: 371 coordinates: [i_longitude, i_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance file_units: "1" units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 I04: name: I04 wavelength: [3.580, 3.740, 3.900] file_type: generic_file dataset_groups: [SVI04] resolution: 371 coordinates: [i_longitude, i_latitude] calibration: brightness_temperature: standard_name: toa_brightness_temperature file_units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 I05: name: I05 wavelength: [10.500, 11.450, 12.300] dataset_groups: [SVI05] file_type: generic_file resolution: 371 coordinates: [i_longitude, i_latitude] calibration: brightness_temperature: standard_name: toa_brightness_temperature file_units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 M01: name: M01 wavelength: [0.402, 0.412, 0.422] modifiers: [sunz_corrected] dataset_groups: [SVM01] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance file_units: "1" units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 M02: name: M02 wavelength: [0.436, 0.445, 0.454] modifiers: [sunz_corrected] dataset_groups: [SVM02] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance file_units: "1" units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 M03: name: M03 wavelength: [0.478, 0.488, 0.498] modifiers: [sunz_corrected] dataset_groups: [SVM03] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance file_units: "1" units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 M04: name: M04 wavelength: [0.545, 0.555, 0.565] modifiers: [sunz_corrected] dataset_groups: [SVM04] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance file_units: "1" units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 M05: name: M05 wavelength: [0.662, 0.672, 0.682] modifiers: [sunz_corrected] dataset_groups: [SVM05] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance file_units: "1" units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 M06: name: M06 wavelength: [0.739, 0.746, 0.754] modifiers: [sunz_corrected] dataset_groups: [SVM06] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance file_units: "1" units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 M07: name: M07 wavelength: [0.846, 0.865, 0.885] modifiers: [sunz_corrected] dataset_groups: [SVM07] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance file_units: "1" units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 M08: name: M08 wavelength: [1.230, 1.240, 1.250] modifiers: [sunz_corrected] dataset_groups: [SVM08] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance file_units: "1" units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 M09: name: M09 wavelength: [1.371, 1.378, 1.386] modifiers: [sunz_corrected] dataset_groups: [SVM09] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance file_units: "1" units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 M10: name: M10 wavelength: [1.580, 1.610, 1.640] modifiers: [sunz_corrected] dataset_groups: [SVM10] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance file_units: "1" units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 M11: name: M11 wavelength: [2.225, 2.250, 2.275] modifiers: [sunz_corrected] dataset_groups: [SVM11] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance file_units: "1" units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 M12: name: M12 wavelength: [3.610, 3.700, 3.790] dataset_groups: [SVM12] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: brightness_temperature: standard_name: toa_brightness_temperature file_units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 M13: name: M13 wavelength: [3.973, 4.050, 4.128] dataset_groups: [SVM13] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: brightness_temperature: standard_name: toa_brightness_temperature file_units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 M14: name: M14 wavelength: [8.400, 8.550, 8.700] dataset_groups: [SVM14] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: brightness_temperature: standard_name: toa_brightness_temperature file_units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 M15: name: M15 wavelength: [10.263, 10.763, 11.263] dataset_groups: [SVM15] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: brightness_temperature: standard_name: toa_brightness_temperature file_units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 M16: name: M16 wavelength: [11.538, 12.013, 12.489] dataset_groups: [SVM16] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: brightness_temperature: standard_name: toa_brightness_temperature file_units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 I_SOLZ: name: solar_zenith_angle standard_name: solar_zenith_angle resolution: 371 coordinates: [i_longitude, i_latitude] file_units: degrees file_type: generic_file dataset_groups: [GITCO, GIMGO] file_key: 'All_Data/{dataset_group}_All/SolarZenithAngle' I_SOLA: name: solar_azimuth_angle standard_name: solar_azimuth_angle resolution: 371 coordinates: [i_longitude, i_latitude] file_units: degrees file_type: generic_file dataset_groups: [GITCO, GIMGO] file_key: 'All_Data/{dataset_group}_All/SolarAzimuthAngle' I_SENZ: name: satellite_zenith_angle standard_name: sensor_zenith_angle resolution: 371 coordinates: [i_longitude, i_latitude] file_units: degrees file_type: generic_file dataset_groups: [GITCO, GIMGO] file_key: 'All_Data/{dataset_group}_All/SatelliteZenithAngle' I_SENA: name: satellite_azimuth_angle standard_name: sensor_azimuth_angle resolution: 371 coordinates: [i_longitude, i_latitude] file_units: degrees file_type: generic_file dataset_groups: [GITCO, GIMGO] file_key: 'All_Data/{dataset_group}_All/SatelliteAzimuthAngle' M_SOLZ: name: solar_zenith_angle standard_name: solar_zenith_angle resolution: 742 coordinates: [m_longitude, m_latitude] file_units: degrees file_type: generic_file dataset_groups: [GMTCO, GMODO] file_key: 'All_Data/{dataset_group}_All/SolarZenithAngle' M_SOLA: name: solar_azimuth_angle standard_name: solar_azimuth_angle resolution: 742 coordinates: [m_longitude, m_latitude] file_units: degrees file_type: generic_file dataset_groups: [GMTCO, GMODO] file_key: 'All_Data/{dataset_group}_All/SolarAzimuthAngle' M_SENZ: name: satellite_zenith_angle standard_name: sensor_zenith_angle resolution: 742 coordinates: [m_longitude, m_latitude] file_units: degrees file_type: generic_file dataset_groups: [GMTCO, GMODO] file_key: 'All_Data/{dataset_group}_All/SatelliteZenithAngle' M_SENA: name: satellite_azimuth_angle standard_name: sensor_azimuth_angle resolution: 742 coordinates: [m_longitude, m_latitude] file_units: degrees file_type: generic_file dataset_groups: [GMTCO, GMODO] file_key: 'All_Data/{dataset_group}_All/SatelliteAzimuthAngle' DNB: name: DNB wavelength: [0.500, 0.700, 0.900] resolution: 743 coordinates: [dnb_longitude, dnb_latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 sr-1 file_units: W cm-2 sr-1 dataset_groups: [SVDNB] file_type: generic_file DNB_SZA: name: dnb_solar_zenith_angle standard_name: solar_zenith_angle resolution: 743 coordinates: [dnb_longitude, dnb_latitude] file_units: degrees file_type: generic_file dataset_groups: [GDNBO] file_key: 'All_Data/{dataset_group}_All/SolarZenithAngle' DNB_LZA: name: dnb_lunar_zenith_angle standard_name: lunar_zenith_angle resolution: 743 coordinates: [dnb_longitude, dnb_latitude] file_units: degrees file_type: generic_file dataset_groups: [GDNBO] file_key: 'All_Data/{dataset_group}_All/LunarZenithAngle' DNB_SENZ: name: dnb_satellite_zenith_angle standard_name: sensor_zenith_angle resolution: 743 coordinates: [dnb_longitude, dnb_latitude] file_units: degrees file_type: generic_file dataset_groups: [GDNBO] file_key: 'All_Data/{dataset_group}_All/SatelliteZenithAngle' DNB_SAA: name: dnb_solar_azimuth_angle standard_name: solar_azimuth_angle resolution: 743 coordinates: [dnb_longitude, dnb_latitude] file_units: degrees file_type: generic_file dataset_groups: [GDNBO] file_key: 'All_Data/{dataset_group}_All/SolarAzimuthAngle' DNB_LAA: name: dnb_lunar_azimuth_angle standard_name: lunar_azimuth_angle resolution: 743 coordinates: [dnb_longitude, dnb_latitude] file_units: degrees file_type: generic_file dataset_groups: [GDNBO] file_key: 'All_Data/{dataset_group}_All/LunarAzimuthAngle' DNB_SENA: name: dnb_satellite_azimuth_angle standard_name: sensor_azimuth_angle resolution: 743 coordinates: [dnb_longitude, dnb_latitude] file_units: degrees file_type: generic_file dataset_groups: [GDNBO] file_key: 'All_Data/{dataset_group}_All/SatelliteAzimuthAngle' dnb_moon_illumination_fraction: name: dnb_moon_illumination_fraction file_type: generic_file dataset_groups: [GDNBO] file_key: 'All_Data/{dataset_group}_All/MoonIllumFraction' file_units: '1' file_types: generic_file: file_reader: !!python/name:satpy.readers.viirs_sdr.VIIRSSDRFileHandler file_patterns: ['{datasets}_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5'] # Example filenames # GMODO-SVM01-SVM02-SVM03-SVM04-SVM05-SVM06-SVM07-SVM08-SVM09-SVM10-SVM11-SVM12-SVM13-SVM14-SVM15-SVM16_j01_d20190304_t1103049_e1108449_b06684_c20190304213641984108_nobc_ops.h5 # GMTCO_j01_d20190304_t1103049_e1108449_b06684_c20190304150845549693_nobc_ops.h5 # GDNBO-SVDNB_j01_d20190304_t1057236_e1103036_b06684_c20190304213641088765_nobc_ops.h5 # SVM15_npp_d20150311_t1126366_e1128008_b17451_c20150311113344455225_cspp_dev.h5 satpy-0.34.0/satpy/etc/readers/virr_l1b.yaml000066400000000000000000000106111420401153000206670ustar00rootroot00000000000000reader: description: reader for VIRR data name: virr_l1b sensors: [virr] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: virr_l1b: file_reader: !!python/name:satpy.readers.virr_l1b.VIRR_L1B file_patterns: - 'tf{creation_time:%Y%j%H%M%S}.{platform_id}-L_VIRRX_L1B.HDF' geolocation_prefix: '' virr_geoxx: file_reader: !!python/name:satpy.readers.virr_l1b.VIRR_L1B file_patterns: - 'tf{creation_time:%Y%j%H%M%S}.{platform_id}-L_VIRRX_GEOXX.HDF' geolocation_prefix: 'Geolocation/' datasets: R1: name: '1' wavelength: [0.58, 0.63, 0.68] resolution: 1000 file_type: virr_l1b file_key: Data/EV_RefSB band_index: 0 standard_name: toa_bidirectional_reflectance coordinates: [longitude, latitude] calibration: reflectance R2: name: '2' wavelength: [0.84, 0.865, 0.89] resolution: 1000 file_type: virr_l1b file_key: Data/EV_RefSB band_index: 1 standard_name: toa_bidirectional_reflectance coordinates: [longitude, latitude] calibration: reflectance E1: name: '3' wavelength: [3.55, 3.74, 3.93] resolution: 1000 file_type: virr_l1b file_key: Data/EV_Emissive band_index: 0 standard_name: toa_brightness_temperature coordinates: [longitude, latitude] calibration: brightness_temperature E2: name: '4' wavelength: [10.3, 10.8, 11.3] resolution: 1000 file_type: virr_l1b file_key: Data/EV_Emissive band_index: 1 standard_name: toa_brightness_temperature coordinates: [longitude, latitude] calibration: brightness_temperature E3: name: '5' wavelength: [11.5, 12.0, 12.5] resolution: 1000 file_type: virr_l1b file_key: Data/EV_Emissive band_index: 2 standard_name: toa_brightness_temperature coordinates: [longitude, latitude] calibration: brightness_temperature R3: name: '6' wavelength: [1.55, 1.6, 1.64] resolution: 1000 file_type: virr_l1b file_key: Data/EV_RefSB band_index: 2 standard_name: toa_bidirectional_reflectance coordinates: [longitude, latitude] calibration: reflectance R4: name: '7' wavelength: [0.43, 0.455, 0.48] resolution: 1000 file_type: virr_l1b file_key: Data/EV_RefSB band_index: 3 standard_name: toa_bidirectional_reflectance coordinates: [longitude, latitude] calibration: reflectance R5: name: '8' wavelength: [0.48, 0.505, 0.53] resolution: 1000 file_type: virr_l1b file_key: Data/EV_RefSB band_index: 4 standard_name: toa_bidirectional_reflectance coordinates: [longitude, latitude] calibration: reflectance R6: name: '9' wavelength: [0.53, 0.555, 0.58] resolution: 1000 file_type: virr_l1b file_key: Data/EV_RefSB band_index: 5 standard_name: toa_bidirectional_reflectance coordinates: [longitude, latitude] calibration: reflectance R7: name: '10' wavelength: [1.325, 1.36, 1.395] resolution: 1000 file_type: virr_l1b file_key: Data/EV_RefSB band_index: 6 standard_name: toa_bidirectional_reflectance coordinates: [longitude, latitude] calibration: reflectance satellite_azimuth_angle: name: satellite_azimuth_angle file_type: [virr_geoxx, virr_l1b] file_key: SensorAzimuth standard_name: sensor_azimuth_angle coordinates: [longitude, latitude] satellite_zenith_angle: name: satellite_zenith_angle file_type: [virr_geoxx, virr_l1b] file_key: SensorZenith standard_name: sensor_zenith_angle coordinates: [longitude, latitude] solar_azimuth_angle: name: solar_azimuth_angle file_type: [virr_geoxx, virr_l1b] file_key: SolarAzimuth standard_name: solar_azimuth_angle coordinates: [longitude, latitude] solar_zenith_angle: name: solar_zenith_angle file_type: [virr_geoxx, virr_l1b] file_key: SolarZenith standard_name: solar_zenith_angle coordinates: [longitude, latitude] longitude: name: longitude resolution: 1000 file_type: [virr_l1b, virr_geoxx] file_key: Longitude standard_name: longitude units: degrees_east coordinates: [longitude, latitude] latitude: name: latitude resolution: 1000 file_type: [virr_l1b, virr_geoxx] file_key: Latitude units: degrees_north standard_name: latitude coordinates: [longitude, latitude] satpy-0.34.0/satpy/etc/writers/000077500000000000000000000000001420401153000163365ustar00rootroot00000000000000satpy-0.34.0/satpy/etc/writers/awips_tiled.yaml000066400000000000000000000767301420401153000215430ustar00rootroot00000000000000# Originally converted from the CSPP Polar2Grid SCMI Writer # Some datasets are named differently and have not been converted to # Satpy-style naming yet. These config entries are commented out. writer: name: awips_tiled description: AWIPS-compatible Tiled NetCDF4 Writer writer: !!python/name:satpy.writers.awips_tiled.AWIPSTiledWriter compress: True sectors: LCC: lower_left_lonlat: [-135, 20] upper_right_lonlat: [-60, 60] resolution: [1300000, 1300000] # y, x projection: '+proj=lcc +datum=WGS84 +ellps=WGS84 +lat_0=25 +lat_1=25 +lon_0=-95 +units=m +no_defs' Polar: lower_left_lonlat: [-180, 33] upper_right_lonlat: [-40.5, 78] resolution: [1400000, 1400000] projection: '+proj=stere +datum=WGS84 +ellps=WGS84 +lat_0=90 +lat_ts=60.0 +lon_0=-150 +units=m' Mercator: lower_left_lonlat: [-135, 0] upper_right_lonlat: [-30, 50] resolution: [2150000, 2150000] projection: '+proj=merc +datum=WGS84 +ellps=WGS84 +lon_0=-95 +lat_0=0 +units=m +no_defs' Pacific: lower_left_lonlat: [120, 0] upper_right_lonlat: [-135, 50] resolution: [2150000, 2150000] projection: '+proj=merc +datum=WGS84 +ellps=WGS84 +lon_0=170 +lat_0=0 +units=m +no_defs' GOES_TEST: lower_left_xy: [-5434894.8851, -5434894.8851] upper_right_xy: [5434894.8851, 5434894.8851] resolution: [2500000, 2500000] projection: '+proj=geos +lon_0=-89.5 +h=35786023.0 +a=6378137.0 +b=6356752.31414 +sweep=x +units=m +no_defs' GOES_EAST: lower_left_xy: [-5434894.8851, -5434894.8851] upper_right_xy: [5434894.8851, 5434894.8851] resolution: [2500000, 2500000] projection: '+proj=geos +lon_0=-75.0 +h=35786023.0 +a=6378137.0 +b=6356752.31414 +sweep=x +units=m +no_defs' GOES_WEST: lower_left_xy: [-5434894.8851, -5434894.8851] upper_right_xy: [5434894.8851, 5434894.8851] resolution: [2500000, 2500000] projection: '+proj=geos +lon_0=-137.0 +h=35786023.0 +a=6378137.0 +b=6356752.31414 +sweep=x +units=m +no_defs' GOES_STORE: lower_left_xy: [-5434894.8851, -5434894.8851] upper_right_xy: [5434894.8851, 5434894.8851] resolution: [2500000, 2500000] projection: '+proj=geos +lon_0=-105.0 +h=35786023.0 +a=6378137.0 +b=6356752.31414 +sweep=x +units=m +no_defs' AHI Full Disk: lower_left_xy: [-5499999.901174725, -5499999.901174725] upper_right_xy: [5499999.901174725, 5499999.901174725] resolution: [2500000, 2500000] projection: '+proj=geos +lon_0=140.7 +h=35785863.0 +a=6378137.0 +b=6356752.3 +sweep=y +units=m +no_defs' templates: polar: single_variable: true add_sector_id_global: true filename: '{source_name}_AII_{platform_name}_{sensor}_{name}_{sector_id}_{tile_id}_{start_time:%Y%m%d_%H%M}.nc' global_attributes: start_date_time: {} # special handler for debugging in awips_tiled.py # value: "{start_time:%Y-%m-%dT%H:%M:%S}" product_name: value: "{name}" production_location: {} # value: "${ORGANIZATION}" awips_id: {} # value: "{awips_id}" # special variable created by awips_tiled.py physical_element: {} # value: "{physical_element}" #special variable created by awips_tiled.py satellite_id: value: "{platform_name!u}-{sensor!u}" coordinates: x: attributes: units: value: "{units}" encoding: dtype: "int16" _Unsigned: "true" y: attributes: units: value: "{units}" encoding: dtype: "int16" _Unsigned: "true" # XXX: Variable attributes *CAN NOT* be tile-specific. variables: # if no other section matches, we use this default: var_name: 'data' attributes: physical_element: value: '{name}' units: {} standard_name: value: '{standard_name}' encoding: dtype: int16 _Unsigned: "true" # ACSPO Products acspo_sst: reader: acspo name: sst var_name: data attributes: physical_element: raw_value: ACSPO SST # CLAVR-x Products default_clavrx: reader: clavrx var_name: data attributes: units: {} physical_element: value: 'CLAVR-x {name}' clavrx_cloud_type: reader: clavrx name: cloud_type var_name: data attributes: physical_element: raw_value: CLAVR-x Cloud Type units: {} clavrx_cld_temp_acha: reader: clavrx name: cld_temp_acha var_name: data attributes: units: {} physical_element: raw_value: CLAVR-x Cloud Top Temperature (ACHA) clavrx_cld_height_acha: reader: clavrx name: cld_height_acha var_name: data attributes: units: {} physical_element: raw_value: CLAVR-x Cloud Top Height (ACHA) clavrx_cloud_phase: reader: clavrx name: cloud_phase var_name: data attributes: units: {} physical_element: raw_value: CLAVR-x Cloud Phase clavrx_cld_opd_dcomp: reader: clavrx name: cld_opd_dcomp var_name: data attributes: units: {} physical_element: raw_value: CLAVR-x Cloud Optical Depth (dcomp) clavrx_clld_opd_nlcomp: reader: clavrx name: cloud_opd_nlcomp var_name: data attributes: units: {} physical_element: raw_value: CLAVR-x Cloud Optical Depth (nlcomp) clavrx_cld_reff_dcomp: reader: clavrx name: cld_reff_dcomp var_name: data attributes: units: {} physical_element: raw_value: CLAVR-x Cloud Effective Radius (dcomp) clavrx_cld_reff_nlcomp: reader: clavrx name: cld_reff_nlcomp var_name: data attributes: units: {} physical_element: raw_value: CLAVR-x Cloud Effective Radius (nlcomp) clavrx_cld_emiss_acha: reader: clavrx name: cld_emiss_acha var_name: data attributes: units: {} physical_element: raw_value: CLAVR-x Cloud Emissivity (ACHA) clavrx_refl_lunar_dnb_nom: reader: clavrx name: refl_lunar_dnb_nom var_name: data attributes: units: {} physical_element: raw_value: CLAVR-x Cloud Lunar Reflectance clavrx_rain_rate: reader: clavrx name: rain_rate var_name: data attributes: units: {} physical_element: raw_value: CLAVR-x Rain Rate # AVHRR L1B products avhrr_band1_vis: name: band1_vis var_name: data attributes: physical_element: raw_value: 0.63 um avhrr_band2_vis: name: band2_vis var_name: data attributes: physical_element: raw_value: 0.86 um avhrr_band3a_vis: name: band3a_vis var_name: data attributes: physical_element: raw_value: 1.61 um avhrr_band3b_bt: name: band3b_bt var_name: data attributes: physical_element: raw_value: 3.74 um avhrr_band4_bt: name: band4_bt var_name: data attributes: physical_element: raw_value: 10.8 um avhrr_band5_bt: name: band5_bt var_name: data attributes: physical_element: raw_value: 12.0 um # VIIRS SDRs viirs_i01: name: I01 var_name: data attributes: physical_element: raw_value: 0.64 um viirs_i02: name: I02 var_name: data attributes: physical_element: raw_value: 0.86 um viirs_i03: name: I03 var_name: data attributes: physical_element: raw_value: 1.61 um viirs_i04: name: I04 var_name: data attributes: physical_element: raw_value: 3.74 um viirs_i05: name: I05 var_name: data attributes: physical_element: raw_value: 11.5 um viirs_histogram_dnb: name: histogram_dnb var_name: data attributes: physical_element: raw_value: Histogram DNB viirs_adaptive_dnb: name: adaptive_dnb var_name: data attributes: physical_element: raw_value: Adaptive DNB viirs_dynamic_dnb: name: dynamic_dnb var_name: data attributes: physical_element: raw_value: Dynamic DNB viirs_hncc_dnb: name: hncc_dnb var_name: data attributes: physical_element: raw_value: HNCC DNB viirs_ifog: name: ssec_fog var_name: data attributes: physical_element: raw_value: Fog viirs_m01: name: M01 var_name: data attributes: physical_element: raw_value: 0.41 um viirs_m02: name: M02 var_name: data attributes: physical_element: raw_value: 0.45 um viirs_m03: name: M03 var_name: data attributes: physical_element: raw_value: 0.49 um viirs_m04: name: M04 var_name: data attributes: physical_element: raw_value: 0.56 um viirs_m05: name: M05 var_name: data attributes: physical_element: raw_value: 0.67 um viirs_m06: name: M06 var_name: data attributes: physical_element: raw_value: 0.75 um viirs_m07: name: M07 var_name: data attributes: physical_element: raw_value: 0.86 um viirs_m08: name: M08 var_name: data attributes: physical_element: raw_value: 1.24 um viirs_m09: name: M09 var_name: data attributes: physical_element: raw_value: 1.38 um viirs_m10: name: M10 var_name: data attributes: physical_element: raw_value: 1.61 um viirs_m11: name: M11 var_name: data attributes: physical_element: raw_value: 2.25 um viirs_m12: name: M12 var_name: data attributes: physical_element: raw_value: 3.70 um viirs_m13: name: M13 var_name: data attributes: physical_element: raw_value: 4.05 um viirs_m14: name: M14 var_name: data attributes: physical_element: raw_value: 8.6 um viirs_m15: name: M15 var_name: data attributes: physical_element: raw_value: 10.8 um viirs_m16: name: M16 var_name: data attributes: physical_element: raw_value: 12.0 um # VIIRS Corrected Reflectance # viirs_viirs_crefl01: # name: viirs_crefl01 # attributes: # physical_element: # raw_value: 0.67 um CR # viirs_viirs_crefl02: # name: viirs_crefl02 # attributes: # physical_element: # raw_value: 0.87 um CR # viirs_viirs_crefl03: # name: viirs_crefl03 # attributes: # physical_element: # raw_value: 0.49 um CR # viirs_viirs_crefl04: # name: viirs_crefl04 # attributes: # physical_element: # raw_value: 0.56 um CR # viirs_viirs_crefl05: # name: viirs_crefl05 # attributes: # physical_element: # raw_value: 1.24 um CR # viirs_viirs_crefl06: # name: viirs_crefl06 # attributes: # physical_element: # raw_value: 1.61 um CR # viirs_crefl07: # name: viirs_crefl07 # attributes: # physical_element: # raw_value: 2.25 um CR # viirs_crefl08: # name: viirs_crefl08 # attributes: # physical_element: # raw_value: 0.64 um CR # viirs_crefl09: # name: viirs_crefl09 # attributes: # physical_element: # raw_value: 0.87 um CR # viirs_crefl10: # name: viirs_crefl10 # attributes: # physical_element: # raw_value: 1.61 um CR # MODIS L1B Products # modis_vis01: # name: vis01 # physical_element: 0.65 um # modis_vis02: # name: vis02 # physical_element: 0.86 um # modis_vis03: # name: vis03 # physical_element: 0.47 um # modis_vis04: # name: vis04 # physical_element: 0.56 um # modis_vis05: # name: vis05 # physical_element: 1.24 um # modis_vis06: # name: vis06 # physical_element: 1.64 um # modis_vis07: # name: vis07 # physical_element: 2.13 um # modis_vis26: # name: vis26 # physical_element: 1.38 um # modis_bt20: # name: bt20 # physical_element: 3.75 um # modis_bt21: # name: bt21 # physical_element: Fire # modis_bt22: # name: bt22 # physical_element: 3.96 um # modis_bt23: # name: bt23 # physical_element: 4.05 um # modis_bt24: # name: bt24 # physical_element: 4.47 um # modis_bt25: # name: bt25 # physical_element: 4.52 um # modis_bt27: # name: bt27 # physical_element: 6.7 um # modis_bt28: # name: bt28 # physical_element: 7.3 um # modis_bt29: # name: bt29 # physical_element: 8.6 um # modis_bt30: # name: bt30 # physical_element: 9.7 um # modis_bt31: # name: bt31 # physical_element: 11.0 um # modis_bt32: # name: bt32 # physical_element: 12.0 um # modis_bt33: # name: bt33 # physical_element: 13.3 um # modis_bt34: # name: bt34 # physical_element: 13.6 um # modis_bt35: # name: bt35 # physical_element: 13.9 um # modis_bt36: # name: bt36 # physical_element: 14.2 um # modis_sst: # name: sst # physical_element: SST # modis_lst: # name: lst # physical_element: LST # modis_slst: # name: slst # physical_element: LSTSUM # modis_fog: # name: ssec_fog # physical_element: Fog # modis_ctt: # name: ctt # physical_element: CTT # modis_ndvi: # name: ndvi # physical_element: NDVI # modis_tpw: # name: tpw # physical_element: TPW # modis_ice_concentration: # name: ice_concentration # physical_element: Ice Concentration # modis_ist: # name: ist # physical_element: Ice Surface Temperature # MODIS L1B Corrected Reflectances # modis_crefl01_250m: # name: modis_crefl01_250m # physical_element: 0.65 um CR # modis_crefl01_500m: # name: modis_crefl01_250m # physical_element: 0.65 um CR # modis_crefl01_1000m: # name: modis_crefl01_1000m # physical_element: 0.65 um CR # modis_crefl02_250m: # name: modis_crefl02_250m # physical_element: 0.86 um CR # modis_crefl02_500m: # name: modis_crefl02_500m # physical_element: 0.86 um CR # modis_crefl02_1000m: # name: modis_crefl02_1000m # physical_element: 0.86 um CR # modis_crefl03_250m: # name: modis_crefl03_250m # physical_element: 0.47 um CR # modis_crefl03_500m: # name: modis_crefl03_500m # physical_element: 0.47 um CR # modis_crefl03_1000m: # name: modis_crefl03_1000m # physical_element: 0.47 um CR # modis_crefl04_250m: # name: modis_crefl04_250m # physical_element: 0.56 um CR # modis_crefl04_500m: # name: modis_crefl04_500m # physical_element: 0.56 um CR # modis_crefl04_1000m: # name: modis_crefl04_1000m # physical_element: 0.56 um CR # modis_crefl05_500m: # name: modis_crefl05_500m # physical_element: 1.24 um CR # modis_crefl05_1000m: # name: modis_crefl05_1000m # physical_element: 1.24 um CR # modis_crefl06_500m: # name: modis_crefl06_500m # physical_element: 1.64 um CR # modis_crefl06_1000m: # name: modis_crefl06_1000m # physical_element: 1.64 um CR # modis_crefl07_500m: # name: modis_crefl07_500m # physical_element: 2.13 um CR # modis_crefl07_1000m: # name: modis_crefl07_1000m # physical_element: 2.13 um CR # MIRS Products # mirs_btemp_23v: # name: btemp_23v # physical_element: MIRS 23 GHZ V # mirs_btemp_31v: # name: btemp_31v # physical_element: MIRS 31 GHZ V # mirs_btemp_50h: # name: btemp_50h # physical_element: MIRS 50 GHZ H # mirs_btemp_51h: # name: btemp_51h # physical_element: MIRS 51 GHZ H # mirs_btemp_52h: # name: btemp_52h # physical_element: MIRS 52 GHZ H # mirs_btemp_53h: # name: btemp_53h # physical_element: MIRS 53 GHZ H # mirs_btemp_54h1: # name: btemp_54h1 # physical_element: MIRS 54 GHZ H-1 # mirs_btemp_54h2: # name: btemp_54h2 # physical_element: MIRS 54 GHZ H-2 # mirs_btemp_55h: # name: btemp_55h # physical_element: MIRS 55 GHZ H # mirs_btemp_57h1: # name: btemp_57h1 # physical_element: MIRS 57 GHZ H-1 # mirs_btemp_57h2: # name: btemp_57h2 # physical_element: MIRS 57 GHZ H-2 # mirs_btemp_57h3: # name: btemp_57h3 # physical_element: MIRS 57 GHZ H-3 # mirs_btemp_57h4: # name: btemp_57h4 # physical_element: MIRS 57 GHZ H-4 # mirs_btemp_57h5: # name: btemp_57h5 # physical_element: MIRS 57 GHZ H-5 # mirs_btemp_57h6: # name: btemp_57h6 # physical_element: MIRS 57 GHZ H-6 # mirs_btemp_88v: # name: btemp_88v # physical_element: MIRS 88 GHZ V # mirs_btemp_165h: # name: btemp_165h # physical_element: MIRS 165 GHZ H # mirs_btemp_183h1: # name: btemp_183h1 # physical_element: MIRS 183 GHZ H-1 # mirs_btemp_183h2: # name: btemp_183h2 # physical_element: MIRS 183 GHZ H-2 # mirs_btemp_183h3: # name: btemp_183h3 # physical_element: MIRS 183 GHZ H-3 # mirs_btemp_183h4: # name: btemp_183h4 # physical_element: MIRS 183 GHZ H-4 # mirs_btemp_183h5: # name: btemp_183h5 # physical_element: MIRS 183 GHZ H-5 # MIRS BTs - NOAA-18 - AMSU-A MHS # MIRS BTs - NOAA-19 - AMSU-A MHS # MIRS BTs - M1 (metopb) - AMSU-A MHS # MIRS BTs - M2 (metopa) - AMSU-A MHS # mirs_btemp_50v: # name: btemp_50v # physical_element: MIRS 50 GHZ V # mirs_btemp_52v: # name: btemp_52v # physical_element: MIRS 52 GHZ V # mirs_btemp_54h: # name: btemp_54h # physical_element: MIRS 54 GHZ H # mirs_btemp_54v: # name: btemp_54v # physical_element: MIRS 54 GHZ V # mirs_btemp_89v1: # name: btemp_89v1 # physical_element: MIRS 89 GHZ V-1 # mirs_btemp_89v2: # name: btemp_89v2 # physical_element: MIRS 89 GHZ V-2 # 157h on OPSO NOAA site # mirs_btemp_157v: # name: btemp_157v # physical_element: MIRS 157 GHZ V # mirs_btemp_190v: # name: btemp_190v # physical_element: MIRS 190 GHZ V # mirs_rain_rate: # reader: mirs # name: rain_rate # physical_element: MIRS Rain Rate # mirs_snow_cover: # reader: mirs # name: snow_cover # physical_element: MIRS Snow Cover # mirs_sea_ice: # reader: mirs # name: sea_ice # physical_element: MIRS Sea Ice # mirs_swe: # reader: mirs # name: swe # physical_element: MIRS SWE # mirs_clw: # reader: mirs # name: clw # physical_element: MIRS CLW # mirs_tpw: # reader: mirs # name: tpw # physical_element: MIRS TPW # mirs_tskin: # reader: mirs # name: tskin # physical_element: MIRS Skin Temperature # AMSR-2 L1B amsr2_btemp_36.5h: name: btemp_36.5h var_name: data attributes: physical_element: raw_value: 36.5 GHz H amsr2_btemp_36.5v: name: btemp_36.5v var_name: data attributes: physical_element: raw_value: 36.5 GHz V amsr2_btemp_89.0ah: name: btemp_89.0ah var_name: data attributes: physical_element: raw_value: 89.0 GHz AH amsr2_btemp_89.0av: name: btemp_89.0av var_name: data attributes: physical_element: raw_value: 89.0 GHz AV amsr2_btemp_89.0bh: name: btemp_89.0bh var_name: data attributes: physical_element: raw_value: 89.0 GHz BH amsr2_btemp_89.0bv: name: btemp_89.0bv var_name: data attributes: physical_element: raw_value: 89.0 GHz BV # GEOCAT Level 1 Products geocat_surface_type: name: pixel_surface_type var_name: data attributes: physical_element: raw_value: Surface Type # GEOCAT Level 2 Products glm_l2_radc: single_variable: false # OR_GLM-L2-GLMF-M6_G16_T10_e20201105150300.nc filename: '{environment_prefix}_GLM-L2-GLM{scene_abbr}-{scan_mode}_{platform_shortname}_T{tile_number:02d}_{end_time:%Y%m%d%H%M%S}.nc' global_attributes: # FIXME: This should come from the reader's metadata dataset_name: value: 'OR_GLM-L2-GLM{scene_abbr}-{scan_mode}_{platform_shortname}_s{start_time:%Y%j%H%M%S0}_e{end_time:%Y%j%H%M%S0}_c{creation_time:%Y%j%H%M%S0}.nc' time_coverage_end: value: "{end_time:%Y-%m-%dT%H:%M:%S.%fZ}" time_coverage_start: value: "{start_time:%Y-%m-%dT%H:%M:%S.%fZ}" production_site: {} # special handler in awips_tiled.py platform_ID: value: "{platform_shortname}" cdm_data_type: raw_value: "Image" spatial_resolution: value: "{spatial_resolution}" orbital_slot: value: "{orbital_slot}" # This is used by AWIPS as sectorID scene_id: value: "{scene_id}" coordinates: x: attributes: units: value: "{units}" encoding: dtype: "int16" # _Unsigned: "true" y: attributes: units: value: "{units}" encoding: dtype: "int16" # _Unsigned: "true" variables: DQF: # Not currently viewable by AWIPS name: DQF attributes: # physical_element: # raw_value: "GLM_DQF" units: raw_value: "1" long_name: value: "{long_name}" standard_name: value: "{standard_name}" flag_values: raw_key: "flag_values" flag_meanings: raw_key: "flag_meanings" encoding: dtype: int8 _Unsigned: "true" flash_extent_density: name: "flash_extent_density" var_name: "Flash_extent_density" attributes: # physical_element: # raw_value: "GLM_Flash_Extent_Density" standard_name: value: "{standard_name}" long_name: value: "{long_name}" encoding: dtype: int16 _Unsigned: "true" flash_extent_density_window: name: "flash_extent_density_window" var_name: "Flash_extent_density_window" attributes: # physical_element: # raw_value: "GLM_Flash_Extent_Density_Window" standard_name: value: "{standard_name}" long_name: value: "{long_name}" encoding: dtype: int16 _Unsigned: "true" average_flash_area: name: "average_flash_area" var_name: "Average_flash_area" attributes: # physical_element: # raw_value: "GLM_Average_Flash_Area" units: raw_value: "km^2" standard_name: value: "{standard_name}" long_name: value: "{long_name}" encoding: dtype: int16 _Unsigned: "true" average_flash_area_window: name: "average_flash_area_window" var_name: "Average_flash_area_window" attributes: # physical_element: # raw_value: "GLM_Average_Flash_Area_Window" units: raw_value: "km^2" standard_name: value: "{standard_name}" long_name: value: "{long_name}" encoding: dtype: int16 _Unsigned: "true" minimum_flash_area: name: "minimum_flash_area" var_name: "Minimum_flash_area" attributes: units: raw_value: "km^2" standard_name: value: "{standard_name}" long_name: value: "{long_name}" encoding: dtype: int16 _Unsigned: "true" minimum_flash_area_window: name: "minimum_flash_area_window" var_name: "Minimum_flash_area_window" attributes: units: raw_value: "km^2" standard_name: value: "{standard_name}" long_name: value: "{long_name}" encoding: dtype: int16 _Unsigned: "true" total_optical_energy: name: "total_energy" var_name: "Total_Optical_energy" attributes: # physical_element: # raw_value: "GLM_Total_Optical_Energy" units: value: "{units}" standard_name: value: "{standard_name}" long_name: value: "{long_name}" encoding: dtype: int16 _Unsigned: "true" total_optical_energy_window: name: "total_energy_window" var_name: "Total_optical_energy_window" attributes: # physical_element: # raw_value: "GLM_Total_Optical_Energy_Window" units: value: "{units}" standard_name: value: "{standard_name}" long_name: value: "{long_name}" encoding: dtype: int16 _Unsigned: "true" glm_l2_radf: single_variable: false # OR_GLM-L2-GLMF-M6_G16_T10_e20201105150300.nc filename: '{environment_prefix}_GLM-L2-GLM{scene_abbr}-{scan_mode}_{platform_shortname}_T{tile_number:02d}_{end_time:%Y%m%d%H%M%S}.nc' global_attributes: # FIXME: This should come from the reader's metadata dataset_name: # XXX: global attributes don't currently know things about tiles so we hardcode this to T00 value: 'OR_GLM-L2-GLM{scene_abbr}-{scan_mode}_{platform_shortname}_T00_e{end_time:%Y%m%d%H%M%S}.nc' time_coverage_end: value: "{end_time:%Y-%m-%dT%H:%M:%SZ}" time_coverage_start: value: "{start_time:%Y-%m-%dT%H:%M:%SZ}" production_site: {} # special handler in awips_tiled.py platform_ID: value: "{platform_shortname}" cdm_data_type: raw_value: "Image" spatial_resolution: value: "{spatial_resolution}" orbital_slot: value: "{orbital_slot}" # This is used by AWIPS as sectorID scene_id: value: "{scene_id}" coordinates: x: attributes: units: value: "{units}" encoding: dtype: "int16" # _Unsigned: "true" y: attributes: units: value: "{units}" encoding: dtype: "int16" # _Unsigned: "true" variables: DQF: # Not currently viewable by AWIPS name: DQF attributes: # physical_element: # raw_value: "GLM_DQF" units: raw_value: "1" long_name: value: "{long_name}" standard_name: value: "{standard_name}" flag_values: raw_key: "flag_values" flag_meanings: raw_key: "flag_meanings" encoding: dtype: int8 _Unsigned: "true" flash_extent_density: name: "flash_extent_density" var_name: "Flash_extent_density" attributes: # physical_element: # raw_value: "GLM_Flash_Extent_Density" standard_name: value: "{standard_name}" long_name: value: "{long_name}" encoding: dtype: int16 _Unsigned: "true" scale_factor: 1.0000001 add_offset: 0.0 _FillValue: -1 flash_extent_density_window: name: "flash_extent_density_window" var_name: "Flash_extent_density_w5u1" attributes: # physical_element: # raw_value: "GLM_Flash_Extent_Density_Window" standard_name: value: "{standard_name}" long_name: value: "{long_name}" encoding: dtype: int16 _Unsigned: "true" average_flash_area: name: "average_flash_area" var_name: "Average_flash_area" attributes: # physical_element: # raw_value: "GLM_Average_Flash_Area" units: raw_value: "km^2" standard_name: value: "{standard_name}" long_name: value: "{long_name}" encoding: dtype: int16 _Unsigned: "true" average_flash_area_window: name: "average_flash_area_window" var_name: "Average_flash_area_w5u1" attributes: # physical_element: # raw_value: "GLM_Average_Flash_Area_Window" units: raw_value: "km^2" standard_name: value: "{standard_name}" long_name: value: "{long_name}" encoding: dtype: int16 _Unsigned: "true" minimum_flash_area: name: "minimum_flash_area" var_name: "Minimum_flash_area" attributes: units: raw_value: "km^2" standard_name: value: "{standard_name}" long_name: value: "{long_name}" encoding: dtype: int16 _Unsigned: "true" minimum_flash_area_window: name: "minimum_flash_area_window" var_name: "Minimum_flash_area_w5u1" attributes: units: raw_value: "km^2" standard_name: value: "{standard_name}" long_name: value: "{long_name}" encoding: dtype: int16 _Unsigned: "true" total_optical_energy: name: "total_energy" var_name: "Total_Optical_energy" attributes: # physical_element: # raw_value: "GLM_Total_Optical_Energy" units: value: "{units}" standard_name: value: "{standard_name}" long_name: value: "{long_name}" encoding: dtype: int16 _Unsigned: "true" total_optical_energy_window: name: "total_energy_window" var_name: "Total_optical_energy_w5u1" attributes: # physical_element: # raw_value: "GLM_Total_Optical_Energy_Window" units: value: "{units}" standard_name: value: "{standard_name}" long_name: value: "{long_name}" encoding: dtype: int16 _Unsigned: "true" satpy-0.34.0/satpy/etc/writers/cf.yaml000066400000000000000000000003101420401153000176040ustar00rootroot00000000000000writer: name: cf description: Generic netCDF4/CF Writer writer: !!python/name:satpy.writers.cf_writer.CFWriter filename: '{name}_{start_time:%Y%m%d_%H%M%S}.nc' compress: DEFLATE zlevel: 6 satpy-0.34.0/satpy/etc/writers/geotiff.yaml000066400000000000000000000003161420401153000206450ustar00rootroot00000000000000writer: name: geotiff description: Generic GeoTIFF Writer writer: !!python/name:satpy.writers.geotiff.GeoTIFFWriter filename: '{name}_{start_time:%Y%m%d_%H%M%S}.tif' compress: DEFLATE zlevel: 6 satpy-0.34.0/satpy/etc/writers/mitiff.yaml000066400000000000000000000003231420401153000204760ustar00rootroot00000000000000--- writer: name: mitiff description: Generic MITIFF Writer writer: !!python/name:satpy.writers.mitiff.MITIFFWriter filename: '{name:s}_{start_time:%Y%m%d_%H%M%S}.mitiff' compress: DEFLATE zlevel: 6 satpy-0.34.0/satpy/etc/writers/ninjogeotiff.yaml000066400000000000000000000003651420401153000217070ustar00rootroot00000000000000writer: name: ninjogeotiff description: GeoTIFF Writer with NinJo tags in GDALMetaData writer: !!python/name:satpy.writers.ninjogeotiff.NinJoGeoTIFFWriter filename: '{name}_{start_time:%Y%m%d_%H%M%S}.tif' compress: DEFLATE zlevel: 6 satpy-0.34.0/satpy/etc/writers/ninjotiff.yaml000066400000000000000000000003161420401153000212100ustar00rootroot00000000000000writer: name: ninjotiff description: NinjoTIFF Writer writer: !!python/name:satpy.writers.ninjotiff.NinjoTIFFWriter filename: '{name}_{start_time:%Y%m%d_%H%M%S}.tif' compress: DEFLATE zlevel: 6 satpy-0.34.0/satpy/etc/writers/simple_image.yaml000066400000000000000000000002651420401153000216600ustar00rootroot00000000000000writer: name: simple_image description: Generic Image Writer writer: !!python/name:satpy.writers.simple_image.PillowWriter filename: '{name}_{start_time:%Y%m%d_%H%M%S}.png' satpy-0.34.0/satpy/modifiers/000077500000000000000000000000001420401153000160455ustar00rootroot00000000000000satpy-0.34.0/satpy/modifiers/__init__.py000066400000000000000000000024601420401153000201600ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Modifier classes and other related utilities.""" # file deepcode ignore W0611: Ignore unused imports in init module from .base import ModifierBase # noqa: F401, isort: skip from .atmosphere import CO2Corrector # noqa: F401 from .atmosphere import PSPAtmosphericalCorrection # noqa: F401 from .atmosphere import PSPRayleighReflectance # noqa: F401 from .geometry import EffectiveSolarPathLengthCorrector # noqa: F401 from .geometry import SunZenithCorrector # noqa: F401 from .spectral import NIREmissivePartFromReflectance # noqa: F401 from .spectral import NIRReflectance # noqa: F401 satpy-0.34.0/satpy/modifiers/_crefl.py000066400000000000000000000150461420401153000176570ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Classes related to the CREFL (corrected reflectance) modifier.""" import logging import warnings import numpy as np from satpy.aux_download import DataDownloadMixin, retrieve from satpy.modifiers import ModifierBase from satpy.modifiers.angles import get_angles LOG = logging.getLogger(__name__) class ReflectanceCorrector(ModifierBase, DataDownloadMixin): """Corrected Reflectance (crefl) modifier. Uses a python rewrite of the C CREFL code written for VIIRS and MODIS. """ def __init__(self, *args, dem_filename=None, dem_sds="averaged elevation", url=None, known_hash=None, **kwargs): """Initialize the compositor with values from the user or from the configuration file. If `dem_filename` can't be found or opened then correction is done assuming TOA or sealevel options. Args: dem_filename (str): DEPRECATED url (str): URL or local path to the Digital Elevation Model (DEM) HDF4 file. If unset (None or empty string), then elevation is assumed to be 0 everywhere. known_hash (str): Optional SHA256 checksum to verify the download of ``url``. dem_sds (str): Name of the variable in the elevation file to load. """ if dem_filename is not None: warnings.warn("'dem_filename' for 'ReflectanceCorrector' is " "deprecated. Use 'url' instead.", DeprecationWarning) super(ReflectanceCorrector, self).__init__(*args, **kwargs) self.dem_sds = dem_sds self.url = url self.known_hash = known_hash self.dem_cache_key = self._get_registered_dem_cache_key() def _get_registered_dem_cache_key(self): if not self.url: return reg_files = self.register_data_files([{ 'url': self.url, 'known_hash': self.known_hash} ]) return reg_files[0] def __call__(self, datasets, optional_datasets, **info): """Create modified DataArray object by applying the crefl algorithm.""" from satpy.modifiers._crefl_utils import get_coefficients refl_data, angles = self._extract_angle_data_arrays(datasets, optional_datasets) coefficients = get_coefficients(refl_data.attrs["sensor"], refl_data.attrs["wavelength"], refl_data.attrs["resolution"]) results = self._call_crefl(refl_data, coefficients, angles) info.update(refl_data.attrs) info["rayleigh_corrected"] = True results.attrs = info self.apply_modifier_info(refl_data, results) return results def _call_crefl(self, refl_data, coefficients, angles): from satpy.modifiers._crefl_utils import run_crefl avg_elevation = self._get_average_elevation() lons, lats = refl_data.attrs['area'].get_lonlats(chunks=refl_data.chunks) is_percent = refl_data.attrs["units"] == "%" use_abi = refl_data.attrs['sensor'] == 'abi' results = run_crefl(refl_data, coefficients, lons, lats, *angles, avg_elevation=avg_elevation, percent=is_percent, use_abi=use_abi) factor = 100. if is_percent else 1. results = results * factor return results def _get_average_elevation(self): if self.dem_cache_key is None: return LOG.debug("Loading CREFL averaged elevation information from: %s", self.dem_cache_key) local_filename = retrieve(self.dem_cache_key) avg_elevation = self._read_var_from_hdf4_file(local_filename, self.dem_sds).astype(np.float64) if isinstance(avg_elevation, np.ma.MaskedArray): avg_elevation = avg_elevation.filled(np.nan) return avg_elevation @staticmethod def _read_var_from_hdf4_file(local_filename, var_name): try: return ReflectanceCorrector._read_var_from_hdf4_file_pyhdf(local_filename, var_name) except (ImportError, OSError): return ReflectanceCorrector._read_var_from_hdf4_file_netcdf4(local_filename, var_name) @staticmethod def _read_var_from_hdf4_file_netcdf4(local_filename, var_name): from netCDF4 import Dataset as NCDataset # HDF4 file, NetCDF library needs to be compiled with HDF4 support nc = NCDataset(local_filename, "r") # average elevation is stored as a 16-bit signed integer but with # scale factor 1 and offset 0, convert it to float here return nc.variables[var_name][:] @staticmethod def _read_var_from_hdf4_file_pyhdf(local_filename, var_name): from pyhdf.SD import SD, SDC f = SD(local_filename, SDC.READ) var = f.select(var_name) data = var[:] fill = ReflectanceCorrector._read_fill_value_from_hdf4(var, data.dtype) return np.ma.MaskedArray(data, data == fill) @staticmethod def _read_fill_value_from_hdf4(var, dtype): from pyhdf.error import HDF4Error try: return var.getfillvalue() except HDF4Error: return np.iinfo(dtype).min def _extract_angle_data_arrays(self, datasets, optional_datasets): all_datasets = datasets + optional_datasets if len(all_datasets) == 1: vis = self.match_data_arrays(datasets)[0] return vis, get_angles(vis) if len(all_datasets) == 5: vis, *angles = self.match_data_arrays( datasets + optional_datasets) return vis, angles raise ValueError("Not sure how to handle provided dependencies. " "Either all 4 angles must be provided or none of " "of them.") satpy-0.34.0/satpy/modifiers/_crefl_utils.py000066400000000000000000000441561420401153000211030ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2010-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Shared utilities for correcting reflectance data using the 'crefl' algorithm. Original code written by Ralph Kuehn with modifications by David Hoese and Martin Raspaud. Ralph's code was originally based on the C crefl code distributed for VIIRS and MODIS. """ import logging import dask.array as da import numpy as np import xarray as xr LOG = logging.getLogger(__name__) bUseV171 = False if bUseV171: UO3 = 0.319 UH2O = 2.93 else: UO3 = 0.285 UH2O = 2.93 MAXSOLZ = 86.5 MAXAIRMASS = 18 SCALEHEIGHT = 8000 FILL_INT16 = 32767 TAUSTEP4SPHALB_ABI = .0003 TAUSTEP4SPHALB = .0001 MAXNUMSPHALBVALUES = 4000 # with no aerosol taur <= 0.4 in all bands everywhere REFLMIN = -0.01 REFLMAX = 1.6 def _csalbr(tau): # Previously 3 functions csalbr fintexp1, fintexp3 a = [-.57721566, 0.99999193, -0.24991055, 0.05519968, -0.00976004, 0.00107857] # xx = a[0] + a[1] * tau + a[2] * tau**2 + a[3] * tau**3 + a[4] * tau**4 + a[5] * tau**5 # xx = np.polyval(a[::-1], tau) # xx = a[0] # xftau = 1.0 # for i in xrange(5): # xftau = xftau*tau # xx = xx + a[i] * xftau fintexp1 = np.polyval(a[::-1], tau) - np.log(tau) fintexp3 = (np.exp(-tau) * (1.0 - tau) + tau**2 * fintexp1) / 2.0 return (3.0 * tau - fintexp3 * (4.0 + 2.0 * tau) + 2.0 * np.exp(-tau)) / (4.0 + 3.0 * tau) # From crefl.1.7.1 if bUseV171: aH2O = np.array([-5.60723, -5.25251, 0, 0, -6.29824, -7.70944, -3.91877, 0, 0, 0, 0, 0, 0, 0, 0, 0]) bH2O = np.array([0.820175, 0.725159, 0, 0, 0.865732, 0.966947, 0.745342, 0, 0, 0, 0, 0, 0, 0, 0, 0]) # const float aO3[Nbands]={ 0.0711, 0.00313, 0.0104, 0.0930, 0, # 0, 0, 0.00244, 0.00383, 0.0225, 0.0663, 0.0836, 0.0485, 0.0395, 0.0119, # 0.00263};*/ aO3 = np.array( [0.0715289, 0, 0.00743232, 0.089691, 0, 0, 0, 0.001, 0.00383, 0.0225, 0.0663, 0.0836, 0.0485, 0.0395, 0.0119, 0.00263]) # const float taur0[Nbands] = { 0.0507, 0.0164, 0.1915, 0.0948, # 0.0036, 0.0012, 0.0004, 0.3109, 0.2375, 0.1596, 0.1131, 0.0994, # 0.0446, 0.0416, 0.0286, 0.0155};*/ taur0 = np.array( [0.05100, 0.01631, 0.19325, 0.09536, 0.00366, 0.00123, 0.00043, 0.3139, 0.2375, 0.1596, 0.1131, 0.0994, 0.0446, 0.0416, 0.0286, 0.0155]) else: # From polar2grid cviirs.c # This number is what Ralph says "looks good" rg_fudge = .55 aH2O = np.array( [0.000406601, 0.0015933, 0, 1.78644e-05, 0.00296457, 0.000617252, 0.000996563, 0.00222253, 0.00094005, 0.000563288, 0, 0, 0, 0, 0, 0, 2.4111e-003, 7.8454e-003*rg_fudge, 7.9258e-3, 9.3392e-003, 2.53e-2]) bH2O = np.array([0.812659, 0.832931, 1., 0.8677850, 0.806816, 0.944958, 0.78812, 0.791204, 0.900564, 0.942907, 0, 0, 0, 0, 0, 0, # These are actually aO2 values for abi calculations 1.2360e-003, 3.7296e-003, 177.7161e-006, 10.4899e-003, 1.63e-2]) # /*const float aO3[Nbands]={ 0.0711, 0.00313, 0.0104, 0.0930, 0, 0, 0, 0.00244, # 0.00383, 0.0225, 0.0663, 0.0836, 0.0485, 0.0395, 0.0119, 0.00263};*/ aO3 = np.array([0.0433461, 0.0, 0.0178299, 0.0853012, 0, 0, 0, 0.0813531, 0, 0, 0.0663, 0.0836, 0.0485, 0.0395, 0.0119, 0.00263, 4.2869e-003, 25.6509e-003*rg_fudge, 802.4319e-006, 0.0000e+000, 2e-5]) # /*const float taur0[Nbands] = { 0.0507, 0.0164, 0.1915, 0.0948, 0.0036, 0.0012, 0.0004, # 0.3109, 0.2375, 0.1596, 0.1131, 0.0994, 0.0446, 0.0416, 0.0286, 0.0155};*/ taur0 = np.array([0.04350, 0.01582, 0.16176, 0.09740, 0.00369, 0.00132, 0.00033, 0.05373, 0.01561, 0.00129, 0.1131, 0.0994, 0.0446, 0.0416, 0.0286, 0.0155, 184.7200e-003, 52.3490e-003, 15.8450e-003, 1.3074e-003, 311.2900e-006]) # add last 5 from bH2O to aO2 aO2 = 0 # Map of pixel resolutions -> wavelength -> coefficient index # Map of pixel resolutions -> band name -> coefficient index # Index is used in aH2O, bH2O, aO3, and taur0 arrays above MODIS_COEFF_INDEX_MAP = { 1000: { (0.620, 0.6450, 0.670): 0, "1": 0, (0.841, 0.8585, 0.876): 1, "2": 1, (0.459, 0.4690, 0.479): 2, "3": 2, (0.545, 0.5550, 0.565): 3, "4": 3, (1.230, 1.2400, 1.250): 4, "5": 4, (1.628, 1.6400, 1.652): 5, "6": 5, (2.105, 2.1300, 2.155): 6, "7": 6, } } MODIS_COEFF_INDEX_MAP[500] = MODIS_COEFF_INDEX_MAP[1000] MODIS_COEFF_INDEX_MAP[250] = MODIS_COEFF_INDEX_MAP[1000] # resolution -> wavelength -> coefficient index # resolution -> band name -> coefficient index VIIRS_COEFF_INDEX_MAP = { 1000: { (0.662, 0.6720, 0.682): 0, # M05 "M05": 0, (0.846, 0.8650, 0.885): 1, # M07 "M07": 1, (0.478, 0.4880, 0.498): 2, # M03 "M03": 2, (0.545, 0.5550, 0.565): 3, # M04 "M04": 3, (1.230, 1.2400, 1.250): 4, # M08 "M08": 4, (1.580, 1.6100, 1.640): 5, # M10 "M10": 5, (2.225, 2.2500, 2.275): 6, # M11 "M11": 6, }, 500: { (0.600, 0.6400, 0.680): 7, # I01 "I01": 7, (0.845, 0.8650, 0.884): 8, # I02 "I02": 8, (1.580, 1.6100, 1.640): 9, # I03 "I03": 9, }, } # resolution -> wavelength -> coefficient index # resolution -> band name -> coefficient index ABI_COEFF_INDEX_MAP = { 2000: { (0.450, 0.470, 0.490): 16, # C01 "C01": 16, (0.590, 0.640, 0.690): 17, # C02 "C02": 17, (0.8455, 0.865, 0.8845): 18, # C03 "C03": 18, # (1.3705, 1.378, 1.3855): None, # C04 # "C04": None, (1.580, 1.610, 1.640): 19, # C05 "C05": 19, (2.225, 2.250, 2.275): 20, # C06 "C06": 20 }, } COEFF_INDEX_MAP = { "viirs": VIIRS_COEFF_INDEX_MAP, "modis": MODIS_COEFF_INDEX_MAP, "abi": ABI_COEFF_INDEX_MAP, } def find_coefficient_index(sensor, wavelength_range, resolution=0): """Return index in to coefficient arrays for this band's wavelength. This function search through the `COEFF_INDEX_MAP` dictionary and finds the first key where the nominal wavelength of `wavelength_range` falls between the minimum wavelength and maximum wavelength of the key. `wavelength_range` can also be the standard name of the band. For example, "M05" for VIIRS or "1" for MODIS. :param sensor: sensor of band to be corrected :param wavelength_range: 3-element tuple of (min wavelength, nominal wavelength, max wavelength) :param resolution: resolution of the band to be corrected :return: index in to coefficient arrays like `aH2O`, `aO3`, etc. None is returned if no matching wavelength is found """ index_map = COEFF_INDEX_MAP[sensor.lower()] # Find the best resolution of coefficients for res in sorted(index_map.keys()): if resolution <= res: index_map = index_map[res] break else: raise ValueError("Unrecognized data resolution: {}", resolution) # Find the best wavelength of coefficients if isinstance(wavelength_range, str): # wavelength range is actually a band name return index_map[wavelength_range] for k, v in index_map.items(): if isinstance(k, str): # we are analyzing wavelengths and ignoring dataset names continue if k[0] <= wavelength_range[1] <= k[2]: return v def get_coefficients(sensor, wavelength_range, resolution=0): """Get coefficients used in CREFL correction. Args: sensor: sensor of the band to be corrected wavelength_range: 3-element tuple of (min wavelength, nominal wavelength, max wavelength) resolution: resolution of the band to be corrected Returns: aH2O, bH2O, aO3, taur0 coefficient values """ idx = find_coefficient_index(sensor, wavelength_range, resolution=resolution) return aH2O[idx], bH2O[idx], aO3[idx], taur0[idx] def _chand(phi, muv, mus, taur): # FROM FUNCTION CHAND # phi: azimuthal difference between sun and observation in degree # (phi=0 in backscattering direction) # mus: cosine of the sun zenith angle # muv: cosine of the observation zenith angle # taur: molecular optical depth # rhoray: molecular path reflectance # constant xdep: depolarization factor (0.0279) # xfd = (1-xdep/(2-xdep)) / (1 + 2*xdep/(2-xdep)) = 2 * (1 - xdep) / (2 + xdep) = 0.958725775 # */ xfd = 0.958725775 xbeta2 = 0.5 # float pl[5]; # double fs01, fs02, fs0, fs1, fs2; as0 = [0.33243832, 0.16285370, -0.30924818, -0.10324388, 0.11493334, -6.777104e-02, 1.577425e-03, -1.240906e-02, 3.241678e-02, -3.503695e-02] as1 = [0.19666292, -5.439061e-02] as2 = [0.14545937, -2.910845e-02] # float phios, xcos1, xcos2, xcos3; # float xph1, xph2, xph3, xitm1, xitm2; # float xlntaur, xitot1, xitot2, xitot3; # int i, ib; xph1 = 1.0 + (3.0 * mus * mus - 1.0) * (3.0 * muv * muv - 1.0) * xfd / 8.0 xph2 = -xfd * xbeta2 * 1.5 * mus * muv * np.sqrt( 1.0 - mus * mus) * np.sqrt(1.0 - muv * muv) xph3 = xfd * xbeta2 * 0.375 * (1.0 - mus * mus) * (1.0 - muv * muv) # pl[0] = 1.0 # pl[1] = mus + muv # pl[2] = mus * muv # pl[3] = mus * mus + muv * muv # pl[4] = mus * mus * muv * muv fs01 = as0[0] + (mus + muv) * as0[1] + (mus * muv) * as0[2] + ( mus * mus + muv * muv) * as0[3] + (mus * mus * muv * muv) * as0[4] fs02 = as0[5] + (mus + muv) * as0[6] + (mus * muv) * as0[7] + ( mus * mus + muv * muv) * as0[8] + (mus * mus * muv * muv) * as0[9] # for (i = 0; i < 5; i++) { # fs01 += (double) (pl[i] * as0[i]); # fs02 += (double) (pl[i] * as0[5 + i]); # } # for refl, (ah2o, bh2o, ao3, tau) in zip(reflectance_bands, coefficients): # ib = find_coefficient_index(center_wl) # if ib is None: # raise ValueError("Can't handle band with wavelength '{}'".format(center_wl)) xlntaur = np.log(taur) fs0 = fs01 + fs02 * xlntaur fs1 = as1[0] + xlntaur * as1[1] fs2 = as2[0] + xlntaur * as2[1] del xlntaur, fs01, fs02 trdown = np.exp(-taur / mus) trup = np.exp(-taur / muv) xitm1 = (1.0 - trdown * trup) / 4.0 / (mus + muv) xitm2 = (1.0 - trdown) * (1.0 - trup) xitot1 = xph1 * (xitm1 + xitm2 * fs0) xitot2 = xph2 * (xitm1 + xitm2 * fs1) xitot3 = xph3 * (xitm1 + xitm2 * fs2) del xph1, xph2, xph3, xitm1, xitm2, fs0, fs1, fs2 phios = np.deg2rad(phi + 180.0) xcos1 = 1.0 xcos2 = np.cos(phios) xcos3 = np.cos(2.0 * phios) del phios rhoray = xitot1 * xcos1 + xitot2 * xcos2 * 2.0 + xitot3 * xcos3 * 2.0 return rhoray, trdown, trup def _atm_variables_finder(mus, muv, phi, height, tau, tO3, tH2O, taustep4sphalb, tO2=1.0): tau_step = np.linspace(taustep4sphalb, MAXNUMSPHALBVALUES * taustep4sphalb, MAXNUMSPHALBVALUES) sphalb0 = _csalbr(tau_step) taur = tau * np.exp(-height / SCALEHEIGHT) rhoray, trdown, trup = _chand(phi, muv, mus, taur) sphalb = sphalb0[(taur / taustep4sphalb + 0.5).astype(np.int32)] Ttotrayu = ((2 / 3. + muv) + (2 / 3. - muv) * trup) / (4 / 3. + taur) Ttotrayd = ((2 / 3. + mus) + (2 / 3. - mus) * trdown) / (4 / 3. + taur) TtotraytH2O = Ttotrayu * Ttotrayd * tH2O tOG = tO3 * tO2 return sphalb, rhoray, TtotraytH2O, tOG def get_atm_variables(mus, muv, phi, height, ah2o, bh2o, ao3, tau): """Get atmospheric variables for non-ABI instruments.""" air_mass = 1.0 / mus + 1 / muv air_mass[air_mass > MAXAIRMASS] = -1.0 tO3 = 1.0 tH2O = 1.0 if ao3 != 0: tO3 = np.exp(-air_mass * UO3 * ao3) if bh2o != 0: if bUseV171: tH2O = np.exp(-np.exp(ah2o + bh2o * np.log(air_mass * UH2O))) else: tH2O = np.exp(-(ah2o * ((air_mass * UH2O) ** bh2o))) # Returns sphalb, rhoray, TtotraytH2O, tOG return _atm_variables_finder(mus, muv, phi, height, tau, tO3, tH2O, TAUSTEP4SPHALB) def get_atm_variables_abi(mus, muv, phi, height, G_O3, G_H2O, G_O2, ah2o, ao2, ao3, tau): """Get atmospheric variables for ABI.""" tO3 = 1.0 tH2O = 1.0 if ao3 != 0: tO3 = np.exp(-G_O3 * ao3) if ah2o != 0: tH2O = np.exp(-G_H2O * ah2o) tO2 = np.exp(-G_O2 * ao2) # Returns sphalb, rhoray, TtotraytH2O, tOG. return _atm_variables_finder(mus, muv, phi, height, tau, tO3, tH2O, TAUSTEP4SPHALB_ABI, tO2=tO2) def _G_calc(zenith, a_coeff): return (np.cos(np.deg2rad(zenith))+(a_coeff[0]*(zenith**a_coeff[1])*(a_coeff[2]-zenith)**a_coeff[3]))**-1 def _avg_elevation_index(avg_elevation, row, col): return avg_elevation[row, col] def run_crefl(refl, coeffs, lon, lat, sensor_azimuth, sensor_zenith, solar_azimuth, solar_zenith, avg_elevation=None, percent=False, use_abi=False): """Run main crefl algorithm. All input parameters are per-pixel values meaning they are the same size and shape as the input reflectance data, unless otherwise stated. :param reflectance_bands: tuple of reflectance band arrays :param coefficients: tuple of coefficients for each band (see `get_coefficients`) :param lon: input swath longitude array :param lat: input swath latitude array :param sensor_azimuth: input swath sensor azimuth angle array :param sensor_zenith: input swath sensor zenith angle array :param solar_azimuth: input swath solar azimuth angle array :param solar_zenith: input swath solar zenith angle array :param avg_elevation: average elevation (usually pre-calculated and stored in CMGDEM.hdf) :param percent: True if input reflectances are on a 0-100 scale instead of 0-1 scale (default: False) """ # FUTURE: Find a way to compute the average elevation before hand # Get digital elevation map data for our granule, set ocean fill value to 0 if avg_elevation is None: LOG.debug("No average elevation information provided in CREFL") # height = np.zeros(lon.shape, dtype=np.float64) height = 0. else: LOG.debug("Using average elevation information provided to CREFL") height = da.map_blocks(_space_mask_height, lon, lat, avg_elevation, chunks=lon.chunks, dtype=avg_elevation.dtype) mus = np.cos(np.deg2rad(solar_zenith)) mus = mus.where(mus >= 0) muv = np.cos(np.deg2rad(sensor_zenith)) phi = solar_azimuth - sensor_azimuth if use_abi: LOG.debug("Using ABI CREFL algorithm") corr_refl = da.map_blocks(_run_crefl_abi, refl.data, mus.data, muv.data, phi.data, solar_zenith.data, sensor_zenith.data, height, *coeffs, meta=np.ndarray((), dtype=refl.dtype), chunks=refl.chunks, dtype=refl.dtype, percent=percent) else: LOG.debug("Using original VIIRS CREFL algorithm") corr_refl = da.map_blocks(_run_crefl, refl.data, mus.data, muv.data, phi.data, height, *coeffs, meta=np.ndarray((), dtype=refl.dtype), chunks=refl.chunks, dtype=refl.dtype, percent=percent) return xr.DataArray(corr_refl, dims=refl.dims, coords=refl.coords, attrs=refl.attrs) def _space_mask_height(lon, lat, avg_elevation): lat[(lat <= -90) | (lat >= 90)] = np.nan lon[(lon <= -180) | (lon >= 180)] = np.nan row = ((90.0 - lat) * avg_elevation.shape[0] / 180.0).astype(np.int32) col = ((lon + 180.0) * avg_elevation.shape[1] / 360.0).astype(np.int32) space_mask = np.isnan(lon) | np.isnan(lat) row[space_mask] = 0 col[space_mask] = 0 height = avg_elevation[row, col] # negative heights aren't allowed, clip to 0 height[(height < 0.0) | np.isnan(height) | space_mask] = 0.0 return height def _run_crefl(refl, mus, muv, phi, height, *coeffs, percent=True, computing_meta=False): if computing_meta: return refl sphalb, rhoray, TtotraytH2O, tOG = get_atm_variables(mus, muv, phi, height, *coeffs) return _correct_refl(refl, tOG, rhoray, TtotraytH2O, sphalb, percent) def _run_crefl_abi(refl, mus, muv, phi, solar_zenith, sensor_zenith, height, *coeffs, percent=True, computing_meta=False): if computing_meta: return refl a_O3 = [268.45, 0.5, 115.42, -3.2922] a_H2O = [0.0311, 0.1, 92.471, -1.3814] a_O2 = [0.4567, 0.007, 96.4884, -1.6970] G_O3 = _G_calc(solar_zenith, a_O3) + _G_calc(sensor_zenith, a_O3) G_H2O = _G_calc(solar_zenith, a_H2O) + _G_calc(sensor_zenith, a_H2O) G_O2 = _G_calc(solar_zenith, a_O2) + _G_calc(sensor_zenith, a_O2) # Note: bh2o values are actually ao2 values for abi sphalb, rhoray, TtotraytH2O, tOG = get_atm_variables_abi(mus, muv, phi, height, G_O3, G_H2O, G_O2, *coeffs) return _correct_refl(refl, tOG, rhoray, TtotraytH2O, sphalb, percent) def _correct_refl(refl, tOG, rhoray, TtotraytH2O, sphalb, percent): if percent: corr_refl = ((refl / 100.) / tOG - rhoray) / TtotraytH2O else: corr_refl = (refl / tOG - rhoray) / TtotraytH2O corr_refl /= (1.0 + corr_refl * sphalb) return corr_refl.clip(REFLMIN, REFLMAX) satpy-0.34.0/satpy/modifiers/angles.py000066400000000000000000000424121420401153000176730ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Utilties for getting various angles for a dataset..""" from __future__ import annotations import hashlib import os import shutil from datetime import datetime from functools import update_wrapper from glob import glob from typing import Any, Callable, Optional, Union import dask import numpy as np import xarray as xr from dask import array as da from pyorbital.astronomy import cos_zen as pyob_cos_zen from pyorbital.astronomy import get_alt_az from pyorbital.orbital import get_observer_look from pyresample.geometry import AreaDefinition, StackedAreaDefinition, SwathDefinition import satpy from satpy.utils import get_satpos, ignore_invalid_float_warnings PRGeometry = Union[SwathDefinition, AreaDefinition, StackedAreaDefinition] # Arbitrary time used when computing sensor angles that is passed to # pyorbital's get_observer_look function. # The difference is on the order of 1e-10 at most as time changes so we force # it to a single time for easier caching. It is *only* used if caching. STATIC_EARTH_INERTIAL_DATETIME = datetime(2000, 1, 1, 12, 0, 0) DEFAULT_UNCACHE_TYPES = (SwathDefinition, xr.DataArray, da.Array) HASHABLE_GEOMETRIES = (AreaDefinition, StackedAreaDefinition) class ZarrCacheHelper: """Helper for caching function results to on-disk zarr arrays. It is recommended to use this class through the :func:`cache_to_zarr_if` decorator rather than using it directly. Currently the cache does not perform any limiting or removal of cache content. That is left up to the user to manage. Caching is based on arguments passed to the decorated function but will only be performed if the arguments are of a certain type (see ``uncacheable_arg_types``). The cache value to use is purely based on the hash value of all of the provided arguments along with the "cache version" (see below). Args: func: Function that will be called to generate the value to cache. cache_config_key: Name of the boolean ``satpy.config`` parameter to use to determine if caching should be done. uncacheable_arg_types: Types that if present in the passed arguments should trigger caching to *not* happen. By default this includes ``SwathDefinition``, ``xr.DataArray``, and ``da.Array`` objects. sanitize_args_func: Optional function to call to sanitize provided arguments before they are considered for caching. This can be used to make arguments more "cacheable" by replacing them with similar values that will result in more cache hits. Note that the sanitized arguments are only passed to the underlying function if caching will be performed, otherwise the original arguments are passed. cache_version: Version number used to distinguish one version of a decorated function from future versions. Notes: * Caching only supports dask array values. * This helper allows for an additional ``cache_dir`` parameter to override the use of the ``satpy.config`` ``cache_dir`` parameter. Examples: To use through the :func:`cache_to_zarr_if` decorator:: @cache_to_zarr_if("cache_my_stuff") def generate_my_stuff(area_def: AreaDefinition, some_factor: int) -> da.Array: # Generate return my_dask_arr To use the decorated function:: with satpy.config.set(cache_my_stuff=True): my_stuff = generate_my_stuff(area_def, 5) """ def __init__(self, func: Callable, cache_config_key: str, uncacheable_arg_types=DEFAULT_UNCACHE_TYPES, sanitize_args_func: Callable = None, cache_version: int = 1, ): """Hold on to provided arguments for future use.""" self._func = func self._cache_config_key = cache_config_key self._uncacheable_arg_types = uncacheable_arg_types self._sanitize_args_func = sanitize_args_func self._cache_version = cache_version def cache_clear(self, cache_dir: Optional[str] = None): """Remove all on-disk files associated with this function. Intended to mimic the :func:`functools.cache` behavior. """ if cache_dir is None: cache_dir = satpy.config.get("cache_dir") if cache_dir is None: raise RuntimeError("No 'cache_dir' configured.") zarr_pattern = self._zarr_pattern("*", cache_version="*").format("*") for zarr_dir in glob(os.path.join(cache_dir, zarr_pattern)): try: shutil.rmtree(zarr_dir) except OSError: continue def _zarr_pattern(self, arg_hash, cache_version: Union[int, str] = None) -> str: if cache_version is None: cache_version = self._cache_version return f"{self._func.__name__}_v{cache_version}" + "_{}_" + f"{arg_hash}.zarr" def __call__(self, *args, cache_dir: Optional[str] = None) -> Any: """Call the decorated function.""" new_args = self._sanitize_args_func(*args) if self._sanitize_args_func is not None else args arg_hash = _hash_args(*new_args, unhashable_types=self._uncacheable_arg_types) should_cache, cache_dir = self._get_should_cache_and_cache_dir(new_args, cache_dir) zarr_fn = self._zarr_pattern(arg_hash) zarr_format = os.path.join(cache_dir, zarr_fn) zarr_paths = glob(zarr_format.format("*")) if not should_cache or not zarr_paths: # use sanitized arguments if we are caching, otherwise use original arguments args = new_args if should_cache else args res = self._func(*args) if should_cache and not zarr_paths: self._cache_results(res, zarr_format) # if we did any caching, let's load from the zarr files if should_cache: # re-calculate the cached paths zarr_paths = sorted(glob(zarr_format.format("*"))) if not zarr_paths: raise RuntimeError("Data was cached to disk but no files were found") res = tuple(da.from_zarr(zarr_path) for zarr_path in zarr_paths) return res def _get_should_cache_and_cache_dir(self, args, cache_dir: Optional[str]) -> tuple[bool, str]: should_cache: bool = satpy.config.get(self._cache_config_key, False) can_cache = not any(isinstance(arg, self._uncacheable_arg_types) for arg in args) should_cache = should_cache and can_cache if cache_dir is None: cache_dir = satpy.config.get("cache_dir") return should_cache, cache_dir def _cache_results(self, res, zarr_format): os.makedirs(os.path.dirname(zarr_format), exist_ok=True) new_res = [] for idx, sub_res in enumerate(res): if not isinstance(sub_res, da.Array): raise ValueError("Zarr caching currently only supports dask " f"arrays. Got {type(sub_res)}") zarr_path = zarr_format.format(idx) # See https://github.com/dask/dask/issues/8380 with dask.config.set({"optimization.fuse.active": False}): new_sub_res = sub_res.to_zarr(zarr_path, return_stored=True, compute=False) new_res.append(new_sub_res) # actually compute the storage to zarr da.compute(new_res) def cache_to_zarr_if( cache_config_key: str, uncacheable_arg_types=DEFAULT_UNCACHE_TYPES, sanitize_args_func: Callable = None, ) -> Callable: """Decorate a function and cache the results as a zarr array on disk. This only happens if the ``satpy.config`` boolean value for the provided key is ``True`` as well as some other conditions. See :class:`ZarrCacheHelper` for more information. Most importantly, this decorator does not limit how many items can be cached and does not clear out old entries. It is up to the user to manage the size of the cache. """ def _decorator(func: Callable) -> Callable: zarr_cacher = ZarrCacheHelper(func, cache_config_key, uncacheable_arg_types, sanitize_args_func) wrapper = update_wrapper(zarr_cacher, func) return wrapper return _decorator def _hash_args(*args, unhashable_types=DEFAULT_UNCACHE_TYPES): import json hashable_args = [] for arg in args: if isinstance(arg, unhashable_types): continue if isinstance(arg, HASHABLE_GEOMETRIES): arg = hash(arg) elif isinstance(arg, datetime): arg = arg.isoformat(" ") hashable_args.append(arg) arg_hash = hashlib.sha1() # nosec arg_hash.update(json.dumps(tuple(hashable_args)).encode('utf8')) return arg_hash.hexdigest() def _sanitize_observer_look_args(*args): new_args = [] for arg in args: if isinstance(arg, datetime): new_args.append(STATIC_EARTH_INERTIAL_DATETIME) elif isinstance(arg, (float, np.float64, np.float32)): # round floating point numbers to nearest tenth new_args.append(round(arg, 1)) else: new_args.append(arg) return new_args def _geo_dask_to_data_array(arr: da.Array) -> xr.DataArray: return xr.DataArray(arr, dims=('y', 'x')) def get_angles(data_arr: xr.DataArray) -> tuple[xr.DataArray, xr.DataArray, xr.DataArray, xr.DataArray]: """Get sun and satellite azimuth and zenith angles. Note that this function can benefit from the ``satpy.config`` parameters :ref:`cache_lonlats ` and :ref:`cache_sensor_angles ` being set to ``True``. Args: data_arr: DataArray to get angles for. Information extracted from this object are ``.attrs["area"]``,``.attrs["start_time"]``, and ``.attrs["orbital_parameters"]``. See :func:`satpy.utils.get_satpos` and :ref:`dataset_metadata` for more information. Additionally, the dask array chunk size is used when generating new arrays. The actual data of the object is not used. Returns: Four DataArrays representing sensor azimuth angle, sensor zenith angle, solar azimuth angle, and solar zenith angle. All values are in degrees. Sensor angles are provided in the [0, 360] degree range. Solar angles are provided in the [-180, 180] degree range. """ sata, satz = _get_sensor_angles(data_arr) suna, sunz = _get_sun_angles(data_arr) return sata, satz, suna, sunz def get_satellite_zenith_angle(data_arr: xr.DataArray) -> xr.DataArray: """Generate satellite zenith angle for the provided data. Note that this function can benefit from the ``satpy.config`` parameters :ref:`cache_lonlats ` and :ref:`cache_sensor_angles ` being set to ``True``. Values are in degrees. """ satz = _get_sensor_angles(data_arr)[1] return satz def get_cos_sza(data_arr: xr.DataArray) -> xr.DataArray: """Generate the cosine of the solar zenith angle for the provided data. Returns: DataArray with the same shape as ``data_arr``. """ lons, lats = _get_valid_lonlats(data_arr.attrs["area"], data_arr.chunks) cos_sza = _get_cos_sza(data_arr.attrs["start_time"], lons, lats) return _geo_dask_to_data_array(cos_sza) @cache_to_zarr_if("cache_lonlats") def _get_valid_lonlats(area: PRGeometry, chunks: Union[int, str, tuple] = "auto") -> tuple[da.Array, da.Array]: with ignore_invalid_float_warnings(): lons, lats = area.get_lonlats(chunks=chunks) lons = da.where(lons >= 1e30, np.nan, lons) lats = da.where(lats >= 1e30, np.nan, lats) return lons, lats def _get_sun_angles(data_arr: xr.DataArray) -> tuple[xr.DataArray, xr.DataArray]: lons, lats = _get_valid_lonlats(data_arr.attrs["area"], data_arr.data.chunks) suna = da.map_blocks(_get_sun_azimuth_ndarray, lons, lats, data_arr.attrs["start_time"], dtype=lons.dtype, meta=np.array((), dtype=lons.dtype), chunks=lons.chunks) cos_sza = _get_cos_sza(data_arr.attrs["start_time"], lons, lats) sunz = np.rad2deg(np.arccos(cos_sza)) suna = _geo_dask_to_data_array(suna) sunz = _geo_dask_to_data_array(sunz) return suna, sunz def _get_cos_sza(utc_time, lons, lats): cos_sza = da.map_blocks(_cos_zen_ndarray, lons, lats, utc_time, meta=np.array((), dtype=lons.dtype), dtype=lons.dtype, chunks=lons.chunks) return cos_sza def _cos_zen_ndarray(lons, lats, utc_time): with ignore_invalid_float_warnings(): return pyob_cos_zen(utc_time, lons, lats) def _get_sun_azimuth_ndarray(lons: np.ndarray, lats: np.ndarray, start_time: datetime) -> np.ndarray: with ignore_invalid_float_warnings(): suna = get_alt_az(start_time, lons, lats)[1] suna = np.rad2deg(suna) return suna def _get_sensor_angles(data_arr: xr.DataArray) -> tuple[xr.DataArray, xr.DataArray]: sat_lon, sat_lat, sat_alt = get_satpos(data_arr) area_def = data_arr.attrs["area"] sata, satz = _get_sensor_angles_from_sat_pos(sat_lon, sat_lat, sat_alt, data_arr.attrs["start_time"], area_def, data_arr.data.chunks) sata = _geo_dask_to_data_array(sata) satz = _geo_dask_to_data_array(satz) return sata, satz @cache_to_zarr_if("cache_sensor_angles", sanitize_args_func=_sanitize_observer_look_args) def _get_sensor_angles_from_sat_pos(sat_lon, sat_lat, sat_alt, start_time, area_def, chunks): lons, lats = _get_valid_lonlats(area_def, chunks) res = da.map_blocks(_get_sensor_angles_ndarray, lons, lats, start_time, sat_lon, sat_lat, sat_alt, dtype=lons.dtype, meta=np.array((), dtype=lons.dtype), new_axis=[0], chunks=(2,) + lons.chunks) return res[0], res[1] def _get_sensor_angles_ndarray(lons, lats, start_time, sat_lon, sat_lat, sat_alt) -> np.ndarray: with ignore_invalid_float_warnings(): sata, satel = get_observer_look( sat_lon, sat_lat, sat_alt / 1000.0, # km start_time, lons, lats, 0) satz = 90 - satel return np.stack([sata, satz]) def sunzen_corr_cos(data: da.Array, cos_zen: da.Array, limit: float = 88., max_sza: Optional[float] = 95.) -> da.Array: """Perform Sun zenith angle correction. The correction is based on the provided cosine of the zenith angle (``cos_zen``). The correction is limited to ``limit`` degrees (default: 88.0 degrees). For larger zenith angles, the correction is the same as at the ``limit`` if ``max_sza`` is `None`. The default behavior is to gradually reduce the correction past ``limit`` degrees up to ``max_sza`` where the correction becomes 0. Both ``data`` and ``cos_zen`` should be 2D arrays of the same shape. """ return da.map_blocks(_sunzen_corr_cos_ndarray, data, cos_zen, limit, max_sza, meta=np.array((), dtype=data.dtype), chunks=data.chunks) def _sunzen_corr_cos_ndarray(data: np.ndarray, cos_zen: np.ndarray, limit: float, max_sza: Optional[float]) -> np.ndarray: # Convert the zenith angle limit to cosine of zenith angle limit_rad = np.deg2rad(limit) limit_cos = np.cos(limit_rad) max_sza_rad = np.deg2rad(max_sza) if max_sza is not None else max_sza # Cosine correction corr = 1. / cos_zen if max_sza is not None: # gradually fall off for larger zenith angle grad_factor = (np.arccos(cos_zen) - limit_rad) / (max_sza_rad - limit_rad) # invert the factor so maximum correction is done at `limit` and falls off later grad_factor = 1. - np.log(grad_factor + 1) / np.log(2) # make sure we don't make anything negative grad_factor = grad_factor.clip(0.) else: # Use constant value (the limit) for larger zenith angles grad_factor = 1. corr = np.where(cos_zen > limit_cos, corr, grad_factor / limit_cos) # Force "night" pixels to 0 (where SZA is invalid) corr[np.isnan(cos_zen)] = 0 return data * corr satpy-0.34.0/satpy/modifiers/atmosphere.py000066400000000000000000000143571420401153000206000ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Modifiers related to atmospheric corrections or adjustments.""" import logging from weakref import WeakValueDictionary import dask.array as da import numpy as np import xarray as xr from satpy.modifiers import ModifierBase from satpy.modifiers._crefl import ReflectanceCorrector # noqa from satpy.modifiers.angles import get_angles, get_satellite_zenith_angle logger = logging.getLogger(__name__) class PSPRayleighReflectance(ModifierBase): """Pyspectral-based rayleigh corrector for visible channels.""" _rayleigh_cache: "WeakValueDictionary[tuple, object]" = WeakValueDictionary() def __call__(self, projectables, optional_datasets=None, **info): """Get the corrected reflectance when removing Rayleigh scattering. Uses pyspectral. """ from pyspectral.rayleigh import Rayleigh if not optional_datasets or len(optional_datasets) != 4: vis, red = self.match_data_arrays(projectables) sata, satz, suna, sunz = get_angles(vis) else: vis, red, sata, satz, suna, sunz = self.match_data_arrays( projectables + optional_datasets) # get the dask array underneath sata = sata.data satz = satz.data suna = suna.data sunz = sunz.data # First make sure the two azimuth angles are in the range 0-360: sata = sata % 360. suna = suna % 360. ssadiff = da.absolute(suna - sata) ssadiff = da.minimum(ssadiff, 360 - ssadiff) del sata, suna atmosphere = self.attrs.get('atmosphere', 'us-standard') aerosol_type = self.attrs.get('aerosol_type', 'marine_clean_aerosol') rayleigh_key = (vis.attrs['platform_name'], vis.attrs['sensor'], atmosphere, aerosol_type) logger.info("Removing Rayleigh scattering with atmosphere '%s' and " "aerosol type '%s' for '%s'", atmosphere, aerosol_type, vis.attrs['name']) if rayleigh_key not in self._rayleigh_cache: corrector = Rayleigh(vis.attrs['platform_name'], vis.attrs['sensor'], atmosphere=atmosphere, aerosol_type=aerosol_type) self._rayleigh_cache[rayleigh_key] = corrector else: corrector = self._rayleigh_cache[rayleigh_key] try: refl_cor_band = corrector.get_reflectance(sunz, satz, ssadiff, vis.attrs['name'], red.data) except (KeyError, IOError): logger.warning("Could not get the reflectance correction using band name: %s", vis.attrs['name']) logger.warning("Will try use the wavelength, however, this may be ambiguous!") refl_cor_band = corrector.get_reflectance(sunz, satz, ssadiff, vis.attrs['wavelength'][1], red.data) proj = vis - refl_cor_band proj.attrs = vis.attrs self.apply_modifier_info(vis, proj) return proj def _call_mapped_correction(satz, band_data, corrector, band_name): # need to convert to masked array orig_dtype = band_data.dtype band_data = np.ma.masked_where(np.isnan(band_data), band_data) res = corrector.get_correction(satz, band_name, band_data) return res.filled(np.nan).astype(orig_dtype, copy=False) class PSPAtmosphericalCorrection(ModifierBase): """Correct for atmospheric effects.""" def __call__(self, projectables, optional_datasets=None, **info): """Get the atmospherical correction. Uses pyspectral. """ from pyspectral.atm_correction_ir import AtmosphericalCorrection band = projectables[0] if optional_datasets: satz = optional_datasets[0] else: satz = get_satellite_zenith_angle(band) satz = satz.data # get dask array underneath logger.info('Correction for limb cooling') corrector = AtmosphericalCorrection(band.attrs['platform_name'], band.attrs['sensor']) atm_corr = da.map_blocks(_call_mapped_correction, satz, band.data, corrector=corrector, band_name=band.attrs['name'], meta=np.array((), dtype=band.dtype)) proj = xr.DataArray(atm_corr, attrs=band.attrs, dims=band.dims, coords=band.coords) self.apply_modifier_info(band, proj) return proj class CO2Corrector(ModifierBase): """CO2 correction of the brightness temperature of the MSG 3.9um channel. .. math:: T4_CO2corr = (BT(IR3.9)^4 + Rcorr)^0.25 Rcorr = BT(IR10.8)^4 - (BT(IR10.8)-dt_CO2)^4 dt_CO2 = (BT(IR10.8)-BT(IR13.4))/4.0 Derived from D. Rosenfeld, "CO2 Correction of Brightness Temperature of Channel IR3.9" References: - http://www.eumetrain.org/IntGuide/PowerPoints/Channels/conversion.ppt """ def __call__(self, projectables, optional_datasets=None, **info): """Apply correction.""" ir_039, ir_108, ir_134 = projectables logger.info('Applying CO2 correction') dt_co2 = (ir_108 - ir_134) / 4.0 rcorr = ir_108 ** 4 - (ir_108 - dt_co2) ** 4 t4_co2corr = (ir_039 ** 4 + rcorr).clip(0.0) ** 0.25 t4_co2corr.attrs = ir_039.attrs.copy() self.apply_modifier_info(ir_039, t4_co2corr) return t4_co2corr satpy-0.34.0/satpy/modifiers/base.py000066400000000000000000000031331420401153000173310ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Base modifier classes and utilities.""" from satpy.composites import CompositeBase class ModifierBase(CompositeBase): """Base class for all modifiers. A modifier in Satpy is a class that takes one input DataArray to be changed along with zero or more other input DataArrays used to perform these changes. The result of a modifier typically has a lot of the same metadata (name, units, etc) as the original DataArray, but the data is different. A modified DataArray can be differentiated from the original DataArray by the `modifiers` property of its `DataID`. See the :class:`~satpy.composites.CompositeBase` class for information on the similar concept of "compositors". """ def __call__(self, datasets, optional_datasets=None, **info): """Generate a modified copy of the first provided dataset.""" raise NotImplementedError() satpy-0.34.0/satpy/modifiers/geometry.py000066400000000000000000000154111420401153000202540ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Modifier classes for corrections based on sun and other angles.""" from __future__ import annotations import logging import time from datetime import datetime from typing import Optional from weakref import WeakValueDictionary import numpy as np import xarray as xr from satpy.modifiers import ModifierBase from satpy.modifiers.angles import sunzen_corr_cos from satpy.utils import atmospheric_path_length_correction logger = logging.getLogger(__name__) class SunZenithCorrectorBase(ModifierBase): """Base class for sun zenith correction modifiers.""" coszen_cache: "WeakValueDictionary[tuple[datetime, str], Optional[xr.DataArray]]" = WeakValueDictionary() def __init__(self, max_sza=95.0, **kwargs): """Collect custom configuration values. Args: max_sza (float): Maximum solar zenith angle in degrees that is considered valid and correctable. Default 95.0. """ self.max_sza = max_sza self.max_sza_cos = np.cos(np.deg2rad(max_sza)) if max_sza is not None else None super(SunZenithCorrectorBase, self).__init__(**kwargs) def __call__(self, projectables, **info): """Generate the composite.""" projectables = self.match_data_arrays(list(projectables) + list(info.get('optional_datasets', []))) vis = projectables[0] if vis.attrs.get("sunz_corrected"): logger.debug("Sun zen correction already applied") return vis area_name = hash(vis.attrs['area']) key = (vis.attrs["start_time"], area_name) tic = time.time() logger.debug("Applying sun zen correction") coszen = self.coszen_cache.get(key) if coszen is None and not info.get('optional_datasets'): # we were not given SZA, generate cos(SZA) logger.debug("Computing sun zenith angles.") from .angles import get_cos_sza coszen = get_cos_sza(vis) if self.max_sza is not None: coszen = coszen.where(coszen >= self.max_sza_cos) self.coszen_cache[key] = coszen elif coszen is None: # we were given the SZA, calculate the cos(SZA) coszen = np.cos(np.deg2rad(projectables[1])) self.coszen_cache[key] = coszen proj = self._apply_correction(vis, coszen) proj.attrs = vis.attrs.copy() self.apply_modifier_info(vis, proj) logger.debug("Sun-zenith correction applied. Computation time: %5.1f (sec)", time.time() - tic) return proj def _apply_correction(self, proj, coszen): raise NotImplementedError("Correction method shall be defined!") class SunZenithCorrector(SunZenithCorrectorBase): """Standard sun zenith correction using ``1 / cos(sunz)``. In addition to adjusting the provided reflectances by the cosine of the solar zenith angle, this modifier forces all reflectances beyond a solar zenith angle of ``max_sza`` to 0. It also gradually reduces the amount of correction done between ``correction_limit`` and ``max_sza``. If ``max_sza`` is ``None`` then a constant correction is applied to zenith angles beyond ``correction_limit``. To set ``max_sza`` to ``None`` in a YAML configuration file use: .. code-block:: yaml sunz_corrected: modifier: !!python/name:satpy.modifiers.SunZenithCorrector max_sza: !!null optional_prerequisites: - solar_zenith_angle """ def __init__(self, correction_limit=88., **kwargs): """Collect custom configuration values. Args: correction_limit (float): Maximum solar zenith angle to apply the correction in degrees. Pixels beyond this limit have a constant correction applied. Default 88. max_sza (float): Maximum solar zenith angle in degrees that is considered valid and correctable. Default 95.0. """ self.correction_limit = correction_limit super(SunZenithCorrector, self).__init__(**kwargs) def _apply_correction(self, proj, coszen): logger.debug("Apply the standard sun-zenith correction [1/cos(sunz)]") res = proj.copy() res.data = sunzen_corr_cos(proj.data, coszen.data, limit=self.correction_limit, max_sza=self.max_sza) return res class EffectiveSolarPathLengthCorrector(SunZenithCorrectorBase): """Special sun zenith correction with the method proposed by Li and Shibata. (2006): https://doi.org/10.1175/JAS3682.1 In addition to adjusting the provided reflectances by the cosine of the solar zenith angle, this modifier forces all reflectances beyond a solar zenith angle of `max_sza` to 0 to reduce noise in the final data. It also gradually reduces the amount of correction done between ``correction_limit`` and ``max_sza``. If ``max_sza`` is ``None`` then a constant correction is applied to zenith angles beyond ``correction_limit``. To set ``max_sza`` to ``None`` in a YAML configuration file use: .. code-block:: yaml effective_solar_pathlength_corrected: modifier: !!python/name:satpy.modifiers.EffectiveSolarPathLengthCorrector max_sza: !!null optional_prerequisites: - solar_zenith_angle """ def __init__(self, correction_limit=88., **kwargs): """Collect custom configuration values. Args: correction_limit (float): Maximum solar zenith angle to apply the correction in degrees. Pixels beyond this limit have a constant correction applied. Default 88. max_sza (float): Maximum solar zenith angle in degrees that is considered valid and correctable. Default 95.0. """ self.correction_limit = correction_limit super(EffectiveSolarPathLengthCorrector, self).__init__(**kwargs) def _apply_correction(self, proj, coszen): logger.debug("Apply the effective solar atmospheric path length correction method by Li and Shibata") return atmospheric_path_length_correction(proj, coszen, limit=self.correction_limit, max_sza=self.max_sza) satpy-0.34.0/satpy/modifiers/spectral.py000066400000000000000000000177151420401153000202470ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Modifier classes dealing with spectral domain changes or corrections.""" import logging import xarray as xr from satpy.modifiers import ModifierBase try: from pyspectral.near_infrared_reflectance import Calculator except ImportError: Calculator = None try: from pyorbital.astronomy import sun_zenith_angle except ImportError: sun_zenith_angle = None logger = logging.getLogger(__name__) class NIRReflectance(ModifierBase): """Get the reflective part of NIR bands.""" TERMINATOR_LIMIT = 85.0 MASKING_LIMIT = 88.0 def __init__(self, sunz_threshold=TERMINATOR_LIMIT, masking_limit=MASKING_LIMIT, **kwargs): """Collect custom configuration values. Args: sunz_threshold: The threshold sun zenith angle used when deriving the near infrared reflectance. Above this angle the derivation will assume this sun-zenith everywhere. Unless overridden, the default threshold of 85.0 degrees will be used. masking_limit: Mask the data (set to NaN) above this Sun zenith angle. By default the limit is at 88.0 degrees. If set to `None`, no masking is done. """ self.sun_zenith_threshold = sunz_threshold self.masking_limit = masking_limit super(NIRReflectance, self).__init__(**kwargs) def __call__(self, projectables, optional_datasets=None, **info): """Get the reflectance part of an NIR channel. Not supposed to be used for wavelength outside [3, 4] µm. """ projectables = self.match_data_arrays(projectables) return self._get_reflectance_as_dataarray(projectables, optional_datasets) def _get_reflectance_as_dataarray(self, projectables, optional_datasets): """Get the reflectance as a dataarray.""" _nir, _tb11 = projectables da_nir = _nir.data da_tb11 = _tb11.data da_tb13_4 = self._get_tb13_4_from_optionals(optional_datasets) da_sun_zenith = self._get_sun_zenith_from_provided_data(projectables, optional_datasets) logger.info('Getting reflective part of %s', _nir.attrs['name']) reflectance = self._get_reflectance_as_dask(da_nir, da_tb11, da_tb13_4, da_sun_zenith, _nir.attrs) proj = self._create_modified_dataarray(reflectance, base_dataarray=_nir) proj.attrs['units'] = '%' return proj @staticmethod def _get_tb13_4_from_optionals(optional_datasets): tb13_4 = None for dataset in optional_datasets: wavelengths = dataset.attrs.get('wavelength', [100., 0, 0]) if (dataset.attrs.get('units') == 'K' and wavelengths[0] <= 13.4 <= wavelengths[2]): tb13_4 = dataset.data return tb13_4 @staticmethod def _get_sun_zenith_from_provided_data(projectables, optional_datasets): """Get the sunz from available data or compute it if unavailable.""" sun_zenith = None for dataset in optional_datasets: if dataset.attrs.get("standard_name") == "solar_zenith_angle": sun_zenith = dataset.data if sun_zenith is None: if sun_zenith_angle is None: raise ImportError("Module pyorbital.astronomy needed to compute sun zenith angles.") _nir = projectables[0] lons, lats = _nir.attrs["area"].get_lonlats(chunks=_nir.data.chunks) sun_zenith = sun_zenith_angle(_nir.attrs['start_time'], lons, lats) return sun_zenith def _create_modified_dataarray(self, reflectance, base_dataarray): proj = xr.DataArray(reflectance, dims=base_dataarray.dims, coords=base_dataarray.coords, attrs=base_dataarray.attrs.copy()) proj.attrs['sun_zenith_threshold'] = self.sun_zenith_threshold proj.attrs['sun_zenith_masking_limit'] = self.masking_limit self.apply_modifier_info(base_dataarray, proj) return proj def _get_reflectance_as_dask(self, da_nir, da_tb11, da_tb13_4, da_sun_zenith, metadata): """Calculate 3.x reflectance in % with pyspectral from dask arrays.""" reflectance_3x_calculator = self._init_reflectance_calculator(metadata) return reflectance_3x_calculator.reflectance_from_tbs(da_sun_zenith, da_nir, da_tb11, tb_ir_co2=da_tb13_4) * 100 def _init_reflectance_calculator(self, metadata): """Initialize the 3.x reflectance derivations.""" if not Calculator: logger.info("Couldn't load pyspectral") raise ImportError("No module named pyspectral.near_infrared_reflectance") reflectance_3x_calculator = Calculator(metadata['platform_name'], metadata['sensor'], metadata['name'], sunz_threshold=self.sun_zenith_threshold, masking_limit=self.masking_limit) return reflectance_3x_calculator class NIREmissivePartFromReflectance(NIRReflectance): """Get the emissive part of NIR bands.""" def __init__(self, sunz_threshold=None, **kwargs): """Collect custom configuration values. Args: sunz_threshold: The threshold sun zenith angle used when deriving the near infrared reflectance. Above this angle the derivation will assume this sun-zenith everywhere. Default None, in which case the default threshold defined in Pyspectral will be used. """ self.sunz_threshold = sunz_threshold super(NIREmissivePartFromReflectance, self).__init__(sunz_threshold=sunz_threshold, **kwargs) def __call__(self, projectables, optional_datasets=None, **info): """Get the emissive part an NIR channel after having derived the reflectance. Not supposed to be used for wavelength outside [3, 4] µm. """ projectables = self.match_data_arrays(projectables) return self._get_emissivity_as_dataarray(projectables, optional_datasets) def _get_emissivity_as_dataarray(self, projectables, optional_datasets): """Get the emissivity as a dataarray.""" _nir, _tb11 = projectables da_nir = _nir.data da_tb11 = _tb11.data da_tb13_4 = self._get_tb13_4_from_optionals(optional_datasets) da_sun_zenith = self._get_sun_zenith_from_provided_data(projectables, optional_datasets) logger.info('Getting emissive part of %s', _nir.attrs['name']) emissivity = self._get_emissivity_as_dask(da_nir, da_tb11, da_tb13_4, da_sun_zenith, _nir.attrs) proj = self._create_modified_dataarray(emissivity, base_dataarray=_nir) proj.attrs['units'] = 'K' return proj def _get_emissivity_as_dask(self, da_nir, da_tb11, da_tb13_4, da_sun_zenith, metadata): """Get the emissivity from pyspectral.""" reflectance_3x_calculator = self._init_reflectance_calculator(metadata) # Use the nir and thermal ir brightness temperatures and derive the reflectance using # PySpectral. The reflectance is stored internally in PySpectral and # needs to be derived first in order to get the emissive part. reflectance_3x_calculator.reflectance_from_tbs(da_sun_zenith, da_nir, da_tb11, tb_ir_co2=da_tb13_4) return reflectance_3x_calculator.emissive_part_3x() satpy-0.34.0/satpy/multiscene.py000066400000000000000000000665411420401153000166220ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """MultiScene object to work with multiple timesteps of satellite data.""" import copy import logging import warnings from queue import Queue from threading import Thread import dask.array as da import numpy as np import pandas as pd import xarray as xr from satpy.dataset import DataID, combine_metadata from satpy.scene import Scene from satpy.writers import get_enhanced_image, split_results try: import imageio except ImportError: imageio = None try: from dask.distributed import get_client except ImportError: get_client = None log = logging.getLogger(__name__) def stack(datasets): """Overlay series of datasets on top of each other.""" base = datasets[0].copy() for dataset in datasets[1:]: base = base.where(dataset.isnull(), dataset) return base def timeseries(datasets): """Expand dataset with and concatenate by time dimension.""" expanded_ds = [] for ds in datasets: if 'time' not in ds.dims: tmp = ds.expand_dims("time") tmp.coords["time"] = pd.DatetimeIndex([ds.attrs["start_time"]]) else: tmp = ds expanded_ds.append(tmp) res = xr.concat(expanded_ds, dim="time") res.attrs = combine_metadata(*[x.attrs for x in expanded_ds]) return res def add_group_aliases(scenes, groups): """Add aliases for the groups datasets belong to.""" for scene in scenes: scene = scene.copy() for group_id, member_names in groups.items(): # Find out whether one of the datasets in this scene belongs # to this group member_ids = [scene[name].attrs['_satpy_id'] for name in member_names if name in scene] # Add an alias for the group it belongs to if len(member_ids) == 1: member_id = member_ids[0] new_ds = scene[member_id].copy() new_ds.attrs.update(group_id.to_dict()) scene[group_id] = new_ds elif len(member_ids) > 1: raise ValueError('Cannot add multiple datasets from the same ' 'scene to a group') else: # Datasets in this scene don't belong to any group pass yield scene class _SceneGenerator(object): """Fancy way of caching Scenes from a generator.""" def __init__(self, scene_gen): self._scene_gen = scene_gen self._scene_cache = [] self._dataset_idx = {} # this class itself is not an iterator, make one self._self_iter = self._create_cached_iter() @property def first(self): """First element in the generator.""" return next(iter(self)) def _create_cached_iter(self): """Iterate over the provided scenes, caching them for later.""" for scn in self._scene_gen: self._scene_cache.append(scn) yield scn def __iter__(self): """Iterate over the provided scenes, caching them for later.""" idx = 0 while True: if idx >= len(self._scene_cache): try: scn = next(self._self_iter) except StopIteration: return else: scn = self._scene_cache[idx] yield scn idx += 1 def __getitem__(self, ds_id): """Get a specific dataset from the scenes.""" for scn in self: yield scn.get(ds_id) class MultiScene(object): """Container for multiple `Scene` objects.""" def __init__(self, scenes=None): """Initialize MultiScene and validate sub-scenes. Args: scenes (iterable): `Scene` objects to operate on (optional) .. note:: If the `scenes` passed to this object are a generator then certain operations performed will try to preserve that generator state. This may limit what properties or methods are available to the user. To avoid this behavior compute the passed generator by converting the passed scenes to a list first: ``MultiScene(list(scenes))``. """ self._scenes = scenes or [] scenes = iter(self._scenes) self._scene_gen = _SceneGenerator(iter(scenes)) # if we were originally given a generator-like object then we want to # coordinate the loading between _SceneGenerator and _scenes # otherwise it doesn't really matter and other operations may prefer # a list if not isinstance(scenes, (list, tuple)): self._scenes = iter(self._scene_gen) @property def first_scene(self): """First Scene of this MultiScene object.""" return self._scene_gen.first @classmethod def from_files(cls, files_to_sort, reader=None, ensure_all_readers=False, scene_kwargs=None, **kwargs): """Create multiple Scene objects from multiple files. Args: files_to_sort (Collection[str]): files to read reader (str or Collection[str]): reader or readers to use ensure_all_readers (bool): If True, limit to scenes where all readers have at least one file. If False (default), include all scenes where at least one reader has at least one file. scene_kwargs (Mapping): additional arguments to pass on to :func:`Scene.__init__` for each created scene. This uses the :func:`satpy.readers.group_files` function to group files. See this function for more details on additional possible keyword arguments. In particular, it is strongly recommended to pass `"group_keys"` when using multiple instruments. .. versionadded:: 0.12 """ from satpy.readers import group_files if scene_kwargs is None: scene_kwargs = {} file_groups = group_files(files_to_sort, reader=reader, **kwargs) if ensure_all_readers: warnings.warn( "Argument ensure_all_readers is deprecated. Use " "missing='skip' instead.", DeprecationWarning) file_groups = [fg for fg in file_groups if all(fg.values())] scenes = (Scene(filenames=fg, **scene_kwargs) for fg in file_groups) return cls(scenes) def __iter__(self): """Iterate over the provided Scenes once.""" for scn in self._scenes: yield scn @property def scenes(self): """Get list of Scene objects contained in this MultiScene. .. note:: If the Scenes contained in this object are stored in a generator (not list or tuple) then accessing this property will load/iterate through the generator possibly """ if self.is_generator: log.debug("Forcing iteration of generator-like object of Scenes") self._scenes = list(self._scenes) return self._scenes @property def is_generator(self): """Contained Scenes are stored as a generator.""" return not isinstance(self._scenes, (list, tuple)) @property def loaded_dataset_ids(self): """Union of all Dataset IDs loaded by all children.""" return set(ds_id for scene in self.scenes for ds_id in scene.keys()) @property def shared_dataset_ids(self): """Dataset IDs shared by all children.""" shared_ids = set(self.scenes[0].keys()) for scene in self.scenes[1:]: shared_ids &= set(scene.keys()) return shared_ids def _all_same_area(self, dataset_ids): """Return True if all areas for the provided IDs are equal.""" all_areas = [] for ds_id in dataset_ids: for scn in self.scenes: ds = scn.get(ds_id) if ds is None: continue all_areas.append(ds.attrs.get('area')) all_areas = [area for area in all_areas if area is not None] return all(all_areas[0] == area for area in all_areas[1:]) @property def all_same_area(self): """Determine if all contained Scenes have the same 'area'.""" return self._all_same_area(self.loaded_dataset_ids) @staticmethod def _call_scene_func(gen, func_name, create_new_scene, *args, **kwargs): """Abstract method for running a Scene method on each Scene.""" for scn in gen: new_scn = getattr(scn, func_name)(*args, **kwargs) if create_new_scene: yield new_scn else: yield scn def _generate_scene_func(self, gen, func_name, create_new_scene, *args, **kwargs): """Abstract method for running a Scene method on each Scene. Additionally, modifies current MultiScene or creates a new one if needed. """ new_gen = self._call_scene_func(gen, func_name, create_new_scene, *args, **kwargs) new_gen = new_gen if self.is_generator else list(new_gen) if create_new_scene: return self.__class__(new_gen) self._scene_gen = _SceneGenerator(new_gen) self._scenes = iter(self._scene_gen) def load(self, *args, **kwargs): """Load the required datasets from the multiple scenes.""" self._generate_scene_func(self._scenes, 'load', False, *args, **kwargs) def crop(self, *args, **kwargs): """Crop the multiscene and return a new cropped multiscene.""" return self._generate_scene_func(self._scenes, 'crop', True, *args, **kwargs) def resample(self, destination=None, **kwargs): """Resample the multiscene.""" return self._generate_scene_func(self._scenes, 'resample', True, destination=destination, **kwargs) def blend(self, blend_function=stack): """Blend the datasets into one scene. Reduce the :class:`MultiScene` to a single :class:`~satpy.scene.Scene`. Datasets occurring in each scene will be passed to a blending function, which shall take as input a list of datasets (:class:`xarray.DataArray` objects) and shall return a single dataset (:class:`xarray.DataArray` object). The blend method then assigns those datasets to the blended scene. Blending functions provided in this module are :func:`stack` (the default) and :func:`timeseries`, but the Python built-in function :func:`sum` also works and may be appropriate for some types of data. .. note:: Blending is not currently optimized for generator-based MultiScene. """ new_scn = Scene() common_datasets = self.shared_dataset_ids for ds_id in common_datasets: datasets = [scn[ds_id] for scn in self.scenes if ds_id in scn] new_scn[ds_id] = blend_function(datasets) return new_scn def group(self, groups): """Group datasets from the multiple scenes. By default, `MultiScene` only operates on dataset IDs shared by all scenes. Using this method you can specify groups of datasets that shall be treated equally by `MultiScene`. Even if their dataset IDs differ (for example because the names or wavelengths are slightly different). Groups can be specified as a dictionary `{group_id: dataset_names}` where the keys must be of type `DataQuery`, for example:: groups={ DataQuery('my_group', wavelength=(10, 11, 12)): ['IR_108', 'B13', 'C13'] } """ self._scenes = add_group_aliases(self._scenes, groups) def _distribute_save_datasets(self, scenes_iter, client, batch_size=1, **kwargs): """Distribute save_datasets across a cluster.""" def load_data(q): idx = 0 while True: future_list = q.get() if future_list is None: break # save_datasets shouldn't be returning anything for future in future_list: future.result() log.info("Finished saving %d scenes", idx) idx += 1 q.task_done() input_q = Queue(batch_size if batch_size is not None else 1) # set threads to daemon so they are killed if error is raised from main thread load_thread = Thread(target=load_data, args=(input_q,), daemon=True) load_thread.start() for scene in scenes_iter: delayeds = scene.save_datasets(compute=False, **kwargs) sources, targets, delayeds = split_results(delayeds) if len(sources) > 0: # TODO Make this work for (source, target) datasets # given a target, source combination raise NotImplementedError("Distributed save_datasets does not support writers " "that return (source, target) combinations at this time. Use " "the non-distributed save_datasets instead.") future = client.compute(delayeds) input_q.put(future) input_q.put(None) log.debug("Waiting for child thread to get saved results...") load_thread.join() log.debug("Child thread died successfully") def _simple_save_datasets(self, scenes_iter, **kwargs): """Run save_datasets on each Scene.""" for scn in scenes_iter: scn.save_datasets(**kwargs) def save_datasets(self, client=True, batch_size=1, **kwargs): """Run save_datasets on each Scene. Note that some writers may not be multi-process friendly and may produce unexpected results or fail by raising an exception. In these cases ``client`` should be set to ``False``. This is currently a known issue for basic 'geotiff' writer work loads. Args: batch_size (int): Number of scenes to compute at the same time. This only has effect if the `dask.distributed` package is installed. This will default to 1. Setting this to 0 or less will attempt to process all scenes at once. This option should be used with care to avoid memory issues when trying to improve performance. client (bool or dask.distributed.Client): Dask distributed client to use for computation. If this is ``True`` (default) then any existing clients will be used. If this is ``False`` or ``None`` then a client will not be created and ``dask.distributed`` will not be used. If this is a dask ``Client`` object then it will be used for distributed computation. kwargs: Additional keyword arguments to pass to :meth:`~satpy.scene.Scene.save_datasets`. Note ``compute`` can not be provided. """ if 'compute' in kwargs: raise ValueError("The 'compute' keyword argument can not be provided.") client = self._get_client(client=client) scenes = iter(self._scenes) if client is not None: self._distribute_save_datasets(scenes, client, batch_size=batch_size, **kwargs) else: self._simple_save_datasets(scenes, **kwargs) def _get_animation_info(self, all_datasets, filename, fill_value=None): """Determine filename and shape of animation to be created.""" valid_datasets = [ds for ds in all_datasets if ds is not None] first_dataset = valid_datasets[0] last_dataset = valid_datasets[-1] first_img = get_enhanced_image(first_dataset) first_img_data = first_img.finalize(fill_value=fill_value)[0] shape = tuple(first_img_data.sizes.get(dim_name) for dim_name in ('y', 'x', 'bands')) if fill_value is None and filename.endswith('gif'): log.warning("Forcing fill value to '0' for GIF Luminance images") fill_value = 0 shape = shape[:2] attrs = first_dataset.attrs.copy() if 'end_time' in last_dataset.attrs: attrs['end_time'] = last_dataset.attrs['end_time'] this_fn = filename.format(**attrs) return this_fn, shape, fill_value @staticmethod def _format_decoration(ds, decorate): """Maybe format decoration. If the nested dictionary in decorate (argument to ``save_animation``) contains a text to be added, format those based on dataset parameters. """ if decorate is None or "decorate" not in decorate: return decorate deco_local = copy.deepcopy(decorate) for deco in deco_local["decorate"]: if "text" in deco and "txt" in deco["text"]: deco["text"]["txt"] = deco["text"]["txt"].format(**ds.attrs) return deco_local def _get_single_frame(self, ds, enh_args, fill_value): """Get single frame from dataset. Yet a single image frame from a dataset. """ enh_args = enh_args.copy() # don't change caller's dict! if "decorate" in enh_args: enh_args["decorate"] = self._format_decoration( ds, enh_args["decorate"]) img = get_enhanced_image(ds, **enh_args) data, mode = img.finalize(fill_value=fill_value) if data.ndim == 3: # assume all other shapes are (y, x) # we need arrays grouped by pixel so # transpose if needed data = data.transpose('y', 'x', 'bands') return data def _get_animation_frames(self, all_datasets, shape, fill_value=None, ignore_missing=False, enh_args=None): """Create enhanced image frames to save to a file.""" if enh_args is None: enh_args = {} for idx, ds in enumerate(all_datasets): if ds is None and ignore_missing: continue elif ds is None: log.debug("Missing frame: %d", idx) data = da.zeros(shape, dtype=np.uint8, chunks=shape) data = xr.DataArray(data) else: data = self._get_single_frame(ds, enh_args, fill_value) yield data.data def _get_client(self, client=True): """Determine what dask distributed client to use.""" client = client or None # convert False/None to None if client is True and get_client is None: log.debug("'dask.distributed' library was not found, will " "use simple serial processing.") client = None elif client is True: try: # get existing client client = get_client() except ValueError: log.warning("No dask distributed client was provided or found, " "but distributed features were requested. Will use simple serial processing.") client = None return client def _distribute_frame_compute(self, writers, frame_keys, frames_to_write, client, batch_size=1): """Use ``dask.distributed`` to compute multiple frames at a time.""" def load_data(frame_gen, q): for frame_arrays in frame_gen: future_list = client.compute(frame_arrays) for frame_key, arr_future in zip(frame_keys, future_list): q.put({frame_key: arr_future}) q.put(None) input_q = Queue(batch_size if batch_size is not None else 1) load_thread = Thread(target=load_data, args=(frames_to_write, input_q,)) load_thread.start() while True: input_future = input_q.get() future_dict = client.gather(input_future) if future_dict is None: break # write the current frame # this should only be one element in the dictionary, but this is # also the easiest way to get access to the data for frame_key, result in future_dict.items(): # frame_key = rev_future_dict[future] w = writers[frame_key] w.append_data(result) input_q.task_done() log.debug("Waiting for child thread...") load_thread.join(10) if load_thread.is_alive(): import warnings warnings.warn("Background thread still alive after failing to die gracefully") else: log.debug("Child thread died successfully") @staticmethod def _simple_frame_compute(writers, frame_keys, frames_to_write): """Compute frames the plain dask way.""" for frame_arrays in frames_to_write: for frame_key, product_frame in zip(frame_keys, frame_arrays): w = writers[frame_key] w.append_data(product_frame.compute()) def _get_writers_and_frames( self, filename, datasets, fill_value, ignore_missing, enh_args, imio_args): """Get writers and frames. Helper function for save_animation. """ scene_gen = self._scene_gen first_scene = self.first_scene scenes = iter(self._scene_gen) info_scenes = [first_scene] if 'end_time' in filename: # if we need the last scene to generate the filename # then compute all the scenes so we can figure it out log.debug("Generating scenes to compute end_time for filename") scenes = list(scenes) info_scenes.append(scenes[-1]) available_ds = [first_scene.get(ds) for ds in first_scene.wishlist] available_ds = [DataID.from_dataarray(ds) for ds in available_ds if ds is not None] dataset_ids = datasets or available_ds if not dataset_ids: raise RuntimeError("No datasets found for saving (resampling may be needed to generate composites)") writers = {} frames = {} for dataset_id in dataset_ids: if not self.is_generator and not self._all_same_area([dataset_id]): raise ValueError("Sub-scene datasets must all be on the same " "area (see the 'resample' method).") all_datasets = scene_gen[dataset_id] info_datasets = [scn.get(dataset_id) for scn in info_scenes] this_fn, shape, this_fill = self._get_animation_info(info_datasets, filename, fill_value=fill_value) data_to_write = self._get_animation_frames( all_datasets, shape, this_fill, ignore_missing, enh_args) writer = imageio.get_writer(this_fn, **imio_args) frames[dataset_id] = data_to_write writers[dataset_id] = writer return (writers, frames) def save_animation(self, filename, datasets=None, fps=10, fill_value=None, batch_size=1, ignore_missing=False, client=True, enh_args=None, **kwargs): """Save series of Scenes to movie (MP4) or GIF formats. Supported formats are dependent on the `imageio` library and are determined by filename extension by default. .. note:: Starting with ``imageio`` 2.5.0, the use of FFMPEG depends on a separate ``imageio-ffmpeg`` package. By default all datasets available will be saved to individual files using the first Scene's datasets metadata to format the filename provided. If a dataset is not available from a Scene then a black array is used instead (np.zeros(shape)). This function can use the ``dask.distributed`` library for improved performance by computing multiple frames at a time (see `batch_size` option below). If the distributed library is not available then frames will be generated one at a time, one product at a time. Args: filename (str): Filename to save to. Can include python string formatting keys from dataset ``.attrs`` (ex. "{name}_{start_time:%Y%m%d_%H%M%S.gif") datasets (list): DataIDs to save (default: all datasets) fps (int): Frames per second for produced animation fill_value (int): Value to use instead creating an alpha band. batch_size (int): Number of frames to compute at the same time. This only has effect if the `dask.distributed` package is installed. This will default to 1. Setting this to 0 or less will attempt to process all frames at once. This option should be used with care to avoid memory issues when trying to improve performance. Note that this is the total number of frames for all datasets, so when saving 2 datasets this will compute ``(batch_size / 2)`` frames for the first dataset and ``(batch_size / 2)`` frames for the second dataset. ignore_missing (bool): Don't include a black frame when a dataset is missing from a child scene. client (bool or dask.distributed.Client): Dask distributed client to use for computation. If this is ``True`` (default) then any existing clients will be used. If this is ``False`` or ``None`` then a client will not be created and ``dask.distributed`` will not be used. If this is a dask ``Client`` object then it will be used for distributed computation. enh_args (Mapping): Optional, arguments passed to :func:`satpy.writers.get_enhanced_image`. If this includes a keyword "decorate", in any text added to the image, string formatting will be applied based on dataset attributes. For example, passing ``enh_args={"decorate": {"decorate": [{"text": {"txt": "{start_time:%H:%M}"}}]}`` will replace the decorated text accordingly. kwargs: Additional keyword arguments to pass to `imageio.get_writer`. """ if imageio is None: raise ImportError("Missing required 'imageio' library") (writers, frames) = self._get_writers_and_frames( filename, datasets, fill_value, ignore_missing, enh_args, imio_args={"fps": fps, **kwargs}) client = self._get_client(client=client) # get an ordered list of frames frame_keys, frames_to_write = list(zip(*frames.items())) frames_to_write = zip(*frames_to_write) if client is not None: self._distribute_frame_compute(writers, frame_keys, frames_to_write, client, batch_size=batch_size) else: self._simple_frame_compute(writers, frame_keys, frames_to_write) for writer in writers.values(): writer.close() satpy-0.34.0/satpy/node.py000066400000000000000000000155651420401153000153770ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Nodes to build trees.""" from satpy.utils import get_logger LOG = get_logger(__name__) # Empty leaf used for marking composites with no prerequisites EMPTY_LEAF_NAME = "__EMPTY_LEAF_SENTINEL__" class MissingDependencies(RuntimeError): """Exception when dependencies are missing.""" def __init__(self, missing_dependencies, *args, **kwargs): """Set up the exception.""" super().__init__(*args, **kwargs) self.missing_dependencies = missing_dependencies def __str__(self): """Return the string representation of the exception.""" prefix = super().__str__() unknown_str = ", ".join(map(str, self.missing_dependencies)) return "{} {}".format(prefix, unknown_str) class Node: """A node object.""" def __init__(self, name, data=None): """Init the node object.""" self.name = name self.data = data self.children = [] self.parents = [] def update_name(self, new_name): """Update 'name' property.""" self.name = new_name @property def is_leaf(self): """Check if the node is a leaf.""" return not self.children def flatten(self, d=None): """Flatten tree structure to a one level dictionary. Args: d (dict, optional): output dictionary to update Returns: dict: Node.name -> Node. The returned dictionary includes the current Node and all its children. """ if d is None: d = {} if self.name is not None: d[self.name] = self for child in self.children: child.flatten(d=d) return d def copy(self, node_cache=None): """Make a copy of the node.""" if node_cache and self.name in node_cache: return node_cache[self.name] if self.name is EMPTY_LEAF_NAME: return self s = self._copy_name_and_data(node_cache) for c in self.children: c = c.copy(node_cache=node_cache) s.add_child(c) if node_cache is not None: node_cache[s.name] = s return s def _copy_name_and_data(self, node_cache=None): return Node(self.name, self.data) def add_child(self, obj): """Add a child to the node.""" self.children.append(obj) obj.parents.append(self) def __str__(self): """Display the node.""" return self.display() def __repr__(self): """Generate a representation of the node.""" return "<{} ({})>".format(self.__class__.__name__, repr(self.name)) def __eq__(self, other): """Check equality.""" return self.name == other.name def __hash__(self): """Generate the hash of the node.""" return hash(self.name) def display(self, previous=0, include_data=False): """Display the node.""" no_data = " (No Data)" if self.data is None else "" return ( (" +" * previous) + str(self.name) + no_data + '\n' + ''.join([child.display(previous + 1) for child in self.children])) def leaves(self, unique=True): """Get the leaves of the tree starting at this root.""" if self.name is EMPTY_LEAF_NAME: return [] elif not self.children: return [self] res = list() for child in self.children: for sub_child in child.leaves(unique=unique): if not unique or sub_child not in res: res.append(sub_child) return res def trunk(self, unique=True, limit_children_to=None): """Get the trunk of the tree starting at this root.""" # FIXME: uniqueness is not correct in `trunk` yet unique = False res = [] if self.children and self.name is not EMPTY_LEAF_NAME: if self.name is not None: res.append(self) if limit_children_to is not None and self.name in limit_children_to: return res for child in self.children: for sub_child in child.trunk(unique=unique, limit_children_to=limit_children_to): if not unique or sub_child not in res: res.append(sub_child) return res class CompositorNode(Node): """Implementation of a compositor-specific node.""" def __init__(self, compositor): """Set up the node.""" super().__init__(compositor.id, data=(compositor, [], [])) def add_required_nodes(self, children): """Add nodes to the required field.""" self.data[1].extend(children) @property def required_nodes(self): """Get the required nodes.""" return self.data[1] def add_optional_nodes(self, children): """Add nodes to the optional field.""" self.data[2].extend(children) @property def optional_nodes(self): """Get the optional nodes.""" return self.data[2] @property def compositor(self): """Get the compositor.""" return self.data[0] def _copy_name_and_data(self, node_cache=None): new_node = CompositorNode(self.compositor) new_required_nodes = [node.copy(node_cache) for node in self.required_nodes] new_node.add_required_nodes(new_required_nodes) new_optional_nodes = [node.copy(node_cache) for node in self.optional_nodes] new_node.add_optional_nodes(new_optional_nodes) # `comp.id` uses the compositor's attributes to compute itself # however, this node may have been updated by creation of the # composite. In order to not modify the compositor's attrs, we # overwrite the name here instead. new_node.name = self.name return new_node class ReaderNode(Node): """Implementation of a storage-based node.""" def __init__(self, unique_id, reader_name): """Set up the node.""" super().__init__(unique_id, data={'reader_name': reader_name}) def _copy_name_and_data(self, node_cache): return ReaderNode(self.name, self.data['reader_name']) @property def reader_name(self): """Get the name of the reader.""" return self.data['reader_name'] satpy-0.34.0/satpy/plugin_base.py000066400000000000000000000052311420401153000167270ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2011-2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Classes and utilities for defining generic "plugin" components.""" import logging import yaml try: from yaml import UnsafeLoader except ImportError: from yaml import Loader as UnsafeLoader # type: ignore from satpy._config import config_search_paths from satpy.utils import recursive_dict_update LOG = logging.getLogger(__name__) class Plugin(object): """Base plugin class for all dynamically loaded and configured objects.""" def __init__(self, default_config_filename=None, config_files=None, **kwargs): """Load configuration files related to this plugin. This initializes a `self.config` dictionary that can be used to customize the subclass. Args: default_config_filename (str): Configuration filename to use if no other files have been specified with `config_files`. config_files (list or str): Configuration files to load instead of those automatically found in `SATPY_CONFIG_PATH` and other default configuration locations. kwargs (dict): Unused keyword arguments. """ self.default_config_filename = default_config_filename self.config_files = config_files if self.config_files is None and self.default_config_filename is not None: # Specify a default self.config_files = config_search_paths(self.default_config_filename) if not isinstance(self.config_files, (list, tuple)): self.config_files = [self.config_files] self.config = {} if self.config_files: for config_file in self.config_files: self.load_yaml_config(config_file) def load_yaml_config(self, conf): """Load a YAML configuration file and recursively update the overall configuration.""" with open(conf, 'r', encoding='utf-8') as fd: self.config = recursive_dict_update(self.config, yaml.load(fd, Loader=UnsafeLoader)) satpy-0.34.0/satpy/readers/000077500000000000000000000000001420401153000155115ustar00rootroot00000000000000satpy-0.34.0/satpy/readers/__init__.py000066400000000000000000000734111420401153000176300ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Shared objects of the various reader classes.""" from __future__ import annotations import logging import os import pickle import warnings from datetime import datetime, timedelta from functools import total_ordering import yaml try: from yaml import UnsafeLoader except ImportError: from yaml import Loader as UnsafeLoader # type: ignore from satpy._config import config_search_paths, glob_config from .yaml_reader import AbstractYAMLReader from .yaml_reader import load_yaml_configs as load_yaml_reader_configs LOG = logging.getLogger(__name__) # Old Name -> New Name PENDING_OLD_READER_NAMES = {'fci_l1c_fdhsi': 'fci_l1c_nc'} OLD_READER_NAMES: dict[str, str] = {} def group_files(files_to_sort, reader=None, time_threshold=10, group_keys=None, reader_kwargs=None, missing="pass"): """Group series of files by file pattern information. By default this will group files by their filename ``start_time`` assuming it exists in the pattern. By passing the individual dictionaries returned by this function to the Scene classes' ``filenames``, a series `Scene` objects can be easily created. Args: files_to_sort (iterable): File paths to sort in to group reader (str or Collection[str]): Reader or readers whose file patterns should be used to sort files. If not given, try all readers (slow, adding a list of readers is strongly recommended). time_threshold (int): Number of seconds used to consider time elements in a group as being equal. For example, if the 'start_time' item is used to group files then any time within `time_threshold` seconds of the first file's 'start_time' will be seen as occurring at the same time. group_keys (list or tuple): File pattern information to use to group files. Keys are sorted in order and only the first key is used when comparing datetime elements with `time_threshold` (see above). This means it is recommended that datetime values should only come from the first key in ``group_keys``. Otherwise, there is a good chance that files will not be grouped properly (datetimes being barely unequal). Defaults to a reader's ``group_keys`` configuration (set in YAML), otherwise ``('start_time',)``. When passing multiple readers, passing group_keys is strongly recommended as the behaviour without doing so is undefined. reader_kwargs (dict): Additional keyword arguments to pass to reader creation. missing (str): Parameter to control the behavior in the scenario where multiple readers were passed, but at least one group does not have files associated with every reader. Valid values are ``"pass"`` (the default), ``"skip"``, and ``"raise"``. If set to ``"pass"``, groups are passed as-is. Some groups may have zero files for some readers. If set to ``"skip"``, groups for which one or more readers have zero files are skipped (meaning that some files may not be associated to any group). If set to ``"raise"``, raise a `FileNotFoundError` in case there are any groups for which one or more readers have no files associated. Returns: List of dictionaries mapping 'reader' to a list of filenames. Each of these dictionaries can be passed as ``filenames`` to a `Scene` object. """ if reader is not None and not isinstance(reader, (list, tuple)): reader = [reader] reader_kwargs = reader_kwargs or {} reader_files = _assign_files_to_readers( files_to_sort, reader, reader_kwargs) if reader is None: reader = reader_files.keys() file_keys = _get_file_keys_for_reader_files( reader_files, group_keys=group_keys) file_groups = _get_sorted_file_groups(file_keys, time_threshold) groups = [{rn: file_groups[group_key].get(rn, []) for rn in reader} for group_key in file_groups] return list(_filter_groups(groups, missing=missing)) def _assign_files_to_readers(files_to_sort, reader_names, reader_kwargs): """Assign files to readers. Given a list of file names (paths), match those to reader instances. Internal helper for group_files. Args: files_to_sort (Collection[str]): Files to assign to readers. reader_names (Collection[str]): Readers to consider reader_kwargs (Mapping): Returns: Mapping[str, Tuple[reader, Set[str]]] Mapping where the keys are reader names and the values are tuples of (reader_configs, filenames). """ files_to_sort = set(files_to_sort) reader_dict = {} for reader_configs in configs_for_reader(reader_names): try: reader = load_reader(reader_configs, **reader_kwargs) except yaml.constructor.ConstructorError: LOG.exception( f"ConstructorError loading {reader_configs!s}, " "probably a missing dependency, skipping " "corresponding reader (if you did not explicitly " "specify the reader, Satpy tries all; performance " "will improve if you pass readers explicitly).") continue reader_name = reader.info["name"] files_matching = set(reader.filter_selected_filenames(files_to_sort)) files_to_sort -= files_matching if files_matching or reader_names is not None: reader_dict[reader_name] = (reader, files_matching) if files_to_sort: raise ValueError("No matching readers found for these files: " + ", ".join(files_to_sort)) return reader_dict def _get_file_keys_for_reader_files(reader_files, group_keys=None): """From a mapping from _assign_files_to_readers, get file keys. Given a mapping where each key is a reader name and each value is a tuple of reader instance (typically FileYAMLReader) and a collection of files, return a mapping with the same keys, but where the values are lists of tuples of (keys, filename), where keys are extracted from the filenames according to group_keys and filenames are the names those keys were extracted from. Internal helper for group_files. Returns: Mapping[str, List[Tuple[Tuple, str]]], as described. """ file_keys = {} for (reader_name, (reader_instance, files_to_sort)) in reader_files.items(): if group_keys is None: group_keys = reader_instance.info.get('group_keys', ('start_time',)) file_keys[reader_name] = [] # make a copy because filename_items_for_filetype will modify inplace files_to_sort = set(files_to_sort) for _, filetype_info in reader_instance.sorted_filetype_items(): for f, file_info in reader_instance.filename_items_for_filetype(files_to_sort, filetype_info): group_key = tuple(file_info.get(k) for k in group_keys) if all(g is None for g in group_key): warnings.warn( f"Found matching file {f:s} for reader " "{reader_name:s}, but none of group keys found. " "Group keys requested: " + ", ".join(group_keys), UserWarning) file_keys[reader_name].append((group_key, f)) return file_keys def _get_sorted_file_groups(all_file_keys, time_threshold): """Get sorted file groups. Get a list of dictionaries, where each list item consists of a dictionary mapping a tuple of keys to a mapping of reader names to files. The files listed in each list item are considered to be grouped within the same time. Args: all_file_keys, as returned by _get_file_keys_for_reader_files time_threshold: temporal threshold Returns: List[Mapping[Tuple, Mapping[str, List[str]]]], as described Internal helper for group_files. """ # flatten to get an overall sorting; put the name in the middle in the # interest of sorting flat_keys = ((v[0], rn, v[1]) for (rn, vL) in all_file_keys.items() for v in vL) prev_key = None threshold = timedelta(seconds=time_threshold) # file_groups is sorted, because dictionaries are sorted by insertion # order in Python 3.7+ file_groups = {} for gk, rn, f in sorted(flat_keys): # use first element of key as time identifier (if datetime type) if prev_key is None: is_new_group = True prev_key = gk elif isinstance(gk[0], datetime): # datetimes within threshold difference are "the same time" is_new_group = (gk[0] - prev_key[0]) > threshold else: is_new_group = gk[0] != prev_key[0] # compare keys for those that are found for both the key and # this is a generator and is not computed until the if statement below # when we know that `prev_key` is not None vals_not_equal = (this_val != prev_val for this_val, prev_val in zip(gk[1:], prev_key[1:]) if this_val is not None and prev_val is not None) # if this is a new group based on the first element if is_new_group or any(vals_not_equal): file_groups[gk] = {rn: [f]} prev_key = gk else: if rn not in file_groups[prev_key]: file_groups[prev_key][rn] = [f] else: file_groups[prev_key][rn].append(f) return file_groups def _filter_groups(groups, missing="pass"): """Filter multi-reader group-files behavior. Helper for `group_files`. When `group_files` is called with multiple readers, make sure that the desired behaviour for missing files is enforced: if missing is ``"raise"``, raise an exception if at least one group has at least one reader without files; if it is ``"skip"``, remove those. If it is ``"pass"``, do nothing. Yields groups to be kept. Args: groups (List[Mapping[str, List[str]]]): groups as found by `group_files`. missing (str): String controlling behaviour, see documentation above. Yields: ``Mapping[str:, List[str]]``: groups to be retained """ if missing == "pass": yield from groups return if missing not in ("raise", "skip"): raise ValueError("Invalid value for ``missing`` argument. Expected " f"'raise', 'skip', or 'pass', got '{missing!s}'") for (i, grp) in enumerate(groups): readers_without_files = _get_keys_with_empty_values(grp) if readers_without_files: if missing == "raise": raise FileNotFoundError( f"when grouping files, group at index {i:d} " "had no files for readers: " + ", ".join(readers_without_files)) else: yield grp def _get_keys_with_empty_values(grp): """Find mapping keys where values have length zero. Helper for `_filter_groups`, which is in turn a helper for `group_files`. Given a mapping key -> Collection[Any], return the keys where the length of the collection is zero. Args: grp (Mapping[Any, Collection[Any]]): dictionary to check Returns: set of keys """ empty = set() for (k, v) in grp.items(): if len(v) == 0: # explicit check to ensure failure if not a collection empty.add(k) return empty def read_reader_config(config_files, loader=UnsafeLoader): """Read the reader `config_files` and return the extracted reader metadata.""" reader_config = load_yaml_reader_configs(*config_files, loader=loader) return reader_config['reader'] def load_reader(reader_configs, **reader_kwargs): """Import and setup the reader from *reader_info*.""" return AbstractYAMLReader.from_config_files(*reader_configs, **reader_kwargs) def configs_for_reader(reader=None): """Generate reader configuration files for one or more readers. Args: reader (Optional[str]): Yield configs only for this reader Returns: Generator of lists of configuration files """ if reader is not None: if not isinstance(reader, (list, tuple)): reader = [reader] reader = get_valid_reader_names(reader) # given a config filename or reader name config_files = [r if r.endswith('.yaml') else r + '.yaml' for r in reader] else: reader_configs = glob_config(os.path.join('readers', '*.yaml')) config_files = set(reader_configs) for config_file in config_files: config_basename = os.path.basename(config_file) reader_name = os.path.splitext(config_basename)[0] reader_configs = config_search_paths( os.path.join("readers", config_basename)) if not reader_configs: # either the reader they asked for does not exist # or satpy is improperly configured and can't find its own readers raise ValueError("No reader named: {}".format(reader_name)) yield reader_configs def get_valid_reader_names(reader): """Check for old reader names or readers pending deprecation.""" new_readers = [] for reader_name in reader: if reader_name in OLD_READER_NAMES: raise ValueError( "Reader name '{}' has been deprecated, " "use '{}' instead.".format(reader_name, OLD_READER_NAMES[reader_name])) if reader_name in PENDING_OLD_READER_NAMES: new_name = PENDING_OLD_READER_NAMES[reader_name] warnings.warn("Reader name '{}' is being deprecated and will be removed soon." "Please use '{}' instead.".format(reader_name, new_name), FutureWarning) new_readers.append(new_name) else: new_readers.append(reader_name) return new_readers def available_readers(as_dict=False): """Available readers based on current configuration. Args: as_dict (bool): Optionally return reader information as a dictionary. Default: False Returns: List of available reader names. If `as_dict` is `True` then a list of dictionaries including additionally reader information is returned. """ readers = [] for reader_configs in configs_for_reader(): try: reader_info = read_reader_config(reader_configs) except (KeyError, IOError, yaml.YAMLError): LOG.debug("Could not import reader config from: %s", reader_configs) LOG.debug("Error loading YAML", exc_info=True) continue readers.append(reader_info if as_dict else reader_info['name']) if as_dict: readers = sorted(readers, key=lambda reader_info: reader_info['name']) else: readers = sorted(readers) return readers def find_files_and_readers(start_time=None, end_time=None, base_dir=None, reader=None, sensor=None, filter_parameters=None, reader_kwargs=None, missing_ok=False, fs=None): """Find files matching the provided parameters. Use `start_time` and/or `end_time` to limit found filenames by the times in the filenames (not the internal file metadata). Files are matched if they fall anywhere within the range specified by these parameters. Searching is **NOT** recursive. Files may be either on-disk or on a remote file system. By default, files are searched for locally. Users can search on remote filesystems by passing an instance of an implementation of `fsspec.spec.AbstractFileSystem` (strictly speaking, any object of a class implementing a ``glob`` method works). If locating files on a local file system, the returned dictionary can be passed directly to the `Scene` object through the `filenames` keyword argument. If it points to a remote file system, it is the responsibility of the user to download the files first (directly reading from cloud storage is not currently available in Satpy). The behaviour of time-based filtering depends on whether or not the filename contains information about the end time of the data or not: - if the end time is not present in the filename, the start time of the filename is used and has to fall between (inclusive) the requested start and end times - otherwise, the timespan of the filename has to overlap the requested timespan Example usage for querying a s3 filesystem using the s3fs module: >>> import s3fs, satpy.readers, datetime >>> satpy.readers.find_files_and_readers( ... base_dir="s3://noaa-goes16/ABI-L1b-RadF/2019/321/14/", ... fs=s3fs.S3FileSystem(anon=True), ... reader="abi_l1b", ... start_time=datetime.datetime(2019, 11, 17, 14, 40)) {'abi_l1b': [...]} Args: start_time (datetime): Limit used files by starting time. end_time (datetime): Limit used files by ending time. base_dir (str): The directory to search for files containing the data to load. Defaults to the current directory. reader (str or list): The name of the reader to use for loading the data or a list of names. sensor (str or list): Limit used files by provided sensors. filter_parameters (dict): Filename pattern metadata to filter on. `start_time` and `end_time` are automatically added to this dictionary. Shortcut for `reader_kwargs['filter_parameters']`. reader_kwargs (dict): Keyword arguments to pass to specific reader instances to further configure file searching. missing_ok (bool): If False (default), raise ValueError if no files are found. If True, return empty dictionary if no files are found. fs (FileSystem): Optional, instance of implementation of fsspec.spec.AbstractFileSystem (strictly speaking, any object of a class implementing ``.glob`` is enough). Defaults to searching the local filesystem. Returns: Dictionary mapping reader name string to list of filenames """ reader_files = {} reader_kwargs = reader_kwargs or {} filter_parameters = filter_parameters or reader_kwargs.get('filter_parameters', {}) sensor_supported = False if start_time or end_time: filter_parameters['start_time'] = start_time filter_parameters['end_time'] = end_time reader_kwargs['filter_parameters'] = filter_parameters for reader_configs in configs_for_reader(reader): (reader_instance, loadables, this_sensor_supported) = _get_loadables_for_reader_config( base_dir, reader, sensor, reader_configs, reader_kwargs, fs) sensor_supported = sensor_supported or this_sensor_supported if loadables: reader_files[reader_instance.name] = list(loadables) if sensor and not sensor_supported: raise ValueError("Sensor '{}' not supported by any readers".format(sensor)) if not (reader_files or missing_ok): raise ValueError("No supported files found") return reader_files def _get_loadables_for_reader_config(base_dir, reader, sensor, reader_configs, reader_kwargs, fs): """Get loadables for reader configs. Helper for find_files_and_readers. Args: base_dir: as for `find_files_and_readers` reader: as for `find_files_and_readers` sensor: as for `find_files_and_readers` reader_configs: reader metadata such as returned by `configs_for_reader`. reader_kwargs: Keyword arguments to be passed to reader. fs (FileSystem): as for `find_files_and_readers` """ sensor_supported = False try: reader_instance = load_reader(reader_configs, **reader_kwargs) except (KeyError, IOError, yaml.YAMLError) as err: LOG.info('Cannot use %s', str(reader_configs)) LOG.debug(str(err)) if reader and (isinstance(reader, str) or len(reader) == 1): # if it is a single reader then give a more usable error raise return (None, [], False) if not reader_instance.supports_sensor(sensor): return (reader_instance, [], False) if sensor is not None: # sensor was specified and a reader supports it sensor_supported = True loadables = reader_instance.select_files_from_directory(base_dir, fs) if loadables: loadables = list( reader_instance.filter_selected_filenames(loadables)) return (reader_instance, loadables, sensor_supported) def load_readers(filenames=None, reader=None, reader_kwargs=None): """Create specified readers and assign files to them. Args: filenames (iterable or dict): A sequence of files that will be used to load data from. A ``dict`` object should map reader names to a list of filenames for that reader. reader (str or list): The name of the reader to use for loading the data or a list of names. reader_kwargs (dict): Keyword arguments to pass to specific reader instances. This can either be a single dictionary that will be passed to all reader instances, or a mapping of reader names to dictionaries. If the keys of ``reader_kwargs`` match exactly the list of strings in ``reader`` or the keys of filenames, each reader instance will get its own keyword arguments accordingly. Returns: Dictionary mapping reader name to reader instance """ reader_instances = {} if _early_exit(filenames, reader): return {} reader, filenames, remaining_filenames = _get_reader_and_filenames(reader, filenames) (reader_kwargs, reader_kwargs_without_filter) = _get_reader_kwargs(reader, reader_kwargs) for idx, reader_configs in enumerate(configs_for_reader(reader)): if isinstance(filenames, dict): readers_files = set(filenames[reader[idx]]) else: readers_files = remaining_filenames try: reader_instance = load_reader( reader_configs, **reader_kwargs[None if reader is None else reader[idx]]) except (KeyError, IOError, yaml.YAMLError) as err: LOG.info('Cannot use %s', str(reader_configs)) LOG.debug(str(err)) continue if not readers_files: # we weren't given any files for this reader continue loadables = reader_instance.select_files_from_pathnames(readers_files) if loadables: reader_instance.create_filehandlers( loadables, fh_kwargs=reader_kwargs_without_filter[None if reader is None else reader[idx]]) reader_instances[reader_instance.name] = reader_instance remaining_filenames -= set(loadables) if not remaining_filenames: break _check_remaining_files(remaining_filenames) _check_reader_instances(reader_instances) return reader_instances def _early_exit(filenames, reader): if not filenames and not reader: # used for an empty Scene return True if reader and filenames is not None and not filenames: # user made a mistake in their glob pattern raise ValueError("'filenames' was provided but is empty.") if not filenames: LOG.warning("'filenames' required to create readers and load data") return True return False def _get_reader_and_filenames(reader, filenames): if reader is None and isinstance(filenames, dict): # filenames is a dictionary of reader_name -> filenames reader = list(filenames.keys()) remaining_filenames = set(f for fl in filenames.values() for f in fl) elif reader and isinstance(filenames, dict): # filenames is a dictionary of reader_name -> filenames # but they only want one of the readers filenames = filenames[reader] remaining_filenames = set(filenames or []) else: remaining_filenames = set(filenames or []) return reader, filenames, remaining_filenames def _check_remaining_files(remaining_filenames): if remaining_filenames: LOG.warning("Don't know how to open the following files: {}".format(str(remaining_filenames))) def _check_reader_instances(reader_instances): if not reader_instances: raise ValueError("No supported files found") if not any(list(r.available_dataset_ids) for r in reader_instances.values()): raise ValueError("No dataset could be loaded. Either missing " "requirements (such as Epilog, Prolog) or none of the " "provided files match the filter parameters.") def _get_reader_kwargs(reader, reader_kwargs): """Help load_readers to form reader_kwargs. Helper for load_readers to get reader_kwargs and reader_kwargs_without_filter in the desirable form. """ reader_kwargs = reader_kwargs or {} # ensure one reader_kwargs per reader, None if not provided if reader is None: reader_kwargs = {None: reader_kwargs} elif reader_kwargs.keys() != set(reader): reader_kwargs = dict.fromkeys(reader, reader_kwargs) reader_kwargs_without_filter = {} for (k, v) in reader_kwargs.items(): reader_kwargs_without_filter[k] = v.copy() reader_kwargs_without_filter[k].pop('filter_parameters', None) return (reader_kwargs, reader_kwargs_without_filter) @total_ordering class FSFile(os.PathLike): """Implementation of a PathLike file object, that can be opened. This is made to be used in conjuction with fsspec or s3fs. For example:: from satpy import Scene import fsspec filename = 'noaa-goes16/ABI-L1b-RadC/2019/001/17/*_G16_s20190011702186*' the_files = fsspec.open_files("simplecache::s3://" + filename, s3={'anon': True}) from satpy.readers import FSFile fs_files = [FSFile(open_file) for open_file in the_files] scn = Scene(filenames=fs_files, reader='abi_l1b') scn.load(['true_color_raw']) """ def __init__(self, file, fs=None): """Initialise the FSFile instance. Args: file (str, Pathlike, or OpenFile): String, object implementing the `os.PathLike` protocol, or an `fsspec.OpenFile` instance. If passed an instance of `fsspec.OpenFile`, the following argument ``fs`` has no effect. fs (fsspec filesystem, optional) Object implementing the fsspec filesystem protocol. """ try: self._file = file.path self._fs = file.fs except AttributeError: self._file = file self._fs = fs def __str__(self): """Return the string version of the filename.""" return os.fspath(self._file) def __fspath__(self): """Comply with PathLike.""" return os.fspath(self._file) def __repr__(self): """Representation of the object.""" return '' def open(self): """Open the file. This is read-only. """ try: return self._fs.open(self._file) except AttributeError: return open(self._file) def __lt__(self, other): """Implement ordering. Ordering is defined by the string representation of the filename, without considering the file system. """ return os.fspath(self) < os.fspath(other) def __eq__(self, other): """Implement equality comparisons. Two FSFile instances are considered equal if they have the same filename and the same file system. """ return (isinstance(other, FSFile) and self._file == other._file and self._fs == other._fs) def __hash__(self): """Implement hashing. Make FSFile objects hashable, so that they can be used in sets. Some parts of satpy and perhaps others use sets of filenames (strings or pathlib.Path), or maybe use them as dictionary keys. This requires them to be hashable. To ensure FSFile can work as a drop-in replacement for strings of Path objects to represent the location of blob of data, FSFile should be hashable too. Returns the hash, computed from the hash of the filename and the hash of the filesystem. """ try: fshash = hash(self._fs) except TypeError: # fsspec < 0.8.8 for CachingFileSystem fshash = hash(pickle.dumps(self._fs)) return hash(self._file) ^ fshash def open_file_or_filename(unknown_file_thing): """Try to open the *unknown_file_thing*, otherwise return the filename.""" try: f_obj = unknown_file_thing.open() except AttributeError: f_obj = unknown_file_thing return f_obj satpy-0.34.0/satpy/readers/_geos_area.py000066400000000000000000000173261420401153000201600ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Geostationary Projection / Area computations. This module computes properties and area definitions for geostationary satellites. It is designed to be a common module that can be called by all geostationary satellite readers and uses commonly-included parameters such as the CFAC/LFAC values, satellite position, etc, to compute the correct area definition. """ import numpy as np from pyresample import geometry def get_xy_from_linecol(line, col, offsets, factors): """Get the intermediate coordinates from line & col. Intermediate coordinates are actually the instruments scanning angles. """ loff, coff = offsets lfac, cfac = factors x__ = float(col - coff) / (float(cfac) / 2 ** 16) y__ = float(line - loff) / (float(lfac) / 2 ** 16) return x__, y__ def make_ext(ll_x, ur_x, ll_y, ur_y, h): """Create the area extent from computed ll and ur. Args: ll_x: The lower left x coordinate (m) ur_x: The upper right x coordinate (m) ll_y: The lower left y coordinate (m) ur_y: The upper right y coordinate (m) h: The satellite altitude above the Earth's surface Returns: aex: An area extent for the scene """ aex = (np.deg2rad(ll_x) * h, np.deg2rad(ll_y) * h, np.deg2rad(ur_x) * h, np.deg2rad(ur_y) * h) return aex def get_area_extent(pdict): """Get the area extent seen by a geostationary satellite. Args: pdict: A dictionary containing common parameters: nlines: Number of lines in image ncols: Number of columns in image cfac: Column scaling factor lfac: Line scaling factor coff: Column offset factor loff: Line offset factor scandir: 'N2S' for standard (N->S), 'S2N' for inverse (S->N) Returns: aex: An area extent for the scene """ # count starts at 1 cols = 1 - 0.5 if pdict['scandir'] == 'S2N': lines = 0.5 - 1 scanmult = -1 else: lines = 1 - 0.5 scanmult = 1 # Lower left x, y scanning angles in degrees ll_x, ll_y = get_xy_from_linecol(lines * scanmult, cols, (pdict['loff'], pdict['coff']), (pdict['lfac'], pdict['cfac'])) cols += pdict['ncols'] lines += pdict['nlines'] # Upper right x, y scanning angles in degrees ur_x, ur_y = get_xy_from_linecol(lines * scanmult, cols, (pdict['loff'], pdict['coff']), (pdict['lfac'], pdict['cfac'])) if pdict['scandir'] == 'S2N': ll_y *= -1 ur_y *= -1 # Convert degrees to radians and create area extent aex = make_ext(ll_x=ll_x, ur_x=ur_x, ll_y=ll_y, ur_y=ur_y, h=pdict['h']) return aex def get_area_definition(pdict, a_ext): """Get the area definition for a geo-sat. Args: pdict: A dictionary containing common parameters: nlines: Number of lines in image ncols: Number of columns in image ssp_lon: Subsatellite point longitude (deg) a: Earth equatorial radius (m) b: Earth polar radius (m) h: Platform height (m) a_name: Area name a_desc: Area description p_id: Projection id a_ext: A four element tuple containing the area extent (scan angle) for the scene in radians Returns: a_def: An area definition for the scene .. note:: The AreaDefinition `proj_id` attribute is being deprecated. """ proj_dict = {'a': float(pdict['a']), 'b': float(pdict['b']), 'lon_0': float(pdict['ssp_lon']), 'h': float(pdict['h']), 'proj': 'geos', 'units': 'm'} a_def = geometry.AreaDefinition( pdict['a_name'], pdict['a_desc'], pdict['p_id'], proj_dict, int(pdict['ncols']), int(pdict['nlines']), a_ext) return a_def def sampling_to_lfac_cfac(sampling): """Convert angular sampling to line/column scaling factor (aka LFAC/CFAC). Reference: `MSG Ground Segment LRIT HRIT Mission Specific Implementation`_, Appendix E.2. .. _MSG Ground Segment LRIT HRIT Mission Specific Implementation: https://www-cdn.eumetsat.int/files/2020-04/pdf_ten_05057_spe_msg_lrit_hri.pdf Args: sampling: float Angular sampling (rad) Returns: Line/column scaling factor (deg-1) """ return 2.0 ** 16 / np.rad2deg(sampling) def get_geos_area_naming(input_dict): """Get a dictionary containing formatted AreaDefinition naming. Args: input_dict: dict Dictionary with keys `platform_name`, `instrument_name`, `service_name`, `service_desc`, `resolution` . The resolution is expected in meters. Returns: area_naming_dict with `area_id`, `description` keys, values are strings. .. note:: The AreaDefinition `proj_id` attribute is being deprecated and is therefore not formatted here. An empty string is to be used until the attribute is fully removed. """ area_naming_dict = {} resolution_strings = get_resolution_and_unit_strings(input_dict['resolution']) area_naming_dict['area_id'] = '{}_{}_{}_{}{}'.format(input_dict['platform_name'].lower(), input_dict['instrument_name'].lower(), input_dict['service_name'].lower(), resolution_strings['value'], resolution_strings['unit'] ) area_naming_dict['description'] = '{} {} {} area definition ' \ 'with {} {} resolution'.format(input_dict['platform_name'].upper(), input_dict['instrument_name'].upper(), input_dict['service_desc'], resolution_strings['value'], resolution_strings['unit'] ) return area_naming_dict def get_resolution_and_unit_strings(resolution): """Get the resolution value and unit as strings. If the resolution is larger than 1000 m, use kilometer as unit. If lower, use meter. Args: resolution: scalar Resolution in meters. Returns: Dictionary with `value` and `unit` keys, values are strings. """ if resolution >= 1000: return {'value': '{:.0f}'.format(resolution*1e-3), 'unit': 'km'} return {'value': '{:.0f}'.format(resolution), 'unit': 'm'} satpy-0.34.0/satpy/readers/aapp_l1b.py000066400000000000000000000622011420401153000175430ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2012-2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reader for aapp level 1b data. Options for loading: - pre_launch_coeffs (False): use pre-launch coefficients if True, operational otherwise (if available). https://nwp-saf.eumetsat.int/site/download/documentation/aapp/NWPSAF-MF-UD-003_Formats_v8.0.pdf """ import functools import logging from datetime import datetime, timedelta import dask.array as da import numpy as np import xarray as xr from dask import delayed from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler LINE_CHUNK = CHUNK_SIZE ** 2 // 2048 logger = logging.getLogger(__name__) AVHRR_CHANNEL_NAMES = ["1", "2", "3a", "3b", "4", "5"] AVHRR_ANGLE_NAMES = ['sensor_zenith_angle', 'solar_zenith_angle', 'sun_sensor_azimuth_difference_angle'] AVHRR_PLATFORM_IDS2NAMES = {4: 'NOAA-15', 2: 'NOAA-16', 6: 'NOAA-17', 7: 'NOAA-18', 8: 'NOAA-19', 11: 'Metop-B', 12: 'Metop-A', 13: 'Metop-C', 14: 'Metop simulator'} def create_xarray(arr): """Create an `xarray.DataArray`.""" res = xr.DataArray(arr, dims=['y', 'x']) return res class AAPPL1BaseFileHandler(BaseFileHandler): """A base file handler for the AAPP level-1 formats.""" def __init__(self, filename, filename_info, filetype_info): """Initialize AAPP level-1 file handler object.""" super().__init__(filename, filename_info, filetype_info) self.channels = None self.units = None self.sensor = "unknown" self._data = None self._header = None self.area = None self._channel_names = [] self._angle_names = [] def _set_filedata_layout(self): """Set the file data type/layout.""" self._header_offset = 0 self._scan_type = np.dtype([("siteid", " 0: status |= self._header['inststat2'].item() return status @staticmethod def _convert_binary_channel_status_to_activation_dict(status): bits_channels = ((13, '1'), (12, '2'), (11, '3a'), (10, '3b'), (9, '4'), (8, '5')) activated = dict() for bit, channel_name in bits_channels: activated[channel_name] = bool(status >> bit & 1) return activated def available_datasets(self, configured_datasets=None): """Get the available datasets.""" for _, mda in configured_datasets: if mda['name'] in self._channel_names: yield self.active_channels[mda['name']], mda else: yield True, mda def get_angles(self, angle_id): """Get sun-satellite viewing angles.""" sunz, satz, azidiff = self._get_all_interpolated_angles() name_to_variable = dict(zip(self._angle_names, (satz, sunz, azidiff))) return create_xarray(name_to_variable[angle_id]) @functools.lru_cache(maxsize=10) def _get_all_interpolated_angles(self): sunz40km, satz40km, azidiff40km = self._get_tiepoint_angles_in_degrees() return self._interpolate_arrays(sunz40km, satz40km, azidiff40km) def _get_tiepoint_angles_in_degrees(self): sunz40km = self._data["ang"][:, :, 0] * 1e-2 satz40km = self._data["ang"][:, :, 1] * 1e-2 azidiff40km = self._data["ang"][:, :, 2] * 1e-2 return sunz40km, satz40km, azidiff40km def _interpolate_arrays(self, *input_arrays, geolocation=False): lines = input_arrays[0].shape[0] try: interpolator = self._create_40km_interpolator(lines, *input_arrays, geolocation=geolocation) except ImportError: logger.warning("Could not interpolate, python-geotiepoints missing.") output_arrays = input_arrays else: output_delayed = delayed(interpolator.interpolate, nout=3)() output_arrays = [da.from_delayed(out_array, (lines, 2048), in_array.dtype) for in_array, out_array in zip(input_arrays, output_delayed)] return output_arrays @staticmethod def _create_40km_interpolator(lines, *arrays_40km, geolocation=False): if geolocation: # Slower but accurate at datum line from geotiepoints.geointerpolator import GeoInterpolator as Interpolator else: from geotiepoints.interpolator import Interpolator cols40km = np.arange(24, 2048, 40) cols1km = np.arange(2048) rows40km = np.arange(lines) rows1km = np.arange(lines) along_track_order = 1 cross_track_order = 3 satint = Interpolator( arrays_40km, (rows40km, cols40km), (rows1km, cols1km), along_track_order, cross_track_order) return satint def navigate(self, coordinate_id): """Get the longitudes and latitudes of the scene.""" lons, lats = self._get_all_interpolated_coordinates() if coordinate_id == 'longitude': return create_xarray(lons) if coordinate_id == 'latitude': return create_xarray(lats) raise KeyError("Coordinate {} unknown.".format(coordinate_id)) @functools.lru_cache(maxsize=10) def _get_all_interpolated_coordinates(self): lons40km, lats40km = self._get_coordinates_in_degrees() return self._interpolate_arrays(lons40km, lats40km, geolocation=True) def _get_coordinates_in_degrees(self): lons40km = self._data["pos"][:, :, 1] * 1e-4 lats40km = self._data["pos"][:, :, 0] * 1e-4 return lons40km, lats40km def calibrate(self, dataset_id, pre_launch_coeffs=False, calib_coeffs=None): """Calibrate the data.""" if calib_coeffs is None: calib_coeffs = {} units = {'reflectance': '%', 'brightness_temperature': 'K', 'counts': '', 'radiance': 'W*m-2*sr-1*cm ?'} if dataset_id['name'] in ("3a", "3b") and self._is3b is None: # Is it 3a or 3b: self._is3a = da.bitwise_and(da.from_array(self._data['scnlinbit'], chunks=LINE_CHUNK), 3) == 0 self._is3b = da.bitwise_and(da.from_array(self._data['scnlinbit'], chunks=LINE_CHUNK), 3) == 1 try: vis_idx = ['1', '2', '3a'].index(dataset_id['name']) ir_idx = None except ValueError: vis_idx = None ir_idx = ['3b', '4', '5'].index(dataset_id['name']) mask = True if vis_idx is not None: coeffs = calib_coeffs.get('ch' + dataset_id['name']) if dataset_id['name'] == '3a': mask = self._is3a[:, None] ds = create_xarray( _vis_calibrate(self._data, vis_idx, dataset_id['calibration'], pre_launch_coeffs, coeffs, mask=mask)) else: if dataset_id['name'] == '3b': mask = self._is3b[:, None] ds = create_xarray( _ir_calibrate(self._header, self._data, ir_idx, dataset_id['calibration'], mask=mask)) ds.attrs['units'] = units[dataset_id['calibration']] ds.attrs.update(dataset_id._asdict()) return ds # AAPP 1b header _HEADERTYPE = np.dtype([("siteid", "S3"), ("blank", "S1"), ("l1bversnb", " 0.0 return da.where(mask, rad, np.nan) # Central wavenumber: cwnum = header['radtempcnv'][0, irchn, 0] if irchn == 0: cwnum = cwnum / 1.0e2 else: cwnum = cwnum / 1.0e3 bandcor_2 = header['radtempcnv'][0, irchn, 1] / 1e5 bandcor_3 = header['radtempcnv'][0, irchn, 2] / 1e6 ir_const_1 = 1.1910659e-5 ir_const_2 = 1.438833 t_planck = (ir_const_2 * cwnum) / \ np.log(1 + ir_const_1 * cwnum * cwnum * cwnum / rad) # Band corrections applied to t_planck to get correct # brightness temperature for channel: if bandcor_2 < 0: # Post AAPP-v4 tb_ = bandcor_2 + bandcor_3 * t_planck else: # AAPP 1 to 4 tb_ = (t_planck - bandcor_2) / bandcor_3 # Mask unnaturally low values return da.where(mask, tb_, np.nan) satpy-0.34.0/satpy/readers/aapp_mhs_amsub_l1c.py000066400000000000000000000406161420401153000216100ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020, 2021 Pytroll developers # Author(s): # Adam Dybbroe # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . """Reader for the AAPP AMSU-B/MHS level-1c data. https://nwp-saf.eumetsat.int/site/download/documentation/aapp/NWPSAF-MF-UD-003_Formats_v8.0.pdf """ import logging import numbers from contextlib import suppress from typing import NamedTuple import dask.array as da import numpy as np from satpy import CHUNK_SIZE from satpy.readers.aapp_l1b import AAPPL1BaseFileHandler, create_xarray logger = logging.getLogger(__name__) LINE_CHUNK = CHUNK_SIZE ** 2 // 90 MHS_AMSUB_CHANNEL_NAMES = ['1', '2', '3', '4', '5'] MHS_AMSUB_ANGLE_NAMES = ['sensor_zenith_angle', 'sensor_azimuth_angle', 'solar_zenith_angle', 'solar_azimuth_difference_angle'] MHS_AMSUB_PLATFORM_IDS2NAMES = {15: 'NOAA-15', 16: 'NOAA-16', 17: 'NOAA-17', 18: 'NOAA-18', 19: 'NOAA-19', 1: 'Metop-B', 2: 'Metop-A', 3: 'Metop-C', 4: 'Metop simulator'} MHS_AMSUB_PLATFORMS = ['Metop-A', 'Metop-B', 'Metop-C', 'NOAA-18', 'NOAA-19'] class FrequencyDoubleSideBandBase(NamedTuple): """Base class for a frequency double side band. Frequency Double Side Band is supposed to describe the special type of bands commonly used in humidty sounding from Passive Microwave Sensors. When the absorption band being observed is symmetrical it is advantageous (giving better NeDT) to sense in a band both right and left of the central absorption frequency. This is needed because of this bug: https://bugs.python.org/issue41629 """ central: float side: float bandwidth: float unit: str = "GHz" class FrequencyDoubleSideBand(FrequencyDoubleSideBandBase): """The frequency double side band class. The elements of the double-side-band type frequency band are the central frquency, the relative side band frequency (relative to the center - left and right) and their bandwidths, and optionally a unit (defaults to GHz). No clever unit conversion is done here, it's just used for checking that two ranges are comparable. Frequency Double Side Band is supposed to describe the special type of bands commonly used in humidty sounding from Passive Microwave Sensors. When the absorption band being observed is symmetrical it is advantageous (giving better NeDT) to sense in a band both right and left of the central absorption frequency. """ def __eq__(self, other): """Return if two channel frequencies are equal. Args: other (tuple or scalar): (central frq, side band frq and band width frq) or scalar frq Return: True if other is a scalar and min <= other <= max, or if other is a tuple equal to self, False otherwise. """ if other is None: return False if isinstance(other, numbers.Number): return other in self if isinstance(other, (tuple, list)) and len(other) == 3: return other in self return super().__eq__(other) def __ne__(self, other): """Return the opposite of `__eq__`.""" return not self == other def __lt__(self, other): """Compare to another frequency.""" if other is None: return False return super().__lt__(other) def __gt__(self, other): """Compare to another frequency.""" if other is None: return True return super().__gt__(other) def __hash__(self): """Hash this tuple.""" return tuple.__hash__(self) def __str__(self): """Format for print out.""" return "{0.central} {0.unit} ({0.side}_{0.bandwidth} {0.unit})".format(self) def __contains__(self, other): """Check if this double-side-band 'contains' *other*.""" if other is None: return False if isinstance(other, numbers.Number): if (self.central + self.side - self.bandwidth/2. <= other <= self.central + self.side + self.bandwidth/2.): return True if (self.central - self.side - self.bandwidth/2. <= other <= self.central - self.side + self.bandwidth/2.): return True return False if isinstance(other, (tuple, list)) and len(other) == 3: return ((self.central - self.side - self.bandwidth/2. <= other[0] - other[1] - other[2]/2. and self.central - self.side + self.bandwidth/2. >= other[0] - other[1] + other[2]/2.) or (self.central + self.side - self.bandwidth/2. <= other[0] + other[1] - other[2]/2. and self.central + self.side + self.bandwidth/2. >= other[0] + other[1] + other[2]/2.)) with suppress(AttributeError): if self.unit != other.unit: raise NotImplementedError("Can't compare frequency ranges with different units.") return ((self.central - self.side - self.bandwidth/2. <= other.central - other.side - other.bandwidth/2. and self.central - self.side + self.bandwidth/2. >= other.central - other.side + other.bandwidth/2.) or (self.central + self.side - self.bandwidth/2. <= other.central + other.side - other.bandwidth/2. and self.central + self.side + self.bandwidth/2. >= other.central + other.side + other.bandwidth/2.)) return False def distance(self, value): """Get the distance from value.""" if self == value: try: left_side_dist = abs(value.central - value.side - (self.central - self.side)) right_side_dist = abs(value.central + value.side - (self.central + self.side)) return min(left_side_dist, right_side_dist) except AttributeError: if isinstance(value, (tuple, list)): return abs((value[0] - value[1]) - (self.central - self.side)) left_side_dist = abs(value - (self.central - self.side)) right_side_dist = abs(value - (self.central + self.side)) return min(left_side_dist, right_side_dist) else: return np.inf @classmethod def convert(cls, frq): """Convert `frq` to this type if possible.""" if isinstance(frq, dict): return cls(**frq) return frq class FrequencyRangeBase(NamedTuple): """Base class for frequency ranges. This is needed because of this bug: https://bugs.python.org/issue41629 """ central: float bandwidth: float unit: str = "GHz" class FrequencyRange(FrequencyRangeBase): """The Frequency range class. The elements of the range are central and bandwidth values, and optionally a unit (defaults to GHz). No clever unit conversion is done here, it's just used for checking that two ranges are comparable. This type is used for passive microwave sensors. """ def __eq__(self, other): """Return if two channel frequencies are equal. Args: other (tuple or scalar): (central frq, band width frq) or scalar frq Return: True if other is a scalar and min <= other <= max, or if other is a tuple equal to self, False otherwise. """ if other is None: return False if isinstance(other, numbers.Number): return other in self if isinstance(other, (tuple, list)) and len(other) == 2: return self[:2] == other return super().__eq__(other) def __ne__(self, other): """Return the opposite of `__eq__`.""" return not self == other def __lt__(self, other): """Compare to another frequency.""" if other is None: return False return super().__lt__(other) def __gt__(self, other): """Compare to another frequency.""" if other is None: return True return super().__gt__(other) def __hash__(self): """Hash this tuple.""" return tuple.__hash__(self) def __str__(self): """Format for print out.""" return "{0.central} {0.unit} ({0.bandwidth} {0.unit})".format(self) def __contains__(self, other): """Check if this range contains *other*.""" if other is None: return False if isinstance(other, numbers.Number): return self.central - self.bandwidth/2. <= other <= self.central + self.bandwidth/2. with suppress(AttributeError): if self.unit != other.unit: raise NotImplementedError("Can't compare frequency ranges with different units.") return (self.central - self.bandwidth/2. <= other.central - other.bandwidth/2. and self.central + self.bandwidth/2. >= other.central + other.bandwidth/2.) return False def distance(self, value): """Get the distance from value.""" if self == value: try: return abs(value.central - self.central) except AttributeError: if isinstance(value, (tuple, list)): return abs(value[0] - self.central) return abs(value - self.central) else: return np.inf @classmethod def convert(cls, frq): """Convert `frq` to this type if possible.""" if isinstance(frq, dict): return cls(**frq) return frq class MHS_AMSUB_AAPPL1CFile(AAPPL1BaseFileHandler): """Reader for AMSU-B/MHS L1C files created from the AAPP software.""" def __init__(self, filename, filename_info, filetype_info): """Initialize object information by reading the input file.""" super(MHS_AMSUB_AAPPL1CFile, self).__init__(filename, filename_info, filetype_info) self.channels = {i: None for i in MHS_AMSUB_CHANNEL_NAMES} self.units = {i: 'brightness_temperature' for i in MHS_AMSUB_CHANNEL_NAMES} self._channel_names = MHS_AMSUB_CHANNEL_NAMES self._angle_names = MHS_AMSUB_ANGLE_NAMES self._set_filedata_layout() self.read() self._get_platform_name(MHS_AMSUB_PLATFORM_IDS2NAMES) self._get_sensorname() def _set_filedata_layout(self): """Set the file data type/layout.""" self._header_offset = HEADER_LENGTH self._scan_type = _SCANTYPE self._header_type = _HEADERTYPE def _get_sensorname(self): """Get the sensor name from the header.""" if self._header['instrument'][0] == 11: self.sensor = 'amsub' elif self._header['instrument'][0] == 12: self.sensor = 'mhs' else: raise IOError("Sensor neither MHS nor AMSU-B!") def get_angles(self, angle_id): """Get sun-satellite viewing angles.""" satz = self._data["angles"][:, :, 0] * 1e-2 sata = self._data["angles"][:, :, 1] * 1e-2 sunz = self._data["angles"][:, :, 2] * 1e-2 suna = self._data["angles"][:, :, 3] * 1e-2 name_to_variable = dict(zip(MHS_AMSUB_ANGLE_NAMES, (satz, sata, sunz, suna))) return create_xarray(name_to_variable[angle_id]) def navigate(self, coordinate_id): """Get the longitudes and latitudes of the scene.""" lons, lats = self._get_coordinates_in_degrees() if coordinate_id == 'longitude': return create_xarray(lons) if coordinate_id == 'latitude': return create_xarray(lats) raise KeyError("Coordinate {} unknown.".format(coordinate_id)) def _get_coordinates_in_degrees(self): lons = self._data["latlon"][:, :, 1] * 1e-4 lats = self._data["latlon"][:, :, 0] * 1e-4 return lons, lats def _calibrate_active_channel_data(self, key): """Calibrate active channel data only.""" return self.calibrate(key) def calibrate(self, dataset_id): """Calibrate the data.""" units = {'brightness_temperature': 'K'} mask = True idx = ['1', '2', '3', '4', '5'].index(dataset_id['name']) ds = create_xarray( _calibrate(self._data, idx, dataset_id['calibration'], mask=mask)) ds.attrs['units'] = units[dataset_id['calibration']] ds.attrs.update(dataset_id._asdict()) return ds def _calibrate(data, chn, calib_type, mask=True): """Calibrate channel data. *calib_type* in brightness_temperature. """ if calib_type not in ['brightness_temperature']: raise ValueError('Calibration ' + calib_type + ' unknown!') channel = da.from_array(data["btemps"][:, :, chn] / 100., chunks=(LINE_CHUNK, 90)) mask &= channel != 0 if calib_type == 'counts': return channel channel = channel.astype(np.float) return da.where(mask, channel, np.nan) HEADER_LENGTH = 1152*4 _HEADERTYPE = np.dtype([("siteid", "S3"), ("cfill_1", "S1"), ("l1bsite", "S3"), ("cfill_2", "S1"), ("versnb", ". """Advance Baseline Imager reader base class for the Level 1b and l2+ reader.""" import logging from datetime import datetime import numpy as np import xarray as xr from pyresample import geometry from satpy import CHUNK_SIZE from satpy._compat import cached_property from satpy.readers import open_file_or_filename from satpy.readers.file_handlers import BaseFileHandler logger = logging.getLogger(__name__) PLATFORM_NAMES = { 'G16': 'GOES-16', 'G17': 'GOES-17', 'G18': 'GOES-18', 'G19': 'GOES-19', } class NC_ABI_BASE(BaseFileHandler): """Base reader for ABI L1B L2+ NetCDF4 files.""" def __init__(self, filename, filename_info, filetype_info): """Open the NetCDF file with xarray and prepare the Dataset for reading.""" super(NC_ABI_BASE, self).__init__(filename, filename_info, filetype_info) platform_shortname = filename_info['platform_shortname'] self.platform_name = PLATFORM_NAMES.get(platform_shortname) self.nlines = self.nc['y'].size self.ncols = self.nc['x'].size self.coords = {} @cached_property def nc(self): """Get the xarray dataset for this file.""" f_obj = open_file_or_filename(self.filename) try: nc = xr.open_dataset(f_obj, decode_cf=True, mask_and_scale=False, chunks={'x': CHUNK_SIZE, 'y': CHUNK_SIZE}, ) except ValueError: nc = xr.open_dataset(f_obj, decode_cf=True, mask_and_scale=False, chunks={'lon': CHUNK_SIZE, 'lat': CHUNK_SIZE}, ) nc = self._rename_dims(nc) return nc @staticmethod def _rename_dims(nc): if 't' in nc.dims or 't' in nc.coords: nc = nc.rename({'t': 'time'}) if 'goes_lat_lon_projection' in nc: nc = nc.rename({'lon': 'x', 'lat': 'y'}) return nc @property def sensor(self): """Get sensor name for current file handler.""" return 'abi' def __getitem__(self, item): """Wrap `self.nc[item]` for better floating point precision. Some datasets use a 32-bit float scaling factor like the 'x' and 'y' variables which causes inaccurate unscaled data values. This method forces the scale factor to a 64-bit float first. """ data = self.nc[item] attrs = data.attrs data = self._adjust_data(data, item) data.attrs = attrs data = self._adjust_coords(data, item) return data def _adjust_data(self, data, item): """Adjust data with typing, scaling and filling.""" factor = data.attrs.get('scale_factor', 1) offset = data.attrs.get('add_offset', 0) fill = data.attrs.get('_FillValue') unsigned = data.attrs.get('_Unsigned', None) def is_int(val): return np.issubdtype(val.dtype, np.integer) if hasattr(val, 'dtype') else isinstance(val, int) # Ref. GOESR PUG-L1B-vol3, section 5.0.2 Unsigned Integer Processing if unsigned is not None and unsigned.lower() == 'true': # cast the data from int to uint data = data.astype('u%s' % data.dtype.itemsize) if fill is not None: fill = fill.astype('u%s' % fill.dtype.itemsize) if fill is not None: # Some backends (h5netcdf) may return attributes as shape (1,) # arrays rather than shape () scalars, which according to the netcdf # documentation at # is correct. if np.ndim(fill) > 0: fill = fill.item() if is_int(data) and is_int(factor) and is_int(offset): new_fill = fill else: new_fill = np.nan data = data.where(data != fill, new_fill) if factor != 1 and item in ('x', 'y'): # be more precise with x/y coordinates # see get_area_def for more information data = data * np.round(float(factor), 6) + np.round(float(offset), 6) elif factor != 1: # make sure the factor is a 64-bit float # can't do this in place since data is most likely uint16 # and we are making it a 64-bit float if not is_int(factor): factor = float(factor) data = data * factor + offset return data def _adjust_coords(self, data, item): """Handle coordinates (and recursive fun).""" new_coords = {} # 'time' dimension causes issues in other processing # 'x_image' and 'y_image' are confusing to some users and unnecessary # 'x' and 'y' will be overwritten by base class AreaDefinition for coord_name in ('x_image', 'y_image', 'time', 'x', 'y'): if coord_name in data.coords: data = data.drop_vars(coord_name) if item in data.coords: self.coords[item] = data for coord_name in data.coords.keys(): if coord_name not in self.coords: self.coords[coord_name] = self[coord_name] new_coords[coord_name] = self.coords[coord_name] data.coords.update(new_coords) return data def get_dataset(self, key, info): """Load a dataset.""" raise NotImplementedError("Reader {} has not implemented get_dataset".format(self.name)) def get_area_def(self, key): """Get the area definition of the data at hand.""" if 'goes_imager_projection' in self.nc: return self._get_areadef_fixedgrid(key) if 'goes_lat_lon_projection' in self.nc: return self._get_areadef_latlon(key) raise ValueError('Unsupported projection found in the dataset') def _get_areadef_latlon(self, key): """Get the area definition of the data at hand.""" projection = self.nc["goes_lat_lon_projection"] a = projection.attrs['semi_major_axis'] b = projection.attrs['semi_minor_axis'] fi = projection.attrs['inverse_flattening'] pm = projection.attrs['longitude_of_prime_meridian'] proj_ext = self.nc["geospatial_lat_lon_extent"] w_lon = proj_ext.attrs['geospatial_westbound_longitude'] e_lon = proj_ext.attrs['geospatial_eastbound_longitude'] n_lat = proj_ext.attrs['geospatial_northbound_latitude'] s_lat = proj_ext.attrs['geospatial_southbound_latitude'] lat_0 = proj_ext.attrs['geospatial_lat_center'] lon_0 = proj_ext.attrs['geospatial_lon_center'] area_extent = (w_lon, s_lat, e_lon, n_lat) proj_dict = {'proj': 'latlong', 'lon_0': float(lon_0), 'lat_0': float(lat_0), 'a': float(a), 'b': float(b), 'fi': float(fi), 'pm': float(pm)} ll_area_def = geometry.AreaDefinition( self.nc.attrs.get('orbital_slot', 'abi_geos'), self.nc.attrs.get('spatial_resolution', 'ABI file area'), 'abi_latlon', proj_dict, self.ncols, self.nlines, np.asarray(area_extent)) return ll_area_def def _get_areadef_fixedgrid(self, key): """Get the area definition of the data at hand. Note this method takes special care to round and cast numbers to new data types so that the area definitions for different resolutions (different bands) should be equal. Without the special rounding in `__getitem__` and this method the area extents can be 0 to 1.0 meters off depending on how the calculations are done. """ projection = self.nc["goes_imager_projection"] a = projection.attrs['semi_major_axis'] b = projection.attrs['semi_minor_axis'] h = projection.attrs['perspective_point_height'] lon_0 = projection.attrs['longitude_of_projection_origin'] sweep_axis = projection.attrs['sweep_angle_axis'][0] # compute x and y extents in m h = np.float64(h) x = self['x'] y = self['y'] x_l = x[0].values x_r = x[-1].values y_l = y[-1].values y_u = y[0].values x_half = (x_r - x_l) / (self.ncols - 1) / 2. y_half = (y_u - y_l) / (self.nlines - 1) / 2. area_extent = (x_l - x_half, y_l - y_half, x_r + x_half, y_u + y_half) area_extent = tuple(np.round(h * val, 6) for val in area_extent) proj_dict = {'proj': 'geos', 'lon_0': float(lon_0), 'a': float(a), 'b': float(b), 'h': h, 'units': 'm', 'sweep': sweep_axis} fg_area_def = geometry.AreaDefinition( self.nc.attrs.get('orbital_slot', 'abi_geos'), self.nc.attrs.get('spatial_resolution', 'ABI file area'), 'abi_fixed_grid', proj_dict, self.ncols, self.nlines, np.asarray(area_extent)) return fg_area_def @property def start_time(self): """Start time of the current file's observations.""" return datetime.strptime(self.nc.attrs['time_coverage_start'], '%Y-%m-%dT%H:%M:%S.%fZ') @property def end_time(self): """End time of the current file's observations.""" return datetime.strptime(self.nc.attrs['time_coverage_end'], '%Y-%m-%dT%H:%M:%S.%fZ') def spatial_resolution_to_number(self): """Convert the 'spatial_resolution' global attribute to meters.""" res = self.nc.attrs['spatial_resolution'].split(' ')[0] if res.endswith('km'): res = int(float(res[:-2]) * 1000) elif res.endswith('m'): res = int(res[:-1]) else: raise ValueError("Unexpected 'spatial_resolution' attribute '{}'".format(res)) return res satpy-0.34.0/satpy/readers/abi_l1b.py000066400000000000000000000136251420401153000173630ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Advance Baseline Imager reader for the Level 1b format. The files read by this reader are described in the official PUG document: https://www.goes-r.gov/users/docs/PUG-L1b-vol3.pdf """ import logging import numpy as np from satpy.readers.abi_base import NC_ABI_BASE logger = logging.getLogger(__name__) class NC_ABI_L1B(NC_ABI_BASE): """File reader for individual ABI L1B NetCDF4 files.""" def get_dataset(self, key, info): """Load a dataset.""" logger.debug('Reading in get_dataset %s.', key['name']) # For raw cal, don't apply scale and offset, return raw file counts if key['calibration'] == 'counts': radiances = self.nc['Rad'].copy() else: radiances = self['Rad'] # mapping of calibration types to calibration functions cal_dictionary = { 'reflectance': self._vis_calibrate, 'brightness_temperature': self._ir_calibrate, 'radiance': self._rad_calibrate, 'counts': self._raw_calibrate, } try: func = cal_dictionary[key['calibration']] res = func(radiances) except KeyError: raise ValueError("Unknown calibration '{}'".format(key['calibration'])) # convert to satpy standard units if res.attrs['units'] == '1' and key['calibration'] != 'counts': res *= 100 res.attrs['units'] = '%' self._adjust_attrs(res, key) return res def _adjust_attrs(self, data, key): data.attrs.update({'platform_name': self.platform_name, 'sensor': self.sensor}) # Add orbital parameters projection = self.nc["goes_imager_projection"] data.attrs['orbital_parameters'] = { 'projection_longitude': float(projection.attrs['longitude_of_projection_origin']), 'projection_latitude': float(projection.attrs['latitude_of_projection_origin']), 'projection_altitude': float(projection.attrs['perspective_point_height']), 'satellite_nominal_latitude': float(self['nominal_satellite_subpoint_lat']), 'satellite_nominal_longitude': float(self['nominal_satellite_subpoint_lon']), 'satellite_nominal_altitude': float(self['nominal_satellite_height']) * 1000., 'yaw_flip': bool(self['yaw_flip_flag']), } data.attrs.update(key.to_dict()) # remove attributes that could be confusing later # if calibration type is raw counts, we leave them in if key['calibration'] != 'counts': data.attrs.pop('_FillValue', None) data.attrs.pop('scale_factor', None) data.attrs.pop('add_offset', None) data.attrs.pop('_Unsigned', None) data.attrs.pop('ancillary_variables', None) # Can't currently load DQF # although we could compute these, we'd have to update in calibration data.attrs.pop('valid_range', None) # add in information from the filename that may be useful to the user for attr in ('observation_type', 'scene_abbr', 'scan_mode', 'platform_shortname', 'suffix'): if attr in self.filename_info: data.attrs[attr] = self.filename_info[attr] # copy global attributes to metadata for attr in ('scene_id', 'orbital_slot', 'instrument_ID', 'production_site', 'timeline_ID'): data.attrs[attr] = self.nc.attrs.get(attr) # only include these if they are present for attr in ('fusion_args',): if attr in self.nc.attrs: data.attrs[attr] = self.nc.attrs[attr] def _rad_calibrate(self, data): """Calibrate any channel to radiances. This no-op method is just to keep the flow consistent - each valid cal type results in a calibration method call """ res = data res.attrs = data.attrs return res def _raw_calibrate(self, data): """Calibrate any channel to raw counts. Useful for cases where a copy requires no calibration. """ res = data res.attrs = data.attrs res.attrs['units'] = '1' res.attrs['long_name'] = 'Raw Counts' res.attrs['standard_name'] = 'counts' return res def _vis_calibrate(self, data): """Calibrate visible channels to reflectance.""" solar_irradiance = self['esun'] esd = self["earth_sun_distance_anomaly_in_AU"].astype(float) factor = np.pi * esd * esd / solar_irradiance res = data * factor res.attrs = data.attrs res.attrs['units'] = '1' res.attrs['long_name'] = 'Bidirectional Reflectance' res.attrs['standard_name'] = 'toa_bidirectional_reflectance' return res def _ir_calibrate(self, data): """Calibrate IR channels to BT.""" fk1 = float(self["planck_fk1"]) fk2 = float(self["planck_fk2"]) bc1 = float(self["planck_bc1"]) bc2 = float(self["planck_bc2"]) res = (fk2 / np.log(fk1 / data + 1) - bc1) / bc2 res.attrs = data.attrs res.attrs['units'] = 'K' res.attrs['long_name'] = 'Brightness Temperature' res.attrs['standard_name'] = 'toa_brightness_temperature' return res satpy-0.34.0/satpy/readers/abi_l2_nc.py000066400000000000000000000105511420401153000176750ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2019 Satpy developers # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . """Advance Baseline Imager NOAA Level 2+ products reader. The files read by this reader are described in the official PUG document: https://www.goes-r.gov/products/docs/PUG-L2+-vol5.pdf """ import logging import numpy as np from satpy.readers.abi_base import NC_ABI_BASE LOG = logging.getLogger(__name__) class NC_ABI_L2(NC_ABI_BASE): """Reader class for NOAA ABI l2+ products in netCDF format.""" def get_dataset(self, key, info): """Load a dataset.""" var = info['file_key'] if self.filetype_info['file_type'] == 'abi_l2_mcmip': var += "_" + key.name LOG.debug('Reading in get_dataset %s.', var) variable = self[var] variable.attrs.update(key.to_dict()) self._update_data_arr_with_filename_attrs(variable) self._remove_problem_attrs(variable) return variable def _update_data_arr_with_filename_attrs(self, variable): _units = variable.attrs['units'] if 'units' in variable.attrs else None variable.attrs.update({'platform_name': self.platform_name, 'sensor': self.sensor, 'units': _units, 'satellite_latitude': float(self.nc['nominal_satellite_subpoint_lat']), 'satellite_longitude': float(self.nc['nominal_satellite_subpoint_lon']), 'satellite_altitude': float(self.nc['nominal_satellite_height']) * 1000.}) if 'flag_meanings' in variable.attrs: variable.attrs['flag_meanings'] = variable.attrs['flag_meanings'].split(' ') # add in information from the filename that may be useful to the user for attr in ('scene_abbr', 'scan_mode', 'platform_shortname'): variable.attrs[attr] = self.filename_info[attr] # add in information hardcoded in the filetype YAML for attr in ('observation_type',): if attr in self.filetype_info: variable.attrs[attr] = self.filetype_info[attr] # copy global attributes to metadata for attr in ('scene_id', 'orbital_slot', 'instrument_ID', 'production_site', 'timeline_ID'): variable.attrs[attr] = self.nc.attrs.get(attr) @staticmethod def _remove_problem_attrs(variable): # remove attributes that could be confusing later if not np.issubdtype(variable.dtype, np.integer): # integer fields keep the _FillValue variable.attrs.pop('_FillValue', None) variable.attrs.pop('scale_factor', None) variable.attrs.pop('add_offset', None) variable.attrs.pop('valid_range', None) variable.attrs.pop('_Unsigned', None) variable.attrs.pop('valid_range', None) variable.attrs.pop('ancillary_variables', None) # Can't currently load DQF def available_datasets(self, configured_datasets=None): """Add resolution to configured datasets.""" for is_avail, ds_info in (configured_datasets or []): # some other file handler knows how to load this # don't override what they've done if is_avail is not None: yield is_avail, ds_info matches = self.file_type_matches(ds_info['file_type']) if matches: # we have this dataset resolution = self.spatial_resolution_to_number() new_info = ds_info.copy() new_info.setdefault('resolution', resolution) yield True, ds_info elif is_avail is None: # we don't know what to do with this # see if another future file handler does yield is_avail, ds_info satpy-0.34.0/satpy/readers/acspo.py000066400000000000000000000126451420401153000172000ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """ACSPO SST Reader. See the following page for more information: https://podaac.jpl.nasa.gov/dataset/VIIRS_NPP-OSPO-L2P-v2.3 """ import logging from datetime import datetime import numpy as np from satpy.readers.netcdf_utils import NetCDF4FileHandler LOG = logging.getLogger(__name__) ROWS_PER_SCAN = { 'MODIS': 10, 'VIIRS': 16, 'AVHRR': None, } class ACSPOFileHandler(NetCDF4FileHandler): """ACSPO L2P SST File Reader.""" @property def platform_name(self): """Get satellite name for this file's data.""" res = self['/attr/platform'] if isinstance(res, np.ndarray): return str(res.astype(str)) return res @property def sensor_name(self): """Get instrument name for this file's data.""" res = self['/attr/sensor'] if isinstance(res, np.ndarray): return str(res.astype(str)) return res def get_shape(self, ds_id, ds_info): """Get numpy array shape for the specified dataset. Args: ds_id (DataID): ID of dataset that will be loaded ds_info (dict): Dictionary of dataset information from config file Returns: tuple: (rows, cols) """ var_path = ds_info.get('file_key', '{}'.format(ds_id['name'])) if var_path + '/shape' not in self: # loading a scalar value shape = 1 else: shape = self[var_path + '/shape'] if len(shape) == 3: if shape[0] != 1: raise ValueError("Not sure how to load 3D Dataset with more than 1 time") shape = shape[1:] return shape @staticmethod def _parse_datetime(datestr): return datetime.strptime(datestr, "%Y%m%dT%H%M%SZ") @property def start_time(self): """Get first observation time of data.""" return self._parse_datetime(self['/attr/time_coverage_start']) @property def end_time(self): """Get final observation time of data.""" return self._parse_datetime(self['/attr/time_coverage_end']) def get_metadata(self, dataset_id, ds_info): """Collect various metadata about the specified dataset.""" var_path = ds_info.get('file_key', '{}'.format(dataset_id['name'])) shape = self.get_shape(dataset_id, ds_info) units = self[var_path + '/attr/units'] info = getattr(self[var_path], 'attrs', {}) standard_name = self[var_path + '/attr/standard_name'] resolution = float(self['/attr/spatial_resolution'].split(' ')[0]) rows_per_scan = ROWS_PER_SCAN.get(self.sensor_name) or 0 info.update(dataset_id.to_dict()) info.update({ 'shape': shape, 'units': units, 'platform_name': self.platform_name, 'sensor': self.sensor_name, 'standard_name': standard_name, 'resolution': resolution, 'rows_per_scan': rows_per_scan, 'long_name': self.get(var_path + '/attr/long_name'), 'comment': self.get(var_path + '/attr/comment'), }) return info def get_dataset(self, dataset_id, ds_info): """Load data array and metadata from file on disk.""" var_path = ds_info.get('file_key', '{}'.format(dataset_id['name'])) metadata = self.get_metadata(dataset_id, ds_info) shape = metadata['shape'] file_shape = self[var_path + '/shape'] metadata['shape'] = shape valid_min = self[var_path + '/attr/valid_min'] valid_max = self[var_path + '/attr/valid_max'] # no need to check fill value since we are using valid min/max scale_factor = self.get(var_path + '/attr/scale_factor') add_offset = self.get(var_path + '/attr/add_offset') data = self[var_path] data = data.rename({"ni": "x", "nj": "y"}) if isinstance(file_shape, tuple) and len(file_shape) == 3: # can only read 3D arrays with size 1 in the first dimension data = data[0] data = data.where((data >= valid_min) & (data <= valid_max)) if scale_factor is not None: data = data * scale_factor + add_offset if ds_info.get('cloud_clear', False): # clear-sky if bit 15-16 are 00 clear_sky_mask = (self['l2p_flags'][0] & 0b1100000000000000) != 0 clear_sky_mask = clear_sky_mask.rename({"ni": "x", "nj": "y"}) data = data.where(~clear_sky_mask) data.attrs.update(metadata) # Remove these attributes since they are no longer valid and can cause invalid value filling. data.attrs.pop('_FillValue', None) data.attrs.pop('valid_max', None) data.attrs.pop('valid_min', None) return data satpy-0.34.0/satpy/readers/agri_l1.py000066400000000000000000000226661420401153000174150ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Advanced Geostationary Radiation Imager reader for the Level_1 HDF format. The files read by this reader are described in the official Real Time Data Service: http://fy4.nsmc.org.cn/data/en/data/realtime.html """ import logging from datetime import datetime import dask.array as da import numpy as np import xarray as xr from satpy.readers._geos_area import get_area_definition, get_area_extent from satpy.readers.hdf5_utils import HDF5FileHandler logger = logging.getLogger(__name__) # info of 500 m, 1 km, 2 km and 4 km data RESOLUTION_LIST = [500, 1000, 2000, 4000] _COFF_list = [10991.5, 5495.5, 2747.5, 1373.5] _CFAC_list = [81865099.0, 40932549.0, 20466274.0, 10233137.0] _LOFF_list = [10991.5, 5495.5, 2747.5, 1373.5] _LFAC_list = [81865099.0, 40932549.0, 20466274.0, 10233137.0] PLATFORM_NAMES = {'FY4A': 'FY-4A', 'FY4B': 'FY-4B', 'FY4C': 'FY-4C'} def scale(dn, slope, offset): """Convert digital number (DN) to calibrated quantity through scaling. Args: dn: Raw detector digital number slope: Slope offset: Offset Returns: Scaled data """ ref = dn * slope + offset ref = ref.clip(min=0) ref.attrs = dn.attrs return ref def apply_lut(data, lut): """Calibrate digital number (DN) by applying a LUT. Args: data: Raw detector digital number lut: the look up table Returns: Calibrated quantity """ # append nan to the end of lut for fillvalue lut = np.append(lut, np.nan) data.data = da.where(data.data > lut.shape[0], lut.shape[0] - 1, data.data) res = data.data.map_blocks(_getitem, lut, dtype=lut.dtype) res = xr.DataArray(res, dims=data.dims, attrs=data.attrs, coords=data.coords) return res def _getitem(block, lut): return lut[block] class HDF_AGRI_L1(HDF5FileHandler): """AGRI l1 file handler.""" def __init__(self, filename, filename_info, filetype_info): """Init filehandler.""" super(HDF_AGRI_L1, self).__init__(filename, filename_info, filetype_info) def get_dataset(self, dataset_id, ds_info): """Load a dataset.""" ds_name = dataset_id['name'] logger.debug('Reading in get_dataset %s.', ds_name) file_key = ds_info.get('file_key', ds_name) data = self.get(file_key) if data.ndim >= 2: data = data.rename({data.dims[-2]: 'y', data.dims[-1]: 'x'}) data = self.calibrate(data, ds_info, ds_name, file_key) self.adjust_attrs(data, ds_info) return data def adjust_attrs(self, data, ds_info): """Adjust the attrs of the data.""" satname = PLATFORM_NAMES.get(self['/attr/Satellite Name'], self['/attr/Satellite Name']) data.attrs.update({'platform_name': satname, 'sensor': self['/attr/Sensor Identification Code'].lower(), 'orbital_parameters': { 'satellite_nominal_latitude': self['/attr/NOMCenterLat'].item(), 'satellite_nominal_longitude': self['/attr/NOMCenterLon'].item(), 'satellite_nominal_altitude': self['/attr/NOMSatHeight'].item()}}) data.attrs.update(ds_info) # remove attributes that could be confusing later data.attrs.pop('FillValue', None) data.attrs.pop('Intercept', None) data.attrs.pop('Slope', None) def calibrate(self, data, ds_info, ds_name, file_key): """Calibrate the data.""" # Check if calibration is present, if not assume dataset is an angle calibration = ds_info.get('calibration') # Return raw data in case of counts or no calibration if calibration in ('counts', None): data.attrs['units'] = ds_info['units'] ds_info['valid_range'] = data.attrs['valid_range'] elif calibration == 'reflectance': channel_index = int(file_key[-2:]) - 1 data = self.calibrate_to_reflectance(data, channel_index, ds_info) elif calibration == 'brightness_temperature': data = self.calibrate_to_bt(data, ds_info, ds_name) elif calibration == 'radiance': raise NotImplementedError("Calibration to radiance is not supported.") # Apply range limits, but not for counts or we convert to float! if calibration != 'counts': data = data.where((data >= min(data.attrs['valid_range'])) & (data <= max(data.attrs['valid_range']))) else: data.attrs['_FillValue'] = data.attrs['FillValue'].item() return data def calibrate_to_reflectance(self, data, channel_index, ds_info): """Calibrate to reflectance [%].""" logger.debug("Calibrating to reflectances") # using the corresponding SCALE and OFFSET cal_coef = 'CALIBRATION_COEF(SCALE+OFFSET)' num_channel = self.get(cal_coef).shape[0] if num_channel == 1: # only channel_2, resolution = 500 m channel_index = 0 data.attrs['scale_factor'] = self.get(cal_coef)[channel_index, 0].values.item() data.attrs['add_offset'] = self.get(cal_coef)[channel_index, 1].values.item() data = scale(data, data.attrs['scale_factor'], data.attrs['add_offset']) data *= 100 ds_info['valid_range'] = (data.attrs['valid_range'] * data.attrs['scale_factor'] + data.attrs['add_offset']) ds_info['valid_range'] = ds_info['valid_range'] * 100 return data def calibrate_to_bt(self, data, ds_info, ds_name): """Calibrate to Brightness Temperatures [K].""" logger.debug("Calibrating to brightness_temperature") lut_key = ds_info.get('lut_key', ds_name) lut = self.get(lut_key) # the value of dn is the index of brightness_temperature data = apply_lut(data, lut) ds_info['valid_range'] = lut.attrs['valid_range'] return data def get_area_def(self, key): """Get the area definition.""" # Coordination Group for Meteorological Satellites LRIT/HRIT Global Specification # https://www.cgms-info.org/documents/cgms-lrit-hrit-global-specification-(v2-8-of-30-oct-2013).pdf res = key['resolution'] pdict = {} pdict['coff'] = _COFF_list[RESOLUTION_LIST.index(res)] pdict['loff'] = _LOFF_list[RESOLUTION_LIST.index(res)] pdict['cfac'] = _CFAC_list[RESOLUTION_LIST.index(res)] pdict['lfac'] = _LFAC_list[RESOLUTION_LIST.index(res)] pdict['a'] = self.file_content['/attr/dEA'] * 1E3 # equator radius (m) pdict['b'] = pdict['a'] * (1 - 1 / self.file_content['/attr/dObRecFlat']) # polar radius (m) pdict['h'] = self.file_content['/attr/NOMSatHeight'] # the altitude of satellite (m) pdict['ssp_lon'] = self.file_content['/attr/NOMCenterLon'] pdict['nlines'] = self.file_content['/attr/RegLength'] pdict['ncols'] = self.file_content['/attr/RegWidth'] pdict['scandir'] = 'S2N' b500 = ['C02'] b1000 = ['C01', 'C03'] b2000 = ['C04', 'C05', 'C06', 'C07'] pdict['a_desc'] = "AGRI {} area".format(self.filename_info['observation_type']) if key['name'] in b500: pdict['a_name'] = self.filename_info['observation_type']+'_500m' pdict['p_id'] = 'FY-4A, 500m' elif key['name'] in b1000: pdict['a_name'] = self.filename_info['observation_type']+'_1000m' pdict['p_id'] = 'FY-4A, 1000m' elif key['name'] in b2000: pdict['a_name'] = self.filename_info['observation_type']+'_2000m' pdict['p_id'] = 'FY-4A, 2000m' else: pdict['a_name'] = self.filename_info['observation_type']+'_4000m' pdict['p_id'] = 'FY-4A, 4000m' pdict['coff'] = pdict['coff'] + 0.5 pdict['nlines'] = pdict['nlines'] - 1 pdict['ncols'] = pdict['ncols'] - 1 pdict['loff'] = (pdict['loff'] - self.file_content['/attr/End Line Number'] + 0.5) area_extent = get_area_extent(pdict) area_extent = (area_extent[0], area_extent[1], area_extent[2], area_extent[3]) pdict['nlines'] = pdict['nlines'] + 1 pdict['ncols'] = pdict['ncols'] + 1 area = get_area_definition(pdict, area_extent) return area @property def start_time(self): """Get the start time.""" start_time = self['/attr/Observing Beginning Date'] + 'T' + self['/attr/Observing Beginning Time'] + 'Z' return datetime.strptime(start_time, '%Y-%m-%dT%H:%M:%S.%fZ') @property def end_time(self): """Get the end time.""" end_time = self['/attr/Observing Ending Date'] + 'T' + self['/attr/Observing Ending Time'] + 'Z' return datetime.strptime(end_time, '%Y-%m-%dT%H:%M:%S.%fZ') satpy-0.34.0/satpy/readers/ahi_hsd.py000066400000000000000000000704311420401153000174670ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2014-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Advanced Himawari Imager (AHI) standard format data reader. References: - Himawari-8/9 Himawari Standard Data User's Guide - http://www.data.jma.go.jp/mscweb/en/himawari89/space_segment/spsg_ahi.html Time Information **************** AHI observations use the idea of a "scheduled" time and an "observation time. The "scheduled" time is when the instrument was told to record the data, usually at a specific and consistent interval. The "observation" time is when the data was actually observed. Scheduled time can be accessed from the `scheduled_time` metadata key and observation time from the `start_time` key. """ import logging import os import warnings from datetime import datetime, timedelta import dask.array as da import numpy as np import xarray as xr from satpy import CHUNK_SIZE from satpy._compat import cached_property from satpy.readers._geos_area import get_area_definition, get_area_extent from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.utils import ( apply_rad_correction, get_earth_radius, get_geostationary_mask, get_user_calibration_factors, np2str, unzip_file, ) AHI_CHANNEL_NAMES = ("1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "16") logger = logging.getLogger('ahi_hsd') # Basic information block: _BASIC_INFO_TYPE = np.dtype([("hblock_number", "u1"), ("blocklength", " no temperature data = da.where(data == 0, np.float32(np.nan), data) cwl = self._header['block5']["central_wave_length"][0] * 1e-6 c__ = self._header['calibration']["speed_of_light"][0] h__ = self._header['calibration']["planck_constant"][0] k__ = self._header['calibration']["boltzmann_constant"][0] a__ = (h__ * c__) / (k__ * cwl) b__ = ((2 * h__ * c__ ** 2) / (data * 1.0e6 * cwl ** 5)) + 1 Te_ = a__ / da.log(b__) c0_ = self._header['calibration']["c0_rad2tb_conversion"][0] c1_ = self._header['calibration']["c1_rad2tb_conversion"][0] c2_ = self._header['calibration']["c2_rad2tb_conversion"][0] return (c0_ + c1_ * Te_ + c2_ * Te_ ** 2).clip(0) satpy-0.34.0/satpy/readers/ahi_l1b_gridded_bin.py000066400000000000000000000222671420401153000217050ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Advanced Himawari Imager (AHI) gridded format data reader. This data comes in a flat binary format on a fixed grid, and needs to have calibration coefficients applied to it in order to retrieve reflectance or BT. LUTs can be downloaded at: ftp://hmwr829gr.cr.chiba-u.ac.jp/gridded/FD/support/ This data is gridded from the original Himawari geometry. To our knowledge, only full disk grids are available, not for the Meso or Japan rapid scans. References: - AHI gridded data website: http://www.cr.chiba-u.jp/databases/GEO/H8_9/FD/index_jp.html """ import logging import os import dask.array as da import numpy as np import xarray as xr from appdirs import AppDirs from pyresample import geometry from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.utils import unzip_file # Hardcoded address of the reflectance and BT look-up tables AHI_REMOTE_LUTS = 'http://www.cr.chiba-u.jp/databases/GEO/H8_9/FD/count2tbb_v102.tgz' # Full disk image sizes for each spatial resolution AHI_FULLDISK_SIZES = {0.005: {'x_size': 24000, 'y_size': 24000}, 0.01: {'x_size': 12000, 'y_size': 12000}, 0.02: {'x_size': 6000, 'y_size': 6000}} # Geographic extent of the full disk area in degrees AHI_FULLDISK_EXTENT = [85., -60., 205., 60.] # Resolutions of each channel type AHI_CHANNEL_RES = {'vis': 0.01, 'ext': 0.005, 'sir': 0.02, 'tir': 0.02} # List of LUT filenames AHI_LUT_NAMES = ['ext.01', 'vis.01', 'vis.02', 'vis.03', 'sir.01', 'sir.02', 'tir.01', 'tir.02', 'tir.03', 'tir.04', 'tir.05', 'tir.06', 'tir.07', 'tir.08', 'tir.09', 'tir.10'] logger = logging.getLogger('ahi_grid') class AHIGriddedFileHandler(BaseFileHandler): """AHI gridded format reader. This data is flat binary, big endian unsigned short. It covers the region 85E -> 205E, 60N -> 60S at variable resolution: - 0.005 degrees for Band 3 - 0.01 degrees for Bands 1, 2 and 4 - 0.02 degrees for all other bands. These are approximately equivalent to 0.5, 1 and 2km. Files can either be zipped with bz2 compression (like the HSD format data), or can be uncompressed flat binary. """ def __init__(self, filename, filename_info, filetype_info): """Initialize the reader.""" super(AHIGriddedFileHandler, self).__init__(filename, filename_info, filetype_info) self._unzipped = unzip_file(self.filename) # Assume file is not zipped if self._unzipped: # But if it is, set the filename to point to unzipped temp file self.filename = self._unzipped # Get the band name, needed for finding area and dimensions self.product_name = filetype_info['file_type'] self.areaname = filename_info['area'] self.sensor = 'ahi' self.res = AHI_CHANNEL_RES[self.product_name[:3]] if self.areaname == 'fld': self.nlines = AHI_FULLDISK_SIZES[self.res]['y_size'] self.ncols = AHI_FULLDISK_SIZES[self.res]['x_size'] else: raise NotImplementedError("Only full disk data is supported.") # Set up directory path for the LUTs app_dirs = AppDirs('ahi_gridded_luts', 'satpy', '1.0.2') self.lut_dir = os.path.expanduser(app_dirs.user_data_dir) + '/' self.area = None def __del__(self): """Delete the object.""" if self._unzipped and os.path.exists(self.filename): os.remove(self.filename) def _load_lut(self): """Determine if LUT is available and, if not, download it.""" # First, check that the LUT is available. If not, download it. lut_file = self.lut_dir + self.product_name if not os.path.exists(lut_file): self._get_luts() try: # Load file, it has 2 columns: DN + Refl/BT. We only need latter. lut = np.loadtxt(lut_file)[:, 1] except FileNotFoundError: raise FileNotFoundError("No LUT file found:", lut_file) return lut def _calibrate(self, data): """Load calibration from LUT and apply.""" lut = self._load_lut() # LUT may truncate NaN values, so manually set those in data lut_len = len(lut) data = np.where(data < lut_len - 1, data, np.nan) return lut[data.astype(np.uint16)] @staticmethod def _download_luts(file_name): """Download LUTs from remote server.""" import shutil import urllib # Set up an connection and download with urllib.request.urlopen(AHI_REMOTE_LUTS) as response: # nosec with open(file_name, 'wb') as out_file: shutil.copyfileobj(response, out_file) @staticmethod def _untar_luts(tarred_file, outdir): """Uncompress downloaded LUTs, which are a tarball.""" import tarfile tar = tarfile.open(tarred_file) tar.extractall(outdir) tar.close() os.remove(tarred_file) def _get_luts(self): """Download the LUTs needed for count->Refl/BT conversion.""" import pathlib import shutil import tempfile # Check that the LUT directory exists pathlib.Path(self.lut_dir).mkdir(parents=True, exist_ok=True) logger.info("Download AHI LUTs files and store in directory %s", self.lut_dir) tempdir = tempfile.gettempdir() fname = os.path.join(tempdir, 'tmp.tgz') # Download the LUTs self._download_luts(fname) # The file is tarred, untar and remove the downloaded file self._untar_luts(fname, tempdir) lut_dl_dir = os.path.join(tempdir, 'count2tbb_v102/') # Loop over the LUTs and copy to the correct location for lutfile in AHI_LUT_NAMES: shutil.move(os.path.join(lut_dl_dir, lutfile), os.path.join(self.lut_dir, lutfile)) shutil.rmtree(lut_dl_dir) def get_dataset(self, key, info): """Get the dataset.""" return self.read_band(key, info) def get_area_def(self, dsid): """Get the area definition. This is fixed, but not defined in the file. So we must generate it ourselves with some assumptions. """ if self.areaname == 'fld': area_extent = AHI_FULLDISK_EXTENT else: raise NotImplementedError("Reader only supports full disk data.") proj_param = 'EPSG:4326' area = geometry.AreaDefinition('gridded_himawari', 'A gridded Himawari area', 'longlat', proj_param, self.ncols, self.nlines, area_extent) self.area = area return area def _read_data(self, fp_): """Read raw binary data from file.""" return da.from_array(np.memmap(self.filename, offset=fp_.tell(), dtype='>u2', shape=(self.nlines, self.ncols), mode='r'), chunks=CHUNK_SIZE) def read_band(self, key, info): """Read the data.""" with open(self.filename, "rb") as fp_: res = self._read_data(fp_) # Calibrate res = self.calibrate(res, key['calibration']) # Update metadata new_info = dict( units=info['units'], standard_name=info['standard_name'], wavelength=info['wavelength'], resolution=info['resolution'], id=key, name=key['name'], sensor=self.sensor, ) res = xr.DataArray(res, attrs=new_info, dims=['y', 'x']) return res def calibrate(self, data, calib): """Calibrate the data.""" if calib == 'counts': return data if calib == 'reflectance' or calib == 'brightness_temperature': return self._calibrate(data) raise NotImplementedError("ERROR: Unsupported calibration.", "Only counts, reflectance and ", "brightness_temperature calibration", "are supported.") satpy-0.34.0/satpy/readers/ami_l1b.py000066400000000000000000000275271420401153000174040ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Advanced Meteorological Imager reader for the Level 1b NetCDF4 format.""" import logging from datetime import datetime, timedelta import dask.array as da import numpy as np import pyproj import xarray as xr from pyspectral.blackbody import blackbody_wn_rad2temp as rad2temp from satpy import CHUNK_SIZE from satpy.readers._geos_area import get_area_definition, get_area_extent from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.utils import apply_rad_correction, get_user_calibration_factors logger = logging.getLogger(__name__) PLATFORM_NAMES = { 'GK-2A': 'GEO-KOMPSAT-2A', 'GK-2B': 'GEO-KOMPSAT-2B', } class AMIL1bNetCDF(BaseFileHandler): """Base reader for AMI L1B NetCDF4 files. AMI data contains GSICS adjustment factors for the IR bands. By default, these are not applied. If you wish to apply them then you must set the calibration mode appropriately:: import satpy import glob filenames = glob.glob('*FLDK*.dat') scene = satpy.Scene(filenames, reader='ahi_hsd', reader_kwargs={'calib_mode': 'gsics'}) scene.load(['B13']) In addition, the GSICS website (and other sources) also supply radiance correction coefficients like so:: radiance_corr = (radiance_orig - corr_offset) / corr_slope If you wish to supply such coefficients, pass 'user_calibration' and a dictionary containing per-channel slopes and offsets as a reader_kwarg:: user_calibration={'chan': {'slope': slope, 'offset': offset}} If you do not have coefficients for a particular band, then by default the slope will be set to 1 .and the offset to 0.:: import satpy import glob # Load bands 7, 14 and 15, but we only have coefs for 7+14 calib_dict = {'WV063': {'slope': 0.99, 'offset': 0.002}, 'IR087': {'slope': 1.02, 'offset': -0.18}} filenames = glob.glob('*.nc') scene = satpy.Scene(filenames, reader='ami_l1b', reader_kwargs={'user_calibration': calib_dict, 'calib_mode': 'file') # IR133 will not have radiance correction applied. scene.load(['WV063', 'IR087', 'IR133']) By default these updated coefficients are not used. In most cases, setting `calib_mode` to `file` is required in order to use external coefficients. """ def __init__(self, filename, filename_info, filetype_info, calib_mode='PYSPECTRAL', allow_conditional_pixels=False, user_calibration=None): """Open the NetCDF file with xarray and prepare the Dataset for reading.""" super(AMIL1bNetCDF, self).__init__(filename, filename_info, filetype_info) self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=False, chunks={'dim_image_x': CHUNK_SIZE, 'dim_image_y': CHUNK_SIZE}) self.nc = self.nc.rename({'dim_image_x': 'x', 'dim_image_y': 'y'}) platform_shortname = self.nc.attrs['satellite_name'] self.platform_name = PLATFORM_NAMES.get(platform_shortname) self.sensor = 'ami' self.band_name = filetype_info['file_type'].upper() self.allow_conditional_pixels = allow_conditional_pixels calib_mode_choices = ('FILE', 'PYSPECTRAL', 'GSICS') if calib_mode.upper() not in calib_mode_choices: raise ValueError('Invalid calibration mode: {}. Choose one of {}'.format( calib_mode, calib_mode_choices)) self.calib_mode = calib_mode.upper() self.user_calibration = user_calibration @property def start_time(self): """Get observation start time.""" base = datetime(2000, 1, 1, 12, 0, 0) return base + timedelta(seconds=self.nc.attrs['observation_start_time']) @property def end_time(self): """Get observation end time.""" base = datetime(2000, 1, 1, 12, 0, 0) return base + timedelta(seconds=self.nc.attrs['observation_end_time']) def get_area_def(self, dsid): """Get area definition for this file.""" pdict = {} pdict['a'] = self.nc.attrs['earth_equatorial_radius'] pdict['b'] = self.nc.attrs['earth_polar_radius'] pdict['h'] = self.nc.attrs['nominal_satellite_height'] - pdict['a'] pdict['ssp_lon'] = self.nc.attrs['sub_longitude'] * 180 / np.pi # it's in radians? pdict['ncols'] = self.nc.attrs['number_of_columns'] pdict['nlines'] = self.nc.attrs['number_of_lines'] obs_mode = self.nc.attrs['observation_mode'] resolution = self.nc.attrs['channel_spatial_resolution'] # Example offset: 11000.5 # the 'get_area_extent' will handle this half pixel for us pdict['cfac'] = self.nc.attrs['cfac'] pdict['coff'] = self.nc.attrs['coff'] pdict['lfac'] = -self.nc.attrs['lfac'] pdict['loff'] = self.nc.attrs['loff'] pdict['scandir'] = 'N2S' pdict['a_name'] = 'ami_geos_{}'.format(obs_mode.lower()) pdict['a_desc'] = 'AMI {} Area at {} resolution'.format(obs_mode, resolution) pdict['p_id'] = 'ami_fixed_grid' area_extent = get_area_extent(pdict) fg_area_def = get_area_definition(pdict, area_extent) return fg_area_def def get_orbital_parameters(self): """Collect orbital parameters for this file.""" a = float(self.nc.attrs['earth_equatorial_radius']) b = float(self.nc.attrs['earth_polar_radius']) # nominal_satellite_height seems to be from the center of the earth h = float(self.nc.attrs['nominal_satellite_height']) - a lon_0 = self.nc.attrs['sub_longitude'] * 180 / np.pi # it's in radians? sc_position = self.nc['sc_position'].attrs['sc_position_center_pixel'] # convert ECEF coordinates to lon, lat, alt ecef = pyproj.Proj(proj='geocent', a=a, b=b) lla = pyproj.Proj(proj='latlong', a=a, b=b) sc_position = pyproj.transform( ecef, lla, sc_position[0], sc_position[1], sc_position[2]) orbital_parameters = { 'projection_longitude': float(lon_0), 'projection_latitude': 0.0, 'projection_altitude': h, 'satellite_actual_longitude': sc_position[0], 'satellite_actual_latitude': sc_position[1], 'satellite_actual_altitude': sc_position[2], # meters } return orbital_parameters def get_dataset(self, dataset_id, ds_info): """Load a dataset as a xarray DataArray.""" file_key = ds_info.get('file_key', dataset_id['name']) data = self.nc[file_key] # hold on to attributes for later attrs = data.attrs # highest 2 bits are data quality flags # 00=no error # 01=available under conditions # 10=outside the viewing area # 11=Error exists if self.allow_conditional_pixels: qf = data & 0b1000000000000000 else: qf = data & 0b1100000000000000 # mask DQF bits bits = attrs['number_of_valid_bits_per_pixel'] data &= 2**bits - 1 # only take "no error" pixels as valid data = data.where(qf == 0) # Calibration values from file, fall back to built-in if unavailable gain = self.nc.attrs['DN_to_Radiance_Gain'] offset = self.nc.attrs['DN_to_Radiance_Offset'] if dataset_id['calibration'] in ('radiance', 'reflectance', 'brightness_temperature'): data = gain * data + offset if self.calib_mode == 'GSICS': data = self._apply_gsics_rad_correction(data) elif isinstance(self.user_calibration, dict): data = self._apply_user_rad_correction(data) if dataset_id['calibration'] == 'reflectance': # depends on the radiance calibration above rad_to_alb = self.nc.attrs['Radiance_to_Albedo_c'] if ds_info.get('units') == '%': rad_to_alb *= 100 data = data * rad_to_alb elif dataset_id['calibration'] == 'brightness_temperature': data = self._calibrate_ir(dataset_id, data) elif dataset_id['calibration'] not in ('counts', 'radiance'): raise ValueError("Unknown calibration: '{}'".format(dataset_id['calibration'])) for attr_name in ('standard_name', 'units'): attrs[attr_name] = ds_info[attr_name] attrs.update(dataset_id.to_dict()) attrs['orbital_parameters'] = self.get_orbital_parameters() attrs['platform_name'] = self.platform_name attrs['sensor'] = self.sensor data.attrs = attrs return data def _calibrate_ir(self, dataset_id, data): """Calibrate radiance data to BTs using either pyspectral or in-file coefficients.""" if self.calib_mode == 'PYSPECTRAL': # depends on the radiance calibration above # Convert um to m^-1 (SI units for pyspectral) wn = 1 / (dataset_id['wavelength'][1] / 1e6) # Convert cm^-1 (wavenumbers) and (mW/m^2)/(str/cm^-1) (radiance data) # to SI units m^-1, mW*m^-3*str^-1. bt_data = rad2temp(wn, data.data * 1e-5) if isinstance(bt_data, np.ndarray): # old versions of pyspectral produce numpy arrays data.data = da.from_array(bt_data, chunks=data.data.chunks) else: # new versions of pyspectral can do dask arrays data.data = bt_data else: # IR coefficients from the file # Channel specific c0 = self.nc.attrs['Teff_to_Tbb_c0'] c1 = self.nc.attrs['Teff_to_Tbb_c1'] c2 = self.nc.attrs['Teff_to_Tbb_c2'] # These should be fixed, but load anyway cval = self.nc.attrs['light_speed'] kval = self.nc.attrs['Boltzmann_constant_k'] hval = self.nc.attrs['Plank_constant_h'] # Compute wavenumber as cm-1 wn = (10000 / dataset_id['wavelength'][1]) * 100 # Convert radiance to effective brightness temperature e1 = (2 * hval * cval * cval) * np.power(wn, 3) e2 = (data.data * 1e-5) t_eff = ((hval * cval / kval) * wn) / np.log((e1 / e2) + 1) # Now convert to actual brightness temperature bt_data = c0 + c1 * t_eff + c2 * t_eff * t_eff data.data = bt_data return data def _apply_gsics_rad_correction(self, data): """Retrieve GSICS factors from L1 file and apply to radiance.""" rad_slope = self.nc['gsics_coeff_slope'][0] rad_offset = self.nc['gsics_coeff_intercept'][0] data = apply_rad_correction(data, rad_slope, rad_offset) return data def _apply_user_rad_correction(self, data): """Retrieve user-supplied radiance correction and apply.""" rad_slope, rad_offset = get_user_calibration_factors(self.band_name, self.user_calibration) data = apply_rad_correction(data, rad_slope, rad_offset) return data satpy-0.34.0/satpy/readers/amsr2_l1b.py000066400000000000000000000052771420401153000176600ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reader for AMSR2 L1B files in HDF5 format.""" from satpy.readers.hdf5_utils import HDF5FileHandler class AMSR2L1BFileHandler(HDF5FileHandler): """File handler for AMSR2 l1b.""" def get_metadata(self, ds_id, ds_info): """Get the metadata.""" var_path = ds_info['file_key'] info = getattr(self[var_path], 'attrs', {}) info.update(ds_info) info.update({ "shape": self.get_shape(ds_id, ds_info), "units": self[var_path + "/attr/UNIT"], "platform_name": self["/attr/PlatformShortName"], "sensor": self["/attr/SensorShortName"].lower(), "start_orbit": int(self["/attr/StartOrbitNumber"]), "end_orbit": int(self["/attr/StopOrbitNumber"]), }) info.update(ds_id.to_dict()) return info def get_shape(self, ds_id, ds_info): """Get output shape of specified dataset.""" var_path = ds_info['file_key'] shape = self[var_path + '/shape'] if ((ds_info.get('standard_name') == "longitude" or ds_info.get('standard_name') == "latitude") and ds_id['resolution'] == 10000): return shape[0], int(shape[1] / 2) return shape def get_dataset(self, ds_id, ds_info): """Get output data and metadata of specified dataset.""" var_path = ds_info['file_key'] fill_value = ds_info.get('fill_value', 65535) metadata = self.get_metadata(ds_id, ds_info) data = self[var_path] if ((ds_info.get('standard_name') == "longitude" or ds_info.get('standard_name') == "latitude") and ds_id['resolution'] == 10000): # FIXME: Lower frequency channels need CoRegistration parameters applied data = data[:, ::2] * self[var_path + "/attr/SCALE FACTOR"] else: data = data * self[var_path + "/attr/SCALE FACTOR"] data = data.where(data != fill_value) data.attrs.update(metadata) return data satpy-0.34.0/satpy/readers/amsr2_l2.py000066400000000000000000000033551420401153000175120ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reader for AMSR2 L2 files in HDF5 format.""" from satpy.readers.amsr2_l1b import AMSR2L1BFileHandler class AMSR2L2FileHandler(AMSR2L1BFileHandler): """AMSR2 level 2 file handler.""" def mask_dataset(self, ds_info, data): """Mask data with the fill value.""" fill_value = ds_info.get('fill_value', 65535) return data.where(data != fill_value) def scale_dataset(self, var_path, data): """Scale data with the scale factor attribute.""" return data * self[var_path + "/attr/SCALE FACTOR"] def get_dataset(self, ds_id, ds_info): """Get output data and metadata of specified dataset.""" var_path = ds_info['file_key'] data = self[var_path].squeeze() data = self.mask_dataset(ds_info, data) data = self.scale_dataset(var_path, data) if ds_info.get('name') == "ssw": data = data.rename({'dim_0': 'y', 'dim_1': 'x'}) metadata = self.get_metadata(ds_id, ds_info) data.attrs.update(metadata) return data satpy-0.34.0/satpy/readers/amsr2_l2_gaasp.py000066400000000000000000000247141420401153000206670ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """GCOM-W1 AMSR2 Level 2 files from the GAASP software. GAASP output files are in the NetCDF4 format. Software is provided by NOAA and is also distributed by the CSPP group. More information on the products supported by this reader can be found here: https://www.star.nesdis.noaa.gov/jpss/gcom.php for more information. GAASP includes both swath/granule products and gridded products. Swath products are provided in files with "MBT", "OCEAN", "SNOW", or "SOIL" in the filename. Gridded products are in files with "SEAICE-SH" or "SEAICE-NH" in the filename where SH stands for South Hemisphere and NH stands for North Hemisphere. These gridded products are on the EASE2 North pole and South pole grids. See https://nsidc.org/ease/ease-grid-projection-gt for more details. Note that since SEAICE products can be on both the northern or southern hemisphere or both depending on what files are provided to Satpy, this reader appends a `_NH` and `_SH` suffix to all variable names that are dynamically discovered from the provided files. """ import logging from datetime import datetime from typing import Tuple import numpy as np import xarray as xr from pyproj import CRS from pyresample.geometry import AreaDefinition from satpy import CHUNK_SIZE from satpy._compat import cached_property from satpy.readers.file_handlers import BaseFileHandler logger = logging.getLogger(__name__) class GAASPFileHandler(BaseFileHandler): """Generic file handler for GAASP output files.""" y_dims: Tuple[str, ...] = ( 'Number_of_Scans', ) x_dims: Tuple[str, ...] = ( 'Number_of_hi_rez_FOVs', 'Number_of_low_rez_FOVs', ) time_dims = ( 'Time_Dimension', ) is_gridded = False dim_resolutions = { 'Number_of_hi_rez_FOVs': 5000, 'Number_of_low_rez_FOVs': 10000, } @cached_property def nc(self): """Get the xarray dataset for this file.""" chunks = {dim_name: CHUNK_SIZE for dim_name in self.y_dims + self.x_dims + self.time_dims} nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=False, chunks=chunks) if len(self.time_dims) == 1: nc = nc.rename({self.time_dims[0]: 'time'}) return nc @property def start_time(self): """Get start time of observation.""" try: return self.filename_info['start_time'] except KeyError: time_str = self.nc.attrs['time_coverage_start'] return datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """Get end time of observation.""" try: return self.filename_info['end_time'] except KeyError: time_str = self.nc.attrs['time_coverage_end'] return datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%S.%fZ") @property def sensor_names(self): """Sensors who have data in this file.""" return {self.nc.attrs['instrument_name'].lower()} @property def platform_name(self): """Name of the platform whose data is stored in this file.""" return self.nc.attrs['platform_name'] def _get_var_name_without_suffix(self, var_name): var_suffix = self.filetype_info.get('var_suffix', "") if var_suffix: var_name = var_name[:-len(var_suffix)] return var_name def _scale_data(self, data_arr, attrs): # handle scaling # take special care for integer/category fields scale_factor = attrs.pop('scale_factor', 1.) add_offset = attrs.pop('add_offset', 0.) scaling_needed = not (scale_factor == 1 and add_offset == 0) if scaling_needed: data_arr = data_arr * scale_factor + add_offset return data_arr, attrs @staticmethod def _nan_for_dtype(data_arr_dtype): # don't force the conversion from 32-bit float to 64-bit float # if we don't have to if data_arr_dtype.type == np.float32: return np.float32(np.nan) if np.issubdtype(data_arr_dtype, np.timedelta64): return np.timedelta64('NaT') if np.issubdtype(data_arr_dtype, np.datetime64): return np.datetime64('NaT') return np.nan def _fill_data(self, data_arr, attrs): fill_value = attrs.pop('_FillValue', None) is_int = np.issubdtype(data_arr.dtype, np.integer) has_flag_comment = 'comment' in attrs if is_int and has_flag_comment: # category product fill_out = fill_value attrs['_FillValue'] = fill_out else: fill_out = self._nan_for_dtype(data_arr.dtype) if fill_value is not None: data_arr = data_arr.where(data_arr != fill_value, fill_out) return data_arr, attrs def get_dataset(self, dataid, ds_info): """Load, scale, and collect metadata for the specified DataID.""" orig_var_name = self._get_var_name_without_suffix(dataid['name']) data_arr = self.nc[orig_var_name].copy() attrs = data_arr.attrs.copy() data_arr, attrs = self._scale_data(data_arr, attrs) data_arr, attrs = self._fill_data(data_arr, attrs) attrs.update({ 'platform_name': self.platform_name, 'sensor': sorted(self.sensor_names)[0], 'start_time': self.start_time, 'end_time': self.end_time, }) dim_map = dict(zip(data_arr.dims, ('y', 'x'))) # rename dims data_arr = data_arr.rename(**dim_map) # drop coords, the base reader will recreate these data_arr = data_arr.reset_coords(drop=True) data_arr.attrs = attrs return data_arr def _available_if_this_file_type(self, configured_datasets): for is_avail, ds_info in (configured_datasets or []): if is_avail is not None: # some other file handler said it has this dataset # we don't know any more information than the previous # file handler so let's yield early yield is_avail, ds_info continue yield self.file_type_matches(ds_info['file_type']), ds_info def _add_lonlat_coords(self, data_arr, ds_info): lat_coord = None lon_coord = None for coord_name in data_arr.coords: if 'longitude' in coord_name.lower(): lon_coord = coord_name if 'latitude' in coord_name.lower(): lat_coord = coord_name ds_info['coordinates'] = [lon_coord, lat_coord] def _get_ds_info_for_data_arr(self, var_name, data_arr): var_suffix = self.filetype_info.get('var_suffix', "") ds_info = { 'file_type': self.filetype_info['file_type'], 'name': var_name + var_suffix, } x_dim_name = data_arr.dims[1] if x_dim_name in self.dim_resolutions: ds_info['resolution'] = self.dim_resolutions[x_dim_name] if not self.is_gridded and data_arr.coords: self._add_lonlat_coords(data_arr, ds_info) return ds_info def _is_2d_yx_data_array(self, data_arr): has_y_dim = data_arr.dims[0] in self.y_dims has_x_dim = data_arr.dims[1] in self.x_dims return has_y_dim and has_x_dim def _available_new_datasets(self): possible_vars = list(self.nc.data_vars.items()) + list(self.nc.coords.items()) for var_name, data_arr in possible_vars: if data_arr.ndim != 2: # we don't currently handle non-2D variables continue if not self._is_2d_yx_data_array(data_arr): # we need 'traditional' y/x dimensions currently continue ds_info = self._get_ds_info_for_data_arr(var_name, data_arr) yield True, ds_info def available_datasets(self, configured_datasets=None): """Dynamically discover what variables can be loaded from this file. See :meth:`satpy.readers.file_handlers.BaseHandler.available_datasets` for more information. """ yield from self._available_if_this_file_type(configured_datasets) yield from self._available_new_datasets() class GAASPGriddedFileHandler(GAASPFileHandler): """GAASP file handler for gridded products like SEAICE.""" y_dims = ( 'Number_of_Y_Dimension', ) x_dims = ( 'Number_of_X_Dimension', ) dim_resolutions = { 'Number_of_X_Dimension': 10000, } is_gridded = True @staticmethod def _get_extents(data_shape, res): # assume data is centered at projection center x_min = -(data_shape[1] / 2.0) * res x_max = (data_shape[1] / 2.0) * res y_min = -(data_shape[0] / 2.0) * res y_max = (data_shape[0] / 2.0) * res return x_min, y_min, x_max, y_max def get_area_def(self, dataid): """Create area definition for equirectangular projected data.""" var_suffix = self.filetype_info.get('var_suffix', '') area_name = 'gaasp{}'.format(var_suffix) orig_var_name = self._get_var_name_without_suffix(dataid['name']) data_shape = self.nc[orig_var_name].shape crs = CRS(self.filetype_info['grid_epsg']) res = dataid['resolution'] extent = self._get_extents(data_shape, res) area_def = AreaDefinition( area_name, area_name, area_name, crs, data_shape[1], data_shape[0], extent ) return area_def class GAASPLowResFileHandler(GAASPFileHandler): """GAASP file handler for files that only have low resolution products.""" x_dims = ( 'Number_of_low_rez_FOVs', ) dim_resolutions = { 'Number_of_low_rez_FOVs': 10000, } satpy-0.34.0/satpy/readers/ascat_l2_soilmoisture_bufr.py000066400000000000000000000115441420401153000234140ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """ASCAT Soil moisture product reader for BUFR messages. Based on the IASI L2 SO2 BUFR reader. """ import logging from datetime import datetime import dask.array as da import numpy as np import xarray as xr try: import eccodes as ec except ImportError as e: raise ImportError( """Missing eccodes-python and/or eccodes C-library installation. Use conda to install eccodes. Error: """, e) from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler logger = logging.getLogger('AscatSoilMoistureBufr') class AscatSoilMoistureBufr(BaseFileHandler): """File handler for the ASCAT Soil Moisture BUFR product.""" def __init__(self, filename, filename_info, filetype_info, **kwargs): """Initialise the file handler for the ASCAT Soil Moisture BUFR data.""" super(AscatSoilMoistureBufr, self).__init__(filename, filename_info, filetype_info) start_time, end_time = self.get_start_end_date() self.metadata = {} self.metadata['start_time'] = start_time self.metadata['end_time'] = end_time @property def start_time(self): """Return the start time of data acqusition.""" return self.metadata['start_time'] @property def end_time(self): """Return the end time of data acquisition.""" return self.metadata['end_time'] @property def platform_name(self): """Return spacecraft name.""" return self.filename_info['platform'] def extract_msg_date_extremes(self, bufr, date_min=None, date_max=None): """Extract the minimum and maximum dates from a single bufr message.""" ec.codes_set(bufr, 'unpack', 1) size = ec.codes_get(bufr, 'numberOfSubsets') years = np.resize(ec.codes_get_array(bufr, 'year'), size) months = np.resize(ec.codes_get_array(bufr, 'month'), size) days = np.resize(ec.codes_get_array(bufr, 'day'), size) hours = np.resize(ec.codes_get_array(bufr, 'hour'), size) minutes = np.resize(ec.codes_get_array(bufr, 'minute'), size) seconds = np.resize(ec.codes_get_array(bufr, 'second'), size) for year, month, day, hour, minute, second in zip(years, months, days, hours, minutes, seconds): time_stamp = datetime(year, month, day, hour, minute, second) date_min = time_stamp if not date_min else min(date_min, time_stamp) date_max = time_stamp if not date_max else max(date_max, time_stamp) return date_min, date_max def get_start_end_date(self): """Get the first and last date from the bufr file.""" with open(self.filename, 'rb') as fh: date_min = None date_max = None while True: # get handle for message bufr = ec.codes_bufr_new_from_file(fh) if bufr is None: break date_min, date_max = self.extract_msg_date_extremes(bufr, date_min, date_max) return date_min, date_max def get_bufr_data(self, key): """Get BUFR data by key.""" attr = np.array([]) with open(self.filename, 'rb') as fh: while True: # get handle for message bufr = ec.codes_bufr_new_from_file(fh) if bufr is None: break ec.codes_set(bufr, 'unpack', 1) tmp = ec.codes_get_array(bufr, key, float) if len(tmp) == 1: size = ec.codes_get(bufr, 'numberOfSubsets') tmp = np.resize(tmp, size) attr = np.append(attr, tmp) ec.codes_release(bufr) return attr def get_dataset(self, dataset_id, dataset_info): """Get dataset using the BUFR key in dataset_info.""" arr = self.get_bufr_data(dataset_info['key']) if 'fill_value' in dataset_info: arr[arr == dataset_info['fill_value']] = np.nan arr = da.from_array(arr, chunks=CHUNK_SIZE) xarr = xr.DataArray(arr, dims=["y"], name=dataset_info['name']) xarr.attrs['platform_name'] = self.platform_name xarr.attrs.update(dataset_info) return xarr satpy-0.34.0/satpy/readers/avhrr_l1b_gaclac.py000066400000000000000000000275311420401153000212450ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2009-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reading and calibrating GAC and LAC AVHRR data. .. todo:: Fine grained calibration Radiance output """ import logging from datetime import datetime, timedelta import dask.array as da import numpy as np import pygac.utils import xarray as xr from pygac.gac_klm import GACKLMReader from pygac.gac_pod import GACPODReader from pygac.lac_klm import LACKLMReader from pygac.lac_pod import LACPODReader from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler logger = logging.getLogger(__name__) spacecrafts = {7: "NOAA 15", 3: "NOAA 16", 13: "NOAA 18", 15: "NOAA 19"} AVHRR3_CHANNEL_NAMES = {"1": 0, "2": 1, "3A": 2, "3B": 3, "4": 4, "5": 5} AVHRR2_CHANNEL_NAMES = {"1": 0, "2": 1, "3": 2, "4": 3, "5": 4} AVHRR_CHANNEL_NAMES = {"1": 0, "2": 1, "3": 2, "4": 3} ANGLES = ('sensor_zenith_angle', 'sensor_azimuth_angle', 'solar_zenith_angle', 'solar_azimuth_angle', 'sun_sensor_azimuth_difference_angle') class GACLACFile(BaseFileHandler): """Reader for GAC and LAC data.""" def __init__(self, filename, filename_info, filetype_info, start_line=None, end_line=None, strip_invalid_coords=True, interpolate_coords=True, **reader_kwargs): """Init the file handler. Args: start_line: User defined start scanline end_line: User defined end scanline strip_invalid_coords: Strip scanlines with invalid coordinates in the beginning/end of the orbit interpolate_coords: Interpolate coordinates from every eighth pixel to all pixels. reader_kwargs: More keyword arguments to be passed to pygac.Reader. See the pygac documentation for available options. """ super(GACLACFile, self).__init__( filename, filename_info, filetype_info) self.start_line = start_line self.end_line = end_line self.strip_invalid_coords = strip_invalid_coords self.interpolate_coords = interpolate_coords self.reader_kwargs = reader_kwargs self.creation_site = filename_info.get('creation_site') self.reader = None self.calib_channels = None self.counts = None self.angles = None self.qual_flags = None self.first_valid_lat = None self.last_valid_lat = None self._start_time = filename_info['start_time'] self._end_time = datetime.combine(filename_info['start_time'].date(), filename_info['end_time'].time()) if self._end_time < self._start_time: self._end_time += timedelta(days=1) self.platform_id = filename_info['platform_id'] if self.platform_id in ['NK', 'NL', 'NM', 'NN', 'NP', 'M1', 'M2', 'M3']: if filename_info.get('transfer_mode') == 'GHRR': self.reader_class = GACKLMReader else: self.reader_class = LACKLMReader self.chn_dict = AVHRR3_CHANNEL_NAMES self.sensor = 'avhrr-3' elif self.platform_id in ['NC', 'ND', 'NF', 'NH', 'NJ']: if filename_info.get('transfer_mode') == 'GHRR': self.reader_class = GACPODReader else: self.reader_class = LACPODReader self.chn_dict = AVHRR2_CHANNEL_NAMES self.sensor = 'avhrr-2' else: if filename_info.get('transfer_mode') == 'GHRR': self.reader_class = GACPODReader else: self.reader_class = LACPODReader self.chn_dict = AVHRR_CHANNEL_NAMES self.sensor = 'avhrr' self.filename_info = filename_info def read_raw_data(self): """Create a pygac reader and read raw data from the file.""" if self.reader is None: self.reader = self.reader_class( interpolate_coords=self.interpolate_coords, creation_site=self.creation_site, **self.reader_kwargs) self.reader.read(self.filename) if np.all(self.reader.mask): raise ValueError('All data is masked out') def get_dataset(self, key, info): """Get the dataset.""" self.read_raw_data() if key['name'] in ['latitude', 'longitude']: # Lats/lons are buffered by the reader if key['name'] == 'latitude': _, data = self.reader.get_lonlat() else: data, _ = self.reader.get_lonlat() # If coordinate interpolation is disabled, only every eighth # pixel has a lat/lon coordinate xdim = 'x' if self.interpolate_coords else 'x_every_eighth' xcoords = None elif key['name'] in ANGLES: data = self._get_angle(key) xdim = 'x' if self.interpolate_coords else 'x_every_eighth' xcoords = None elif key['name'] == 'qual_flags': data = self.reader.get_qual_flags() xdim = 'num_flags' xcoords = ['Scan line number', 'Fatal error flag', 'Insufficient data for calibration', 'Insufficient data for calibration', 'Solar contamination of blackbody in channels 3', 'Solar contamination of blackbody in channels 4', 'Solar contamination of blackbody in channels 5'] elif key['name'].upper() in self.chn_dict: # Read and calibrate channel data data = self._get_channel(key) xdim = 'x' xcoords = None else: raise ValueError('Unknown dataset: {}'.format(key['name'])) # Update start/end time using the actual scanline timestamps times = self.reader.get_times() self._start_time = times[0].astype(datetime) self._end_time = times[-1].astype(datetime) # Select user-defined scanlines and/or strip invalid coordinates if (self.start_line is not None or self.end_line is not None or self.strip_invalid_coords): data, times = self.slice(data=data, times=times) # Create data array chunk_cols = data.shape[1] chunk_lines = int((CHUNK_SIZE ** 2) / chunk_cols) res = xr.DataArray(da.from_array(data, chunks=(chunk_lines, chunk_cols)), dims=['y', xdim], attrs=info) if xcoords: res[xdim] = xcoords # Update dataset attributes self._update_attrs(res) # Add scanline acquisition times res['acq_time'] = ('y', times) res['acq_time'].attrs['long_name'] = 'Mean scanline acquisition time' return res def slice(self, data, times): """Select user-defined scanlines and/or strip invalid coordinates. Furthermore, update scanline timestamps. Args: data: Data to be sliced times: Scanline timestamps Returns: Sliced data and timestamps """ sliced = self._slice(data) times = self._slice(times) self._start_time = times[0].astype(datetime) self._end_time = times[-1].astype(datetime) return sliced, times def _slice(self, data): """Select user-defined scanlines and/or strip invalid coordinates. Returns: Sliced data """ start_line = self.start_line if self.start_line is not None else 0 end_line = self.end_line if self.end_line is not None else 0 # Strip scanlines with invalid coordinates if self.strip_invalid_coords: first_valid_lat, last_valid_lat = self._strip_invalid_lat() else: first_valid_lat = last_valid_lat = None # Check and correct user-defined scanlines, if possible start_line, end_line = pygac.utils.check_user_scanlines( start_line=start_line, end_line=end_line, first_valid_lat=first_valid_lat, last_valid_lat=last_valid_lat, along_track=data.shape[0] ) # Slice data sliced = pygac.utils.slice_channel(data, start_line=start_line, end_line=end_line, first_valid_lat=first_valid_lat, last_valid_lat=last_valid_lat) if isinstance(sliced, tuple): # pygac < 1.4.0 sliced = sliced[0] return sliced def _get_channel(self, key): """Get channel and buffer results.""" name = key['name'] calibration = key['calibration'] if calibration == 'counts': if self.counts is None: counts = self.reader.get_counts() self.counts = counts channels = self.counts elif calibration in ['reflectance', 'brightness_temperature']: if self.calib_channels is None: self.calib_channels = self.reader.get_calibrated_channels() channels = self.calib_channels else: raise ValueError('Unknown calibration: {}'.format(calibration)) return channels[:, :, self.chn_dict[name.upper()]] def _get_qual_flags(self): """Get quality flags and buffer results.""" if self.qual_flags is None: self.qual_flags = self.reader.get_qual_flags() return self.qual_flags def _get_angle(self, key): """Get angles and buffer results.""" if self.angles is None: sat_azi, sat_zenith, sun_azi, sun_zenith, rel_azi = self.reader.get_angles() self.angles = {'sensor_zenith_angle': sat_zenith, 'sensor_azimuth_angle': sat_azi, 'solar_zenith_angle': sun_zenith, 'solar_azimuth_angle': sun_azi, 'sun_sensor_azimuth_difference_angle': rel_azi} return self.angles[key['name']] def _strip_invalid_lat(self): """Strip scanlines with invalid coordinates in the beginning/end of the orbit. Returns: First and last scanline with valid latitudes. """ if self.first_valid_lat is None: _, lats = self.reader.get_lonlat() start, end = pygac.utils.strip_invalid_lat(lats) self.first_valid_lat, self.last_valid_lat = start, end return self.first_valid_lat, self.last_valid_lat def _update_attrs(self, res): """Update dataset attributes.""" for attr in self.reader.meta_data: res.attrs[attr] = self.reader.meta_data[attr] res.attrs['platform_name'] = self.reader.spacecraft_name res.attrs['orbit_number'] = self.filename_info.get('orbit_number', None) res.attrs['sensor'] = self.sensor try: res.attrs['orbital_parameters'] = {'tle': self.reader.get_tle_lines()} except (IndexError, RuntimeError): pass @property def start_time(self): """Get the start time.""" return self._start_time @property def end_time(self): """Get the end time.""" return self._end_time satpy-0.34.0/satpy/readers/caliop_l2_cloud.py000066400000000000000000000074561420401153000211310ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . # type: ignore """Interface to CALIOP L2 HDF4 cloud products.""" import logging import os.path import re from datetime import datetime from pyhdf.SD import SD, SDC from satpy.dataset import Dataset from satpy.readers.file_handlers import BaseFileHandler logger = logging.getLogger(__name__) class HDF4BandReader(BaseFileHandler): """CALIOP v3 HDF4 reader.""" def __init__(self, filename, filename_info, filetype_info): """Initialze file handler.""" super(HDF4BandReader, self).__init__(filename, filename_info, filetype_info) self.lons = None self.lats = None self._start_time = None self._end_time = None self.get_filehandle() self._start_time = filename_info['start_time'] logger.debug('Retrieving end time from metadata array') self.get_end_time() def get_end_time(self): """Get observation end time from file metadata.""" mda_dict = self.filehandle.attributes() core_mda = mda_dict['coremetadata'] end_time_str = self.parse_metadata_string(core_mda) self._end_time = datetime.strptime(end_time_str, "%Y-%m-%dT%H:%M:%SZ") @staticmethod def parse_metadata_string(metadata_string): """Grab end time with regular expression.""" regex = r"STOP_DATE.+?VALUE\s*=\s*\"(.+?)\"" match = re.search(regex, metadata_string, re.DOTALL) end_time_str = match.group(1) return end_time_str def get_filehandle(self): """Get HDF4 filehandle.""" if os.path.exists(self.filename): self.filehandle = SD(self.filename, SDC.READ) logger.debug("Loading dataset {}".format(self.filename)) else: raise IOError("Path {} does not exist.".format(self.filename)) def get_dataset(self, key, info): """Read data from file and return the corresponding projectables.""" if key['name'] in ['longitude', 'latitude']: logger.debug('Reading coordinate arrays.') if self.lons is None or self.lats is None: self.lons, self.lats = self.get_lonlats() if key['name'] == 'latitude': proj = Dataset(self.lats, id=key, **info) else: proj = Dataset(self.lons, id=key, **info) else: data = self.get_sds_variable(key['name']) proj = Dataset(data, id=key, **info) return proj def get_sds_variable(self, name): """Read variable from the HDF4 file.""" sds_obj = self.filehandle.select(name) data = sds_obj.get() return data def get_lonlats(self): """Get longitude and latitude arrays from the file.""" longitudes = self.get_sds_variable('Longitude') latitudes = self.get_sds_variable('Latitude') return longitudes, latitudes @property def start_time(self): """Get start time.""" return self._start_time @property def end_time(self): """Get end time.""" return self._end_time satpy-0.34.0/satpy/readers/clavrx.py000066400000000000000000000436011420401153000173660ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Interface to CLAVR-X HDF4 products.""" import logging import os from glob import glob from pathlib import Path from typing import Optional import netCDF4 import numpy as np import xarray as xr from pyresample import geometry from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.hdf4_utils import SDS, HDF4FileHandler LOG = logging.getLogger(__name__) CF_UNITS = { 'none': '1', } SENSORS = { 'MODIS': 'modis', 'VIIRS': 'viirs', 'AVHRR': 'avhrr', 'AHI': 'ahi', 'ABI': 'abi', } PLATFORMS = { 'SNPP': 'npp', 'HIM8': 'himawari8', 'HIM9': 'himawari9', 'H08': 'himawari8', 'H09': 'himawari9', 'G16': 'GOES-16', 'G17': 'GOES-17' } ROWS_PER_SCAN = { 'viirs': 16, 'modis': 10, } NADIR_RESOLUTION = { 'viirs': 742, 'modis': 1000, 'avhrr': 1050, 'ahi': 2000, 'abi': 2004, } def _get_sensor(sensor: str) -> str: """Get the sensor.""" for k, v in SENSORS.items(): if k in sensor: return v raise ValueError("Unknown sensor '{}'".format(sensor)) def _get_platform(platform: str) -> str: """Get the platform.""" for k, v in PLATFORMS.items(): if k in platform: return v return platform def _get_rows_per_scan(sensor: str) -> Optional[int]: """Get number of rows per scan.""" for k, v in ROWS_PER_SCAN.items(): if sensor.startswith(k): return v return None def _remove_attributes(attrs: dict) -> dict: """Remove attributes that described data before scaling.""" old_attrs = ['unscaled_missing', 'SCALED_MIN', 'SCALED_MAX', 'SCALED_MISSING'] for attr_key in old_attrs: attrs.pop(attr_key, None) return attrs class _CLAVRxHelper: """A base class for the CLAVRx File Handlers.""" @staticmethod def _scale_data(data_arr: xr.DataArray, scale_factor: float, add_offset: float) -> xr.DataArray: """Scale data, if needed.""" scaling_needed = not (scale_factor == 1.0 and add_offset == 0.0) if scaling_needed: data_arr = data_arr * scale_factor + add_offset return data_arr @staticmethod def _get_data(data: xr.DataArray, dataset_id: dict) -> xr.DataArray: """Get a dataset.""" if dataset_id.get('resolution'): data.attrs['resolution'] = dataset_id['resolution'] attrs = data.attrs.copy() fill = attrs.get('_FillValue') factor = attrs.pop('scale_factor', (np.ones(1, dtype=data.dtype))[0]) offset = attrs.pop('add_offset', (np.zeros(1, dtype=data.dtype))[0]) valid_range = attrs.get('valid_range', [None]) if isinstance(valid_range, np.ndarray): attrs["valid_range"] = valid_range.tolist() flags = not data.attrs.get("SCALED", 1) and any(data.attrs.get("flag_values", [None])) if not flags: data = data.where(data != fill) data = _CLAVRxHelper._scale_data(data, factor, offset) # don't need _FillValue if it has been applied. attrs.pop('_FillValue', None) if all(valid_range): valid_min = _CLAVRxHelper._scale_data(valid_range[0], factor, offset) valid_max = _CLAVRxHelper._scale_data(valid_range[1], factor, offset) if flags: data = data.where((data >= valid_min) & (data <= valid_max), fill) else: data = data.where((data >= valid_min) & (data <= valid_max)) attrs['valid_range'] = [valid_min, valid_max] data.attrs = _remove_attributes(attrs) return data @staticmethod def _area_extent(x, y, h): x_l = h * x[0] x_r = h * x[-1] y_l = h * y[-1] y_u = h * y[0] ncols = x.shape[0] nlines = y.shape[0] x_half = (x_r - x_l) / (ncols - 1) / 2. y_half = (y_u - y_l) / (nlines - 1) / 2. area_extent = (x_l - x_half, y_l - y_half, x_r + x_half, y_u + y_half) return area_extent, ncols, nlines @staticmethod def _read_pug_fixed_grid(projection, distance_multiplier=1.0) -> dict: """Read from recent PUG format, where axes are in meters.""" a = projection.semi_major_axis h = projection.perspective_point_height b = projection.semi_minor_axis lon_0 = projection.longitude_of_projection_origin sweep_axis = projection.sweep_angle_axis[0] proj_dict = {'a': float(a) * distance_multiplier, 'b': float(b) * distance_multiplier, 'lon_0': float(lon_0), 'h': float(h) * distance_multiplier, 'proj': 'geos', 'units': 'm', 'sweep': sweep_axis} return proj_dict @staticmethod def _find_input_nc(filename: str, l1b_base: str) -> str: file_path = Path(filename) dirname = file_path.parent l1b_filename = dirname.joinpath(l1b_base + '.nc') if l1b_filename.exists(): return str(l1b_filename) glob_pat = os.path.join(dirname, l1b_base + '*R20*.nc') LOG.debug("searching for {0}".format(glob_pat)) l1b_filenames = list(glob(glob_pat)) if not l1b_filenames: raise IOError("Could not find navigation donor for {0}" " in same directory as CLAVR-x data".format(l1b_base)) LOG.debug('Candidate nav donors: {0}'.format(repr(l1b_filenames))) return l1b_filenames[0] @staticmethod def _read_axi_fixed_grid(filename: str, l1b_attr) -> geometry.AreaDefinition: """Read a fixed grid. CLAVR-x does not transcribe fixed grid parameters to its output We have to recover that information from the original input file, which is partially named as L1B attribute example attributes found in L2 CLAVR-x files: sensor = "AHI" ; platform = "HIM8" ; FILENAME = "clavrx_H08_20180719_1300.level2.hdf" ; L1B = "clavrx_H08_20180719_1300" ; """ LOG.debug("looking for corresponding input file for {0}" " to act as fixed grid navigation donor".format(l1b_attr)) l1b_path = _CLAVRxHelper._find_input_nc(filename, l1b_attr) LOG.info("Since CLAVR-x does not include fixed-grid parameters," " using input file {0} as donor".format(l1b_path)) l1b = netCDF4.Dataset(l1b_path) proj = None proj_var = l1b.variables.get("Projection", None) if proj_var is not None: # hsd2nc input typically used by CLAVR-x uses old-form km for axes/height LOG.debug("found hsd2nc-style draft PUG fixed grid specification") proj = _CLAVRxHelper._read_pug_fixed_grid(proj_var, 1000.0) if proj is None: # most likely to come into play for ABI cases proj_var = l1b.variables.get("goes_imager_projection", None) if proj_var is not None: LOG.debug("found cmip-style final PUG fixed grid specification") proj = _CLAVRxHelper._read_pug_fixed_grid(proj_var) if not proj: raise ValueError("Unable to recover projection information" " for {0}".format(filename)) h = float(proj['h']) x, y = l1b['x'], l1b['y'] area_extent, ncols, nlines = _CLAVRxHelper._area_extent(x, y, h) # LOG.debug(repr(proj)) # LOG.debug(repr(area_extent)) area = geometry.AreaDefinition( 'ahi_geos', "AHI L2 file area", 'ahi_geos', proj, ncols, nlines, np.asarray(area_extent)) return area @staticmethod def get_metadata(sensor, platform, attrs: dict, ds_info: dict) -> dict: """Get metadata.""" i = {} i.update(attrs) i.update(ds_info) flag_meanings = i.get('flag_meanings', None) if not i.get('SCALED', 1) and not flag_meanings: i['flag_meanings'] = '' i.setdefault('flag_values', [None]) u = i.get('units') if u in CF_UNITS: # CF compliance i['units'] = CF_UNITS[u] if u.lower() == "none": i['units'] = "1" i['sensor'] = sensor i['platform_name'] = platform rps = _get_rows_per_scan(sensor) if rps: i['rows_per_scan'] = rps i['reader'] = 'clavrx' return i class CLAVRXHDF4FileHandler(HDF4FileHandler, _CLAVRxHelper): """A file handler for CLAVRx files.""" def __init__(self, filename, filename_info, filetype_info): """Init method.""" super(CLAVRXHDF4FileHandler, self).__init__(filename, filename_info, filetype_info) @property def start_time(self): """Get the start time.""" return self.filename_info['start_time'] @property def end_time(self): """Get the end time.""" return self.filename_info.get('end_time', self.start_time) def get_dataset(self, dataset_id, ds_info): """Get a dataset.""" var_name = ds_info.get('file_key', dataset_id['name']) data = self[var_name] data = _CLAVRxHelper._get_data(data, dataset_id) data.attrs = _CLAVRxHelper.get_metadata(self.sensor, self.platform, data.attrs, ds_info) return data def get_nadir_resolution(self, sensor): """Get nadir resolution.""" for k, v in NADIR_RESOLUTION.items(): if sensor.startswith(k): return v res = self.filename_info.get('resolution') if res.endswith('m'): return int(res[:-1]) elif res is not None: return int(res) def available_datasets(self, configured_datasets=None): """Automatically determine datasets provided by this file.""" self.sensor = _get_sensor(self.file_content.get('/attr/sensor')) self.platform = _get_platform(self.file_content.get('/attr/platform')) nadir_resolution = self.get_nadir_resolution(self.sensor) coordinates = ('longitude', 'latitude') handled_variables = set() # update previously configured datasets for is_avail, ds_info in (configured_datasets or []): this_res = ds_info.get('resolution') this_coords = ds_info.get('coordinates') # some other file handler knows how to load this if is_avail is not None: yield is_avail, ds_info var_name = ds_info.get('file_key', ds_info['name']) matches = self.file_type_matches(ds_info['file_type']) # we can confidently say that we can provide this dataset and can # provide more info if matches and var_name in self and this_res != nadir_resolution: handled_variables.add(var_name) new_info = ds_info.copy() # don't mess up the above yielded new_info['resolution'] = nadir_resolution if self._is_polar() and this_coords is None: new_info['coordinates'] = coordinates yield True, new_info elif is_avail is None: # if we didn't know how to handle this dataset and no one else did # then we should keep it going down the chain yield is_avail, ds_info # add new datasets for var_name, val in self.file_content.items(): if isinstance(val, SDS): ds_info = { 'file_type': self.filetype_info['file_type'], 'resolution': nadir_resolution, 'name': var_name, } if self._is_polar(): ds_info['coordinates'] = ['longitude', 'latitude'] yield True, ds_info def get_shape(self, dataset_id, ds_info): """Get the shape.""" var_name = ds_info.get('file_key', dataset_id['name']) return self[var_name + '/shape'] def _is_polar(self): l1b_att, inst_att = (str(self.file_content.get('/attr/L1B', None)), str(self.file_content.get('/attr/sensor', None))) return (inst_att != 'AHI' and 'GOES' not in inst_att) or (l1b_att is None) def get_area_def(self, key): """Get the area definition of the data at hand.""" if self._is_polar(): # then it doesn't have a fixed grid return super(CLAVRXHDF4FileHandler, self).get_area_def(key) l1b_att = str(self.file_content.get('/attr/L1B', None)) area_def = _CLAVRxHelper._read_axi_fixed_grid(self.filename, l1b_att) return area_def class CLAVRXNetCDFFileHandler(_CLAVRxHelper, BaseFileHandler): """File Handler for CLAVRX netcdf files.""" def __init__(self, filename, filename_info, filetype_info): """Init method.""" super(CLAVRXNetCDFFileHandler, self).__init__(filename, filename_info, filetype_info, ) self.nc = xr.open_dataset(filename, decode_cf=True, mask_and_scale=False, decode_coords=True, chunks=CHUNK_SIZE) # y,x is used in satpy, bands rather than channel using in xrimage self.nc = self.nc.rename_dims({'scan_lines_along_track_direction': "y", 'pixel_elements_along_scan_direction': "x"}) self.platform = _get_platform( self.filename_info.get('platform_shortname', None)) self.sensor = self.nc.attrs.get('sensor', None) # coordinates need scaling and valid_range (mask_and_scale won't work on valid_range) self.nc.coords["latitude"] = _CLAVRxHelper._get_data(self.nc.coords["latitude"], {"name": "latitude"}) self.nc.coords["longitude"] = _CLAVRxHelper._get_data(self.nc.coords["longitude"], {"name": "longitude"}) def _get_ds_info_for_data_arr(self, var_name): ds_info = { 'file_type': self.filetype_info['file_type'], 'name': var_name, } return ds_info def _is_2d_yx_data_array(self, data_arr): has_y_dim = data_arr.dims[0] == "y" has_x_dim = data_arr.dims[1] == "x" return has_y_dim and has_x_dim def _available_new_datasets(self, handled_vars): """Metadata for available variables other than BT.""" possible_vars = list(self.nc.items()) + list(self.nc.coords.items()) for var_name, data_arr in possible_vars: if var_name in handled_vars: continue if data_arr.ndim != 2: # we don't currently handle non-2D variables continue if not self._is_2d_yx_data_array(data_arr): # we need 'traditional' y/x dimensions currently continue ds_info = self._get_ds_info_for_data_arr(var_name) yield True, ds_info def available_datasets(self, configured_datasets=None): """Dynamically discover what variables can be loaded from this file. See :meth:`satpy.readers.file_handlers.BaseHandler.available_datasets` for more information. """ handled_vars = set() for is_avail, ds_info in (configured_datasets or []): if is_avail is not None: # some other file handler said it has this dataset # we don't know any more information than the previous # file handler so let's yield early yield is_avail, ds_info continue if self.file_type_matches(ds_info['file_type']): handled_vars.add(ds_info['name']) yield self.file_type_matches(ds_info['file_type']), ds_info yield from self._available_new_datasets(handled_vars) def _is_polar(self): l1b_att, inst_att = (str(self.nc.attrs.get('L1B', None)), str(self.nc.attrs.get('sensor', None))) return (inst_att != 'AHI' and 'GOES' not in inst_att) or (l1b_att is None) def get_area_def(self, key): """Get the area definition of the data at hand.""" if self._is_polar(): # then it doesn't have a fixed grid return super(CLAVRXNetCDFFileHandler, self).get_area_def(key) l1b_att = str(self.nc.attrs.get('L1B', None)) return _CLAVRxHelper._read_axi_fixed_grid(self.filename, l1b_att) def get_dataset(self, dataset_id, ds_info): """Get a dataset.""" var_name = ds_info.get('name', dataset_id['name']) data = self[var_name] data = _CLAVRxHelper._get_data(data, dataset_id) data.attrs = _CLAVRxHelper.get_metadata(self.sensor, self.platform, data.attrs, ds_info) return data def __getitem__(self, item): """Wrap around `self.nc[item]`.""" data = self.nc[item] return data satpy-0.34.0/satpy/readers/cmsaf_claas2.py000066400000000000000000000056251420401153000204110ustar00rootroot00000000000000"""Module containing CMSAF CLAAS v2 FileHandler.""" import datetime import pyresample.geometry from .netcdf_utils import NetCDF4FileHandler class CLAAS2(NetCDF4FileHandler): """Handle CMSAF CLAAS-2 files.""" def __init__(self, *args, **kwargs): """Initialise class.""" super().__init__(*args, **kwargs, cache_handle=False, auto_maskandscale=True) @property def start_time(self): """Get start time from file.""" # datetime module can't handle timezone identifier return datetime.datetime.fromisoformat( self["/attr/time_coverage_start"].rstrip("Z")) @property def end_time(self): """Get end time from file.""" return datetime.datetime.fromisoformat( self["/attr/time_coverage_end"].rstrip("Z")) def available_datasets(self, configured_datasets=None): """Yield a collection of available datasets. Return a generator that will yield the datasets available in the loaded files. See docstring in parent class for specification details. """ # this method should work for any (CF-conform) NetCDF file, should it # be somewhere more generically available? Perhaps in the # `NetCDF4FileHandler`? yield from super().available_datasets(configured_datasets) data_vars = [k for k in self.file_content if k + "/dimensions" in self.file_content] for k in data_vars: # if it doesn't have a y-dimension we're not interested if "y" not in self.file_content[k + "/dimensions"]: continue ds_info = self._get_dsinfo(k) yield (True, ds_info) def _get_dsinfo(self, var): """Get metadata for variable. Return metadata dictionary for variable ``var``. """ ds_info = {"name": var, "file_type": self.filetype_info["file_type"]} # attributes for this data variable attrs = {k[len(f"{k:s}/attr")+1]: v for (k, v) in self.file_content.items() if k.startswith(f"{k:s}/attr")} # we don't need "special" attributes in our metadata here for unkey in {"_FillValue", "add_offset", "scale_factor"}: attrs.pop(unkey, None) return ds_info def get_dataset(self, dataset_id, info): """Get the dataset.""" ds = self[dataset_id['name']] if "time" in ds.dims: return ds.squeeze(["time"]) return ds def get_area_def(self, dataset_id): """Get the area definition.""" return pyresample.geometry.AreaDefinition( "some_area_name", "on-the-fly area", "geos", self["/attr/CMSAF_proj4_params"], self["/dimension/x"], self["/dimension/y"], self["/attr/CMSAF_area_extent"]) satpy-0.34.0/satpy/readers/electrol_hrit.py000066400000000000000000000352031420401153000207250ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """HRIT format reader. References: ELECTRO-L GROUND SEGMENT MSU-GS INSTRUMENT, LRIT/HRIT Mission Specific Implementation, February 2012 """ import logging from datetime import datetime import numpy as np import xarray as xr from satpy.readers._geos_area import get_area_definition, get_area_extent from satpy.readers.hrit_base import ( HRITFileHandler, ancillary_text, annotation_header, base_hdr_map, image_data_function, time_cds_short, ) logger = logging.getLogger('hrit_electrol') # goms implementation: key_header = np.dtype([('key_number', 'u1'), ('seed', '>f8')]) segment_identification = np.dtype([('GP_SC_ID', '>i2'), ('spectral_channel_id', '>i1'), ('segment_sequence_number', '>u2'), ('planned_start_segment_number', '>u2'), ('planned_end_segment_number', '>u2'), ('data_field_representation', '>i1')]) image_segment_line_quality = np.dtype([('line_number_in_grid', '>i4'), ('line_mean_acquisition', [('days', '>u2'), ('milliseconds', '>u4')]), ('line_validity', 'u1'), ('line_radiometric_quality', 'u1'), ('line_geometric_quality', 'u1')]) goms_variable_length_headers = { image_segment_line_quality: 'image_segment_line_quality'} goms_text_headers = {image_data_function: 'image_data_function', annotation_header: 'annotation_header', ancillary_text: 'ancillary_text'} goms_hdr_map = base_hdr_map.copy() goms_hdr_map.update({7: key_header, 128: segment_identification, 129: image_segment_line_quality }) orbit_coef = np.dtype([('StartTime', time_cds_short), ('EndTime', time_cds_short), ('X', '>f8', (8, )), ('Y', '>f8', (8, )), ('Z', '>f8', (8, )), ('VX', '>f8', (8, )), ('VY', '>f8', (8, )), ('VZ', '>f8', (8, ))]) attitude_coef = np.dtype([('StartTime', time_cds_short), ('EndTime', time_cds_short), ('XofSpinAxis', '>f8', (8, )), ('YofSpinAxis', '>f8', (8, )), ('ZofSpinAxis', '>f8', (8, ))]) cuc_time = np.dtype([('coarse', 'u1', (4, )), ('fine', 'u1', (3, ))]) time_cds_expanded = np.dtype([('days', '>u2'), ('milliseconds', '>u4'), ('microseconds', '>u2'), ('nanoseconds', '>u2')]) satellite_status = np.dtype([("TagType", " 16777216: lut = lut.astype(np.float64) else: lut = lut.astype(np.float32) lut /= 1000 lut[0] = np.nan # Dask/XArray don't support indexing in 2D (yet). res = data.data.map_blocks(self._getitem, lut, dtype=lut.dtype) res = xr.DataArray(res, dims=data.dims, attrs=data.attrs, coords=data.coords) res = res.where(data > 0) return res def get_area_def(self, dsid): """Get the area definition of the band.""" pdict = {} pdict['cfac'] = np.int32(self.mda['cfac']) pdict['lfac'] = np.int32(self.mda['lfac']) pdict['coff'] = np.float32(self.mda['coff']) pdict['loff'] = np.float32(self.mda['loff']) pdict['a'] = 6378169.00 pdict['b'] = 6356583.80 pdict['h'] = 35785831.00 pdict['scandir'] = 'N2S' pdict['ssp_lon'] = self.mda['projection_parameters']['SSP_longitude'] pdict['nlines'] = int(self.mda['number_of_lines']) pdict['ncols'] = int(self.mda['number_of_columns']) pdict['loff'] = pdict['nlines'] - pdict['loff'] pdict['a_name'] = 'geosgoms' pdict['a_desc'] = 'Electro-L/GOMS channel area' pdict['p_id'] = 'goms' area_extent = get_area_extent(pdict) area = get_area_definition(pdict, area_extent) self.area = area return area satpy-0.34.0/satpy/readers/eps_l1b.py000066400000000000000000000364221420401153000174170ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reader for eps level 1b data. Uses xml files as a format description.""" import logging from functools import lru_cache import dask.array as da import numpy as np import xarray as xr from dask.delayed import delayed from pyresample.geometry import SwathDefinition from satpy import CHUNK_SIZE from satpy._compat import cached_property from satpy._config import get_config_path from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.xmlformat import XMLFormat logger = logging.getLogger(__name__) C1 = 1.191062e-05 # mW/(m2*sr*cm-4) C2 = 1.4387863 # K/cm-1 def radiance_to_bt(arr, wc_, a__, b__): """Convert to BT in K.""" return a__ + b__ * (C2 * wc_ / (da.log(1 + (C1 * (wc_ ** 3) / arr)))) def radiance_to_refl(arr, solar_flux): """Convert to reflectances in %.""" return arr * np.pi * 100.0 / solar_flux record_class = ["Reserved", "mphr", "sphr", "ipr", "geadr", "giadr", "veadr", "viadr", "mdr"] def read_records(filename): """Read *filename* without scaling it afterwards.""" format_fn = get_config_path("eps_avhrrl1b_6.5.xml") form = XMLFormat(format_fn) grh_dtype = np.dtype([("record_class", "|i1"), ("INSTRUMENT_GROUP", "|i1"), ("RECORD_SUBCLASS", "|i1"), ("RECORD_SUBCLASS_VERSION", "|i1"), ("RECORD_SIZE", ">u4"), ("RECORD_START_TIME", "S6"), ("RECORD_STOP_TIME", "S6")]) max_lines = np.floor((CHUNK_SIZE ** 2) / 2048) dtypes = [] cnt = 0 counts = [] classes = [] prev = None with open(filename, "rb") as fdes: while True: grh = np.fromfile(fdes, grh_dtype, 1) if grh.size == 0: break rec_class = record_class[int(grh["record_class"])] sub_class = grh["RECORD_SUBCLASS"][0] expected_size = int(grh["RECORD_SIZE"]) bare_size = expected_size - grh_dtype.itemsize try: the_type = form.dtype((rec_class, sub_class)) # the_descr = grh_dtype.descr + the_type.descr except KeyError: the_type = np.dtype([('unknown', 'V%d' % bare_size)]) the_descr = grh_dtype.descr + the_type.descr the_type = np.dtype(the_descr) if the_type.itemsize < expected_size: padding = [('unknown%d' % cnt, 'V%d' % (expected_size - the_type.itemsize))] cnt += 1 the_descr += padding new_dtype = np.dtype(the_descr) key = (rec_class, sub_class) if key == prev: counts[-1] += 1 else: dtypes.append(new_dtype) counts.append(1) classes.append(key) prev = key fdes.seek(expected_size - grh_dtype.itemsize, 1) sections = {} offset = 0 for dtype, count, rec_class in zip(dtypes, counts, classes): fdes.seek(offset) if rec_class == ('mdr', 2): record = da.from_array(np.memmap(fdes, mode='r', dtype=dtype, shape=count, offset=offset), chunks=(max_lines,)) else: record = np.fromfile(fdes, dtype=dtype, count=count) offset += dtype.itemsize * count if rec_class in sections: logger.debug('Multiple records for ', str(rec_class)) sections[rec_class] = np.hstack((sections[rec_class], record)) else: sections[rec_class] = record return sections, form def create_xarray(arr): """Create xarray with correct dimensions.""" res = arr res = xr.DataArray(res, dims=['y', 'x']) return res class EPSAVHRRFile(BaseFileHandler): """Eps level 1b reader for AVHRR data.""" spacecrafts = {"M01": "Metop-B", "M02": "Metop-A", "M03": "Metop-C", } sensors = {"AVHR": "avhrr-3"} units = {"reflectance": "%", "brightness_temperature": "K"} def __init__(self, filename, filename_info, filetype_info): """Initialize FileHandler.""" super(EPSAVHRRFile, self).__init__( filename, filename_info, filetype_info) self.area = None self._start_time = filename_info['start_time'] self._end_time = filename_info['end_time'] self.form = None self.scanlines = None self.pixels = None self.sections = None def _read_all(self): logger.debug("Reading %s", self.filename) self.sections, self.form = read_records(self.filename) self.scanlines = self['TOTAL_MDR'] if self.scanlines != len(self.sections[('mdr', 2)]): logger.warning("Number of declared records doesn't match number of scanlines in the file.") self.scanlines = len(self.sections[('mdr', 2)]) self.pixels = self["EARTH_VIEWS_PER_SCANLINE"] def __getitem__(self, key): """Get value for given key.""" for altkey in self.form.scales: try: try: return self.sections[altkey][key] * self.form.scales[altkey][key] except TypeError: val = self.sections[altkey][key].item().decode().split("=")[1] try: return float(val) * self.form.scales[altkey][key].item() except ValueError: return val.strip() except (KeyError, ValueError): continue raise KeyError("No matching value for " + str(key)) def keys(self): """List of reader's keys.""" keys = [] for val in self.form.scales.values(): keys += val.dtype.fields.keys() return keys @lru_cache(maxsize=1) def get_full_lonlats(self): """Get the interpolated longitudes and latitudes.""" raw_lats = np.hstack((self["EARTH_LOCATION_FIRST"][:, [0]], self["EARTH_LOCATIONS"][:, :, 0], self["EARTH_LOCATION_LAST"][:, [0]])) raw_lons = np.hstack((self["EARTH_LOCATION_FIRST"][:, [1]], self["EARTH_LOCATIONS"][:, :, 1], self["EARTH_LOCATION_LAST"][:, [1]])) return self._interpolate(raw_lons, raw_lats) def _interpolate(self, lons_like, lats_like): nav_sample_rate = self["NAV_SAMPLE_RATE"] if nav_sample_rate == 20 and self.pixels == 2048: lons_like_1km, lats_like_1km = self._interpolate_20km_to_1km(lons_like, lats_like) lons_like_1km = da.from_delayed(lons_like_1km, dtype=lons_like.dtype, shape=(self.scanlines, self.pixels)) lats_like_1km = da.from_delayed(lats_like_1km, dtype=lats_like.dtype, shape=(self.scanlines, self.pixels)) return lons_like_1km, lats_like_1km raise NotImplementedError("Lon/lat and angle expansion not implemented for " + "sample rate = " + str(nav_sample_rate) + " and earth views = " + str(self.pixels)) @delayed(nout=2, pure=True) def _interpolate_20km_to_1km(self, lons, lats): # Note: delayed will cast input dask-arrays to numpy arrays (needed by metop20kmto1km). from geotiepoints import metop20kmto1km return metop20kmto1km(lons, lats) def _get_full_angles(self, solar_zenith, sat_zenith, solar_azimuth, sat_azimuth): nav_sample_rate = self["NAV_SAMPLE_RATE"] if nav_sample_rate == 20 and self.pixels == 2048: # Note: interpolation assumes second array values between -90 and 90 # Solar and satellite zenith is between 0 and 180. sun_azi, sun_zen = self._interpolate(solar_azimuth, solar_zenith - 90) sun_zen += 90 sat_azi, sat_zen = self._interpolate(sat_azimuth, sat_zenith - 90) sat_zen += 90 return sun_azi, sun_zen, sat_azi, sat_zen else: raise NotImplementedError("Angles expansion not implemented for " + "sample rate = " + str(nav_sample_rate) + " and earth views = " + str(self.pixels)) @lru_cache(maxsize=1) def get_full_angles(self): """Get the interpolated angles.""" solar_zenith = np.hstack((self["ANGULAR_RELATIONS_FIRST"][:, [0]], self["ANGULAR_RELATIONS"][:, :, 0], self["ANGULAR_RELATIONS_LAST"][:, [0]])) sat_zenith = np.hstack((self["ANGULAR_RELATIONS_FIRST"][:, [1]], self["ANGULAR_RELATIONS"][:, :, 1], self["ANGULAR_RELATIONS_LAST"][:, [1]])) solar_azimuth = np.hstack((self["ANGULAR_RELATIONS_FIRST"][:, [2]], self["ANGULAR_RELATIONS"][:, :, 2], self["ANGULAR_RELATIONS_LAST"][:, [2]])) sat_azimuth = np.hstack((self["ANGULAR_RELATIONS_FIRST"][:, [3]], self["ANGULAR_RELATIONS"][:, :, 3], self["ANGULAR_RELATIONS_LAST"][:, [3]])) return self._get_full_angles(solar_zenith, sat_zenith, solar_azimuth, sat_azimuth) def get_bounding_box(self): """Get bounding box.""" if self.sections is None: self._read_all() lats = np.hstack([self["EARTH_LOCATION_FIRST"][0, [0]], self["EARTH_LOCATION_LAST"][0, [0]], self["EARTH_LOCATION_LAST"][-1, [0]], self["EARTH_LOCATION_FIRST"][-1, [0]]]) lons = np.hstack([self["EARTH_LOCATION_FIRST"][0, [1]], self["EARTH_LOCATION_LAST"][0, [1]], self["EARTH_LOCATION_LAST"][-1, [1]], self["EARTH_LOCATION_FIRST"][-1, [1]]]) return lons.ravel(), lats.ravel() def get_dataset(self, key, info): """Get calibrated channel data.""" if self.sections is None: self._read_all() if key['name'] in ['longitude', 'latitude']: lons, lats = self.get_full_lonlats() if key['name'] == 'longitude': dataset = create_xarray(lons) else: dataset = create_xarray(lats) elif key['name'] in ['solar_zenith_angle', 'solar_azimuth_angle', 'satellite_zenith_angle', 'satellite_azimuth_angle']: dataset = self._get_angle_dataarray(key) elif key['name'] in ["1", "2", "3a", "3A", "3b", "3B", "4", "5"]: dataset = self._get_calibrated_dataarray(key) else: logger.info("Can't load channel in eps_l1b: " + str(key['name'])) return dataset.attrs['platform_name'] = self.platform_name dataset.attrs['sensor'] = self.sensor_name if "calibration" in key: dataset.attrs["units"] = self.units[key["calibration"]] dataset.attrs.update(info) dataset.attrs.update(key.to_dict()) return dataset def _get_angle_dataarray(self, key): """Get an angle dataarray.""" sun_azi, sun_zen, sat_azi, sat_zen = self.get_full_angles() if key['name'] == 'solar_zenith_angle': dataset = create_xarray(sun_zen) elif key['name'] == 'solar_azimuth_angle': dataset = create_xarray(sun_azi) if key['name'] == 'satellite_zenith_angle': dataset = create_xarray(sat_zen) elif key['name'] == 'satellite_azimuth_angle': dataset = create_xarray(sat_azi) return dataset @cached_property def three_a_mask(self): """Mask for 3A.""" return (self["FRAME_INDICATOR"] & 2 ** 16) != 2 ** 16 @cached_property def three_b_mask(self): """Mask for 3B.""" return (self["FRAME_INDICATOR"] & 2 ** 16) != 0 def _get_calibrated_dataarray(self, key): """Get a calibrated dataarray.""" if key['calibration'] not in ['reflectance', 'brightness_temperature', 'radiance']: raise ValueError('calibration type ' + str(key['calibration']) + ' is not supported!') mask = None channel_name = key['name'].upper() radiance_indices = {"1": 0, "2": 1, "3A": 2, "3B": 2, "4": 3, "5": 4} array = self["SCENE_RADIANCES"][:, radiance_indices[channel_name], :] if channel_name in ["1", "2", "3A"]: if key['calibration'] == 'reflectance': array = radiance_to_refl(array, self[f"CH{channel_name}_SOLAR_FILTERED_IRRADIANCE"]) if channel_name == "3A": mask = self.three_a_mask[:, np.newaxis] if channel_name in ["3B", "4", "5"]: if key['calibration'] == 'brightness_temperature': array = radiance_to_bt(array, self[f"CH{channel_name}_CENTRAL_WAVENUMBER"], self[f"CH{channel_name}_CONSTANT1"], self[f"CH{channel_name}_CONSTANT2_SLOPE"]) if channel_name == "3B": mask = self.three_b_mask[:, np.newaxis] dataset = create_xarray(array) if mask is not None: dataset = dataset.where(~mask) return dataset def get_lonlats(self): """Get lonlats.""" if self.area is None: lons, lats = self.get_full_lonlats() self.area = SwathDefinition(lons, lats) self.area.name = '_'.join([self.platform_name, str(self.start_time), str(self.end_time)]) return self.area @property def platform_name(self): """Get platform name.""" return self.spacecrafts[self["SPACECRAFT_ID"]] @property def sensor_name(self): """Get sensor name.""" return self.sensors[self["INSTRUMENT_ID"]] @property def start_time(self): """Get start time.""" # return datetime.strptime(self["SENSING_START"], "%Y%m%d%H%M%SZ") return self._start_time @property def end_time(self): """Get end time.""" # return datetime.strptime(self["SENSING_END"], "%Y%m%d%H%M%SZ") return self._end_time satpy-0.34.0/satpy/readers/eum_base.py000066400000000000000000000074241420401153000176520ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Utilities for EUMETSAT satellite data.""" from datetime import datetime, timedelta import numpy as np # 6 bytes, 8 bytes, 10 bytes time_cds_short = [('Days', '>u2'), ('Milliseconds', '>u4')] time_cds = time_cds_short + [('Microseconds', '>u2')] time_cds_expanded = time_cds + [('Nanoseconds', '>u2')] issue_revision = [('Issue', np.uint16), ('Revision', np.uint16)] def timecds2datetime(tcds): """Convert time_cds-variables to datetime-object. Works both with a dictionary and a numpy record_array. """ days = int(tcds['Days']) milliseconds = int(tcds['Milliseconds']) try: microseconds = int(tcds['Microseconds']) except (KeyError, ValueError): microseconds = 0 try: microseconds += int(tcds['Nanoseconds']) / 1000. except (KeyError, ValueError): pass reference = datetime(1958, 1, 1) delta = timedelta(days=days, milliseconds=milliseconds, microseconds=microseconds) return reference + delta def recarray2dict(arr): """Convert numpy record array to a dictionary.""" res = {} tcds_types = [time_cds_short, time_cds, time_cds_expanded] for dtuple in arr.dtype.descr: key = dtuple[0] ntype = dtuple[1] data = arr[key] if ntype in tcds_types: if data.size > 1: res[key] = np.array([timecds2datetime(item) for item in data.ravel()]).reshape(data.shape) else: res[key] = timecds2datetime(data) elif isinstance(ntype, list): res[key] = recarray2dict(data) else: if data.size == 1: data = data[0] if ntype[:2] == '|S': # Python2 and Python3 handle strings differently try: data = data.decode() except ValueError: data = None else: data = data.split(':')[0].strip() res[key] = data else: res[key] = data.squeeze() return res def get_service_mode(instrument_name, ssp_lon): """Get information about service mode for a given instrument and subsatellite longitude.""" service_modes = {'seviri': {'0.0': {'service_name': 'fes', 'service_desc': 'Full Earth Scanning service'}, '9.5': {'service_name': 'rss', 'service_desc': 'Rapid Scanning Service'}, '41.5': {'service_name': 'iodc', 'service_desc': 'Indian Ocean Data Coverage service'} }, 'fci': {'0.0': {'service_name': 'fdss', 'service_desc': 'Full Disk Scanning Service'}, '9.5': {'service_name': 'rss', 'service_desc': 'Rapid Scanning Service'}, }, } unknown_modes = {'service_name': 'unknown', 'service_desc': 'unknown'} return service_modes.get(instrument_name, unknown_modes).get('{:.1f}'.format(ssp_lon), unknown_modes) satpy-0.34.0/satpy/readers/fci_l1c_nc.py000066400000000000000000000560371420401153000200560ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Interface to MTG-FCI L1c NetCDF files. This module defines the :class:`FCIL1cNCFileHandler` file handler, to be used for reading Meteosat Third Generation (MTG) Flexible Combined Imager (FCI) Level-1c data. FCI will fly on the MTG Imager (MTG-I) series of satellites, scheduled to be launched in 2022 by the earliest. For more information about FCI, see `EUMETSAT`_. For simulated test data to be used with this reader, see `test data release`_. For the Product User Guide (PUG) of the FCI L1c data, see `PUG`_. .. note:: This reader currently supports Full Disk High Spectral Resolution Imagery (FDHSI) files. Support for High Spatial Resolution Fast Imagery (HRFI) files will be implemented when corresponding test datasets will be available. Geolocation is based on information from the data files. It uses: * From the shape of the data variable ``data//measured/effective_radiance``, start and end line columns of current swath. * From the data variable ``data//measured/x``, the x-coordinates for the grid, in radians (azimuth angle positive towards West). * From the data variable ``data//measured/y``, the y-coordinates for the grid, in radians (elevation angle positive towards North). * From the attribute ``semi_major_axis`` on the data variable ``data/mtg_geos_projection``, the Earth equatorial radius * From the attribute ``inverse_flattening`` on the same data variable, the (inverse) flattening of the ellipsoid * From the attribute ``perspective_point_height`` on the same data variable, the geostationary altitude in the normalised geostationary projection * From the attribute ``longitude_of_projection_origin`` on the same data variable, the longitude of the projection origin * From the attribute ``sweep_angle_axis`` on the same, the sweep angle axis, see https://proj.org/operations/projections/geos.html From the pixel centre angles in radians and the geostationary altitude, the extremities of the lower left and upper right corners are calculated in units of arc length in m. This extent along with the number of columns and rows, the sweep angle axis, and a dictionary with equatorial radius, polar radius, geostationary altitude, and longitude of projection origin, are passed on to ``pyresample.geometry.AreaDefinition``, which then uses proj4 for the actual geolocation calculations. The reading routine supports channel data in counts, radiances, and (depending on channel) brightness temperatures or reflectances. The brightness temperature and reflectance calculation is based on the formulas indicated in `PUG`_. Radiance datasets are returned in units of radiance per unit wavenumber (mW m-2 sr-1 (cm-1)-1). Radiances can be converted to units of radiance per unit wavelength (W m-2 um-1 sr-1) by multiplying with the `radiance_unit_conversion_coefficient` dataset attribute. For each channel, it also supports a number of auxiliary datasets, such as the pixel quality, the index map and the related geometric and acquisition parameters: time, subsatellite latitude, subsatellite longitude, platform altitude, subsolar latitude, subsolar longitude, earth-sun distance, sun-satellite distance, swath number, and swath direction. All auxiliary data can be obtained by prepending the channel name such as ``"vis_04_pixel_quality"``. .. warning:: The API for the direct reading of pixel quality is temporary and likely to change. Currently, for each channel, the pixel quality is available by ``_pixel_quality``. In the future, they will likely all be called ``pixel_quality`` and disambiguated by a to-be-decided property in the `DataID`. .. _PUG: https://www-cdn.eumetsat.int/files/2020-07/pdf_mtg_fci_l1_pug.pdf .. _EUMETSAT: https://www.eumetsat.int/mtg-flexible-combined-imager # noqa: E501 .. _test data release: https://www.eumetsat.int/simulated-mtg-fci-l1c-enhanced-non-nominal-datasets """ from __future__ import absolute_import, division, print_function, unicode_literals import logging import numpy as np import xarray as xr from netCDF4 import default_fillvals from pyresample import geometry from satpy.readers._geos_area import get_geos_area_naming from satpy.readers.eum_base import get_service_mode from .netcdf_utils import NetCDF4FileHandler logger = logging.getLogger(__name__) # dict containing all available auxiliary data parameters to be read using the index map. Keys are the # parameter name and values are the paths to the variable inside the netcdf AUX_DATA = { 'subsatellite_latitude': 'state/platform/subsatellite_latitude', 'subsatellite_longitude': 'state/platform/subsatellite_longitude', 'platform_altitude': 'state/platform/platform_altitude', 'subsolar_latitude': 'state/celestial/subsolar_latitude', 'subsolar_longitude': 'state/celestial/subsolar_longitude', 'earth_sun_distance': 'state/celestial/earth_sun_distance', 'sun_satellite_distance': 'state/celestial/sun_satellite_distance', 'time': 'time', 'swath_number': 'data/swath_number', 'swath_direction': 'data/swath_direction', } def _get_aux_data_name_from_dsname(dsname): aux_data_name = [key for key in AUX_DATA.keys() if key in dsname] if len(aux_data_name) > 0: return aux_data_name[0] return None def _get_channel_name_from_dsname(dsname): # FIXME: replace by .removesuffix after we drop support for Python < 3.9 if dsname.endswith("_pixel_quality"): channel_name = dsname[:-len("_pixel_quality")] elif dsname.endswith("_index_map"): channel_name = dsname[:-len("_index_map")] elif _get_aux_data_name_from_dsname(dsname) is not None: channel_name = dsname[:-len(_get_aux_data_name_from_dsname(dsname)) - 1] else: channel_name = dsname return channel_name class FCIL1cNCFileHandler(NetCDF4FileHandler): """Class implementing the MTG FCI L1c Filehandler. This class implements the Meteosat Third Generation (MTG) Flexible Combined Imager (FCI) Level-1c NetCDF reader. It is designed to be used through the :class:`~satpy.Scene` class using the :mod:`~satpy.Scene.load` method with the reader ``"fci_l1c_nc"``. """ # Platform names according to the MTG FCI L1 Product User Guide, # EUM/MTG/USR/13/719113 from 2019-06-27, pages 32 and 124, are MTI1, MTI2, # MTI3, and MTI4, but we want to use names such as described in WMO OSCAR # MTG-I1, MTG-I2, MTG-I3, and MTG-I4. # # After launch: translate to METEOSAT-xx instead? Not sure how the # numbering will be considering MTG-S1 and MTG-S2 will be launched # in-between. _platform_name_translate = { "MTI1": "MTG-I1", "MTI2": "MTG-I2", "MTI3": "MTG-I3", "MTI4": "MTG-I4"} def __init__(self, filename, filename_info, filetype_info): """Initialize file handler.""" super().__init__(filename, filename_info, filetype_info, cache_var_size=10000, cache_handle=True) logger.debug('Reading: {}'.format(self.filename)) logger.debug('Start: {}'.format(self.start_time)) logger.debug('End: {}'.format(self.end_time)) self._cache = {} @property def start_time(self): """Get start time.""" return self.filename_info['start_time'] @property def end_time(self): """Get end time.""" return self.filename_info['end_time'] def get_dataset(self, key, info=None): """Load a dataset.""" logger.debug('Reading {} from {}'.format(key['name'], self.filename)) if "pixel_quality" in key['name']: return self._get_dataset_quality(key['name']) elif "index_map" in key['name']: return self._get_dataset_index_map(key['name']) elif _get_aux_data_name_from_dsname(key['name']) is not None: return self._get_dataset_aux_data(key['name']) elif any(lb in key['name'] for lb in {"vis_", "ir_", "nir_", "wv_"}): return self._get_dataset_measurand(key, info=info) else: raise ValueError("Unknown dataset key, not a channel, quality or auxiliary data: " f"{key['name']:s}") def _get_dataset_measurand(self, key, info=None): """Load dataset corresponding to channel measurement. Load a dataset when the key refers to a measurand, whether uncalibrated (counts) or calibrated in terms of brightness temperature, radiance, or reflectance. """ # Get the dataset # Get metadata for given dataset measured = self.get_channel_measured_group_path(key['name']) data = self[measured + "/effective_radiance"] attrs = data.attrs.copy() info = info.copy() fv = attrs.pop( "FillValue", default_fillvals.get(data.dtype.str[1:], np.nan)) vr = attrs.get("valid_range", [-np.inf, np.inf]) if key['calibration'] == "counts": attrs["_FillValue"] = fv nfv = fv else: nfv = np.nan data = data.where(data >= vr[0], nfv) data = data.where(data <= vr[1], nfv) res = self.calibrate(data, key) # pre-calibration units no longer apply attrs.pop("units") # For each channel, the effective_radiance contains in the # "ancillary_variables" attribute the value "pixel_quality". In # FileYAMLReader._load_ancillary_variables, satpy will try to load # "pixel_quality" but is lacking the context from what group to load # it: in the FCI format, each channel group (data//measured) has # its own data variable 'pixel_quality'. # Until we can have multiple pixel_quality variables defined (for # example, with https://github.com/pytroll/satpy/pull/1088), rewrite # the ancillary variable to include the channel. See also # https://github.com/pytroll/satpy/issues/1171. if "pixel_quality" in attrs["ancillary_variables"]: attrs["ancillary_variables"] = attrs["ancillary_variables"].replace( "pixel_quality", key['name'] + "_pixel_quality") else: raise ValueError( "Unexpected value for attribute ancillary_variables, " "which the FCI file handler intends to rewrite (see " "https://github.com/pytroll/satpy/issues/1171 for why). " f"Expected 'pixel_quality', got {attrs['ancillary_variables']:s}") res.attrs.update(key.to_dict()) res.attrs.update(info) res.attrs.update(attrs) res.attrs["platform_name"] = self._platform_name_translate.get( self["/attr/platform"], self["/attr/platform"]) # remove unpacking parameters for calibrated data if key['calibration'] in ['brightness_temperature', 'reflectance']: res.attrs.pop("add_offset") res.attrs.pop("warm_add_offset") res.attrs.pop("scale_factor") res.attrs.pop("warm_scale_factor") # remove attributes from original file which don't apply anymore res.attrs.pop('long_name') return res def _get_dataset_quality(self, dsname): """Load a quality field for an FCI channel.""" grp_path = self.get_channel_measured_group_path(_get_channel_name_from_dsname(dsname)) dv_path = grp_path + "/pixel_quality" data = self[dv_path] return data def _get_dataset_index_map(self, dsname): """Load the index map for an FCI channel.""" grp_path = self.get_channel_measured_group_path(_get_channel_name_from_dsname(dsname)) dv_path = grp_path + "/index_map" data = self[dv_path] data = data.where(data != data.attrs.get('_FillValue', 65535)) return data def _get_aux_data_lut_vector(self, aux_data_name): """Load the lut vector of an auxiliary variable.""" lut = self[AUX_DATA[aux_data_name]] fv = default_fillvals.get(lut.dtype.str[1:], np.nan) lut = lut.where(lut != fv) return lut @staticmethod def _getitem(block, lut): return lut[block.astype('uint16')] def _get_dataset_aux_data(self, dsname): """Get the auxiliary data arrays using the index map.""" # get index map index_map = self._get_dataset_index_map(_get_channel_name_from_dsname(dsname)) # index map indexing starts from 1 index_map -= 1 # get lut values from 1-d vector lut = self._get_aux_data_lut_vector(_get_aux_data_name_from_dsname(dsname)) # assign lut values based on index map indices aux = index_map.data.map_blocks(self._getitem, lut.data, dtype=lut.data.dtype) aux = xr.DataArray(aux, dims=index_map.dims, attrs=index_map.attrs, coords=index_map.coords) # filter out out-of-disk values aux = aux.where(index_map >= 0) return aux @staticmethod def get_channel_measured_group_path(channel): """Get the channel's measured group path.""" measured_group_path = 'data/{}/measured'.format(channel) return measured_group_path def calc_area_extent(self, key): """Calculate area extent for a dataset.""" # if a user requests a pixel quality or index map before the channel data, the # yaml-reader will ask the area extent of the pixel quality/index map field, # which will ultimately end up here channel_name = _get_channel_name_from_dsname(key['name']) # Get metadata for given dataset measured = self.get_channel_measured_group_path(channel_name) # Get start/end line and column of loaded swath. nlines, ncols = self[measured + "/effective_radiance/shape"] logger.debug('Channel {} resolution: {}'.format(channel_name, ncols)) logger.debug('Row/Cols: {} / {}'.format(nlines, ncols)) # Calculate full globe line extent h = float(self["data/mtg_geos_projection/attr/perspective_point_height"]) extents = {} for coord in "xy": coord_radian = self["data/{:s}/measured/{:s}".format(channel_name, coord)] coord_radian_num = coord_radian[:] * coord_radian.scale_factor + coord_radian.add_offset # FCI defines pixels by centroids (see PUG), while pyresample # defines corners as lower left corner of lower left pixel, upper right corner of upper right pixel # (see https://pyresample.readthedocs.io/en/latest/geo_def.html). # Therefore, half a pixel (i.e. half scale factor) needs to be added in each direction. # The grid origin is in the South-West corner. # Note that the azimuth angle (x) is defined as positive towards West (see PUG - Level 1c Reference Grid) # The elevation angle (y) is defined as positive towards North as per usual convention. Therefore: # The values of x go from positive (West) to negative (East) and the scale factor of x is negative. # The values of y go from negative (South) to positive (North) and the scale factor of y is positive. # South-West corner (x positive, y negative) first_coord_radian = coord_radian_num[0] - coord_radian.scale_factor / 2 # North-East corner (x negative, y positive) last_coord_radian = coord_radian_num[-1] + coord_radian.scale_factor / 2 # convert to arc length in m first_coord = first_coord_radian * h # arc length in m last_coord = last_coord_radian * h # the .item() call is needed with the h5netcdf backend, see # https://github.com/pytroll/satpy/issues/972#issuecomment-558191583 # but we need to compute it first if this is dask try: first_coord = first_coord.compute() last_coord = last_coord.compute() except AttributeError: # not a dask.array pass extents[coord] = (first_coord.item(), last_coord.item()) # For the final extents, take into account that the image is upside down (lower line is North), and that # East is defined as positive azimuth in Proj, so we need to multiply by -1 the azimuth extents. # lower left x: west-ward extent: first coord of x, multiplied by -1 to account for azimuth orientation # lower left y: north-ward extent: last coord of y # upper right x: east-ward extent: last coord of x, multiplied by -1 to account for azimuth orientation # upper right y: south-ward extent: first coord of y area_extent = (-extents["x"][0], extents["y"][1], -extents["x"][1], extents["y"][0]) return area_extent, nlines, ncols def get_area_def(self, key): """Calculate on-fly area definition for a dataset in geos-projection.""" # assumption: channels with same resolution should have same area # cache results to improve performance if key['resolution'] in self._cache: return self._cache[key['resolution']] a = float(self["data/mtg_geos_projection/attr/semi_major_axis"]) h = float(self["data/mtg_geos_projection/attr/perspective_point_height"]) rf = float(self["data/mtg_geos_projection/attr/inverse_flattening"]) lon_0 = float(self["data/mtg_geos_projection/attr/longitude_of_projection_origin"]) sweep = str(self["data/mtg_geos_projection"].sweep_angle_axis) area_extent, nlines, ncols = self.calc_area_extent(key) logger.debug('Calculated area extent: {}' .format(''.join(str(area_extent)))) # use a (semi-major axis) and rf (reverse flattening) to define ellipsoid as recommended by EUM (see PUG) proj_dict = {'a': a, 'lon_0': lon_0, 'h': h, "rf": rf, 'proj': 'geos', 'units': 'm', "sweep": sweep} area_naming_input_dict = {'platform_name': 'mtg', 'instrument_name': 'fci', 'resolution': int(key['resolution']) } area_naming = get_geos_area_naming({**area_naming_input_dict, **get_service_mode('fci', lon_0)}) area = geometry.AreaDefinition( area_naming['area_id'], area_naming['description'], "", proj_dict, ncols, nlines, area_extent) self._cache[key['resolution']] = area return area def calibrate(self, data, key): """Calibrate data.""" if key['calibration'] in ['brightness_temperature', 'reflectance', 'radiance']: data = self.calibrate_counts_to_physical_quantity(data, key) elif key['calibration'] != "counts": logger.error( "Received unknown calibration key. Expected " "'brightness_temperature', 'reflectance', 'radiance' or 'counts', got " + key['calibration'] + ".") return data def calibrate_counts_to_physical_quantity(self, data, key): """Calibrate counts to radiances, brightness temperatures, or reflectances.""" # counts to radiance scaling data = self.calibrate_counts_to_rad(data, key) if key['calibration'] == 'brightness_temperature': data = self.calibrate_rad_to_bt(data, key) elif key['calibration'] == 'reflectance': data = self.calibrate_rad_to_refl(data, key) return data def calibrate_counts_to_rad(self, data, key): """Calibrate counts to radiances.""" if key['name'] == 'ir_38': data = xr.where(((2 ** 12 - 1 < data) & (data <= 2 ** 13 - 1)), (data * data.attrs.get("warm_scale_factor", 1) + data.attrs.get("warm_add_offset", 0)), (data * data.attrs.get("scale_factor", 1) + data.attrs.get("add_offset", 0)) ) else: data = (data * data.attrs.get("scale_factor", 1) + data.attrs.get("add_offset", 0)) measured = self.get_channel_measured_group_path(key['name']) data.attrs.update({'radiance_unit_conversion_coefficient': self[measured + '/radiance_unit_conversion_coefficient']}) return data def calibrate_rad_to_bt(self, radiance, key): """IR channel calibration.""" # using the method from PUG section Converting from Effective Radiance to Brightness Temperature for IR Channels measured = self.get_channel_measured_group_path(key['name']) vc = self[measured + "/radiance_to_bt_conversion_coefficient_wavenumber"] a = self[measured + "/radiance_to_bt_conversion_coefficient_a"] b = self[measured + "/radiance_to_bt_conversion_coefficient_b"] c1 = self[measured + "/radiance_to_bt_conversion_constant_c1"] c2 = self[measured + "/radiance_to_bt_conversion_constant_c2"] for v in (vc, a, b, c1, c2): if v == v.attrs.get("FillValue", default_fillvals.get(v.dtype.str[1:])): logger.error( "{:s} set to fill value, cannot produce " "brightness temperatures for {:s}.".format( v.attrs.get("long_name", "at least one necessary coefficient"), measured)) return radiance * np.nan nom = c2 * vc denom = a * np.log(1 + (c1 * vc ** 3) / radiance) res = nom / denom - b / a return res def calibrate_rad_to_refl(self, radiance, key): """VIS channel calibration.""" measured = self.get_channel_measured_group_path(key['name']) cesi = self[measured + "/channel_effective_solar_irradiance"] if cesi == cesi.attrs.get( "FillValue", default_fillvals.get(cesi.dtype.str[1:])): logger.error( "channel effective solar irradiance set to fill value, " "cannot produce reflectance for {:s}.".format(measured)) return radiance * np.nan sun_earth_distance = np.mean(self["state/celestial/earth_sun_distance"]) / 149597870.7 # [AU] res = 100 * radiance * np.pi * sun_earth_distance ** 2 / cesi return res satpy-0.34.0/satpy/readers/fci_l2_nc.py000066400000000000000000000300211420401153000176750ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2019 Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # satpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . """Reader for the FCI L2 products in NetCDF4 format.""" import logging from contextlib import suppress from datetime import datetime, timedelta import numpy as np import xarray as xr from satpy import CHUNK_SIZE from satpy.readers._geos_area import get_area_definition, make_ext from satpy.readers.file_handlers import BaseFileHandler logger = logging.getLogger(__name__) PRODUCT_DATA_DURATION_MINUTES = 20 SSP_DEFAULT = 0.0 class FciL2CommonFunctions(object): """Shared operations for file handlers.""" @property def _start_time(self): try: start_time = datetime.strptime(self.nc.attrs['time_coverage_start'], '%Y%m%d%H%M%S') except (ValueError, KeyError): # TODO if the sensing_start_time_utc attribute is not valid, uses a hardcoded value logger.warning("Start time cannot be obtained from file content, using default value instead") start_time = datetime.strptime('20200101120000', '%Y%m%d%H%M%S') return start_time @property def _end_time(self): """Get observation end time.""" try: end_time = datetime.strptime(self.nc.attrs['time_coverage_end'], '%Y%m%d%H%M%S') except (ValueError, KeyError): # TODO if the sensing_end_time_utc attribute is not valid, adds 20 minutes to the start time end_time = self._start_time + timedelta(minutes=PRODUCT_DATA_DURATION_MINUTES) return end_time @property def _spacecraft_name(self): """Return spacecraft name.""" try: return self.nc.attrs['platform'] except KeyError: # TODO if the platform attribute is not valid, return a default value logger.warning("Spacecraft name cannot be obtained from file content, using default value instead") return 'DEFAULT_MTG' @property def _sensor_name(self): """Return instrument.""" try: return self.nc.attrs['data_source'] except KeyError: # TODO if the data_source attribute is not valid, return a default value logger.warning("Sensor cannot be obtained from file content, using default value instead") return 'fci' def _get_global_attributes(self): """Create a dictionary of global attributes to be added to all datasets. Returns: dict: A dictionary of global attributes. filename: name of the product file start_time: sensing start time from best available source end_time: sensing end time from best available source spacecraft_name: name of the spacecraft ssp_lon: longitude of subsatellite point sensor: name of sensor creation_time: creation time of the product platform_name: name of the platform """ attributes = { 'filename': self.filename, 'start_time': self._start_time, 'end_time': self._end_time, 'spacecraft_name': self._spacecraft_name, 'ssp_lon': self.ssp_lon, 'sensor': self._sensor_name, 'creation_time': self.filename_info['creation_time'], 'platform_name': self._spacecraft_name, } return attributes def __del__(self): """Close the NetCDF file that may still be open.""" with suppress(OSError): self.nc.close() class FciL2NCFileHandler(BaseFileHandler, FciL2CommonFunctions): """Reader class for FCI L2 products in NetCDF4 format.""" def __init__(self, filename, filename_info, filetype_info): """Open the NetCDF file with xarray and prepare for dataset reading.""" super().__init__(filename, filename_info, filetype_info) # Use xarray's default netcdf4 engine to open the file self.nc = xr.open_dataset( self.filename, decode_cf=True, mask_and_scale=True, chunks={ 'number_of_columns': CHUNK_SIZE, 'number_of_rows': CHUNK_SIZE } ) # Read metadata which are common to all datasets self.nlines = self.nc['y'].size self.ncols = self.nc['x'].size self._projection = self.nc['mtg_geos_projection'] # Compute the area definition self._area_def = self._compute_area_def() @property def ssp_lon(self): """Return subsatellite point longitude.""" try: return float(self._projection.attrs['longitude_of_projection_origin']) except KeyError: logger.warning("ssp_lon cannot be obtained from file content, using default value instead") return SSP_DEFAULT def get_dataset(self, dataset_id, dataset_info): """Get dataset using the file_key in dataset_info.""" var_key = dataset_info['file_key'] logger.debug('Reading in file to get dataset with key %s.', var_key) try: variable = self.nc[var_key] except KeyError: logger.warning("Could not find key %s in NetCDF file, no valid Dataset created", var_key) return None # TODO in some of the test files, invalid pixels contain the value defined as "fill_value" in the YAML file # instead of being masked directly in the netCDF variable. # therefore NaN is applied where such value is found or (0 if the array contains integer values) # the next 11 lines have to be removed once the product files are correctly configured try: mask_value = dataset_info['mask_value'] except KeyError: mask_value = np.NaN try: fill_value = dataset_info['fill_value'] except KeyError: fill_value = np.NaN if dataset_info['file_type'] == 'nc_fci_test_clm': data_values = variable.where(variable != fill_value, mask_value).astype('uint32', copy=False) else: data_values = variable.where(variable != fill_value, mask_value).astype('float32', copy=False) data_values.attrs = variable.attrs variable = data_values # If the variable has 3 dimensions, select the required layer if variable.ndim == 3: layer = dataset_info.get('layer', 0) logger.debug('Selecting the layer %d.', layer) variable = variable.sel(maximum_number_of_layers=layer) if dataset_info['file_type'] == 'nc_fci_test_clm' and var_key != 'cloud_mask_cmrt6_test_result': variable.values = (variable.values >> dataset_info['extract_byte'] << 31 >> 31) # Rename the dimensions as required by Satpy variable = variable.rename({"number_of_rows": 'y', "number_of_columns": 'x'}) # Manage the attributes of the dataset variable.attrs.setdefault('units', None) variable.attrs.update(dataset_info) variable.attrs.update(self._get_global_attributes()) return variable def get_area_def(self, key): """Return the area definition (common to all data in product).""" return self._area_def def _compute_area_def(self): """Compute the area definition. Returns: AreaDefinition: A pyresample AreaDefinition object containing the area definition. """ # Read the projection data from the mtg_geos_projection variable a = float(self._projection.attrs['semi_major_axis']) b = float(self._projection.attrs['semi_minor_axis']) h = float(self._projection.attrs['perspective_point_height']) # TODO sweep_angle_axis value not handled at the moment, therefore commented out # sweep_axis = self._projection.attrs['sweep_angle_axis'] # Coordinates of the pixel in radians x = self.nc['x'] y = self.nc['y'] # TODO conversion to radians: offset and scale factor are missing from some test NetCDF file # TODO the next two lines should be removed when the offset and scale factor are correctly configured if not hasattr(x, 'standard_name'): x = np.radians(x * 0.003202134 - 8.914740401) y = np.radians(y * 0.003202134 - 8.914740401) # Convert to degrees as required by the make_ext function x_deg = np.degrees(x) y_deg = np.degrees(y) # Select the extreme points of the extension area x_l, x_r = x_deg.values[0], x_deg.values[-1] y_l, y_u = y_deg.values[0], y_deg.values[-1] # Compute the extension area in meters area_extent = make_ext(x_l, x_r, y_l, y_u, h) # Assemble the projection definition dictionary p_dict = { 'nlines': self.nlines, 'ncols': self.ncols, 'ssp_lon': self.ssp_lon, 'a': a, 'b': b, 'h': h, 'a_name': 'FCI Area', # TODO to be confirmed 'a_desc': 'Area for FCI instrument', # TODO to be confirmed 'p_id': 'geos' } # Compute the area definition area_def = get_area_definition(p_dict, area_extent) return area_def class FciL2NCSegmentFileHandler(BaseFileHandler, FciL2CommonFunctions): """Reader class for FCI L2 Segmented products in NetCDF4 format.""" def __init__(self, filename, filename_info, filetype_info): """Open the NetCDF file with xarray and prepare for dataset reading.""" super().__init__(filename, filename_info, filetype_info) # Use xarray's default netcdf4 engine to open the file self.nc = xr.open_dataset( self.filename, decode_cf=True, mask_and_scale=True, chunks={ 'number_of_FoR_cols': CHUNK_SIZE, 'number_of_FoR_rows': CHUNK_SIZE } ) # Read metadata which are common to all datasets self.nlines = self.nc['number_of_FoR_rows'].size self.ncols = self.nc['number_of_FoR_cols'].size self.ssp_lon = SSP_DEFAULT def get_dataset(self, dataset_id, dataset_info): """Get dataset using the file_key in dataset_info.""" var_key = dataset_info['file_key'] logger.debug('Reading in file to get dataset with key %s.', var_key) try: variable = self.nc[var_key] except KeyError: logger.warning("Could not find key %s in NetCDF file, no valid Dataset created", var_key) return None # TODO in some of the test files, invalid pixels contain the value defined as "fill_value" in the YAML file # instead of being masked directly in the netCDF variable. # therefore NaN is applied where such value is found or (0 if the array contains integer values) # the next 11 lines have to be removed once the product files are correctly configured mask_value = dataset_info.get('mask_value', np.NaN) fill_value = dataset_info.get('fill_value', np.NaN) float_variable = variable.where(variable != fill_value, mask_value).astype('float32', copy=False) float_variable.attrs = variable.attrs variable = float_variable # Rename the dimensions as required by Satpy variable = variable.rename({"number_of_FoR_rows": 'y', "number_of_FoR_cols": 'x'}) # # Manage the attributes of the dataset variable.attrs.setdefault('units', None) variable.attrs.update(dataset_info) variable.attrs.update(self._get_global_attributes()) return variable satpy-0.34.0/satpy/readers/file_handlers.py000066400000000000000000000251351420401153000206700ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Interface for BaseFileHandlers.""" from abc import ABCMeta import numpy as np from pyresample.geometry import SwathDefinition from satpy.dataset import combine_metadata class BaseFileHandler(metaclass=ABCMeta): """Base file handler.""" def __init__(self, filename, filename_info, filetype_info): """Initialize file handler.""" self.filename = filename self.navigation_reader = None self.filename_info = filename_info self.filetype_info = filetype_info self.metadata = filename_info.copy() def __str__(self): """Customize __str__.""" return "<{}: '{}'>".format(self.__class__.__name__, self.filename) def __repr__(self): """Customize __repr__.""" return str(self) def get_dataset(self, dataset_id, ds_info): """Get dataset.""" raise NotImplementedError def get_area_def(self, dsid): """Get area definition.""" raise NotImplementedError def get_bounding_box(self): """Get the bounding box of the files, as a (lons, lats) tuple. The tuple return should a lons and lats list of coordinates traveling clockwise around the points available in the file. """ raise NotImplementedError @staticmethod def _combine(infos, func, *keys): res = {} for key in keys: if key in infos[0]: res[key] = func([i[key] for i in infos]) return res def combine_info(self, all_infos): """Combine metadata for multiple datasets. When loading data from multiple files it can be non-trivial to combine things like start_time, end_time, start_orbit, end_orbit, etc. By default this method will produce a dictionary containing all values that were equal across **all** provided info dictionaries. Additionally it performs the logical comparisons to produce the following if they exist: - start_time - end_time - start_orbit - end_orbit - satellite_altitude - satellite_latitude - satellite_longitude - orbital_parameters Also, concatenate the areas. """ combined_info = combine_metadata(*all_infos) new_dict = self._combine(all_infos, min, 'start_time', 'start_orbit') new_dict.update(self._combine(all_infos, max, 'end_time', 'end_orbit')) new_dict.update(self._combine(all_infos, np.mean, 'satellite_longitude', 'satellite_latitude', 'satellite_altitude')) # Average orbital parameters orb_params = [info.get('orbital_parameters', {}) for info in all_infos] if all(orb_params): # Collect all available keys orb_params_comb = {} for d in orb_params: orb_params_comb.update(d) # Average known keys keys = ['projection_longitude', 'projection_latitude', 'projection_altitude', 'satellite_nominal_longitude', 'satellite_nominal_latitude', 'satellite_actual_longitude', 'satellite_actual_latitude', 'satellite_actual_altitude', 'nadir_longitude', 'nadir_latitude'] orb_params_comb.update(self._combine(orb_params, np.mean, *keys)) new_dict['orbital_parameters'] = orb_params_comb try: area = SwathDefinition(lons=np.ma.vstack([info['area'].lons for info in all_infos]), lats=np.ma.vstack([info['area'].lats for info in all_infos])) area.name = '_'.join([info['area'].name for info in all_infos]) combined_info['area'] = area except KeyError: pass new_dict.update(combined_info) return new_dict @property def start_time(self): """Get start time.""" return self.filename_info['start_time'] @property def end_time(self): """Get end time.""" return self.filename_info.get('end_time', self.start_time) @property def sensor_names(self): """List of sensors represented in this file.""" raise NotImplementedError def file_type_matches(self, ds_ftype): """Match file handler's type to this dataset's file type. Args: ds_ftype (str or list): File type or list of file types that a dataset is configured to be loaded from. Returns: ``True`` if this file handler object's type matches the dataset's file type(s), ``None`` otherwise. ``None`` is returned instead of ``False`` to follow the convention of the :meth:`available_datasets` method. """ if not isinstance(ds_ftype, (list, tuple)): ds_ftype = [ds_ftype] if self.filetype_info['file_type'] in ds_ftype: return True return None def available_datasets(self, configured_datasets=None): """Get information of available datasets in this file. This is used for dynamically specifying what datasets are available from a file in addition to what's configured in a YAML configuration file. Note that this method is called for each file handler for each file type; care should be taken when possible to reduce the amount of redundant datasets produced. This method should **not** update values of the dataset information dictionary **unless** this file handler has a matching file type (the data could be loaded from this object in the future) and at least **one** :class:`satpy.dataset.DataID` key is also modified. Otherwise, this file type may override the information provided by a more preferred file type (as specified in the YAML file). It is recommended that any non-ID metadata be updated during the :meth:`BaseFileHandler.get_dataset` part of loading. This method is not guaranteed that it will be called before any other file type's handler. The availability "boolean" not being ``None`` does not mean that a file handler called later can't provide an additional dataset, but it must provide more identifying (DataID) information to do so and should yield its new dataset in addition to the previous one. Args: configured_datasets (list): Series of (bool or None, dict) in the same way as is returned by this method (see below). The bool is whether or not the dataset is available from at least one of the current file handlers. It can also be ``None`` if no file handler knows before us knows how to handle it. The dictionary is existing dataset metadata. The dictionaries are typically provided from a YAML configuration file and may be modified, updated, or used as a "template" for additional available datasets. This argument could be the result of a previous file handler's implementation of this method. Returns: Iterator of (bool or None, dict) pairs where dict is the dataset's metadata. If the dataset is available in the current file type then the boolean value should be ``True``, ``False`` if we **know** about the dataset but it is unavailable, or ``None`` if this file object is not responsible for it. Example 1 - Supplement existing configured information:: def available_datasets(self, configured_datasets=None): "Add information to configured datasets." # we know the actual resolution res = self.resolution # update previously configured datasets for is_avail, ds_info in (configured_datasets or []): # some other file handler knows how to load this # don't override what they've done if is_avail is not None: yield is_avail, ds_info matches = self.file_type_matches(ds_info['file_type']) if matches and ds_info.get('resolution') != res: # we are meant to handle this dataset (file type matches) # and the information we can provide isn't available yet new_info = ds_info.copy() new_info['resolution'] = res yield True, new_info elif is_avail is None: # we don't know what to do with this # see if another future file handler does yield is_avail, ds_info Example 2 - Add dynamic datasets from the file:: def available_datasets(self, configured_datasets=None): "Add information to configured datasets." # pass along existing datasets for is_avail, ds_info in (configured_datasets or []): yield is_avail, ds_info # get dynamic variables known to this file (that we created) for var_name, val in self.dynamic_variables.items(): ds_info = { 'file_type': self.filetype_info['file_type'], 'resolution': 1000, 'name': var_name, } yield True, ds_info """ for is_avail, ds_info in (configured_datasets or []): if is_avail is not None: # some other file handler said it has this dataset # we don't know any more information than the previous # file handler so let's yield early yield is_avail, ds_info continue yield self.file_type_matches(ds_info['file_type']), ds_info satpy-0.34.0/satpy/readers/generic_image.py000066400000000000000000000133401420401153000206420ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """ Reader for generic image (e.g. gif, png, jpg, tif, geotiff, ...). Returns a dataset without calibration. Includes coordinates if available in the file (eg. geotiff). If nodata values are present (and rasterio is able to read them), it will be preserved as attribute ``_FillValue`` in the returned dataset. In case that nodata values should be used to mask pixels (that have equal values) with np.nan, it has to be enabled in the reader yaml file (key ``nodata_handling`` per dataset with value ``"nan_mask"``). """ import logging import dask.array as da import numpy as np import rasterio import xarray as xr from pyresample import utils from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler BANDS = {1: ['L'], 2: ['L', 'A'], 3: ['R', 'G', 'B'], 4: ['R', 'G', 'B', 'A']} NODATA_HANDLING_FILLVALUE = 'fill_value' NODATA_HANDLING_NANMASK = 'nan_mask' logger = logging.getLogger(__name__) class GenericImageFileHandler(BaseFileHandler): """Handle reading of generic image files.""" def __init__(self, filename, filename_info, filetype_info): """Initialize filehandler.""" super(GenericImageFileHandler, self).__init__( filename, filename_info, filetype_info) self.finfo = filename_info try: self.finfo['end_time'] = self.finfo['start_time'] except KeyError: pass self.finfo['filename'] = self.filename self.file_content = {} self.area = None self.dataset_name = None self.read() def read(self): """Read the image.""" dataset = rasterio.open(self.finfo['filename']) # Create area definition if hasattr(dataset, 'crs') and dataset.crs is not None: self.area = utils.get_area_def_from_raster(dataset) data = xr.open_rasterio(dataset, chunks=(1, CHUNK_SIZE, CHUNK_SIZE)) attrs = data.attrs.copy() # Rename to Satpy convention data = data.rename({'band': 'bands'}) # Rename bands to [R, G, B, A], or a subset of those data['bands'] = BANDS[data.bands.size] data.attrs = attrs self.dataset_name = 'image' self.file_content[self.dataset_name] = data def get_area_def(self, dsid): """Get area definition of the image.""" if self.area is None: raise NotImplementedError("No CRS information available from image") return self.area @property def start_time(self): """Return start time.""" return self.finfo['start_time'] @property def end_time(self): """Return end time.""" return self.finfo['end_time'] def get_dataset(self, key, info): """Get a dataset from the file.""" ds_name = self.dataset_name if self.dataset_name else key['name'] logger.debug("Reading '%s.'", ds_name) data = self.file_content[ds_name] # Mask data if necessary try: data = _mask_image_data(data, info) except ValueError as err: logger.warning(err) data.attrs.update(key.to_dict()) data.attrs.update(info) return data def _mask_image_data(data, info): """Mask image data if necessary. Masking is done if alpha channel is present or dataset 'nodata_handling' is set to 'nan_mask'. In the latter case even integer data is converted to float32 and masked with np.nan. """ if data.bands.size in (2, 4): if not np.issubdtype(data.dtype, np.integer): raise ValueError("Only integer datatypes can be used as a mask.") mask = data.data[-1, :, :] == np.iinfo(data.dtype).min data = data.astype(np.float64) masked_data = da.stack([da.where(mask, np.nan, data.data[i, :, :]) for i in range(data.shape[0])]) data.data = masked_data data = data.sel(bands=BANDS[data.bands.size - 1]) elif hasattr(data, 'nodatavals') and data.nodatavals: data = _handle_nodatavals(data, info.get('nodata_handling', NODATA_HANDLING_FILLVALUE)) return data def _handle_nodatavals(data, nodata_handling): """Mask data with np.nan or only set 'attr_FillValue'.""" if nodata_handling == NODATA_HANDLING_NANMASK: # data converted to float and masked with np.nan data = data.astype(np.float32) masked_data = da.stack([da.where(data.data[i, :, :] == nodataval, np.nan, data.data[i, :, :]) for i, nodataval in enumerate(data.nodatavals)]) data.data = masked_data data.attrs['_FillValue'] = np.nan elif nodata_handling == NODATA_HANDLING_FILLVALUE: # keep data as it is but set _FillValue attribute to provided # nodatavalue (first one as it has to be the same for all bands at least # in GeoTiff, see GDAL gtiff driver documentation) fill_value = data.nodatavals[0] if np.issubdtype(data.dtype, np.integer): fill_value = int(fill_value) data.attrs['_FillValue'] = fill_value return data satpy-0.34.0/satpy/readers/geocat.py000066400000000000000000000256511420401153000173360ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Interface to GEOCAT HDF4 or NetCDF4 products. Note: GEOCAT files do not currently have projection information or precise pixel resolution information. Additionally the longitude and latitude arrays are stored as 16-bit integers which causes loss of precision. For this reason the lon/lats can't be used as a reliable coordinate system to calculate the projection X/Y coordinates. Until GEOCAT adds projection information and X/Y coordinate arrays, this reader will estimate the geostationary area the best it can. It currently takes a single lon/lat point as reference and uses hardcoded resolution and projection information to calculate the area extents. """ from __future__ import annotations import logging import numpy as np from pyproj import Proj from pyresample import geometry from pyresample.utils import proj4_str_to_dict from satpy.readers.netcdf_utils import NetCDF4FileHandler, netCDF4 LOG = logging.getLogger(__name__) CF_UNITS = { 'none': '1', } # GEOCAT currently doesn't include projection information in it's files GEO_PROJS = { 'GOES-16': '+proj=geos +lon_0={lon_0:0.02f} +h=35786023.0 +a=6378137.0 +b=6356752.31414 +sweep=x +units=m +no_defs', 'GOES-17': '+proj=geos +lon_0={lon_0:0.02f} +h=35786023.0 +a=6378137.0 +b=6356752.31414 +sweep=x +units=m +no_defs', 'HIMAWARI-8': '+proj=geos +over +lon_0=140.7 +h=35785863 +a=6378137 +b=6356752.299581327 +units=m +no_defs', } class GEOCATFileHandler(NetCDF4FileHandler): """GEOCAT netCDF4 file handler.""" sensors = { 'goes': 'goes_imager', 'himawari8': 'ahi', 'goes16': 'abi', # untested 'goesr': 'abi', # untested } platforms: dict[str, str] = { } resolutions = { 'abi': { 1: 1002.0086577437705, 2: 2004.0173154875411, }, 'ahi': { 1: 999.9999820317674, # assumption 2: 1999.999964063535, 4: 3999.99992812707, } } def get_sensor(self, sensor): """Get sensor.""" last_resort = None for k, v in self.sensors.items(): if k == sensor: return v if k in sensor: last_resort = v if last_resort: return last_resort raise ValueError("Unknown sensor '{}'".format(sensor)) def get_platform(self, platform): """Get platform.""" for k, v in self.platforms.items(): if k in platform: return v return platform def _get_proj(self, platform, ref_lon): if platform == 'GOES-16' and -76. < ref_lon < -74.: # geocat file holds the *actual* subsatellite point, not the # projection (-75.2 actual versus -75 projection) ref_lon = -75. return GEO_PROJS[platform].format(lon_0=ref_lon) @property def sensor_names(self): """Get sensor names.""" return [self.get_sensor(self['/attr/Sensor_Name'])] @property def start_time(self): """Get start time.""" return self.filename_info['start_time'] @property def end_time(self): """Get end time.""" return self.filename_info.get('end_time', self.start_time) @property def is_geo(self): """Check platform.""" platform = self.get_platform(self['/attr/Platform_Name']) return platform in GEO_PROJS @property def resolution(self): """Get resolution.""" elem_res = self['/attr/Element_Resolution'] return int(elem_res * 1000) def _calc_area_resolution(self, ds_res): elem_res = round(ds_res / 1000.) # mimic 'Element_Resolution' attribute from above sensor = self.get_sensor(self['/attr/Sensor_Name']) return self.resolutions.get(sensor, {}).get(int(elem_res), elem_res * 1000.) def available_datasets(self, configured_datasets=None): """Update information for or add datasets provided by this file. If this file handler can load a dataset then it will supplement the dataset info with the resolution and possibly coordinate datasets needed to load it. Otherwise it will continue passing the dataset information down the chain. See :meth:`satpy.readers.file_handlers.BaseFileHandler.available_datasets` for details. """ res = self.resolution coordinates = ('pixel_longitude', 'pixel_latitude') handled_variables = set() # update previously configured datasets for is_avail, ds_info in (configured_datasets or []): this_res = ds_info.get('resolution') this_coords = ds_info.get('coordinates') # some other file handler knows how to load this if is_avail is not None: yield is_avail, ds_info var_name = ds_info.get('file_key', ds_info['name']) matches = self.file_type_matches(ds_info['file_type']) # we can confidently say that we can provide this dataset and can # provide more info if matches and var_name in self and this_res != res: handled_variables.add(var_name) new_info = ds_info.copy() # don't mess up the above yielded new_info['resolution'] = res if not self.is_geo and this_coords is None: new_info['coordinates'] = coordinates yield True, new_info elif is_avail is None: # if we didn't know how to handle this dataset and no one else did # then we should keep it going down the chain yield is_avail, ds_info # Provide new datasets for var_name, val in self.file_content.items(): if var_name in handled_variables: continue if isinstance(val, netCDF4.Variable): ds_info = { 'file_type': self.filetype_info['file_type'], 'resolution': res, 'name': var_name, } if not self.is_geo: ds_info['coordinates'] = coordinates yield True, ds_info def get_shape(self, dataset_id, ds_info): """Get shape.""" var_name = ds_info.get('file_key', dataset_id['name']) return self[var_name + '/shape'] def _first_good_nav(self, lon_arr, lat_arr): if hasattr(lon_arr, 'mask'): good_indexes = np.nonzero(~lon_arr.mask) else: # no masked values found in auto maskandscale good_indexes = ([0], [0]) # nonzero returns (, ) return tuple(x[0] for x in good_indexes) def _get_extents(self, proj, res, lon_arr, lat_arr): p = Proj(proj) res = float(res) first_good = self._first_good_nav(lon_arr, lat_arr) one_x, one_y = p(lon_arr[first_good], lat_arr[first_good]) left_x = one_x - res * first_good[1] right_x = left_x + res * lon_arr.shape[1] top_y = one_y + res * first_good[0] bot_y = top_y - res * lon_arr.shape[0] half_x = res / 2. half_y = res / 2. return (left_x - half_x, bot_y - half_y, right_x + half_x, top_y + half_y) def _load_nav(self, name): nav = self[name] factor = self[name + '/attr/scale_factor'] offset = self[name + '/attr/add_offset'] fill = self[name + '/attr/_FillValue'] nav = nav[:] mask = nav == fill nav = np.ma.masked_array(nav * factor + offset, mask=mask) return nav[:] def get_area_def(self, dsid): """Get area definition.""" if not self.is_geo: raise NotImplementedError("Don't know how to get the Area Definition for this file") platform = self.get_platform(self['/attr/Platform_Name']) res = self._calc_area_resolution(dsid['resolution']) proj = self._get_proj(platform, float(self['/attr/Subsatellite_Longitude'])) area_name = '{} {} Area at {}m'.format( platform, self.metadata.get('sector_id', ''), int(res)) lon = self._load_nav('pixel_longitude') lat = self._load_nav('pixel_latitude') extents = self._get_extents(proj, res, lon, lat) area_def = geometry.AreaDefinition( area_name, area_name, area_name, proj4_str_to_dict(proj), lon.shape[1], lon.shape[0], area_extent=extents, ) return area_def def get_metadata(self, dataset_id, ds_info): """Get metadata.""" var_name = ds_info.get('file_key', dataset_id['name']) shape = self.get_shape(dataset_id, ds_info) info = getattr(self[var_name], 'attrs', {}) info['shape'] = shape info.update(ds_info) u = info.get('units') if u in CF_UNITS: # CF compliance info['units'] = CF_UNITS[u] info['sensor'] = self.get_sensor(self['/attr/Sensor_Name']) info['platform_name'] = self.get_platform(self['/attr/Platform_Name']) info['resolution'] = dataset_id['resolution'] if var_name == 'pixel_longitude': info['standard_name'] = 'longitude' elif var_name == 'pixel_latitude': info['standard_name'] = 'latitude' return info def get_dataset(self, dataset_id, ds_info): """Get dataset.""" var_name = ds_info.get('file_key', dataset_id['name']) # FUTURE: Metadata retrieval may be separate info = self.get_metadata(dataset_id, ds_info) data = self[var_name] fill = self[var_name + '/attr/_FillValue'] factor = self.get(var_name + '/attr/scale_factor') offset = self.get(var_name + '/attr/add_offset') valid_range = self.get(var_name + '/attr/valid_range') data = data.where(data != fill) if valid_range is not None: data = data.where((data >= valid_range[0]) & (data <= valid_range[1])) if factor is not None and offset is not None: data = data * factor + offset data.attrs.update(info) data = data.rename({'lines': 'y', 'elements': 'x'}) return data satpy-0.34.0/satpy/readers/ghrsst_l3c_sst.py000066400000000000000000000110011420401153000210200ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . # type: ignore """An OSISAF SST reader for the netCDF GHRSST format.""" import logging from datetime import datetime import numpy as np from satpy.dataset import Dataset from satpy.readers.netcdf_utils import NetCDF4FileHandler logger = logging.getLogger(__name__) PLATFORM_NAME = {'NPP': 'Suomi-NPP', } SENSOR_NAME = {'VIIRS': 'viirs', 'AVHRR': 'avhrr/3'} class GHRSST_OSISAFL2(NetCDF4FileHandler): """Reader for the OSISAF SST GHRSST format.""" def _parse_datetime(self, datestr): return datetime.strptime(datestr, '%Y%m%dT%H%M%SZ') def get_area_def(self, area_id, area_info): """Override abstract baseclass method.""" raise NotImplementedError def get_dataset(self, dataset_id, ds_info, out=None): """Load a dataset.""" var_path = ds_info.get('file_key', '{}'.format(dataset_id['name'])) dtype = ds_info.get('dtype', np.float32) if var_path + '/shape' not in self: # loading a scalar value shape = 1 else: shape = self[var_path + '/shape'] if shape[0] == 1: # Remove the time dimenstion from dataset shape = shape[1], shape[2] file_units = ds_info.get('file_units') if file_units is None: try: file_units = self[var_path + '/attr/units'] # they were almost completely CF compliant... if file_units == "none": file_units = "1" except KeyError: # no file units specified file_units = None if out is None: out = np.ma.empty(shape, dtype=dtype) out.mask = np.zeros(shape, dtype=bool) out.data[:] = np.require(self[var_path][0][::-1], dtype=dtype) self._scale_and_mask_data(out, var_path) ds_info.update({ "units": ds_info.get("units", file_units), "platform_name": PLATFORM_NAME.get(self['/attr/platform'], self['/attr/platform']), "sensor": SENSOR_NAME.get(self['/attr/sensor'], self['/attr/sensor']), }) ds_info.update(dataset_id.to_dict()) cls = ds_info.pop("container", Dataset) return cls(out, **ds_info) def _scale_and_mask_data(self, out, var_path): valid_min = self[var_path + '/attr/valid_min'] valid_max = self[var_path + '/attr/valid_max'] try: scale_factor = self[var_path + '/attr/scale_factor'] scale_offset = self[var_path + '/attr/add_offset'] except KeyError: scale_factor = scale_offset = None if valid_min is not None and valid_max is not None: out.mask[:] |= (out.data < valid_min) | (out.data > valid_max) factors = (scale_factor, scale_offset) if factors[0] != 1 or factors[1] != 0: out.data[:] *= factors[0] out.data[:] += factors[1] def get_lonlats(self, navid, nav_info, lon_out=None, lat_out=None): """Load an area.""" lon_key = 'lon' valid_min = self[lon_key + '/attr/valid_min'] valid_max = self[lon_key + '/attr/valid_max'] lon_out.data[:] = self[lon_key][::-1] lon_out.mask[:] = (lon_out < valid_min) | (lon_out > valid_max) lat_key = 'lat' valid_min = self[lat_key + '/attr/valid_min'] valid_max = self[lat_key + '/attr/valid_max'] lat_out.data[:] = self[lat_key][::-1] lat_out.mask[:] = (lat_out < valid_min) | (lat_out > valid_max) return {} @property def start_time(self): """Get start time.""" # return self.filename_info['start_time'] return self._parse_datetime(self['/attr/start_time']) @property def end_time(self): """Get end time.""" return self._parse_datetime(self['/attr/stop_time']) satpy-0.34.0/satpy/readers/glm_l2.py000066400000000000000000000142531420401153000172440ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Geostationary Lightning Mapper reader for the Level 2 format from glmtools. More information about `glmtools` and the files it produces can be found on the project's GitHub repository: https://github.com/deeplycloudy/glmtools """ import logging from datetime import datetime import numpy as np from satpy.readers.abi_base import NC_ABI_BASE logger = logging.getLogger(__name__) PLATFORM_NAMES = { 'G16': 'GOES-16', 'G17': 'GOES-17', } # class NC_GLM_L2_LCFA(BaseFileHandler): — add this with glmtools class NCGriddedGLML2(NC_ABI_BASE): """File reader for individual GLM L2 NetCDF4 files.""" @property def sensor(self): """Get sensor name for current file handler.""" return 'glm' @property def start_time(self): """Start time of the current file's observations.""" return datetime.strptime(self.nc.attrs['time_coverage_start'], '%Y-%m-%dT%H:%M:%SZ') @property def end_time(self): """End time of the current file's observations.""" return datetime.strptime(self.nc.attrs['time_coverage_end'], '%Y-%m-%dT%H:%M:%SZ') def _is_category_product(self, data_arr): # if after autoscaling we still have an integer is_int = np.issubdtype(data_arr.dtype, np.integer) # and it has a fill value has_fill = '_FillValue' in data_arr.attrs # or it has flag_meanings has_meanings = 'flag_meanings' in data_arr.attrs # then it is likely a category product and we should keep the # _FillValue for satpy to use later return is_int and (has_fill or has_meanings) def get_dataset(self, key, info): """Load a dataset.""" logger.debug('Reading in get_dataset %s.', key['name']) res = self[key['name']] res.attrs.update({'platform_name': self.platform_name, 'sensor': self.sensor}) res.attrs.update(self.filename_info) # Add orbital parameters projection = self.nc["goes_imager_projection"] res.attrs['orbital_parameters'] = { 'projection_longitude': float(projection.attrs['longitude_of_projection_origin']), 'projection_latitude': float(projection.attrs['latitude_of_projection_origin']), 'projection_altitude': float(projection.attrs['perspective_point_height']), 'satellite_nominal_latitude': float(self['nominal_satellite_subpoint_lat']), 'satellite_nominal_longitude': float(self['nominal_satellite_subpoint_lon']), # 'satellite_nominal_altitude': float(self['nominal_satellite_height']), } res.attrs.update(key.to_dict()) # remove attributes that could be confusing later if not self._is_category_product(res): res.attrs.pop('_FillValue', None) res.attrs.pop('scale_factor', None) res.attrs.pop('add_offset', None) res.attrs.pop('_Unsigned', None) res.attrs.pop('ancillary_variables', None) # Can't currently load DQF # add in information from the filename that may be useful to the user # for key in ('observation_type', 'scene_abbr', 'scan_mode', 'platform_shortname'): for attr in ('scene_abbr', 'scan_mode', 'platform_shortname'): res.attrs[attr] = self.filename_info[attr] # copy global attributes to metadata for attr in ('scene_id', 'orbital_slot', 'instrument_ID', 'production_site', 'timeline_ID', 'spatial_resolution'): res.attrs[attr] = self.nc.attrs.get(attr) return res def _is_2d_xy_var(self, data_arr): is_2d = data_arr.ndim == 2 has_x_dim = 'x' in data_arr.dims has_y_dim = 'y' in data_arr.dims return is_2d and has_x_dim and has_y_dim def available_datasets(self, configured_datasets=None): """Discover new datasets and add information from file.""" # we know the actual resolution res = self.spatial_resolution_to_number() # update previously configured datasets handled_vars = set() for is_avail, ds_info in (configured_datasets or []): # some other file handler knows how to load this # don't override what they've done if is_avail is not None: yield is_avail, ds_info matches = self.file_type_matches(ds_info['file_type']) if matches and ds_info.get('resolution') != res: # we are meant to handle this dataset (file type matches) # and the information we can provide isn't available yet new_info = ds_info.copy() new_info['resolution'] = res exists = ds_info['name'] in self.nc handled_vars.add(ds_info['name']) yield exists, new_info elif is_avail is None: # we don't know what to do with this # see if another future file handler does yield is_avail, ds_info for var_name, data_arr in self.nc.data_vars.items(): if var_name in handled_vars: # it was manually configured and handled above continue if not self._is_2d_xy_var(data_arr): # only handle 2d (y, x) vars for now continue new_info = { 'name': var_name, 'resolution': res, 'file_type': self.filetype_info['file_type'] } handled_vars.add(var_name) yield True, new_info satpy-0.34.0/satpy/readers/goes_imager_hrit.py000066400000000000000000000431711420401153000214000ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2014-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """GOES HRIT format reader. References: LRIT/HRIT Mission Specific Implementation, February 2012 GVARRDL98.pdf 05057_SPE_MSG_LRIT_HRI """ import logging from datetime import datetime, timedelta import dask.array as da import numpy as np import xarray as xr from pyresample import geometry from satpy.readers.eum_base import recarray2dict, time_cds_short from satpy.readers.hrit_base import ( HRITFileHandler, ancillary_text, annotation_header, base_hdr_map, image_data_function, ) class CalibrationError(Exception): """Dummy error-class.""" logger = logging.getLogger('hrit_goes') # Geometric constants [meters] EQUATOR_RADIUS = 6378169.00 POLE_RADIUS = 6356583.80 ALTITUDE = 35785831.00 # goes implementation: key_header = np.dtype([('key_number', 'u1'), ('seed', '>f8')]) segment_identification = np.dtype([('GP_SC_ID', '>i2'), ('spectral_channel_id', '>i1'), ('segment_sequence_number', '>u2'), ('planned_start_segment_number', '>u2'), ('planned_end_segment_number', '>u2'), ('data_field_representation', '>i1')]) image_segment_line_quality = np.dtype([('line_number_in_grid', '>i4'), ('line_mean_acquisition', [('days', '>u2'), ('milliseconds', '>u4')]), ('line_validity', 'u1'), ('line_radiometric_quality', 'u1'), ('line_geometric_quality', 'u1')]) goms_variable_length_headers = { image_segment_line_quality: 'image_segment_line_quality'} goms_text_headers = {image_data_function: 'image_data_function', annotation_header: 'annotation_header', ancillary_text: 'ancillary_text'} goes_hdr_map = base_hdr_map.copy() goes_hdr_map.update({7: key_header, 128: segment_identification, 129: image_segment_line_quality }) orbit_coef = np.dtype([('StartTime', time_cds_short), ('EndTime', time_cds_short), ('X', '>f8', (8, )), ('Y', '>f8', (8, )), ('Z', '>f8', (8, )), ('VX', '>f8', (8, )), ('VY', '>f8', (8, )), ('VZ', '>f8', (8, ))]) attitude_coef = np.dtype([('StartTime', time_cds_short), ('EndTime', time_cds_short), ('XofSpinAxis', '>f8', (8, )), ('YofSpinAxis', '>f8', (8, )), ('ZofSpinAxis', '>f8', (8, ))]) cuc_time = np.dtype([('coarse', 'u1', (4, )), ('fine', 'u1', (3, ))]) sgs_time = np.dtype([('century', 'u1'), ('year', 'u1'), ('doy1', 'u1'), ('doy_hours', 'u1'), ('hours_mins', 'u1'), ('mins_secs', 'u1'), ('secs_msecs', 'u1'), ('msecs', 'u1')]) def make_sgs_time(sgs_time_array): """Make sgs time.""" year = ((sgs_time_array['century'] >> 4) * 1000 + (sgs_time_array['century'] & 15) * 100 + (sgs_time_array['year'] >> 4) * 10 + (sgs_time_array['year'] & 15)) doy = ((sgs_time_array['doy1'] >> 4) * 100 + (sgs_time_array['doy1'] & 15) * 10 + (sgs_time_array['doy_hours'] >> 4)) hours = ((sgs_time_array['doy_hours'] & 15) * 10 + (sgs_time_array['hours_mins'] >> 4)) mins = ((sgs_time_array['hours_mins'] & 15) * 10 + (sgs_time_array['mins_secs'] >> 4)) secs = ((sgs_time_array['mins_secs'] & 15) * 10 + (sgs_time_array['secs_msecs'] >> 4)) msecs = ((sgs_time_array['secs_msecs'] & 15) * 100 + (sgs_time_array['msecs'] >> 4) * 10 + (sgs_time_array['msecs'] & 15)) return (datetime(int(year), 1, 1) + timedelta(days=int(doy - 1), hours=int(hours), minutes=int(mins), seconds=int(secs), milliseconds=int(msecs))) satellite_status = np.dtype([("TagType", "> 24) - 64 mant = float_val & ((1 << 24) - 1) if mant == 0: return 0. res = sign * mant * 2.0**(-24 + exp * 4) return res prologue = np.dtype([ # common generic header ("CommonHeaderVersion", "u1"), ("Junk1", "u1", 3), ("NominalSGSProductTime", time_cds_short), ("SGSProductQuality", "u1"), ("SGSProductCompleteness", "u1"), ("SGSProductTimeliness", "u1"), ("SGSProcessingInstanceId", "u1"), ("BaseAlgorithmVersion", "S1", 16), ("ProductAlgorithmVersion", "S1", 16), # product header ("ImageProductHeaderVersion", "u1"), ("Junk2", "u1", 3), ("ImageProductHeaderLength", ">u4"), ("ImageProductVersion", "u1"), # first block-0 ("SatelliteID", "u1"), ("SPSID", "u1"), ("IScan", "u1", 4), ("IDSub", "u1", 16), ("TCurr", sgs_time), ("TCHED", sgs_time), ("TCTRL", sgs_time), ("TLHED", sgs_time), ("TLTRL", sgs_time), ("TIPFS", sgs_time), ("TINFS", sgs_time), ("TISPC", sgs_time), ("TIECL", sgs_time), ("TIBBC", sgs_time), ("TISTR", sgs_time), ("TLRAN", sgs_time), ("TIIRT", sgs_time), ("TIVIT", sgs_time), ("TCLMT", sgs_time), ("TIONA", sgs_time), ("RelativeScanCount", '>u2'), ("AbsoluteScanCount", '>u2'), ("NorthernmostScanLine", '>u2'), ("WesternmostPixel", '>u2'), ("EasternmostPixel", '>u2'), ("NorthernmostFrameLine", '>u2'), ("SouthernmostFrameLine", '>u2'), ("0Pixel", '>u2'), ("0ScanLine", '>u2'), ("0Scan", '>u2'), ("SubSatScan", '>u2'), ("SubSatPixel", '>u2'), ("SubSatLatitude", gvar_float), ("SubSatLongitude", gvar_float), ("Junk4", "u1", 96), # move to "word" 295 ("IMCIdentifier", "S4"), ("Zeros", "u1", 12), ("ReferenceLongitude", gvar_float), ("ReferenceDistance", gvar_float), ("ReferenceLatitude", gvar_float) ]) class HRITGOESPrologueFileHandler(HRITFileHandler): """GOES HRIT format reader.""" def __init__(self, filename, filename_info, filetype_info): """Initialize the reader.""" super(HRITGOESPrologueFileHandler, self).__init__(filename, filename_info, filetype_info, (goes_hdr_map, goms_variable_length_headers, goms_text_headers)) self.prologue = {} self.read_prologue() def read_prologue(self): """Read the prologue metadata.""" with open(self.filename, "rb") as fp_: fp_.seek(self.mda['total_header_length']) data = np.fromfile(fp_, dtype=prologue, count=1) self.prologue.update(recarray2dict(data)) self.process_prologue() def process_prologue(self): """Reprocess prologue to correct types.""" for key in ['TCurr', 'TCHED', 'TCTRL', 'TLHED', 'TLTRL', 'TIPFS', 'TINFS', 'TISPC', 'TIECL', 'TIBBC', 'TISTR', 'TLRAN', 'TIIRT', 'TIVIT', 'TCLMT', 'TIONA']: try: self.prologue[key] = make_sgs_time(self.prologue[key]) except ValueError: self.prologue.pop(key, None) logger.debug("Invalid data for %s", key) for key in ['SubSatLatitude', "SubSatLongitude", "ReferenceLongitude", "ReferenceDistance", "ReferenceLatitude"]: self.prologue[key] = make_gvar_float(self.prologue[key]) radiometric_processing = np.dtype([("TagType", ". """Reader for GOES 8-15 imager data in netCDF format from NOAA CLASS. Also handles GOES 15 data in netCDF format reformated by Eumetsat GOES Imager netCDF files contain geolocated detector counts. If ordering via NOAA CLASS, select 16 bits/pixel. The instrument oversamples the viewed scene in E-W direction by a factor of 1.75: IR/VIS pixels are 112/28 urad on a side, but the instrument samples every 64/16 urad in E-W direction (see [BOOK-I] and [BOOK-N]). Important note: Some essential information are missing in the netCDF files, which might render them inappropriate for certain applications. The unknowns are: 1. Subsatellite point 2. Calibration coefficients 3. Detector-scanline assignment, i.e. information about which scanline was recorded by which detector Items 1. and 2. are not critical because the images are geo-located and NOAA provides static calibration coefficients ([VIS], [IR]). The detector-scanline assignment however cannot be reconstructed properly. This is where an approximation has to be applied (see below). Calibration =========== Calibration is performed according to [VIS] and [IR], but with an average calibration coefficient applied to all detectors in a certain channel. The reason for and impact of this approximation is described below. The GOES imager simultaneously records multiple scanlines per sweep using multiple detectors per channel. The VIS channel has 8 detectors, the IR channels have 1-2 detectors (see e.g. Figures 3-5a/b, 3-6a/b and 3-7/a-b in [BOOK-N]). Each detector has its own calibration coefficients, so in order to perform an accurate calibration, the detector-scanline assignment is needed. In theory it is known which scanline was recorded by which detector (VIS: 5,6,7,8,1,2,3,4; IR: 1,2). However, the plate on which the detectors are mounted flexes due to thermal gradients in the instrument which leads to a N-S shift of +/- 8 visible or +/- 2 IR pixels. This shift is compensated in the GVAR scan formation process, but in a way which is hard to reconstruct properly afterwards. See [GVAR], section 3.2.1. for details. Since the calibration coefficients of the detectors in a certain channel only differ slightly, a workaround is to calibrate each scanline with the average calibration coefficients. A worst case estimate of the introduced error can be obtained by calibrating all possible counts with both the minimum and the maximum calibration coefficients and computing the difference. The maximum differences are: ======= ===== ==== GOES-8 ------------------ Channel Diff Unit ======= ===== ==== 00_7 0.0 % # Counts are normalized 03_9 0.187 K 06_8 0.0 K # only one detector 10_7 0.106 K 12_0 0.036 K ======= ===== ==== ======= ===== ==== GOES-9 ------------------ Channel Diff Unit ======= ===== ==== 00_7 0.0 % # Counts are normalized 03_9 0.0 K # coefs identical 06_8 0.0 K # only one detector 10_7 0.021 K 12_0 0.006 K ======= ===== ==== ======= ===== ==== GOES-10 ------------------ Channel Diff Unit ======= ===== ==== 00_7 1.05 % 03_9 0.0 K # coefs identical 06_8 0.0 K # only one detector 10_7 0.013 K 12_0 0.004 K ======= ===== ==== ======= ===== ==== GOES-11 ------------------ Channel Diff Unit ======= ===== ==== 00_7 1.25 % 03_9 0.0 K # coefs identical 06_8 0.0 K # only one detector 10_7 0.0 K # coefs identical 12_0 0.065 K ======= ===== ==== ======= ===== ==== GOES-12 ------------------ Channel Diff Unit ======= ===== ==== 00_7 0.8 % 03_9 0.0 K # coefs identical 06_5 0.044 K 10_7 0.0 K # coefs identical 13_3 0.0 K # only one detector ======= ===== ==== ======= ===== ==== GOES-13 ------------------ Channel Diff Unit ======= ===== ==== 00_7 1.31 % 03_9 0.0 K # coefs identical 06_5 0.085 K 10_7 0.008 K 13_3 0.0 K # only one detector ======= ===== ==== ======= ===== ==== GOES-14 ------------------ Channel Diff Unit ======= ===== ==== 00_7 0.66 % 03_9 0.0 K # coefs identical 06_5 0.043 K 10_7 0.006 K 13_3 0.003 K ======= ===== ==== ======= ===== ==== GOES-15 ------------------ Channel Diff Unit ======= ===== ==== 00_7 0.86 % 03_9 0.0 K # coefs identical 06_5 0.02 K 10_7 0.009 K 13_3 0.008 K ======= ===== ==== References: - [GVAR] https://goes.gsfc.nasa.gov/text/GVARRDL98.pdf - [BOOK-N] https://goes.gsfc.nasa.gov/text/GOES-N_Databook/databook.pdf - [BOOK-I] https://goes.gsfc.nasa.gov/text/databook/databook.pdf - [IR] https://www.ospo.noaa.gov/Operations/GOES/calibration/gvar-conversion.html - [VIS] https://www.ospo.noaa.gov/Operations/GOES/calibration/goes-vis-ch-calibration.html - [FAQ] https://www.ncdc.noaa.gov/sites/default/files/attachments/Satellite-Frequently-Asked-Questions_2.pdf - [SCHED-W] http://www.ospo.noaa.gov/Operations/GOES/west/imager-routine.html - [SCHED-E] http://www.ospo.noaa.gov/Operations/GOES/east/imager-routine.html Eumetsat formatted netCDF data: The main differences are: 1. The geolocation is in a separate file, used for all bands 2. VIS data is calibrated to Albedo (or reflectance) 3. IR data is calibrated to radiance. 4. VIS data is downsampled to IR resolution (4km) 5. File name differs also slightly 6. Data is received via EumetCast """ import logging import re from abc import abstractmethod from collections import namedtuple from datetime import datetime, timedelta import numpy as np import pyresample.geometry import xarray as xr from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.goes_imager_hrit import ALTITUDE, EQUATOR_RADIUS, POLE_RADIUS, SPACECRAFTS from satpy.readers.utils import bbox, get_geostationary_angle_extent logger = logging.getLogger(__name__) # Radiation constants. Source: [VIS] C1 = 1.191066E-5 # [mW/(m2-sr-cm-4)] C2 = 1.438833 # [K/cm-1] # Calibration Coefficients # # VIS Channel # ============ # slope, offset: Pre-Launch slope & offset for converting counts to radiance # (one per detector) [W m-2 um-1 sr-1]. # x0: Space count # k: pi / (solar spectral irradiance averaged over the spectral response # function of the detector) [m2 sr um W-1] # # # IR Channels # ============ # scale, offset: Scale & offset for converting counts to radiance. Units: # [mW m-2 cm-1 sr-1], [1]. They are identical for all platforms. # n: The channel's central wavenumber (one for each detector) [cm-1] # a, b: Offset and slope for converting effective BT to actual BT (one per # detector). Units: [K], [1] # btmin, btmax: Valid BT range [K]. Values outside this range will be masked. # Extracted from lookup tables provided in [IR]. SCALE_03_9 = 227.3889 OFFSET_03_9 = 68.2167 SCALE_06_8 = 38.8383 OFFSET_06_8 = 29.1287 SCALE_06_5 = 38.8383 OFFSET_06_5 = 29.1287 SCALE_10_7 = 5.2285 OFFSET_10_7 = 15.6854 SCALE_12_0 = 5.0273 OFFSET_12_0 = 15.3332 SCALE_13_3 = 5.5297 OFFSET_13_3 = 16.5892 CALIB_COEFS = { 'GOES-15': {'00_7': {'slope': [5.851966E-1, 5.879772E-1, 5.856793E-1, 5.854250E-1, 5.866992E-1, 5.836241E-1, 5.846555E-1, 5.843753E-1], 'offset': [-16.9707, -17.0513, -16.9847, -16.9773, -17.0143, -16.9251, -16.9550, -16.9469], 'x0': 29, 'k': 1.88852E-3}, '03_9': {'scale': SCALE_03_9, 'offset': OFFSET_03_9, 'n': [2562.7905, 2562.7905], 'a': [-1.5693377, -1.5693377], 'b': [1.0025034, 1.0025034], 'btmin': 205.0, 'btmax': 340.0}, '06_5': {'scale': SCALE_06_8, 'offset': OFFSET_06_8, 'n': [1521.1988, 1521.5277], 'a': [-3.4706545, -3.4755568], 'b': [1.0093296, 1.0092838], 'btmin': 180.0, 'btmax': 340.0}, '10_7': {'scale': SCALE_10_7, 'offset': OFFSET_10_7, 'n': [935.89417, 935.78158], 'a': [-0.36151367, -0.35316361], 'b': [1.0012715, 1.0012570], 'btmin': 180.0, 'btmax': 340.0}, '13_3': {'scale': SCALE_13_3, 'offset': OFFSET_13_3, 'n': [753.72229, 753.93403], 'a': [-0.21475817, -0.24630068], 'b': [1.0006485, 1.0007178], 'btmin': 180.0, 'btmax': 340.0} }, # ITT RevH + STAR Correction 'GOES-14': {'00_7': {'slope': [5.874693E-1, 5.865367E-1, 5.862807E-1, 5.864086E-1, 5.857146E-1, 5.852004E-1, 5.860814E-1, 5.841697E-1], 'offset': [-17.037, -17.010, -17.002, -17.006, -16.986, -16.971, -16.996, -16.941], 'x0': 29, 'k': 1.88772E-3}, '03_9': {'scale': SCALE_03_9, 'offset': OFFSET_03_9, 'n': [2577.3518, 2577.3518], 'a': [-1.5297091, -1.5297091], 'b': [1.0025608, 1.0025608], 'btmin': 205.0, 'btmax': 340.0}, '06_5': {'scale': SCALE_06_8, 'offset': OFFSET_06_8, 'n': [1519.3488, 1518.5610], 'a': [-3.4647892, -3.4390527], 'b': [1.0093656, 1.0094427], 'btmin': 180.0, 'btmax': 340.0}, '10_7': {'scale': SCALE_10_7, 'offset': OFFSET_10_7, 'n': [933.98541, 934.19579], 'a': [-0.29201763, -0.31824779], 'b': [1.0012018, 1.0012303], 'btmin': 180.0, 'btmax': 340.0}, '13_3': {'scale': SCALE_13_3, 'offset': OFFSET_13_3, 'n': [752.88143, 752.82392], 'a': [-0.22508805, -0.21700982], 'b': [1.0006686, 1.0006503], 'btmin': 180.0, 'btmax': 340.0} }, # ITT RevH + STAR Correction 'GOES-13': {'00_7': {'slope': [6.120196E-1, 6.118504E-1, 6.096360E-1, 6.087055E-1, 6.132860E-1, 6.118208E-1, 6.122307E-1, 6.066968E-1], 'offset': [-17.749, -17.744, -17.769, -17.653, -17.785, -17.743, -17.755, -17.594], 'x0': 29, 'k': 1.89544E-3}, '03_9': {'scale': SCALE_03_9, 'offset': OFFSET_03_9, 'n': [2561.74, 2561.74], 'a': [-1.437204, -1.437204], 'b': [1.002562, 1.002562], 'btmin': 205.0, 'btmax': 340.0}, '06_5': {'scale': SCALE_06_8, 'offset': OFFSET_06_8, 'n': [1522.52, 1521.66], 'a': [-3.625663, -3.607841], 'b': [1.010018, 1.010010], 'btmin': 180.0, 'btmax': 340.0}, '10_7': {'scale': SCALE_10_7, 'offset': OFFSET_10_7, 'n': [937.23, 937.27], 'a': [-0.386043, -0.380113], 'b': [1.001298, 1.001285], 'btmin': 180.0, 'btmax': 340.0}, '13_3': {'scale': SCALE_13_3, 'offset': OFFSET_13_3, 'n': [749.83], 'a': [-0.134801], 'b': [1.000482], 'btmin': 180.0, 'btmax': 340.0} # Has only one detector on GOES-13 }, 'GOES-12': {'00_7': {'slope': [5.771030E-1, 5.761764E-1, 5.775825E-1, 5.790699E-1, 5.787051E-1, 5.755969E-1, 5.753973E-1, 5.752099E-1], 'offset': [-16.736, -16.709, -16.750, -16.793, -16.782, -16.692, -16.687, -16.681], 'x0': 29, 'k': 1.97658E-3}, '03_9': {'scale': SCALE_03_9, 'offset': OFFSET_03_9, 'n': [2562.45, 2562.45], 'a': [-0.650731, -0.650731], 'b': [1.001520, 1.001520], 'btmin': 205.0, 'btmax': 340.0}, '06_5': {'scale': SCALE_06_8, 'offset': OFFSET_06_8, 'n': [1536.43, 1536.94], 'a': [-4.764728, -4.775517], 'b': [1.012420, 1.012403], 'btmin': 180.0, 'btmax': 340.0}, '10_7': {'scale': SCALE_10_7, 'offset': OFFSET_10_7, 'n': [933.21, 933.21], 'a': [-0.360331, -0.360331], 'b': [1.001306, 1.001306], 'btmin': 180.0, 'btmax': 340.0}, '13_3': {'scale': SCALE_13_3, 'offset': OFFSET_13_3, 'n': [751.91], 'a': [-0.253449], 'b': [1.000743], 'btmin': 180.0, 'btmax': 340.0} # Has only one detector on GOES-12 }, 'GOES-11': {'00_7': {'slope': [5.561568E-1, 5.552979E-1, 5.558981E-1, 5.577627E-1, 5.557238E-1, 5.587978E-1, 5.586530E-1, 5.528971E-1], 'offset': [-16.129, -16.104, -16.121, -16.175, -16.116, -16.205, -16.201, -16.034], 'x0': 29, 'k': 2.01524E-3}, '03_9': {'scale': SCALE_03_9, 'offset': OFFSET_03_9, 'n': [2562.07, 2562.07], 'a': [-0.644790, -0.644790], 'b': [1.000775, 1.000775], 'btmin': 205.0, 'btmax': 340.0}, '06_8': {'scale': SCALE_06_8, 'offset': OFFSET_06_8, 'n': [1481.53], 'a': [-0.543401], 'b': [1.001495], 'btmin': 180.0, 'btmax': 340.0}, '10_7': {'scale': SCALE_10_7, 'offset': OFFSET_10_7, 'n': [931.76, 931.76], 'a': [-0.306809, -0.306809], 'b': [1.001274, 1.001274], 'btmin': 180.0, 'btmax': 340.0}, '12_0': {'scale': SCALE_12_0, 'offset': OFFSET_12_0, 'n': [833.67, 833.04], 'a': [-0.333216, -0.315110], 'b': [1.001000, 1.000967], 'btmin': 180.0, 'btmax': 340.0} }, 'GOES-10': {'00_7': {'slope': [5.605602E-1, 5.563529E-1, 5.566574E-1, 5.582154E-1, 5.583361E-1, 5.571736E-1, 5.563135E-1, 5.613536E-1], 'offset': [-16.256, -16.134, -16.143, -16.188, -16.192, -16.158, -16.133, -16.279], 'x0': 29, 'k': 1.98808E-3}, '03_9': {'scale': SCALE_03_9, 'offset': OFFSET_03_9, 'n': [2552.9845, 2552.9845], 'a': [-0.60584483, -0.60584483], 'b': [1.0011017, 1.0011017], 'btmin': 205.0, 'btmax': 340.0}, '06_8': {'scale': SCALE_06_8, 'offset': OFFSET_06_8, 'n': [1486.2212], 'a': [-0.61653805], 'b': [1.0014011], 'btmin': 180.0, 'btmax': 340.0}, '10_7': {'scale': SCALE_10_7, 'offset': OFFSET_10_7, 'n': [936.10260, 935.98981], 'a': [-0.27128884, -0.27064036], 'b': [1.0009674, 1.0009687], 'btmin': 180.0, 'btmax': 340.0}, '12_0': {'scale': SCALE_12_0, 'offset': OFFSET_12_0, 'n': [830.88473, 830.89691], 'a': [-0.26505411, -0.26056452], 'b': [1.0009087, 1.0008962], 'btmin': 180.0, 'btmax': 340.0} }, 'GOES-9': {'00_7': {'slope': [0.5492361], 'offset': [-15.928], 'x0': 29, 'k': 1.94180E-3}, '03_9': {'scale': SCALE_03_9, 'offset': OFFSET_03_9, 'n': [2555.18, 2555.18], 'a': [-0.579908, -0.579908], 'b': [1.000942, 1.000942], 'btmin': 205.0, 'btmax': 340.0}, '06_8': {'scale': SCALE_06_8, 'offset': OFFSET_06_8, 'n': [1481.82], 'a': [-0.493016], 'b': [1.001076], 'btmin': 180.0, 'btmax': 340.0}, '10_7': {'scale': SCALE_10_7, 'offset': OFFSET_10_7, 'n': [934.59, 934.28], 'a': [-0.384798, -0.363703], 'b': [1.001293, 1.001272], 'btmin': 180.0, 'btmax': 340.0}, '12_0': {'scale': SCALE_12_0, 'offset': OFFSET_12_0, 'n': [834.02, 834.09], 'a': [-0.302995, -0.306838], 'b': [1.000941, 1.000948], 'btmin': 180.0, 'btmax': 340.0} }, 'GOES-8': {'00_7': {'slope': [0.5501873], 'offset': [-15.955], 'x0': 29, 'k': 1.92979E-3}, '03_9': {'scale': SCALE_03_9, 'offset': OFFSET_03_9, 'n': [2556.71, 2558.62], 'a': [-0.578526, -0.581853], 'b': [1.001512, 1.001532], 'btmin': 205.0, 'btmax': 340.0}, '06_8': {'scale': SCALE_06_8, 'offset': OFFSET_06_8, 'n': [1481.91], 'a': [-0.593903], 'b': [1.001418], 'btmin': 180.0, 'btmax': 340.0}, '10_7': {'scale': SCALE_10_7, 'offset': OFFSET_10_7, 'n': [934.30, 935.38], 'a': [-0.322585, -0.351889], 'b': [1.001271, 1.001293], 'btmin': 180.0, 'btmax': 340.0}, '12_0': {'scale': SCALE_12_0, 'offset': OFFSET_12_0, 'n': [837.06, 837.00], 'a': [-0.422571, -0.466954], 'b': [1.001170, 1.001257], 'btmin': 180.0, 'btmax': 340.0} } } # Angular sampling rates in radians. Source: [BOOK-I], [BOOK-N] SAMPLING_EW_VIS = 16E-6 SAMPLING_NS_VIS = 28E-6 SAMPLING_EW_IR = 64E-6 SAMPLING_NS_IR = 112E-6 # Sector definitions. TODO: Add remaining sectors (PACUS, CONUS, ...) FULL_DISC = 'Full Disc' NORTH_HEMIS_EAST = 'Northern Hemisphere (GOES-East)' SOUTH_HEMIS_EAST = 'Southern Hemisphere (GOES-East)' NORTH_HEMIS_WEST = 'Northern Hemisphere (GOES-West)' SOUTH_HEMIS_WEST = 'Southern Hemisphere (GOES-West)' UNKNOWN_SECTOR = 'Unknown' IR_SECTORS = { (2704, 5208): FULL_DISC, (1826, 3464): NORTH_HEMIS_EAST, (566, 3464): SOUTH_HEMIS_EAST, (1354, 3312): NORTH_HEMIS_WEST, (1062, 2760): SOUTH_HEMIS_WEST } # (nlines, ncols) VIS_SECTORS = { (10819, 20800): FULL_DISC, (7307, 13852): NORTH_HEMIS_EAST, (2267, 13852): SOUTH_HEMIS_EAST, (5419, 13244): NORTH_HEMIS_WEST, (4251, 11044): SOUTH_HEMIS_WEST } # (nlines, ncols) SCAN_DURATION = { FULL_DISC: timedelta(minutes=26), NORTH_HEMIS_WEST: timedelta(minutes=10, seconds=5), SOUTH_HEMIS_WEST: timedelta(minutes=6, seconds=54), NORTH_HEMIS_EAST: timedelta(minutes=14, seconds=15), SOUTH_HEMIS_EAST: timedelta(minutes=4, seconds=49) } # Source: [SCHED-W], [SCHED-E] class GOESNCBaseFileHandler(BaseFileHandler): """File handler for GOES Imager data in netCDF format.""" def __init__(self, filename, filename_info, filetype_info, geo_data=None): """Initialize the reader.""" super(GOESNCBaseFileHandler, self).__init__(filename, filename_info, filetype_info) self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=False, chunks={'xc': CHUNK_SIZE, 'yc': CHUNK_SIZE}) self.sensor = 'goes_imager' self.nlines = self.nc.dims['yc'] self.ncols = self.nc.dims['xc'] self.platform_name = self._get_platform_name( self.nc.attrs['Satellite Sensor']) self.platform_shortname = self.platform_name.replace('-', '').lower() self.gvar_channel = int(self.nc['bands'].values) self.sector = self._get_sector(channel=self.gvar_channel, nlines=self.nlines, ncols=self.ncols) self._meta = None self.geo_data = geo_data if geo_data is not None else self.nc @abstractmethod def get_dataset(self, key, info): """Load dataset designated by the given key from file.""" raise NotImplementedError @abstractmethod def calibrate(self, data, calibration, channel): """Perform calibration.""" raise NotImplementedError @property @abstractmethod def vis_sectors(self): """Get the vis sectors.""" raise NotImplementedError @property @abstractmethod def ir_sectors(self): """Get the ir sectors.""" raise NotImplementedError @staticmethod def _get_platform_name(ncattr): """Determine name of the platform.""" match = re.match(r'G-(\d+)', ncattr) if match: return SPACECRAFTS.get(int(match.groups()[0])) return None def _get_sector(self, channel, nlines, ncols): """Determine which sector was scanned.""" if self._is_vis(channel): margin = 100 sectors_ref = self.vis_sectors else: margin = 50 sectors_ref = self.ir_sectors for (nlines_ref, ncols_ref), sector in sectors_ref.items(): if np.fabs(ncols - ncols_ref) < margin and \ np.fabs(nlines - nlines_ref) < margin: return sector return UNKNOWN_SECTOR @staticmethod def _is_vis(channel): """Determine whether the given channel is a visible channel.""" if isinstance(channel, str): return channel == '00_7' if isinstance(channel, int): return channel == 1 raise ValueError('Invalid channel') @staticmethod def _get_earth_mask(lat): """Identify earth/space pixels. Returns: Mask (1=earth, 0=space) """ logger.debug('Computing earth mask') return np.fabs(lat) <= 90 @staticmethod def _get_nadir_pixel(earth_mask, sector): """Find the nadir pixel. Args: earth_mask: Mask identifying earth and space pixels sector: Specifies the scanned sector Returns: nadir row, nadir column """ if sector == FULL_DISC: logger.debug('Computing nadir pixel') # The earth is not centered in the image, compute bounding box # of the earth disc first rmin, rmax, cmin, cmax = bbox(earth_mask) # The nadir pixel is approximately at the centre of the earth disk nadir_row = rmin + (rmax - rmin) // 2 nadir_col = cmin + (cmax - cmin) // 2 return nadir_row, nadir_col return None, None @staticmethod def _is_yaw_flip(lat, delta=10): """Determine whether the satellite is yaw-flipped ('upside down').""" logger.debug('Computing yaw flip flag') # In case of yaw-flip the data and coordinates in the netCDF files are # also flipped. Just check whether the latitude increases or decrases # with the line number. crow, ccol = np.array(lat.shape) // 2 return (lat[crow+delta, ccol] - lat[crow, ccol]).values > 0 def _get_area_def_uniform_sampling(self, lon0, channel): """Get area definition with uniform sampling.""" logger.debug('Computing area definition') if lon0 is not None: # Define proj4 projection parameters proj_dict = {'a': EQUATOR_RADIUS, 'b': POLE_RADIUS, 'lon_0': lon0, 'h': ALTITUDE, 'proj': 'geos', 'units': 'm'} # Calculate maximum scanning angles xmax, ymax = get_geostationary_angle_extent( namedtuple('area', ['proj_dict'])(proj_dict)) # Derive area extent using small angle approximation (maximum # scanning angle is ~8.6 degrees) llx, lly, urx, ury = ALTITUDE * np.array([-xmax, -ymax, xmax, ymax]) area_extent = [llx, lly, urx, ury] # Original image is oversampled. Create pyresample area definition # with uniform sampling in N-S and E-W direction if self._is_vis(channel): sampling = SAMPLING_NS_VIS else: sampling = SAMPLING_NS_IR pix_size = ALTITUDE * sampling area_def = pyresample.geometry.AreaDefinition( 'goes_geos_uniform', '{} geostationary projection (uniform sampling)'.format(self.platform_name), 'goes_geos_uniform', proj_dict, np.rint((urx - llx) / pix_size).astype(int), np.rint((ury - lly) / pix_size).astype(int), area_extent) return area_def return None @property def start_time(self): """Start timestamp of the dataset.""" dt = self.nc['time'].dt return datetime(year=dt.year, month=dt.month, day=dt.day, hour=dt.hour, minute=dt.minute, second=dt.second, microsecond=dt.microsecond) @property def end_time(self): """End timestamp of the dataset.""" try: return self.start_time + SCAN_DURATION[self.sector] except KeyError: return self.start_time @property def resolution(self): """Specify the spatial resolution of the dataset. Channel 13_3's spatial resolution changes from one platform to another while the wavelength and file format remain the same. In order to avoid multiple YAML reader definitions for the same file format, read the channel's resolution from the file instead of defining it in the YAML dataset. This information will then be used by the YAML reader to complement the YAML definition of the dataset. Returns: Spatial resolution in kilometers """ return 1000. * self.nc['lineRes'].values def get_shape(self, key, info): """Get the shape of the data. Returns: Number of lines, number of columns """ return self.nlines, self.ncols @property def meta(self): """Derive metadata from the coordinates.""" # Use buffered data if available if self._meta is None: lat = self.geo_data['lat'] earth_mask = self._get_earth_mask(lat) crow, ccol = self._get_nadir_pixel(earth_mask=earth_mask, sector=self.sector) lat0 = lat.values[crow, ccol] if crow is not None else None yaw_flip = self._is_yaw_flip(lat) del lat lon = self.geo_data['lon'] lon0 = lon.values[crow, ccol] if crow is not None else None area_def_uni = self._get_area_def_uniform_sampling( lon0=lon0, channel=self.gvar_channel) del lon self._meta = {'earth_mask': earth_mask, 'yaw_flip': yaw_flip, 'lat0': lat0, 'lon0': lon0, 'nadir_row': crow, 'nadir_col': ccol, 'area_def_uni': area_def_uni} return self._meta def _counts2radiance(self, counts, coefs, channel): """Convert raw detector counts to radiance.""" logger.debug('Converting counts to radiance') if self._is_vis(channel): # Since the scanline-detector assignment is unknown, use the average # coefficients for all scanlines. slope = np.array(coefs['slope']).mean() offset = np.array(coefs['offset']).mean() return self._viscounts2radiance(counts=counts, slope=slope, offset=offset) return self._ircounts2radiance(counts=counts, scale=coefs['scale'], offset=coefs['offset']) def _calibrate(self, radiance, coefs, channel, calibration): """Convert radiance to reflectance or brightness temperature.""" if self._is_vis(channel): if not calibration == 'reflectance': raise ValueError('Cannot calibrate VIS channel to ' '{}'.format(calibration)) return self._calibrate_vis(radiance=radiance, k=coefs['k']) else: if not calibration == 'brightness_temperature': raise ValueError('Cannot calibrate IR channel to ' '{}'.format(calibration)) # Since the scanline-detector assignment is unknown, use the average # coefficients for all scanlines. mean_coefs = {'a': np.array(coefs['a']).mean(), 'b': np.array(coefs['b']).mean(), 'n': np.array(coefs['n']).mean(), 'btmin': coefs['btmin'], 'btmax': coefs['btmax']} return self._calibrate_ir(radiance=radiance, coefs=mean_coefs) @staticmethod def _ircounts2radiance(counts, scale, offset): """Convert IR counts to radiance. Reference: [IR]. Args: counts: Raw detector counts scale: Scale [mW-1 m2 cm sr] offset: Offset [1] Returns: Radiance [mW m-2 cm-1 sr-1] """ rad = (counts - offset) / scale return rad.clip(min=0) @staticmethod def _calibrate_ir(radiance, coefs): """Convert IR radiance to brightness temperature. Reference: [IR] Args: radiance: Radiance [mW m-2 cm-1 sr-1] coefs: Dictionary of calibration coefficients. Keys: n: The channel's central wavenumber [cm-1] a: Offset [K] b: Slope [1] btmin: Minimum brightness temperature threshold [K] btmax: Maximum brightness temperature threshold [K] Returns: Brightness temperature [K] """ logger.debug('Calibrating to brightness temperature') # Compute brightness temperature using inverse Planck formula n = coefs['n'] bteff = C2 * n / np.log(1 + C1 * n ** 3 / radiance.where(radiance > 0)) bt = xr.DataArray(bteff * coefs['b'] + coefs['a']) # Apply BT threshold return bt.where(np.logical_and(bt >= coefs['btmin'], bt <= coefs['btmax'])) @staticmethod def _viscounts2radiance(counts, slope, offset): """Convert VIS counts to radiance. References: [VIS] Args: counts: Raw detector counts slope: Slope [W m-2 um-1 sr-1] offset: Offset [W m-2 um-1 sr-1] Returns: Radiance [W m-2 um-1 sr-1] """ rad = counts * slope + offset return rad.clip(min=0) @staticmethod def _calibrate_vis(radiance, k): """Convert VIS radiance to reflectance. Note: Angle of incident radiation and annual variation of the earth-sun distance is not taken into account. A value of 100% corresponds to the radiance of a perfectly reflecting diffuse surface illuminated at normal incidence when the sun is at its annual-average distance from the Earth. TODO: Take angle of incident radiation (cos sza) and annual variation of the earth-sun distance into account. Reference: [VIS] Args: radiance: Radiance [mW m-2 cm-1 sr-1] k: pi / H, where H is the solar spectral irradiance at annual-average sun-earth distance, averaged over the spectral response function of the detector). Units of k: [m2 um sr W-1] Returns: Reflectance [%] """ logger.debug('Calibrating to reflectance') refl = 100 * k * radiance return refl.clip(min=0) def _update_metadata(self, data, ds_info): """Update metadata of the given DataArray.""" # Metadata from the dataset definition data.attrs.update(ds_info) # If the file_type attribute is a list and the data is xarray # the concat of the dataset will not work. As the file_type is # not needed this will be popped here. if 'file_type' in data.attrs: data.attrs.pop('file_type') # Metadata discovered from the file. data.attrs.update( {'platform_name': self.platform_name, 'sensor': self.sensor, 'sector': self.sector, 'orbital_parameters': {'yaw_flip': self.meta['yaw_flip']}} ) if self.meta['lon0'] is not None: # Attributes only available for full disc images. YAML reader # doesn't like it if satellite_* is present but None data.attrs.update( {'satellite_longitude': self.meta['lon0'], 'satellite_latitude': self.meta['lat0'], 'satellite_altitude': ALTITUDE, 'nadir_row': self.meta['nadir_row'], 'nadir_col': self.meta['nadir_col'], 'area_def_uniform_sampling': self.meta['area_def_uni']} ) data.attrs['orbital_parameters'].update( {'projection_longitude': self.meta['lon0'], 'projection_latitude': self.meta['lat0'], 'projection_altitude': ALTITUDE} ) def __del__(self): """Delete.""" try: self.nc.close() except (AttributeError, OSError): pass def available_datasets(self, configured_datasets=None): """Update information for or add datasets provided by this file. If this file handler can load a dataset then it will supplement the dataset info with the resolution and possibly coordinate datasets needed to load it. Otherwise it will continue passing the dataset information down the chain. See :meth:`satpy.readers.file_handlers.BaseFileHandler.available_datasets` for details. """ res = self.resolution # update previously configured datasets for is_avail, ds_info in (configured_datasets or []): if is_avail is not None: yield is_avail, ds_info matches = self.file_type_matches(ds_info['file_type']) if matches and ds_info.get('resolution') != res: new_info = ds_info.copy() new_info['resolution'] = res yield True, new_info elif is_avail is None: yield is_avail, ds_info class GOESNCFileHandler(GOESNCBaseFileHandler): """File handler for GOES Imager data in netCDF format.""" vis_sectors = VIS_SECTORS ir_sectors = IR_SECTORS def __init__(self, filename, filename_info, filetype_info): """Initialize the reader.""" super(GOESNCFileHandler, self).__init__(filename, filename_info, filetype_info) def get_dataset(self, key, info): """Load dataset designated by the given key from file.""" logger.debug('Reading dataset {}'.format(key['name'])) # Read data from file and calibrate if necessary if 'longitude' in key['name']: data = self.geo_data['lon'] elif 'latitude' in key['name']: data = self.geo_data['lat'] else: tic = datetime.now() data = self.calibrate(self.nc['data'].isel(time=0), calibration=key['calibration'], channel=key['name']) logger.debug('Calibration time: {}'.format(datetime.now() - tic)) # Mask space pixels data = data.where(self.meta['earth_mask']) # Set proper dimension names data = data.rename({'xc': 'x', 'yc': 'y'}) # Update metadata self._update_metadata(data, ds_info=info) return data def calibrate(self, counts, calibration, channel): """Perform calibration.""" # Convert 16bit counts from netCDF4 file to the original 10bit # GVAR counts by dividing by 32. See [FAQ]. counts = counts / 32. coefs = CALIB_COEFS[self.platform_name][channel] if calibration == 'counts': return counts if calibration in ['radiance', 'reflectance', 'brightness_temperature']: radiance = self._counts2radiance(counts=counts, coefs=coefs, channel=channel) if calibration == 'radiance': return radiance return self._calibrate(radiance=radiance, coefs=coefs, channel=channel, calibration=calibration) raise ValueError('Unsupported calibration for channel {}: {}'.format(channel, calibration)) class GOESEUMNCFileHandler(GOESNCBaseFileHandler): """File handler for GOES Imager data in EUM netCDF format. TODO: Remove datasets which are not available in the file (counts, VIS radiance) via available_datasets() -> See #434 """ vis_sectors = IR_SECTORS # VIS channel is downsampled to IR resolution ir_sectors = IR_SECTORS def __init__(self, filename, filename_info, filetype_info, geo_data): """Initialize the reader.""" super(GOESEUMNCFileHandler, self).__init__(filename, filename_info, filetype_info, geo_data) def get_dataset(self, key, info): """Load dataset designated by the given key from file.""" logger.debug('Reading dataset {}'.format(key['name'])) tic = datetime.now() data = self.calibrate(self.nc['data'].isel(time=0), calibration=key['calibration'], channel=key['name']) logger.debug('Calibration time: {}'.format(datetime.now() - tic)) # Mask space pixels data = data.where(self.meta['earth_mask']) # Set proper dimension names data = data.rename({'xc': 'x', 'yc': 'y'}) data = data.drop('time') # Update metadata self._update_metadata(data, ds_info=info) return data def calibrate(self, data, calibration, channel): """Perform calibration.""" coefs = CALIB_COEFS[self.platform_name][channel] is_vis = self._is_vis(channel) # IR files provide radiances, VIS file provides reflectances if is_vis and calibration == 'reflectance': return data if not is_vis and calibration == 'radiance': return data if not is_vis and calibration == 'brightness_temperature': return self._calibrate(radiance=data, calibration=calibration, coefs=coefs, channel=channel) raise ValueError('Unsupported calibration for channel {}: {}' .format(channel, calibration)) class GOESEUMGEONCFileHandler(BaseFileHandler): """File handler for GOES Geolocation data in EUM netCDF format.""" def __init__(self, filename, filename_info, filetype_info): """Initialize the reader.""" super(GOESEUMGEONCFileHandler, self).__init__(filename, filename_info, filetype_info) self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=False, chunks={'xc': CHUNK_SIZE, 'yc': CHUNK_SIZE}) self.sensor = 'goes_imager' self.nlines = self.nc.dims['yc'] self.ncols = self.nc.dims['xc'] self.platform_name = GOESNCBaseFileHandler._get_platform_name( self.nc.attrs['Satellite Sensor']) self.platform_shortname = self.platform_name.replace('-', '').lower() self._meta = None def __getitem__(self, item): """Get item.""" return getattr(self.nc, item) def get_dataset(self, key, info): """Load dataset designated by the given key from file.""" logger.debug('Reading dataset {}'.format(key['name'])) # Read data from file and calibrate if necessary if 'longitude' in key['name']: data = self.nc['lon'] elif 'latitude' in key['name']: data = self.nc['lat'] else: raise KeyError("Unknown dataset: {}".format(key['name'])) # Set proper dimension names data = data.rename({'xc': 'x', 'yc': 'y'}) # Update metadata data.attrs.update(info) return data @property def resolution(self): """Specify the spatial resolution of the dataset. In the EUMETSAT format VIS data is downsampled to IR resolution (4km). """ return 4000.0 class GOESCoefficientReader(object): """Read GOES Imager calibration coefficients from NOAA reference HTMLs.""" gvar_channels = { 'GOES-8': {'00_7': 1, '03_9': 2, '06_8': 3, '10_7': 4, '12_0': 5}, 'GOES-9': {'00_7': 1, '03_9': 2, '06_8': 3, '10_7': 4, '12_0': 5}, 'GOES-10': {'00_7': 1, '03_9': 2, '06_8': 3, '10_7': 4, '12_0': 5}, 'GOES-11': {'00_7': 1, '03_9': 2, '06_8': 3, '10_7': 4, '12_0': 5}, 'GOES-12': {'00_7': 1, '03_9': 2, '06_5': 3, '10_7': 4, '13_3': 6}, 'GOES-13': {'00_7': 1, '03_9': 2, '06_5': 3, '10_7': 4, '13_3': 6}, 'GOES-14': {'00_7': 1, '03_9': 2, '06_5': 3, '10_7': 4, '13_3': 6}, 'GOES-15': {'00_7': 1, '03_9': 2, '06_5': 3, '10_7': 4, '13_3': 6}, } ir_tables = { 'GOES-8': '2-1', 'GOES-9': '2-2', 'GOES-10': '2-3', 'GOES-11': '2-4', 'GOES-12': '2-5a', 'GOES-13': '2-6', 'GOES-14': '2-7c', 'GOES-15': '2-8b' } vis_tables = { 'GOES-8': 'Table 1.', 'GOES-9': 'Table 1.', 'GOES-10': 'Table 2.', 'GOES-11': 'Table 3.', 'GOES-12': 'Table 4.', 'GOES-13': 'Table 5.', 'GOES-14': 'Table 6.', 'GOES-15': 'Table 7.' } def __init__(self, ir_url, vis_url): """Init the coef reader.""" from bs4 import BeautifulSoup self.ir_html = BeautifulSoup(self._load_url_or_file(ir_url), features="html5lib") self.vis_html = BeautifulSoup(self._load_url_or_file(vis_url), features="html5lib") def _load_url_or_file(self, url): import requests from requests.exceptions import MissingSchema try: response = requests.get(url) if response.ok: return response.text raise requests.HTTPError except (MissingSchema, requests.HTTPError): # Not a valid URL, is it a file? try: return open(url, mode='r') except IOError: raise ValueError('Invalid URL or file: {}'.format(url)) def get_coefs(self, platform, channel): """Get the coefs.""" if channel == '00_7': return self._get_vis_coefs(platform=platform) return self._get_ir_coefs(platform=platform, channel=channel) def _get_ir_coefs(self, platform, channel): from collections import defaultdict coefs = defaultdict(list) # Extract scale and offset for conversion counts->radiance from # Table 1-1 (same for all platforms, only depends on the channel) gvar_channel = self.gvar_channels[platform][channel] table11 = self._get_table(root=self.ir_html, heading='Table 1-1', heading_type='h3') for row in table11: if int(row[0]) == gvar_channel: coefs['scale'] = self._float(row[1]) coefs['offset'] = self._float(row[2]) # Extract n,a,b (radiance -> BT) from the coefficient table for the # given platform table = self._get_table(root=self.ir_html, heading=self.ir_tables[platform], heading_type='h3') channel_regex = re.compile('^{}(?:/[a,b])?$'.format(gvar_channel)) for row in table: if channel_regex.match(row[0]): # Extract coefficients. Detector (a) always comes before (b) # in the table so that simply appending preserves the order. coefs['n'].append(self._float(row[1])) coefs['a'].append(self._float(row[2])) coefs['b'].append(self._float(row[3])) return coefs def _get_vis_coefs(self, platform): from collections import defaultdict # Find calibration table table = self._get_table(root=self.vis_html, heading=self.vis_tables[platform], heading_type='p') # Extract values coefs = defaultdict(list) if platform in ('GOES-8', 'GOES-9'): # GOES 8&9 coefficients are in the same table col = 1 if platform == 'GOES-8' else 2 coefs['slope'].append(self._float(table[1][col])) coefs['x0'] = self._float(table[2][col]) coefs['offset'].append(self._float(table[3][col])) coefs['k'] = self._float(table[4][col]) else: # k and x0 appear in the first row only coefs['slope'].append(self._float(table[0][1])) coefs['x0'] = self._float(table[0][2]) coefs['k'] = self._float(table[0][4]) coefs['offset'].append(self._float(table[0][3])) # Remaining rows for row in table[1:]: coefs['slope'].append(self._float(row[1])) coefs['offset'].append(self._float(row[2])) return coefs def _get_table(self, root, heading, heading_type, ): # Find table by its heading headings = [h for h in root.find_all(heading_type) if heading in h.text] if not headings: raise ValueError('Cannot find a coefficient table matching text ' '"{}"'.format(heading)) if len(headings) > 1: raise ValueError('Found multiple headings matching text "{}"' .format(heading)) table = headings[0].next_sibling.next_sibling # Copy items to a list of lists tab = list() for row in table.find_all('tr'): cols = row.find_all('td') if cols: tab.append([c.text for c in cols]) return tab def _denoise(self, string): return string.replace('\n', '').replace(' ', '') def _float(self, string): """Convert string to float. Take care of numbers in exponential format """ string = self._denoise(string) exp_match = re.match(r'^[-.\d]+x10-(\d)$', string) if exp_match: exp = int(exp_match.groups()[0]) fac = 10 ** -exp string = string.replace('x10-{}'.format(exp), '') else: fac = 1 return fac * float(string) def test_coefs(ir_url, vis_url): """Test calibration coefficients against NOAA reference pages. Currently the reference pages are: ir_url = https://www.ospo.noaa.gov/Operations/GOES/calibration/gvar-conversion.html vis_url = https://www.ospo.noaa.gov/Operations/GOES/calibration/goes-vis-ch-calibration.html Args: ir_url: Path or URL to HTML page with IR coefficients vis_url: Path or URL to HTML page with VIS coefficients Raises: ValueError if coefficients don't match the reference """ reader = GOESCoefficientReader(ir_url=ir_url, vis_url=vis_url) for platform in CALIB_COEFS: for channel, coefs in CALIB_COEFS[platform].items(): coefs_expected = reader.get_coefs(platform=platform, channel=channel) for cname in coefs_expected.keys(): if not np.allclose(coefs[cname], coefs_expected[cname]): raise ValueError( 'Coefficient {} for {} channel {} does not match the ' 'reference'.format(cname, platform, channel)) logger.info('Coefficients OK') return True satpy-0.34.0/satpy/readers/gpm_imerg.py000066400000000000000000000071231420401153000200340ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reader for GPM imerg data on half-hourly timesteps. References: - The NASA IMERG ATBD: https://pmm.nasa.gov/sites/default/files/document_files/IMERG_ATBD_V06.pdf """ import logging from datetime import datetime import h5py import numpy as np from pyresample.geometry import AreaDefinition from satpy.readers.hdf5_utils import HDF5FileHandler logger = logging.getLogger(__name__) class Hdf5IMERG(HDF5FileHandler): """IMERG hdf5 reader.""" def __init__(self, filename, filename_info, filetype_info): """Init method.""" super(Hdf5IMERG, self).__init__(filename, filename_info, filetype_info) self.finfo = filename_info self.cache = {} @property def start_time(self): """Find the start time from filename info.""" return datetime(self.finfo['date'].year, self.finfo['date'].month, self.finfo['date'].day, self.finfo['start_time'].hour, self.finfo['start_time'].minute, self.finfo['start_time'].second) @property def end_time(self): """Find the end time from filename info.""" return datetime(self.finfo['date'].year, self.finfo['date'].month, self.finfo['date'].day, self.finfo['end_time'].hour, self.finfo['end_time'].minute, self.finfo['end_time'].second) def get_dataset(self, dataset_id, ds_info): """Load a dataset.""" file_key = ds_info.get('file_key', dataset_id['name']) dsname = 'Grid/' + file_key data = self[dsname].squeeze().transpose() data.values = np.flipud(data.values) fill = data.attrs['_FillValue'] pts = (data.values == fill).nonzero() data.values[pts] = np.nan for key in list(data.attrs.keys()): val = data.attrs[key] if isinstance(val, h5py.h5r.Reference): del data.attrs[key] return data def get_area_def(self, dsid): """Create area definition from the gridded lat/lon values.""" lats = self.__getitem__('Grid/lat').values lons = self.__getitem__('Grid/lon').values width = lons.shape[0] height = lats.shape[0] lower_left_x = lons[0] lower_left_y = lats[0] upper_right_x = lons[-1] upper_right_y = lats[-1] area_extent = (lower_left_x, lower_left_y, upper_right_x, upper_right_y) description = "IMERG GPM Equirectangular Projection" area_id = 'imerg' proj_id = 'equirectangular' proj_dict = {'proj': 'longlat', 'datum': 'WGS84', 'ellps': 'WGS84', } area_def = AreaDefinition(area_id, description, proj_id, proj_dict, width, height, area_extent, ) return area_def satpy-0.34.0/satpy/readers/grib.py000066400000000000000000000271421420401153000170140ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Generic Reader for GRIB2 files. Currently this reader depends on the `pygrib` python package. The `eccodes` package from ECMWF is preferred, but does not support python 3 at the time of writing. """ import logging from datetime import datetime import dask.array as da import numpy as np import pygrib import xarray as xr from pyproj import Proj from pyresample import geometry from satpy import CHUNK_SIZE from satpy.dataset import DataQuery from satpy.readers.file_handlers import BaseFileHandler LOG = logging.getLogger(__name__) CF_UNITS = { 'none': '1', } class GRIBFileHandler(BaseFileHandler): """Generic GRIB file handler.""" def __init__(self, filename, filename_info, filetype_info): """Open grib file and do initial message parsing.""" super(GRIBFileHandler, self).__init__(filename, filename_info, filetype_info) self._msg_datasets = {} self._start_time = None self._end_time = None try: with pygrib.open(self.filename) as grib_file: first_msg = grib_file.message(1) last_msg = grib_file.message(grib_file.messages) start_time = self._convert_datetime( first_msg, 'validityDate', 'validityTime') end_time = self._convert_datetime( last_msg, 'validityDate', 'validityTime') self._start_time = start_time self._end_time = end_time if 'keys' not in filetype_info: self._analyze_messages(grib_file) self._idx = None else: self._create_dataset_ids(filetype_info['keys']) self._idx = pygrib.index(self.filename, *filetype_info['keys'].keys()) except (RuntimeError, KeyError): raise IOError("Unknown GRIB file format: {}".format(self.filename)) def _analyze_messages(self, grib_file): grib_file.seek(0) for idx, msg in enumerate(grib_file): msg_id = DataQuery(name=msg['shortName'], level=msg['level'], modifiers=tuple()) ds_info = { 'message': idx + 1, 'name': msg['shortName'], 'level': msg['level'], 'file_type': self.filetype_info['file_type'], } self._msg_datasets[msg_id] = ds_info def _create_dataset_ids(self, keys): from itertools import product ordered_keys = [k for k in keys.keys() if 'id_key' in keys[k]] for id_vals in product(*[keys[k]['values'] for k in ordered_keys]): id_keys = [keys[k]['id_key'] for k in ordered_keys] msg_info = dict(zip(ordered_keys, id_vals)) ds_info = dict(zip(id_keys, id_vals)) msg_id = DataQuery(**ds_info) ds_info = msg_id.to_dict() ds_info.update(msg_info) ds_info['file_type'] = self.filetype_info['file_type'] self._msg_datasets[msg_id] = ds_info @staticmethod def _convert_datetime(msg, date_key, time_key, date_format="%Y%m%d%H%M"): date_str = "{:d}{:04d}".format(msg[date_key], msg[time_key]) return datetime.strptime(date_str, date_format) @property def start_time(self): """Get start time of this entire file. Assumes the first message is the earliest message. """ return self._start_time @property def end_time(self): """Get end time of this entire file. Assumes the last message is the latest message. """ return self._end_time def available_datasets(self, configured_datasets=None): """Automatically determine datasets provided by this file.""" # previously configured or provided datasets # we can't provide any additional information for is_avail, ds_info in (configured_datasets or []): yield is_avail, ds_info # new datasets for ds_info in self._msg_datasets.values(): yield True, ds_info def _get_message(self, ds_info): with pygrib.open(self.filename) as grib_file: if 'message' in ds_info: msg_num = ds_info['message'] msg = grib_file.message(msg_num) else: msg_keys = self.filetype_info['keys'].keys() msg = self._idx(**{k: ds_info[k] for k in msg_keys})[0] return msg @staticmethod def _correct_cyl_minmax_xy(proj_params, min_lon, min_lat, max_lon, max_lat): proj = Proj(**proj_params) min_x, min_y = proj(min_lon, min_lat) max_x, max_y = proj(max_lon, max_lat) if max_x <= min_x: # wrap around # make 180 longitude the prime meridian # assuming we are going from 0 to 360 longitude proj_params['pm'] = 180 proj = Proj(**proj_params) # recompute x/y extents with this new projection min_x, min_y = proj(min_lon, min_lat) max_x, max_y = proj(max_lon, max_lat) return proj_params, (min_x, min_y, max_x, max_y) @staticmethod def _get_cyl_minmax_lonlat(lons, lats): min_lon = lons[0] max_lon = lons[-1] min_lat = lats[0] max_lat = lats[-1] if min_lat > max_lat: # lats aren't in the order we thought they were, flip them min_lat, max_lat = max_lat, min_lat return min_lon, min_lat, max_lon, max_lat def _get_cyl_area_info(self, msg, proj_params): proj_params['proj'] = 'eqc' lons = msg['distinctLongitudes'] lats = msg['distinctLatitudes'] shape = (lats.shape[0], lons.shape[0]) minmax_lonlat = self._get_cyl_minmax_lonlat(lons, lats) proj_params, minmax_xy = self._correct_cyl_minmax_xy(proj_params, *minmax_lonlat) extents = self._get_extents(*minmax_xy, shape) return proj_params, shape, extents @staticmethod def _get_extents(min_x, min_y, max_x, max_y, shape): half_x = abs((max_x - min_x) / (shape[1] - 1)) / 2. half_y = abs((max_y - min_y) / (shape[0] - 1)) / 2. return min_x - half_x, min_y - half_y, max_x + half_x, max_y + half_y @staticmethod def _get_corner_xy(proj_params, lons, lats, scans_positively): proj = Proj(**proj_params) x, y = proj(lons, lats) if scans_positively: min_x, min_y = x[0], y[0] max_x, max_y = x[3], y[3] else: min_x, min_y = x[2], y[2] max_x, max_y = x[1], y[1] return min_x, min_y, max_x, max_y @staticmethod def _get_corner_lonlat(proj_params, lons, lats): # take the corner points only lons = lons[([0, 0, -1, -1], [0, -1, 0, -1])] lats = lats[([0, 0, -1, -1], [0, -1, 0, -1])] # if we have longitudes over 180, assume 0-360 if (lons > 180).any(): # make 180 longitude the prime meridian proj_params['pm'] = 180 return proj_params, lons, lats def _get_area_info(self, msg, proj_params): lats, lons = msg.latlons() shape = lats.shape scans_positively = (msg.valid_key('jScansPositively') and msg['jScansPositively'] == 1) proj_params, lons, lats = self._get_corner_lonlat( proj_params, lons, lats) minmax_xy = self._get_corner_xy(proj_params, lons, lats, scans_positively) extents = self._get_extents(*minmax_xy, shape) return proj_params, shape, extents @staticmethod def _correct_proj_params_over_prime_meridian(proj_params): # correct for longitudes over 180 for lon_param in ['lon_0', 'lon_1', 'lon_2']: if proj_params.get(lon_param, 0) > 180: proj_params[lon_param] -= 360 return proj_params def _area_def_from_msg(self, msg): proj_params = msg.projparams.copy() proj_params = self._correct_proj_params_over_prime_meridian(proj_params) if proj_params['proj'] in ('cyl', 'eqc'): # eqc projection that goes from 0 to 360 proj_params, shape, extents = self._get_cyl_area_info(msg, proj_params) else: proj_params, shape, extents = self._get_area_info(msg, proj_params) return geometry.AreaDefinition( 'on-the-fly grib area', 'on-the-fly grib area', 'on-the-fly grib area', proj_params, shape[1], shape[0], extents, ) def get_area_def(self, dsid): """Get area definition for message. If latlong grid then convert to valid eqc grid. """ msg = self._get_message(self._msg_datasets[dsid]) try: return self._area_def_from_msg(msg) except (RuntimeError, KeyError): raise RuntimeError("Unknown GRIB projection information") def get_metadata(self, msg, ds_info): """Get metadata.""" model_time = self._convert_datetime(msg, 'dataDate', 'dataTime') start_time = self._convert_datetime(msg, 'validityDate', 'validityTime') end_time = start_time try: center_description = msg['centreDescription'] except (RuntimeError, KeyError): center_description = None key_dicts = { 'shortName': 'shortName', 'long_name': 'name', 'pressureUnits': 'pressureUnits', 'typeOfLevel': 'typeOfLevel', 'standard_name': 'cfName', 'units': 'units', 'modelName': 'modelName', 'valid_min': 'minimum', 'valid_max': 'maximum', 'sensor': 'modelName'} ds_info.update({ 'filename': self.filename, 'model_time': model_time, 'centreDescription': center_description, 'start_time': start_time, 'end_time': end_time, 'platform_name': 'unknown'}) for key in key_dicts: if key_dicts[key] in msg.keys(): ds_info[key] = msg[key_dicts[key]] else: ds_info[key] = 'unknown' return ds_info def get_dataset(self, dataset_id, ds_info): """Read a GRIB message into an xarray DataArray.""" msg = self._get_message(ds_info) ds_info = self.get_metadata(msg, ds_info) fill = msg['missingValue'] data = msg.values.astype(np.float32) if msg.valid_key('jScansPositively') and msg['jScansPositively'] == 1: data = data[::-1] if isinstance(data, np.ma.MaskedArray): data = data.filled(np.nan) data = da.from_array(data, chunks=CHUNK_SIZE) else: data[data == fill] = np.nan data = da.from_array(data, chunks=CHUNK_SIZE) return xr.DataArray(data, attrs=ds_info, dims=('y', 'x')) satpy-0.34.0/satpy/readers/hdf4_utils.py000066400000000000000000000103451420401153000201330ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Helpers for reading hdf4-based files.""" import logging import dask.array as da import numpy as np import xarray as xr from pyhdf.SD import SD, SDC, SDS from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler LOG = logging.getLogger(__name__) HTYPE_TO_DTYPE = { SDC.INT8: np.int8, SDC.UCHAR: np.uint8, SDC.CHAR: np.int8, SDC.INT32: np.int32, SDC.INT16: np.int16, SDC.UINT8: np.uint8, SDC.UINT16: np.uint16, SDC.UINT32: np.uint32, SDC.FLOAT32: np.float32, SDC.FLOAT64: np.float64, } def from_sds(var, *args, **kwargs): """Create a dask array from a SD dataset.""" var.__dict__['dtype'] = np.dtype(HTYPE_TO_DTYPE[var.info()[3]]) shape = var.info()[2] var.__dict__['shape'] = shape if isinstance(shape, (tuple, list)) else tuple(shape) return da.from_array(var, *args, **kwargs) class HDF4FileHandler(BaseFileHandler): """Base class for common HDF4 operations.""" def __init__(self, filename, filename_info, filetype_info): """Open file and collect information.""" super(HDF4FileHandler, self).__init__(filename, filename_info, filetype_info) self.file_content = {} file_handle = SD(self.filename, SDC.READ) self._collect_attrs('', file_handle.attributes()) for k in file_handle.datasets().keys(): self.collect_metadata(k, file_handle.select(k)) del file_handle def _collect_attrs(self, name, attrs): for key, value in attrs.items(): value = np.squeeze(value) if issubclass(value.dtype.type, (np.string_, np.unicode_)) and not value.shape: value = value.item() # convert to scalar if not isinstance(value, str): # python 3 - was scalar numpy array of bytes # otherwise python 2 - scalar numpy array of 'str' value = value.decode() self.file_content["{}/attr/{}".format(name, key)] = value elif not value.shape: # convert to a scalar self.file_content["{}/attr/{}".format(name, key)] = value.item() else: self.file_content["{}/attr/{}".format(name, key)] = value def collect_metadata(self, name, obj): """Collect all metadata about file content.""" if isinstance(obj, SDS): self.file_content[name] = obj info = obj.info() self.file_content[name + "/dtype"] = np.dtype(HTYPE_TO_DTYPE.get(info[3])) self.file_content[name + "/shape"] = info[2] if isinstance(info[2], (int, float)) else tuple(info[2]) def _open_xarray_dataset(self, val, chunks=CHUNK_SIZE): """Read the band in blocks.""" dask_arr = from_sds(val, chunks=chunks) attrs = val.attributes() return xr.DataArray(dask_arr, dims=('y', 'x'), attrs=attrs) def __getitem__(self, key): """Get file content as xarray compatible objects.""" val = self.file_content[key] if isinstance(val, SDS): # these datasets are closed and inaccessible when the file is closed, need to reopen return self._open_xarray_dataset(val) return val def __contains__(self, item): """Check if item is in file content.""" return item in self.file_content def get(self, item, default=None): """Get variable as DataArray or return the default.""" if item in self: return self[item] else: return default satpy-0.34.0/satpy/readers/hdf5_utils.py000066400000000000000000000101561420401153000201340ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2017, 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Helpers for reading hdf5-based files.""" import logging import dask.array as da import h5py import numpy as np import xarray as xr from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.utils import np2str LOG = logging.getLogger(__name__) class HDF5FileHandler(BaseFileHandler): """Small class for inspecting a HDF5 file and retrieve its metadata/header data.""" def __init__(self, filename, filename_info, filetype_info): """Initialize file handler.""" super(HDF5FileHandler, self).__init__( filename, filename_info, filetype_info) self.file_content = {} self._attrs_cache = {} try: file_handle = h5py.File(self.filename, 'r') except IOError: LOG.exception( 'Failed reading file %s. Possibly corrupted file', self.filename) raise file_handle.visititems(self.collect_metadata) self._collect_attrs('', file_handle.attrs) file_handle.close() def _collect_attrs(self, name, attrs): attrs_cache = self._attrs_cache.setdefault(name, {}) for key, value in attrs.items(): value = np.squeeze(value) fc_key = "{}/attr/{}".format(name, key) try: value = np2str(value) except ValueError: # use the original value pass except AttributeError: # A HDF5 reference ? value = self.get_reference(name, key) if value is None: LOG.warning("Value cannot be converted - skip setting attribute %s", fc_key) continue self.file_content[fc_key] = attrs_cache[key] = value def get_reference(self, name, key): """Get reference.""" with h5py.File(self.filename, 'r') as hf: return self._get_reference(hf, hf[name].attrs[key]) def _get_reference(self, hf, ref): try: return [self._get_reference(hf, elt) for elt in ref] except TypeError: if isinstance(ref, h5py.h5r.Reference): ref_name = h5py.h5r.get_name(ref, hf.id) return hf[ref_name][()] def collect_metadata(self, name, obj): """Collect metadata.""" if isinstance(obj, h5py.Dataset): self.file_content[name] = obj self.file_content[name + "/dtype"] = obj.dtype self.file_content[name + "/shape"] = obj.shape self._collect_attrs(name, obj.attrs) def __getitem__(self, key): """Get item for given key.""" val = self.file_content[key] if isinstance(val, h5py.Dataset): # these datasets are closed and inaccessible when the file is closed, need to reopen dset = h5py.File(self.filename, 'r')[key] dset_data = da.from_array(dset, chunks=CHUNK_SIZE) attrs = self._attrs_cache.get(key, dset.attrs) if dset.ndim == 2: return xr.DataArray(dset_data, dims=['y', 'x'], attrs=attrs) return xr.DataArray(dset_data, attrs=attrs) return val def __contains__(self, item): """Get item from file content.""" return item in self.file_content def get(self, item, default=None): """Get item.""" if item in self: return self[item] else: return default satpy-0.34.0/satpy/readers/hdfeos_base.py000066400000000000000000000404001420401153000203230ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Base HDF-EOS reader.""" from __future__ import annotations import logging import re from ast import literal_eval from contextlib import suppress from datetime import datetime import numpy as np import xarray as xr from pyhdf.error import HDF4Error from pyhdf.SD import SD from satpy import CHUNK_SIZE, DataID from satpy.readers.file_handlers import BaseFileHandler logger = logging.getLogger(__name__) def interpolate(clons, clats, csatz, src_resolution, dst_resolution): """Interpolate two parallel datasets jointly.""" if csatz is None: return _interpolate_no_angles(clons, clats, src_resolution, dst_resolution) return _interpolate_with_angles(clons, clats, csatz, src_resolution, dst_resolution) def _interpolate_with_angles(clons, clats, csatz, src_resolution, dst_resolution): from geotiepoints.modisinterpolator import modis_1km_to_250m, modis_1km_to_500m, modis_5km_to_1km # (src_res, dst_res, is satz not None) -> interp function interpolation_functions = { (5000, 1000): modis_5km_to_1km, (1000, 500): modis_1km_to_500m, (1000, 250): modis_1km_to_250m } return _find_and_run_interpolation(interpolation_functions, src_resolution, dst_resolution, (clons, clats, csatz)) def _interpolate_no_angles(clons, clats, src_resolution, dst_resolution): interpolation_functions = {} try: from geotiepoints.simple_modis_interpolator import modis_1km_to_250m as simple_1km_to_250m from geotiepoints.simple_modis_interpolator import modis_1km_to_500m as simple_1km_to_500m except ImportError: raise NotImplementedError( f"Interpolation from {src_resolution}m to {dst_resolution}m " "without satellite zenith angle information is not " "implemented. Try updating your version of " "python-geotiepoints.") else: interpolation_functions[(1000, 500)] = simple_1km_to_500m interpolation_functions[(1000, 250)] = simple_1km_to_250m return _find_and_run_interpolation(interpolation_functions, src_resolution, dst_resolution, (clons, clats)) def _find_and_run_interpolation(interpolation_functions, src_resolution, dst_resolution, args): try: interpolation_function = interpolation_functions[(src_resolution, dst_resolution)] except KeyError: error_message = "Interpolation from {}m to {}m not implemented".format( src_resolution, dst_resolution) raise NotImplementedError(error_message) logger.debug("Interpolating from {} to {}".format(src_resolution, dst_resolution)) return interpolation_function(*args) class HDFEOSBaseFileReader(BaseFileHandler): """Base file handler for HDF EOS data for both L1b and L2 products.""" def __init__(self, filename, filename_info, filetype_info): """Initialize the base reader.""" BaseFileHandler.__init__(self, filename, filename_info, filetype_info) try: self.sd = SD(self.filename) except HDF4Error as err: error_message = "Could not load data from file {}: {}".format(self.filename, err) raise ValueError(error_message) self.metadata = self._load_all_metadata_attributes() def _load_all_metadata_attributes(self): metadata = {} attrs = self.sd.attributes() for md_key in ("CoreMetadata.0", "StructMetadata.0", "ArchiveMetadata.0"): try: str_val = attrs[md_key] except KeyError: continue else: metadata.update(self.read_mda(str_val)) return metadata @classmethod def read_mda(cls, attribute): """Read the EOS metadata.""" line_iterator = iter(attribute.split('\n')) return cls._read_mda(line_iterator) @classmethod def _read_mda(cls, lines, element=None): current_dict = {} for line in lines: if not line: continue if line == 'END': return current_dict key, val = cls._split_line(line, lines) if key in ['GROUP', 'OBJECT']: current_dict[val] = cls._read_mda(lines, val) elif key in ['END_GROUP', 'END_OBJECT']: if val != element: raise SyntaxError("Non-matching end-tag") return current_dict elif key in ['CLASS', 'NUM_VAL']: pass else: current_dict[key] = val logger.warning("Malformed EOS metadata, missing an END.") return current_dict @classmethod def _split_line(cls, line, lines): key, val = line.split('=') key = key.strip() val = val.strip() try: with suppress(ValueError): val = literal_eval(val) except SyntaxError: key, val = cls._split_line(line + next(lines), lines) return key, val @property def metadata_platform_name(self): """Platform name from the internal file metadata.""" try: # Example: 'Terra' or 'Aqua' return self.metadata['INVENTORYMETADATA']['ASSOCIATEDPLATFORMINSTRUMENTSENSOR'][ 'ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER']['ASSOCIATEDPLATFORMSHORTNAME']['VALUE'] except KeyError: return self._platform_name_from_filename() def _platform_name_from_filename(self): platform_indicator = self.filename_info["platform_indicator"] if platform_indicator in ("t", "O"): # t1.* or MOD* return "Terra" # a1.* or MYD* return "Aqua" @property def start_time(self): """Get the start time of the dataset.""" try: date = (self.metadata['INVENTORYMETADATA']['RANGEDATETIME']['RANGEBEGINNINGDATE']['VALUE'] + ' ' + self.metadata['INVENTORYMETADATA']['RANGEDATETIME']['RANGEBEGINNINGTIME']['VALUE']) return datetime.strptime(date, '%Y-%m-%d %H:%M:%S.%f') except KeyError: return self._start_time_from_filename() def _start_time_from_filename(self): for fn_key in ("start_time", "acquisition_time"): if fn_key in self.filename_info: return self.filename_info[fn_key] raise RuntimeError("Could not determine file start time") @property def end_time(self): """Get the end time of the dataset.""" try: date = (self.metadata['INVENTORYMETADATA']['RANGEDATETIME']['RANGEENDINGDATE']['VALUE'] + ' ' + self.metadata['INVENTORYMETADATA']['RANGEDATETIME']['RANGEENDINGTIME']['VALUE']) return datetime.strptime(date, '%Y-%m-%d %H:%M:%S.%f') except KeyError: return self.start_time def _read_dataset_in_file(self, dataset_name): if dataset_name not in self.sd.datasets(): error_message = "Dataset name {} not included in available datasets {}".format( dataset_name, self.sd.datasets() ) raise KeyError(error_message) dataset = self.sd.select(dataset_name) return dataset def load_dataset(self, dataset_name, is_category=False): """Load the dataset from HDF EOS file.""" from satpy.readers.hdf4_utils import from_sds dataset = self._read_dataset_in_file(dataset_name) dask_arr = from_sds(dataset, chunks=CHUNK_SIZE) dims = ('y', 'x') if dask_arr.ndim == 2 else None data = xr.DataArray(dask_arr, dims=dims, attrs=dataset.attributes()) data = self._scale_and_mask_data_array(data, is_category=is_category) return data def _scale_and_mask_data_array(self, data, is_category=False): good_mask, new_fill = self._get_good_data_mask(data, is_category=is_category) scale_factor = data.attrs.pop('scale_factor', None) add_offset = data.attrs.pop('add_offset', None) # don't scale category products, even though scale_factor may equal 1 # we still need to convert integers to floats if scale_factor is not None and not is_category: data = data * np.float32(scale_factor) if add_offset is not None and add_offset != 0: data = data + add_offset if good_mask is not None: data = data.where(good_mask, new_fill) return data def _get_good_data_mask(self, data_arr, is_category=False): try: fill_value = data_arr.attrs["_FillValue"] except KeyError: return None, None # preserve integer data types if possible if is_category and np.issubdtype(data_arr.dtype, np.integer): # no need to mask, the fill value is already what it needs to be return None, None new_fill = np.nan data_arr.attrs.pop('_FillValue', None) good_mask = data_arr != fill_value return good_mask, new_fill def _add_satpy_metadata(self, data_id: DataID, data_arr: xr.DataArray): """Add metadata that is specific to Satpy.""" new_attrs = { 'platform_name': 'EOS-' + self.metadata_platform_name, 'sensor': 'modis', } res = data_id["resolution"] rps = self._resolution_to_rows_per_scan(res) new_attrs["rows_per_scan"] = rps data_arr.attrs.update(new_attrs) def _resolution_to_rows_per_scan(self, resolution: int) -> int: known_rps = { 5000: 2, 1000: 10, 500: 20, 250: 40, } return known_rps.get(resolution, 10) class HDFEOSGeoReader(HDFEOSBaseFileReader): """Handler for the geographical datasets.""" # list of geographical datasets handled by the georeader # mapping to the default variable name if not specified in YAML DATASET_NAMES = { 'longitude': 'Longitude', 'latitude': 'Latitude', 'satellite_azimuth_angle': ('SensorAzimuth', 'Sensor_Azimuth'), 'satellite_zenith_angle': ('SensorZenith', 'Sensor_Zenith'), 'solar_azimuth_angle': ('SolarAzimuth', 'SolarAzimuth'), 'solar_zenith_angle': ('SolarZenith', 'Solar_Zenith'), } def __init__(self, filename, filename_info, filetype_info): """Initialize the geographical reader.""" HDFEOSBaseFileReader.__init__(self, filename, filename_info, filetype_info) self.cache = {} @staticmethod def is_geo_loadable_dataset(dataset_name: str) -> bool: """Determine if this dataset should be loaded as a Geo dataset.""" return dataset_name in HDFEOSGeoReader.DATASET_NAMES @staticmethod def read_geo_resolution(metadata): """Parse metadata to find the geolocation resolution.""" # level 1 files try: return HDFEOSGeoReader._geo_resolution_for_l1b(metadata) except KeyError: try: return HDFEOSGeoReader._geo_resolution_for_l2_l1b(metadata) except (AttributeError, KeyError): raise RuntimeError("Could not determine resolution from file metadata") @staticmethod def _geo_resolution_for_l1b(metadata): ds = metadata['INVENTORYMETADATA']['COLLECTIONDESCRIPTIONCLASS']['SHORTNAME']['VALUE'] if ds.endswith('D03') or ds.endswith('HKM') or ds.endswith('QKM'): return 1000 # 1km files have 5km geolocation usually return 5000 @staticmethod def _geo_resolution_for_l2_l1b(metadata): # data files probably have this level 2 files # this does not work for L1B 1KM data files because they are listed # as 1KM data but the geo data inside is at 5km latitude_dim = metadata['SwathStructure']['SWATH_1']['DimensionMap']['DimensionMap_2']['GeoDimension'] resolution_regex = re.compile(r'(?P\d+)(km|KM)') resolution_match = resolution_regex.search(latitude_dim) return int(resolution_match.group('resolution')) * 1000 @property def geo_resolution(self): """Resolution of the geographical data retrieved in the metadata.""" return self.read_geo_resolution(self.metadata) def _load_ds_by_name(self, ds_name): """Attempt loading using multiple common names.""" var_names = self.DATASET_NAMES[ds_name] if isinstance(var_names, (list, tuple)): try: return self.load_dataset(var_names[0]) except KeyError: return self.load_dataset(var_names[1]) return self.load_dataset(var_names) def get_interpolated_dataset(self, name1, name2, resolution, offset=0): """Load and interpolate datasets.""" try: result1 = self.cache[(name1, resolution)] result2 = self.cache[(name2, resolution)] except KeyError: result1 = self._load_ds_by_name(name1) result2 = self._load_ds_by_name(name2) - offset try: sensor_zenith = self._load_ds_by_name('satellite_zenith_angle') except KeyError: # no sensor zenith angle, do "simple" interpolation sensor_zenith = None result1, result2 = interpolate( result1, result2, sensor_zenith, self.geo_resolution, resolution ) self.cache[(name1, resolution)] = result1 self.cache[(name2, resolution)] = result2 + offset def get_dataset(self, dataset_id: DataID, dataset_info: dict) -> xr.DataArray: """Get the geolocation dataset.""" # Name of the dataset as it appears in the HDF EOS file in_file_dataset_name = dataset_info.get('file_key') # Name of the dataset in the YAML file dataset_name = dataset_id['name'] # Resolution asked resolution = dataset_id['resolution'] if in_file_dataset_name is not None: # if the YAML was configured with a specific name use that data = self.load_dataset(in_file_dataset_name) else: # otherwise use the default name for this variable data = self._load_ds_by_name(dataset_name) if resolution != self.geo_resolution: if in_file_dataset_name is not None: # they specified a custom variable name but # we don't know how to interpolate this yet raise NotImplementedError( "Interpolation for variable '{}' is not " "configured".format(dataset_name)) # The data must be interpolated logger.debug("Loading %s", dataset_name) if dataset_name in ['longitude', 'latitude']: self.get_interpolated_dataset('longitude', 'latitude', resolution) elif dataset_name in ['satellite_azimuth_angle', 'satellite_zenith_angle']: # Sensor dataset names differs between L1b and L2 products self.get_interpolated_dataset('satellite_azimuth_angle', 'satellite_zenith_angle', resolution, offset=90) elif dataset_name in ['solar_azimuth_angle', 'solar_zenith_angle']: # Sensor dataset names differs between L1b and L2 products self.get_interpolated_dataset('solar_azimuth_angle', 'solar_zenith_angle', resolution, offset=90) data = self.cache[dataset_name, resolution] for key in ('standard_name', 'units'): if key in dataset_info: data.attrs[key] = dataset_info[key] self._add_satpy_metadata(dataset_id, data) return data satpy-0.34.0/satpy/readers/hrit_base.py000066400000000000000000000274321420401153000200330ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2014-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """HRIT/LRIT format reader. This module is the base module for all HRIT-based formats. Here, you will find the common building blocks for hrit reading. One of the features here is the on-the-fly decompression of hrit files. It needs a path to the xRITDecompress binary to be provided through the environment variable called XRIT_DECOMPRESS_PATH. When compressed hrit files are then encountered (files finishing with `.C_`), they are decompressed to the system's temporary directory for reading. """ import logging import os from datetime import timedelta from io import BytesIO from subprocess import PIPE, Popen from tempfile import gettempdir import dask.array as da import numpy as np import xarray as xr from pyresample import geometry from satpy.readers.eum_base import time_cds_short from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.seviri_base import dec10216 logger = logging.getLogger('hrit_base') common_hdr = np.dtype([('hdr_id', 'u1'), ('record_length', '>u2')]) primary_header = np.dtype([('file_type', 'u1'), ('total_header_length', '>u4'), ('data_field_length', '>u8')]) image_structure = np.dtype([('number_of_bits_per_pixel', 'u1'), ('number_of_columns', '>u2'), ('number_of_lines', '>u2'), ('compression_flag_for_data', 'u1')]) image_navigation = np.dtype([('projection_name', 'S32'), ('cfac', '>i4'), ('lfac', '>i4'), ('coff', '>i4'), ('loff', '>i4')]) image_data_function = np.dtype([('function', '|S1')]) annotation_header = np.dtype([('annotation', '|S1')]) timestamp_record = np.dtype([('cds_p_field', 'u1'), ('timestamp', time_cds_short)]) ancillary_text = np.dtype([('ancillary', '|S1')]) key_header = np.dtype([('key', '|S1')]) base_text_headers = {image_data_function: 'image_data_function', annotation_header: 'annotation_header', ancillary_text: 'ancillary_text', key_header: 'key_header'} base_hdr_map = {0: primary_header, 1: image_structure, 2: image_navigation, 3: image_data_function, 4: annotation_header, 5: timestamp_record, 6: ancillary_text, 7: key_header, } def get_xritdecompress_cmd(): """Find a valid binary for the xRITDecompress command.""" cmd = os.environ.get('XRIT_DECOMPRESS_PATH', None) if not cmd: raise IOError("XRIT_DECOMPRESS_PATH is not defined (complete path to xRITDecompress)") question = ("Did you set the environment variable XRIT_DECOMPRESS_PATH correctly?") if not os.path.exists(cmd): raise IOError(str(cmd) + " does not exist!\n" + question) elif os.path.isdir(cmd): raise IOError(str(cmd) + " is a directory!\n" + question) return cmd def get_xritdecompress_outfile(stdout): """Analyse the output of the xRITDecompress command call and return the file.""" outfile = b'' for line in stdout: try: k, v = [x.strip() for x in line.split(b':', 1)] except ValueError: break if k == b'Decompressed file': outfile = v break return outfile def decompress(infile, outdir='.'): """Decompress an XRIT data file and return the path to the decompressed file. It expect to find Eumetsat's xRITDecompress through the environment variable XRIT_DECOMPRESS_PATH. """ cmd = get_xritdecompress_cmd() infile = os.path.abspath(infile) cwd = os.getcwd() os.chdir(outdir) p = Popen([cmd, infile], stdout=PIPE) stdout = BytesIO(p.communicate()[0]) status = p.returncode os.chdir(cwd) if status != 0: raise IOError("xrit_decompress '%s', failed, status=%d" % (infile, status)) outfile = get_xritdecompress_outfile(stdout) if not outfile: raise IOError("xrit_decompress '%s', failed, no output file is generated" % infile) return os.path.join(outdir, outfile.decode('utf-8')) class HRITFileHandler(BaseFileHandler): """HRIT standard format reader.""" def __init__(self, filename, filename_info, filetype_info, hdr_info): """Initialize the reader.""" super(HRITFileHandler, self).__init__(filename, filename_info, filetype_info) self.mda = {} self._get_hd(hdr_info) if self.mda.get('compression_flag_for_data'): logger.debug('Unpacking %s', filename) try: self.filename = decompress(filename, gettempdir()) except IOError as err: logger.warning("Unpacking failed: %s", str(err)) self.mda = {} self._get_hd(hdr_info) self._start_time = filename_info['start_time'] self._end_time = self._start_time + timedelta(minutes=15) def _get_hd(self, hdr_info): """Open the file, read and get the basic file header info and set the mda dictionary.""" hdr_map, variable_length_headers, text_headers = hdr_info with open(self.filename) as fp: total_header_length = 16 while fp.tell() < total_header_length: hdr_id = np.fromfile(fp, dtype=common_hdr, count=1)[0] the_type = hdr_map[hdr_id['hdr_id']] if the_type in variable_length_headers: field_length = int((hdr_id['record_length'] - 3) / the_type.itemsize) current_hdr = np.fromfile(fp, dtype=the_type, count=field_length) key = variable_length_headers[the_type] if key in self.mda: if not isinstance(self.mda[key], list): self.mda[key] = [self.mda[key]] self.mda[key].append(current_hdr) else: self.mda[key] = current_hdr elif the_type in text_headers: field_length = int((hdr_id['record_length'] - 3) / the_type.itemsize) char = list(the_type.fields.values())[0][0].char new_type = np.dtype(char + str(field_length)) current_hdr = np.fromfile(fp, dtype=new_type, count=1)[0] self.mda[text_headers[the_type]] = current_hdr else: current_hdr = np.fromfile(fp, dtype=the_type, count=1)[0] self.mda.update( dict(zip(current_hdr.dtype.names, current_hdr))) total_header_length = self.mda['total_header_length'] self.mda.setdefault('number_of_bits_per_pixel', 10) self.mda['projection_parameters'] = {'a': 6378169.00, 'b': 6356583.80, 'h': 35785831.00, # FIXME: find a reasonable SSP 'SSP_longitude': 0.0} self.mda['orbital_parameters'] = {} def get_shape(self, dsid, ds_info): """Get shape.""" return int(self.mda['number_of_lines']), int(self.mda['number_of_columns']) @property def start_time(self): """Get start time.""" return self._start_time @property def end_time(self): """Get end time.""" return self._end_time def get_dataset(self, key, info): """Load a dataset.""" # Read bands data = self.read_band(key, info) # Convert to xarray xdata = xr.DataArray(data, dims=['y', 'x']) return xdata def get_xy_from_linecol(self, line, col, offsets, factors): """Get the intermediate coordinates from line & col. Intermediate coordinates are actually the instruments scanning angles. """ loff, coff = offsets lfac, cfac = factors x__ = (col - coff) / cfac * 2**16 y__ = (line - loff) / lfac * 2**16 return x__, y__ def get_area_extent(self, size, offsets, factors, platform_height): """Get the area extent of the file.""" nlines, ncols = size h = platform_height # count starts at 1 cols = 1 - 0.5 lines = 1 - 0.5 ll_x, ll_y = self.get_xy_from_linecol(lines, cols, offsets, factors) cols += ncols lines += nlines ur_x, ur_y = self.get_xy_from_linecol(lines, cols, offsets, factors) return (np.deg2rad(ll_x) * h, np.deg2rad(ll_y) * h, np.deg2rad(ur_x) * h, np.deg2rad(ur_y) * h) def get_area_def(self, dsid): """Get the area definition of the band.""" cfac = np.int32(self.mda['cfac']) lfac = np.int32(self.mda['lfac']) coff = np.float32(self.mda['coff']) loff = np.float32(self.mda['loff']) a = self.mda['projection_parameters']['a'] b = self.mda['projection_parameters']['b'] h = self.mda['projection_parameters']['h'] lon_0 = self.mda['projection_parameters']['SSP_longitude'] nlines = int(self.mda['number_of_lines']) ncols = int(self.mda['number_of_columns']) area_extent = self.get_area_extent((nlines, ncols), (loff, coff), (lfac, cfac), h) proj_dict = {'a': float(a), 'b': float(b), 'lon_0': float(lon_0), 'h': float(h), 'proj': 'geos', 'units': 'm'} area = geometry.AreaDefinition( 'some_area_name', "On-the-fly area", 'geosmsg', proj_dict, ncols, nlines, area_extent) self.area = area return area def read_band(self, key, info): """Read the data.""" shape = int(np.ceil(self.mda['data_field_length'] / 8.)) if self.mda['number_of_bits_per_pixel'] == 16: dtype = '>u2' shape //= 2 elif self.mda['number_of_bits_per_pixel'] in [8, 10]: dtype = np.uint8 shape = (shape, ) data = np.memmap(self.filename, mode='r', offset=self.mda['total_header_length'], dtype=dtype, shape=shape) data = da.from_array(data, chunks=shape[0]) if self.mda['number_of_bits_per_pixel'] == 10: data = dec10216(data) data = data.reshape((self.mda['number_of_lines'], self.mda['number_of_columns'])) return data satpy-0.34.0/satpy/readers/hrit_jma.py000066400000000000000000000433171420401153000176700ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2010-2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """HRIT format reader for JMA data. Introduction ------------ The JMA HRIT format is described in the `JMA HRIT - Mission Specific Implementation`_. There are three readers for this format in Satpy: - ``jami_hrit``: For data from the `JAMI` instrument on MTSAT-1R - ``mtsat2-imager_hrit``: For data from the `Imager` instrument on MTSAT-2 - ``ahi_hrit``: For data from the `AHI` instrument on Himawari-8/9 Although the data format is identical, the instruments have different characteristics, which is why there is a dedicated reader for each of them. Sample data is available here: - `JAMI/Imager sample data`_ - `AHI sample data`_ Example ------- Here is an example how to read Himwari-8 HRIT data with Satpy: .. code-block:: python from satpy import Scene import glob filenames = glob.glob('data/IMG_DK01B14_2018011109*') scn = Scene(filenames=filenames, reader='ahi_hrit') scn.load(['B14']) print(scn['B14']) Output: .. code-block:: none dask.arrayu1'), ('total_no_image_segm', '>u1'), ('line_no_image_segm', '>u2')]) encryption_key_message = np.dtype([('station_number', '>u2')]) image_compensation_information = np.dtype([('compensation', '|S1')]) image_observation_time = np.dtype([('times', '|S1')]) image_quality_information = np.dtype([('quality', '|S1')]) jma_variable_length_headers: dict = {} jma_text_headers = {image_data_function: 'image_data_function', annotation_header: 'annotation_header', ancillary_text: 'ancillary_text', image_compensation_information: 'image_compensation_information', image_observation_time: 'image_observation_time', image_quality_information: 'image_quality_information'} jma_hdr_map = base_hdr_map.copy() jma_hdr_map.update({7: key_header, 128: segment_identification, 129: encryption_key_message, 130: image_compensation_information, 131: image_observation_time, 132: image_quality_information }) cuc_time = np.dtype([('coarse', 'u1', (4, )), ('fine', 'u1', (3, ))]) time_cds_expanded = np.dtype([('days', '>u2'), ('milliseconds', '>u4'), ('microseconds', '>u2'), ('nanoseconds', '>u2')]) FULL_DISK = 1 NORTH_HEMIS = 2 SOUTH_HEMIS = 3 UNKNOWN_AREA = -1 AREA_NAMES = {FULL_DISK: {'short': 'FLDK', 'long': 'Full Disk'}, NORTH_HEMIS: {'short': 'NH', 'long': 'Northern Hemisphere'}, SOUTH_HEMIS: {'short': 'SH', 'long': 'Southern Hemisphere'}, UNKNOWN_AREA: {'short': 'UNKNOWN', 'long': 'Unknown Area'}} MTSAT1R = 'MTSAT-1R' MTSAT2 = 'MTSAT-2' HIMAWARI8 = 'Himawari-8' UNKNOWN_PLATFORM = 'Unknown Platform' PLATFORMS = { 'GEOS(140.00)': MTSAT1R, 'GEOS(140.25)': MTSAT1R, 'GEOS(140.70)': HIMAWARI8, 'GEOS(145.00)': MTSAT2, } SENSORS = { MTSAT1R: 'jami', MTSAT2: 'mtsat2_imager', HIMAWARI8: 'ahi' } def mjd2datetime64(mjd): """Convert Modified Julian Day (MJD) to datetime64.""" epoch = np.datetime64('1858-11-17 00:00') day2usec = 24 * 3600 * 1E6 mjd_usec = (mjd * day2usec).astype(np.int64).astype('timedelta64[us]') return epoch + mjd_usec class HRITJMAFileHandler(HRITFileHandler): """JMA HRIT format reader. By default, the reader uses the start time parsed from the filename. To use exact time, computed from the metadata, the user can define a keyword argument:: scene = Scene(filenames=filenames, reader='ahi_hrit', reader_kwargs={'use_acquisition_time_as_start_time': True}) As this time is different for every channel, time-dependent calculations like SZA correction can be pretty slow when multiple channels are used. The exact scanline times are always available as coordinates of an individual channels:: scene.load(["B03"]) print(scene["B03].coords["acq_time"].data) would print something similar to:: array(['2021-12-08T06:00:20.131200000', '2021-12-08T06:00:20.191948000', '2021-12-08T06:00:20.252695000', ..., '2021-12-08T06:09:39.449390000', '2021-12-08T06:09:39.510295000', '2021-12-08T06:09:39.571200000'], dtype='datetime64[ns]') The first value represents the exact start time, and the last one the exact end time of the data acquisition. """ def __init__(self, filename, filename_info, filetype_info, use_acquisition_time_as_start_time=False): """Initialize the reader.""" super(HRITJMAFileHandler, self).__init__(filename, filename_info, filetype_info, (jma_hdr_map, jma_variable_length_headers, jma_text_headers)) self._use_acquisition_time_as_start_time = use_acquisition_time_as_start_time self.mda['segment_sequence_number'] = self.mda['image_segm_seq_no'] self.mda['planned_end_segment_number'] = self.mda['total_no_image_segm'] self.mda['planned_start_segment_number'] = 1 items = self.mda['image_data_function'].decode().split('\r') if items[0].startswith('$HALFTONE'): self.calibration_table = [] for item in items[1:]: if item == '': continue key, value = item.split(':=') if key.startswith('_UNIT'): self.mda['unit'] = item.split(':=')[1] elif key.startswith('_NAME'): pass elif key.isdigit(): key = int(key) value = float(value) self.calibration_table.append((key, value)) self.calibration_table = np.array(self.calibration_table) self.projection_name = self.mda['projection_name'].decode().strip() sublon = float(self.projection_name.split('(')[1][:-1]) self.mda['projection_parameters']['SSP_longitude'] = sublon self.platform = self._get_platform() self.is_segmented = self.mda['segment_sequence_number'] > 0 self.area_id = filename_info.get('area', UNKNOWN_AREA) if self.area_id not in AREA_NAMES: self.area_id = UNKNOWN_AREA self.area = self._get_area_def() self.acq_time = self._get_acq_time() def _get_platform(self): """Get the platform name. The platform is not specified explicitly in JMA HRIT files. For segmented data it is not even specified in the filename. But it can be derived indirectly from the projection name: GEOS(140.00): MTSAT-1R GEOS(140.25): MTSAT-1R # TODO: Check if there is more... GEOS(140.70): Himawari-8 GEOS(145.00): MTSAT-2 See [MTSAT], section 3.1. Unfortunately Himawari-8 and 9 are not distinguishable using that method at the moment. From [HIMAWARI]: "HRIT/LRIT files have the same file naming convention in the same format in Himawari-8 and Himawari-9, so there is no particular difference." TODO: Find another way to distinguish Himawari-8 and 9. References: [MTSAT] http://www.data.jma.go.jp/mscweb/notice/Himawari7_e.html [HIMAWARI] http://www.data.jma.go.jp/mscweb/en/himawari89/space_segment/sample_hrit.html """ try: return PLATFORMS[self.projection_name] except KeyError: logger.error('Unable to determine platform: Unknown projection ' 'name "{}"'.format(self.projection_name)) return UNKNOWN_PLATFORM def _check_sensor_platform_consistency(self, sensor): """Make sure sensor and platform are consistent. Args: sensor (str) : Sensor name from YAML dataset definition Raises: ValueError if they don't match """ ref_sensor = SENSORS.get(self.platform, None) if ref_sensor and not sensor == ref_sensor: logger.error('Sensor-Platform mismatch: {} is not a payload ' 'of {}. Did you choose the correct reader?' .format(sensor, self.platform)) def _get_line_offset(self): """Get line offset for the current segment. Read line offset from the file and adapt it to the current segment or half disk scan so that y(l) ~ l - loff because this is what get_geostationary_area_extent() expects. """ # Get line offset from the file nlines = int(self.mda['number_of_lines']) loff = np.float32(self.mda['loff']) # Adapt it to the current segment if self.is_segmented: # loff in the file specifies the offset of the full disk image # centre (1375/2750 for VIS/IR) segment_number = self.mda['segment_sequence_number'] - 1 loff -= (self.mda['total_no_image_segm'] - segment_number - 1) * nlines elif self.area_id in (NORTH_HEMIS, SOUTH_HEMIS): # loff in the file specifies the start line of the half disk image # in the full disk image loff = nlines - loff elif self.area_id == UNKNOWN_AREA: logger.error('Cannot compute line offset for unknown area') return loff def _get_area_def(self): """Get the area definition of the band.""" pdict = { 'cfac': np.int32(self.mda['cfac']), 'lfac': np.int32(self.mda['lfac']), 'coff': np.float32(self.mda['coff']), 'loff': self._get_line_offset(), 'ncols': int(self.mda['number_of_columns']), 'nlines': int(self.mda['number_of_lines']), 'scandir': 'N2S', 'a': float(self.mda['projection_parameters']['a']), 'b': float(self.mda['projection_parameters']['b']), 'h': float(self.mda['projection_parameters']['h']), 'ssp_lon': float(self.mda['projection_parameters']['SSP_longitude']), 'a_name': AREA_NAMES[self.area_id]['short'], 'a_desc': AREA_NAMES[self.area_id]['long'], 'p_id': 'geosmsg' } area_extent = get_area_extent(pdict) return get_area_definition(pdict, area_extent) def get_area_def(self, dsid): """Get the area definition of the band.""" return self.area def get_dataset(self, key, info): """Get the dataset designated by *key*.""" res = super(HRITJMAFileHandler, self).get_dataset(key, info) # Filenames of segmented data is identical for MTSAT-1R, MTSAT-2 # and Himawari-8/9. Make sure we have the correct reader for the data # at hand. self._check_sensor_platform_consistency(info['sensor']) # Calibrate and mask space pixels res = self._mask_space(self.calibrate(res, key.calibration)) # Add scanline acquisition time res.coords['acq_time'] = ('y', self.acq_time) res.coords['acq_time'].attrs['long_name'] = 'Scanline acquisition time' # Update attributes res.attrs.update(info) res.attrs['platform_name'] = self.platform res.attrs['satellite_longitude'] = float(self.mda['projection_parameters']['SSP_longitude']) res.attrs['satellite_latitude'] = 0. res.attrs['satellite_altitude'] = float(self.mda['projection_parameters']['h']) res.attrs['orbital_parameters'] = { 'projection_longitude': float(self.mda['projection_parameters']['SSP_longitude']), 'projection_latitude': 0., 'projection_altitude': float(self.mda['projection_parameters']['h'])} return res def _mask_space(self, data): """Mask space pixels.""" geomask = get_geostationary_mask(area=self.area) return data.where(geomask) def _get_acq_time(self): r"""Get the acquisition times from the file. Acquisition times for a subset of scanlines are stored in the header as follows: b'LINE:=1\rTIME:=54365.022558\rLINE:=21\rTIME:=54365.022664\r...' Missing timestamps in between are computed using linear interpolation. """ buf_b = np.frombuffer(self.mda['image_observation_time'], dtype=image_observation_time) # Replace \r by \n before encoding, otherwise encoding will drop all # elements except the last one buf_s = b''.join(buf_b['times']).replace(b'\r', b'\n').decode() # Split into key:=value pairs; then extract line number and timestamp splits = buf_s.strip().split('\n') lines_sparse = [int(s.split(':=')[1]) for s in splits[0::2]] times_sparse = [float(s.split(':=')[1]) for s in splits[1::2]] if self.platform == HIMAWARI8: # Only a couple of timestamps in the header, and only the first # and last are usable (duplicates inbetween). lines_sparse = [lines_sparse[0], lines_sparse[-1]] times_sparse = [times_sparse[0], times_sparse[-1]] # Compute missing timestamps using linear interpolation. lines = np.arange(lines_sparse[0], lines_sparse[-1]+1) times = np.interp(lines, lines_sparse, times_sparse) # Convert to np.datetime64 times64 = mjd2datetime64(times) return times64 @staticmethod def _interp(arr, cal): return np.interp(arr.ravel(), cal[:, 0], cal[:, 1]).reshape(arr.shape) def calibrate(self, data, calibration): """Calibrate the data.""" tic = datetime.now() if calibration == 'counts': return data if calibration == 'radiance': raise NotImplementedError("Can't calibrate to radiance.") cal = self.calibration_table res = data.data.map_blocks(self._interp, cal, dtype=cal[:, 0].dtype) res = xr.DataArray(res, dims=data.dims, attrs=data.attrs, coords=data.coords) res = res.where(data < 65535) logger.debug("Calibration time " + str(datetime.now() - tic)) return res @property def start_time(self): """Get start time of the scan.""" if self._use_acquisition_time_as_start_time: return self.acq_time[0].astype(datetime) return self._start_time @property def end_time(self): """Get end time of the scan.""" return self.acq_time[-1].astype(datetime) satpy-0.34.0/satpy/readers/hrpt.py000066400000000000000000000226441420401153000170500ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2009-2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reading and calibrating hrpt avhrr data. Todo: - AMSU - Compare output with AAPP Reading: http://www.ncdc.noaa.gov/oa/pod-guide/ncdc/docs/klm/html/c4/sec4-1.htm#t413-1 Calibration: http://www.ncdc.noaa.gov/oa/pod-guide/ncdc/docs/klm/html/c7/sec7-1.htm """ import logging from datetime import datetime import dask.array as da import numpy as np import xarray as xr from geotiepoints import SatelliteInterpolator from pyorbital.geoloc import compute_pixels, get_lonlatalt from pyorbital.geoloc_instrument_definitions import avhrr from pyorbital.orbital import Orbital from satpy._compat import cached_property from satpy.readers.aapp_l1b import LINE_CHUNK from satpy.readers.file_handlers import BaseFileHandler logger = logging.getLogger(__name__) AVHRR_CHANNEL_NAMES = ("1", "2", "3a", "3b", "4", "5") dtype = np.dtype([('frame_sync', '>u2', (6, )), ('id', [('id', '>u2'), ('spare', '>u2')]), ('timecode', '>u2', (4, )), ('telemetry', [("ramp_calibration", '>u2', (5, )), ("PRT", '>u2', (3, )), ("ch3_patch_temp", '>u2'), ("spare", '>u2'), ]), ('back_scan', '>u2', (10, 3)), ('space_data', '>u2', (10, 5)), ('sync', '>u2'), ('TIP_data', '>u2', (520, )), ('spare', '>u2', (127, )), ('image_data', '>u2', (2048, 5)), ('aux_sync', '>u2', (100, ))]) def time_seconds(tc_array, year): """Return the time object from the timecodes.""" tc_array = np.array(tc_array, copy=True) word = tc_array[:, 0] day = word >> 1 word = tc_array[:, 1].astype(np.uint64) msecs = ((127) & word) * 1024 word = tc_array[:, 2] msecs += word & 1023 msecs *= 1024 word = tc_array[:, 3] msecs += word & 1023 return (np.datetime64( str(year) + '-01-01T00:00:00Z', 's') + msecs[:].astype('timedelta64[ms]') + (day - 1)[:].astype('timedelta64[D]')) def bfield(array, bit): """Return the bit array.""" return (array & 2**(9 - bit + 1)).astype(bool) spacecrafts = {7: "NOAA 15", 3: "NOAA 16", 13: "NOAA 18", 15: "NOAA 19"} def geo_interpolate(lons32km, lats32km): """Interpolate geo data.""" cols32km = np.arange(0, 2048, 32) cols1km = np.arange(2048) lines = lons32km.shape[0] rows32km = np.arange(lines) rows1km = np.arange(lines) along_track_order = 1 cross_track_order = 3 satint = SatelliteInterpolator( (lons32km, lats32km), (rows32km, cols32km), (rows1km, cols1km), along_track_order, cross_track_order) lons, lats = satint.interpolate() return lons, lats def _get_channel_index(key): """Get the avhrr channel index.""" avhrr_channel_index = {'1': 0, '2': 1, '3a': 2, '3b': 2, '4': 3, '5': 4} index = avhrr_channel_index[key['name']] return index class HRPTFile(BaseFileHandler): """Reader for HRPT Minor Frame, 10 bits data expanded to 16 bits.""" def __init__(self, filename, filename_info, filetype_info): """Init the file handler.""" super(HRPTFile, self).__init__(filename, filename_info, filetype_info) self.channels = {i: None for i in AVHRR_CHANNEL_NAMES} self.units = {i: 'counts' for i in AVHRR_CHANNEL_NAMES} self.year = filename_info.get('start_time', datetime.utcnow()).year @cached_property def times(self): """Get the timestamps for each line.""" return time_seconds(self._data["timecode"], self.year) @cached_property def _data(self): """Get the data.""" return self.read() def read(self): """Read the file.""" with open(self.filename, "rb") as fp_: data = np.memmap(fp_, dtype=dtype, mode="r") if np.all(np.median(data['frame_sync'], axis=0) > 1024): data = self._data.newbyteorder() return data @cached_property def platform_name(self): """Get the platform name.""" return spacecrafts[np.median((self._data["id"]["id"] >> 3) & 15)] def get_dataset(self, key, info): """Get the dataset.""" attrs = info.copy() attrs['platform_name'] = self.platform_name if key['name'] in ['latitude', 'longitude']: data = self._get_navigation_data(key) else: data = self._get_channel_data(key) result = xr.DataArray(data, dims=['y', 'x'], attrs=attrs) mask = self._get_ch3_mask_or_true(key) return result.where(mask) def _get_channel_data(self, key): """Get channel data.""" data = da.from_array(self._data["image_data"][:, :, _get_channel_index(key)], chunks=(LINE_CHUNK, 2048)) if key['calibration'] != 'counts': if key['name'] in ['1', '2', '3a']: data = self.calibrate_solar_channel(data, key) if key['name'] in ['3b', '4', '5']: data = self.calibrate_thermal_channel(data, key) return data def _get_navigation_data(self, key): """Get navigation data.""" lons, lats = self.lons_lats if key['name'] == 'latitude': data = da.from_array(lats, chunks=(LINE_CHUNK, 2048)) else: data = da.from_array(lons, chunks=(LINE_CHUNK, 2048)) return data def _get_ch3_mask_or_true(self, key): mask = True if key['name'] == '3a': mask = np.tile(np.logical_not(self._is3b), (2048, 1)).T elif key['name'] == '3b': mask = np.tile(self._is3b, (2048, 1)).T return mask @cached_property def _is3b(self): return bfield(self._data["id"]["id"], 10) == 0 def calibrate_thermal_channel(self, data, key): """Calibrate a thermal channel.""" from pygac.calibration import calibrate_thermal line_numbers = ( np.round((self.times - self.times[-1]) / np.timedelta64(166666667, 'ns'))).astype(int) line_numbers -= line_numbers[0] prt, ict, space = self.telemetry index = _get_channel_index(key) data = calibrate_thermal(data, prt, ict[:, index - 2], space[:, index], line_numbers, index + 1, self.calibrator) return data def calibrate_solar_channel(self, data, key): """Calibrate a solar channel.""" from pygac.calibration import calibrate_solar julian_days = ((np.datetime64(self.start_time) - np.datetime64(str(self.year) + '-01-01T00:00:00Z')) / np.timedelta64(1, 'D')) data = calibrate_solar(data, _get_channel_index(key), self.year, julian_days, self.calibrator) return data @cached_property def calibrator(self): """Create a calibrator for the data.""" from pygac.calibration import Calibrator pg_spacecraft = ''.join(self.platform_name.split()).lower() return Calibrator(pg_spacecraft) @cached_property def telemetry(self): """Get the telemetry.""" # This isn't converted to dask arrays as it does not work with pygac prt = np.mean(self._data["telemetry"]['PRT'], axis=1) ict = np.mean(self._data['back_scan'], axis=1) space = np.mean(self._data['space_data'][:, :], axis=1) return prt, ict, space @cached_property def lons_lats(self): """Get the lons and lats.""" scanline_nb = len(self.times) scan_points = np.arange(0, 2048, 32) lons, lats = self._get_avhrr_tiepoints(scan_points, scanline_nb) lons, lats = geo_interpolate( lons.reshape((scanline_nb, -1)), lats.reshape((scanline_nb, -1))) return lons, lats def _get_avhrr_tiepoints(self, scan_points, scanline_nb): sgeom = avhrr(scanline_nb, scan_points, apply_offset=False) # no attitude error rpy = [0, 0, 0] s_times = sgeom.times(self.times[:, np.newaxis]) orb = Orbital(self.platform_name) pixels_pos = compute_pixels(orb, sgeom, s_times, rpy) lons, lats, alts = get_lonlatalt(pixels_pos, s_times) return lons, lats @property def start_time(self): """Get the start time.""" return time_seconds(self._data["timecode"][0, np.newaxis, :], self.year).astype(datetime)[0] @property def end_time(self): """Get the end time.""" return time_seconds(self._data["timecode"][-1, np.newaxis, :], self.year).astype(datetime)[0] satpy-0.34.0/satpy/readers/hsaf_grib.py000066400000000000000000000132601420401153000200110ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019. # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """A reader for files produced by the Hydrology SAF. Currently this reader depends on the `pygrib` python package. The `eccodes` package from ECMWF is preferred, but does not support python 3 at the time of writing. """ import logging from datetime import datetime, timedelta import dask.array as da import numpy as np import pygrib import xarray as xr from pyresample import geometry from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler LOG = logging.getLogger(__name__) CF_UNITS = { 'none': '1', } class HSAFFileHandler(BaseFileHandler): """File handler for HSAF grib files.""" def __init__(self, filename, filename_info, filetype_info): """Init the file handler.""" super(HSAFFileHandler, self).__init__(filename, filename_info, filetype_info) self._msg_datasets = {} self._start_time = None self._end_time = None try: with pygrib.open(self.filename) as grib_file: first_msg = grib_file.message(1) analysis_time = self._get_datetime(first_msg) self._analysis_time = analysis_time self.metadata = self.get_metadata(first_msg) except (RuntimeError, KeyError): raise IOError("Unknown GRIB file format: {}".format(self.filename)) @staticmethod def _get_datetime(msg): dtstr = str(msg['dataDate']) + str(msg['dataTime']).zfill(4) return datetime.strptime(dtstr, "%Y%m%d%H%M") @property def analysis_time(self): """Get validity time of this file.""" return self._analysis_time def get_metadata(self, msg): """Get the metadata.""" try: center_description = msg['centreDescription'] except (RuntimeError, KeyError): center_description = None ds_info = { 'filename': self.filename, 'shortName': msg['shortName'], 'long_name': msg['name'], 'units': msg['units'], 'centreDescription': center_description, 'data_time': self._analysis_time, 'nx': msg['Nx'], 'ny': msg['Ny'], 'projparams': msg.projparams } return ds_info def get_area_def(self, dsid): """Get area definition for message.""" msg = self._get_message(1) try: return self._get_area_def(msg) except (RuntimeError, KeyError): raise RuntimeError("Unknown GRIB projection information") def _get_area_def(self, msg): """Get the area definition of the datasets in the file.""" proj_param = msg.projparams.copy() Rx = 2 * np.arcsin(1. / msg['NrInRadiusOfEarth']) / msg['dx'] Ry = 2 * np.arcsin(1. / msg['NrInRadiusOfEarth']) / msg['dy'] x_0 = - msg['XpInGridLengths'] x_1 = msg['Nx'] - msg['XpInGridLengths'] y_0 = (msg['Ny'] - msg['YpInGridLengths']) * -1 y_1 = msg['YpInGridLengths'] min_x = (x_0 * Rx) * proj_param['h'] max_x = (x_1 * Rx) * proj_param['h'] min_y = (y_0 * Ry) * proj_param['h'] max_y = (y_1 * Ry) * proj_param['h'] area_extent = (min_x, min_y, max_x, max_y) area = geometry.AreaDefinition('hsaf_region', 'A region from H-SAF', 'geos', proj_param, msg['Nx'], msg['Ny'], area_extent) return area def _get_message(self, idx): with pygrib.open(self.filename) as grib_file: msg = grib_file.message(idx) return msg def get_dataset(self, ds_id, ds_info): """Read a GRIB message into an xarray DataArray.""" if (ds_id['name'] not in self.filename): raise IOError("File does not contain {} data".format(ds_id['name'])) msg = self._get_message(1) ds_info = self.get_metadata(msg) ds_info['end_time'] = ds_info['data_time'] if (ds_id['name'] == 'h05' or ds_id['name'] == 'h05B'): flen = len(self.filename) timedelt = self.filename[flen-10:flen-8] ds_info['start_time'] = (ds_info['end_time'] - timedelta(hours=int(timedelt))) else: ds_info['start_time'] = ds_info['end_time'] fill = msg['missingValue'] data = msg.values.astype(np.float32) if msg.valid_key('jScansPositively') and msg['jScansPositively'] == 1: data = data[::-1] if isinstance(data, np.ma.MaskedArray): data = data.filled(np.nan) data = da.from_array(data, chunks=CHUNK_SIZE) else: data[data == fill] = np.nan data = da.from_array(data, chunks=CHUNK_SIZE) return xr.DataArray(data, attrs=ds_info, dims=('y', 'x')) satpy-0.34.0/satpy/readers/hy2_scat_l2b_h5.py000066400000000000000000000125341420401153000207370ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Copyright (c) 2020,2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """HY-2B L2B Reader. Distributed by Eumetsat in HDF5 format. Also handle the HDF5 files from NSOAS, based on a file example. """ from datetime import datetime import numpy as np import xarray as xr from satpy.readers.hdf5_utils import HDF5FileHandler class HY2SCATL2BH5FileHandler(HDF5FileHandler): """File handler for HY2 scat.""" @property def start_time(self): """Time for first observation.""" return datetime.strptime(self['/attr/Range_Beginning_Time'], '%Y%m%dT%H:%M:%S') @property def end_time(self): """Time for final observation.""" return datetime.strptime(self['/attr/Range_Ending_Time'], '%Y%m%dT%H:%M:%S') @property def platform_name(self): """Get the Platform ShortName.""" return self['/attr/Platform_ShortName'] def get_variable_metadata(self): """Get the variable metadata.""" info = getattr(self, 'attrs', {}) info.update({ "Equator_Crossing_Longitude": self['/attr/Equator_Crossing_Longitude'], "Equator_Crossing_Time": self['/attr/Equator_Crossing_Time'], "Input_L2A_Filename": self['/attr/Input_L2A_Filename'], "L2B_Actual_WVC_Rows": self['/attr/L2B_Actual_WVC_Rows'], "Orbit_Inclination": self['/attr/Orbit_Inclination'], "Orbit_Number": self['/attr/Orbit_Number'], "Output_L2B_Filename": self['/attr/Output_L2B_Filename'], "Production_Date_Time": self['/attr/Production_Date_Time'], "L2B_Expected_WVC_Rows": self['/attr/L2B_Expected_WVC_Rows'] }) try: info.update({"L2B_Number_WVC_cells": self['/attr/L2B_Number_WVC_cells']}) except KeyError: info.update({"L2B_Expected_WVC_Cells": self['/attr/L2B_Expected_WVC_Cells']}) return info def get_metadata(self): """Get the metadata.""" info = getattr(self, 'attrs', {}) info.update({ "WVC_Size": self['/attr/WVC_Size'], "HDF_Version_Id": self['/attr/HDF_Version_Id'], "Instrument_ShorName": self['/attr/Instrument_ShorName'], "L2A_Inputdata_Version": self['/attr/L2A_Inputdata_Version'], "L2B_Algorithm_Descriptor": self['/attr/L2B_Algorithm_Descriptor'], "L2B_Data_Version": self['/attr/L2B_Data_Version'], "L2B_Processing_Type": self['/attr/L2B_Processing_Type'], "L2B_Processor_Name": self['/attr/L2B_Processor_Name'], "L2B_Processor_Version": self['/attr/L2B_Processor_Version'], "Long_Name": self['/attr/Long_Name'], "Platform_LongName": self['/attr/Platform_LongName'], "Platform_ShortName": self['/attr/Platform_ShortName'], "Platform_Type": self['/attr/Platform_Type'], "Producer_Agency": self['/attr/Producer_Agency'], "Producer_Institution": self['/attr/Producer_Institution'], "Rev_Orbit_Perio": self['/attr/Rev_Orbit_Period'], "Short_Name": self['/attr/Short_Name'], "Sigma0_Granularity": self['/attr/Sigma0_Granularity'], }) return info def get_dataset(self, key, info): """Get the dataset.""" dims = ['y', 'x'] if self[key['name']].ndim == 3: dims = ['y', 'x', 'selection'] data = self[key['name']] if "valid range" in data.attrs: data.attrs.update({'valid_range': data.attrs.pop('valid range')}) if key['name'] in 'wvc_row_time': data = data.rename({data.dims[0]: 'y'}) else: dim_map = {curr_dim: new_dim for curr_dim, new_dim in zip(data.dims, dims)} data = data.rename(dim_map) data = self._mask_data(key['name'], data) data = self._scale_data(key['name'], data) if key['name'] in 'wvc_lon': data = xr.where(data > 180, data - 360., data) data.attrs.update(info) data.attrs.update(self.get_metadata()) data.attrs.update(self.get_variable_metadata()) if "Platform_ShortName" in data.attrs: data.attrs.update({'platform_name': data.attrs['Platform_ShortName']}) return data def _scale_data(self, key_name, data): return data * self[key_name].attrs['scale_factor'] + self[key_name].attrs['add_offset'] def _mask_data(self, key_name, data): data = xr.where(data == self[key_name].attrs['fill_value'], np.nan, data) valid_range = self[key_name].attrs['valid_range'] data = xr.where(data < valid_range[0], np.nan, data) data = xr.where(data > valid_range[1], np.nan, data) return data satpy-0.34.0/satpy/readers/iasi_l2.py000066400000000000000000000132331420401153000174070ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """IASI L2 HDF5 files.""" import datetime as dt import logging import dask.array as da import h5py import numpy as np import xarray as xr from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler # Scan timing values taken from # http://oiswww.eumetsat.org/WEBOPS/eps-pg/IASI-L1/IASIL1-PG-4ProdOverview.htm # Time between each scan in one scanline [ms] SCAN_STEP_TIME = 8. / 37. # Duration of one measurement [ms] SCAN_STARE_DURATION = 151.0 # Time correction used between each 4-footprint measurements VIEW_TIME_ADJUSTMENT = SCAN_STEP_TIME + SCAN_STARE_DURATION / 2. VALUES_PER_SCAN_LINE = 120 # Epoch for the dates EPOCH = dt.datetime(2000, 1, 1) SHORT_NAMES = {'M01': 'Metop-B', 'M02': 'Metop-A', 'M03': 'Metop-C'} DSET_NAMES = {'ozone_mixing_ratio': 'O', 'ozone_mixing_ratio_quality': 'QO', 'pressure': 'P', 'pressure_quality': 'QP', 'temperature': 'T', 'temperature_quality': 'QT', 'water_mixing_ratio': 'W', 'water_mixing_ratio_quality': 'QW', 'water_total_column': 'WC', 'ozone_total_column': 'OC', 'surface_skin_temperature': 'Ts', 'surface_skin_temperature_quality': 'QTs', 'emissivity': 'E', 'emissivity_quality': 'QE'} GEO_NAMES = {'latitude': 'Latitude', 'longitude': 'Longitude', 'satellite_azimuth_angle': 'SatAzimuth', 'satellite_zenith_angle': 'SatZenith', 'sensing_time': {'day': 'SensingTime_day', 'msec': 'SensingTime_msec'}, 'solar_azimuth_angle': 'SunAzimuth', 'solar_zenith_angle': 'SunZenith'} LOGGER = logging.getLogger(__name__) class IASIL2HDF5(BaseFileHandler): """File handler for IASI L2 HDF5 files.""" def __init__(self, filename, filename_info, filetype_info): """Init the file handler.""" super(IASIL2HDF5, self).__init__(filename, filename_info, filetype_info) self.finfo = filename_info self.lons = None self.lats = None self.sensor = 'iasi' self.mda = {} short_name = filename_info['platform_id'] self.mda['platform_name'] = SHORT_NAMES.get(short_name, short_name) self.mda['sensor'] = 'iasi' @property def start_time(self): """Get the start time.""" return self.finfo['start_time'] @property def end_time(self): """Get the end time.""" end_time = dt.datetime.combine(self.start_time.date(), self.finfo['end_time'].time()) if end_time < self.start_time: end_time += dt.timedelta(days=1) return end_time def get_dataset(self, key, info): """Load a dataset.""" with h5py.File(self.filename, 'r') as fid: LOGGER.debug('Reading %s.', key['name']) if key['name'] in DSET_NAMES: m_data = read_dataset(fid, key) else: m_data = read_geo(fid, key) m_data.attrs.update(info) m_data.attrs['sensor'] = self.sensor return m_data def read_dataset(fid, key): """Read dataset.""" dsid = DSET_NAMES[key['name']] dset = fid["/PWLR/" + dsid] if dset.ndim == 3: dims = ['y', 'x', 'level'] else: dims = ['y', 'x'] data = xr.DataArray(da.from_array(dset[()], chunks=CHUNK_SIZE), name=key['name'], dims=dims).astype(np.float32) data = xr.where(data > 1e30, np.nan, data) dset_attrs = dict(dset.attrs) data.attrs.update(dset_attrs) return data def read_geo(fid, key): """Read geolocation and related datasets.""" dsid = GEO_NAMES[key['name']] add_epoch = False if "time" in key['name']: days = fid["/L1C/" + dsid["day"]][()] msecs = fid["/L1C/" + dsid["msec"]][()] data = _form_datetimes(days, msecs) add_epoch = True dtype = np.float64 else: data = fid["/L1C/" + dsid][()] dtype = np.float32 data = xr.DataArray(da.from_array(data, chunks=CHUNK_SIZE), name=key['name'], dims=['y', 'x']).astype(dtype) if add_epoch: data.attrs['sensing_time_epoch'] = EPOCH return data def _form_datetimes(days, msecs): """Calculate seconds since EPOCH from days and milliseconds for each of IASI scan.""" all_datetimes = [] for i in range(days.size): day = int(days[i]) msec = msecs[i] scanline_datetimes = [] for j in range(int(VALUES_PER_SCAN_LINE / 4)): usec = 1000 * (j * VIEW_TIME_ADJUSTMENT + msec) delta = (dt.timedelta(days=day, microseconds=usec)) for _k in range(4): scanline_datetimes.append(delta.total_seconds()) all_datetimes.append(scanline_datetimes) return np.array(all_datetimes, dtype=np.float64) satpy-0.34.0/satpy/readers/iasi_l2_so2_bufr.py000066400000000000000000000167661420401153000212260ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . r"""IASI L2 SO2 BUFR format reader. Introduction ------------ The ``iasi_l2_so2_bufr`` reader reads IASI level2 SO2 data in BUFR format. The algorithm is described in the Theoretical Basis Document, linked below. Each BUFR file consists of a number of messages, one for each scan, each of which contains SO2 column amounts in Dobson units for retrievals performed with plume heights of 7, 10, 13, 16 and 25 km. Reader Arguments ---------------- A list of retrieval files, fnames, can be opened as follows:: Scene(reader="iasi_l2_so2_bufr", filenames=fnames) Example ------- Here is an example how to read the data in satpy: .. code-block:: python from satpy import Scene import glob filenames = glob.glob( '/test_data/W_XX-EUMETSAT-Darmstadt,SOUNDING+SATELLITE,METOPA+IASI_C_EUMC_20200204091455_68984_eps_o_so2_l2.bin') scn = Scene(filenames=filenames, reader='iasi_l2_so2_bufr') scn.load(['so2_height_3', 'so2_height_4']) print(scn['so2_height_3']) Output: .. code-block:: none dask.array Coordinates: crs object +proj=latlong +datum=WGS84 +ellps=WGS84 +type=crs Dimensions without coordinates: y, x Attributes: sensor: IASI units: dobson file_type: iasi_l2_so2_bufr wavelength: None modifiers: () platform_name: METOP-2 resolution: 12000 fill_value: -1e+100 level: None polarization: None coordinates: ('longitude', 'latitude') calibration: None key: #3#sulphurDioxide name: so2_height_3 start_time: 2020-02-04 09:14:55 end_time: 2020-02-04 09:17:51 area: Shape: (23, 120)\nLons: . # type: ignore """Interface to MTG-LI L2 product NetCDF files The reader is based on preliminary test data provided by EUMETSAT. The data description is described in the "LI L2 Product User Guide [LIL2PUG] Draft version" documentation. """ import logging from datetime import datetime import h5netcdf import numpy as np from pyresample import geometry # FIXME: This is not xarray/dask compatible # TODO: Once migrated to xarray/dask, remove ignored path in setup.cfg from satpy.dataset import Dataset from satpy.readers.file_handlers import BaseFileHandler logger = logging.getLogger(__name__) class LIFileHandler(BaseFileHandler): """MTG LI File Reader.""" def __init__(self, filename, filename_info, filetype_info): super(LIFileHandler, self).__init__(filename, filename_info, filetype_info) self.nc = h5netcdf.File(self.filename, 'r') # Get grid dimensions from file refdim = self.nc['grid_position'][:] # Get number of lines and columns self.nlines = int(refdim[2]) self.ncols = int(refdim[3]) self.cache = {} logger.debug('Dimension : {}'.format(refdim)) logger.debug('Row/Cols: {} / {}'.format(self.nlines, self.ncols)) logger.debug('Reading: {}'.format(self.filename)) logger.debug('Start: {}'.format(self.start_time)) logger.debug('End: {}'.format(self.end_time)) @property def start_time(self): return datetime.strptime(self.nc.attrs['sensing_start'], '%Y%m%d%H%M%S') @property def end_time(self): return datetime.strptime(self.nc.attrs['end_time'], '%Y%m%d%H%M%S') def get_dataset(self, key, info=None, out=None): """Load a dataset """ if key in self.cache: return self.cache[key] # Type dictionary typedict = {"af": "flash_accumulation", "afa": "accumulated_flash_area", "afr": "flash_radiance", "lgr": "radiance", "lef": "radiance", "lfl": "radiance"} # Get lightning data out of NetCDF container logger.debug("Key: {}".format(key['name'])) # Create reference grid grid = np.full((self.nlines, self.ncols), np.NaN) # Get product values values = self.nc[typedict[key['name']]] rows = self.nc['row'] cols = self.nc['column'] logger.debug('[ Number of values ] : {}'.format((len(values)))) logger.debug('[Min/Max] : <{}> / <{}>'.format(np.min(values), np.max(values))) # Convert xy coordinates to flatten indices ids = np.ravel_multi_index([rows, cols], grid.shape) # Replace NaN values with data np.put(grid, ids, values) # Correct for bottom left origin in LI row/column indices. rotgrid = np.flipud(grid) # Rotate the grid by 90 degree clockwise rotgrid = np.rot90(rotgrid, 3) logger.warning("LI data has been rotated to fit to reference grid. \ Works only for test dataset") # Mask invalid values ds = np.ma.masked_where(np.isnan(rotgrid), rotgrid) # Create dataset object out.data[:] = np.ma.getdata(ds) out.mask[:] = np.ma.getmask(ds) out.info.update(key.to_dict()) return out def get_area_def(self, key, info=None): """Create AreaDefinition for specified product. Projection information are hard coded for 0 degree geos projection Test dataset doesn't provide the values in the file container. Only fill values are inserted. """ # TODO Get projection information from input file a = 6378169. h = 35785831. b = 6356583.8 lon_0 = 0. # area_extent = (-5432229.9317116784, -5429229.5285458621, # 5429229.5285458621, 5432229.9317116784) area_extent = (-5570248.4773392612, -5567248.074173444, 5567248.074173444, 5570248.4773392612) proj_dict = {'a': float(a), 'b': float(b), 'lon_0': float(lon_0), 'h': float(h), 'proj': 'geos', 'units': 'm'} area = geometry.AreaDefinition( 'LI_area_name', "LI area", 'geosli', proj_dict, self.ncols, self.nlines, area_extent) self.area = area logger.debug("Dataset area definition: \n {}".format(area)) return area satpy-0.34.0/satpy/readers/maia.py000066400000000000000000000122131420401153000167710ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reader for NWPSAF AAPP MAIA Cloud product. https://nwpsaf.eu/site/software/aapp/ Documentation reference: [NWPSAF-MF-UD-003] DATA Formats [NWPSAF-MF-UD-009] MAIA version 4 Scientific User Manual """ import logging import dask.array as da import h5py import numpy as np from xarray import DataArray from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler logger = logging.getLogger(__name__) class MAIAFileHandler(BaseFileHandler): """File handler for Maia files.""" def __init__(self, filename, filename_info, filetype_info): """Init the file handler.""" super(MAIAFileHandler, self).__init__( filename, filename_info, filetype_info) self.finfo = filename_info # set the day date part for end_time from the file name self.finfo['end_time'] = self.finfo['end_time'].replace( year=self.finfo['start_time'].year, month=self.finfo['start_time'].month, day=self.finfo['start_time'].day) if self.finfo['end_time'] < self.finfo['start_time']: myday = self.finfo['end_time'].day self.finfo['end_time'] = self.finfo['end_time'].replace( day=myday + 1) self.selected = None self.read(self.filename) def read(self, filename): """Read the file.""" self.h5 = h5py.File(filename, 'r') missing = -9999. self.Lat = da.from_array(self.h5[u'DATA/Latitude'], chunks=CHUNK_SIZE) / 10000. self.Lon = da.from_array(self.h5[u'DATA/Longitude'], chunks=CHUNK_SIZE) / 10000. self.selected = (self.Lon > missing) self.file_content = {} for key in self.h5['DATA'].keys(): self.file_content[key] = da.from_array(self.h5[u'DATA/' + key], chunks=CHUNK_SIZE) for key in self.h5[u'HEADER'].keys(): self.file_content[key] = self.h5[u'HEADER/' + key][:] # Cloud Mask on pixel mask = 2**0 + 2**1 + 2**2 lst = self.file_content[u'CloudMask'] & mask lst = lst / 2**0 self.file_content[u"cma"] = lst # Cloud Mask confidence mask = 2**5 + 2**6 lst = self.file_content[u'CloudMask'] & mask lst = lst / 2**5 self.file_content[u"cma_conf"] = lst # Cloud Mask Quality mask = 2**3 + 2**4 lst = self.file_content[u'CloudMask'] & mask lst = lst / 2**3 self.file_content[u'cma_qual'] = lst # Opaque Cloud mask = 2**21 lst = self.file_content[u'CloudMask'] & mask lst = lst / 2**21 self.file_content[u'opaq_cloud'] = lst # land /water Background mask = 2**15 + 2**16 + 2**17 lst = self.file_content[u'CloudMask'] & mask lst = lst / 2**15 self.file_content[u'land_water_background'] = lst # CT (Actual CloudType) mask = 2**4 + 2**5 + 2**6 + 2**7 + 2**8 classif = self.file_content[u'CloudType'] & mask classif = classif / 2**4 self.file_content['ct'] = classif.astype(np.uint8) def get_platform(self, platform): """Get the platform.""" if self.file_content['sat_id'] in (14,): return "viirs" else: return "avhrr" @property def start_time(self): """Get the start time.""" return self.finfo['start_time'] @property def end_time(self): """Get the end time.""" return self.finfo['end_time'] def get_dataset(self, key, info, out=None): """Get a dataset from the file.""" logger.debug("Reading %s.", key['name']) values = self.file_content[key['name']] selected = np.array(self.selected) if key['name'] in ("Latitude", "Longitude"): values = values / 10000. if key['name'] in ('Tsurf', 'CloudTopPres', 'CloudTopTemp'): goods = values > -9998. selected = np.array(selected & goods) if key['name'] in ('Tsurf', "Alt_surface", "CloudTopTemp"): values = values / 100. if key['name'] in ("CloudTopPres"): values = values / 10. else: selected = self.selected info.update(self.finfo) fill_value = np.nan if key['name'] == 'ct': fill_value = 0 info['_FillValue'] = 0 ds = DataArray(values, dims=['y', 'x'], attrs=info).where(selected, fill_value) # update dataset info with file_info return ds satpy-0.34.0/satpy/readers/mersi2_l1b.py000066400000000000000000000175351420401153000200350ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reader for the FY-3D MERSI-2 L1B file format. The files for this reader are HDF5 and come in four varieties; band data and geolocation data, both at 250m and 1000m resolution. This reader was tested on FY-3D MERSI-2 data, but should work on future platforms as well assuming no file format changes. """ from datetime import datetime import dask.array as da import numpy as np from pyspectral.blackbody import blackbody_wn_rad2temp as rad2temp from satpy.readers.hdf5_utils import HDF5FileHandler class MERSI2L1B(HDF5FileHandler): """MERSI-2 L1B file reader.""" def _strptime(self, date_attr, time_attr): """Parse date/time strings.""" date = self[date_attr] time = self[time_attr] # "18:27:39.720" # cuts off microseconds because of unknown meaning # is .720 == 720 microseconds or 720000 microseconds return datetime.strptime(date + " " + time.split('.')[0], "%Y-%m-%d %H:%M:%S") @property def start_time(self): """Time for first observation.""" return self._strptime('/attr/Observing Beginning Date', '/attr/Observing Beginning Time') @property def end_time(self): """Time for final observation.""" return self._strptime('/attr/Observing Ending Date', '/attr/Observing Ending Time') @property def sensor_name(self): """Map sensor name to Satpy 'standard' sensor names.""" file_sensor = self['/attr/Sensor Identification Code'] sensor = { 'MERSI': 'mersi-2', }.get(file_sensor, file_sensor) return sensor def _get_single_slope_intercept(self, slope, intercept, cal_index): try: # convert scalar arrays to scalar return slope.item(), intercept.item() except ValueError: # numpy array but has more than one element return slope[cal_index], intercept[cal_index] return slope, intercept def _get_coefficients(self, cal_key, cal_index): coeffs = self[cal_key][cal_index] slope = coeffs.attrs.pop('Slope', None) intercept = coeffs.attrs.pop('Intercept', None) if slope is not None: slope, intercept = self._get_single_slope_intercept( slope, intercept, cal_index) coeffs = coeffs * slope + intercept return coeffs def get_dataset(self, dataset_id, ds_info): """Load data variable and metadata and calibrate if needed.""" file_key = ds_info.get('file_key', dataset_id['name']) band_index = ds_info.get('band_index') data = self[file_key] if band_index is not None: data = data[band_index] if data.ndim >= 2: data = data.rename({data.dims[-2]: 'y', data.dims[-1]: 'x'}) attrs = data.attrs.copy() # avoid contaminating other band loading attrs.update(ds_info) if 'rows_per_scan' in self.filetype_info: attrs.setdefault('rows_per_scan', self.filetype_info['rows_per_scan']) data = self._mask_data(data, dataset_id, attrs) slope = attrs.pop('Slope', None) intercept = attrs.pop('Intercept', None) if slope is not None and dataset_id.get('calibration') != 'counts': if band_index is not None: slope = slope[band_index] intercept = intercept[band_index] data = data * slope + intercept if dataset_id.get('calibration') == "reflectance": # some bands have 0 counts for the first N columns and # seem to be invalid data points data = data.where(data != 0) coeffs = self._get_coefficients(ds_info['calibration_key'], ds_info['calibration_index']) data = coeffs[0] + coeffs[1] * data + coeffs[2] * data**2 elif dataset_id.get('calibration') == "brightness_temperature": calibration_index = ds_info['calibration_index'] # Converts um^-1 (wavenumbers) and (mW/m^2)/(str/cm^-1) (radiance data) # to SI units m^-1, mW*m^-3*str^-1. wave_number = 1. / (dataset_id['wavelength'][1] / 1e6) data = self._get_bt_dataset(data, calibration_index, wave_number) data.attrs = attrs # convert bytes to str for key, val in attrs.items(): # python 3 only if bytes is not str and isinstance(val, bytes): data.attrs[key] = val.decode('utf8') data.attrs.update({ 'platform_name': self['/attr/Satellite Name'], 'sensor': self.sensor_name, }) return data def _mask_data(self, data, dataset_id, attrs): """Mask the data using fill_value and valid_range attributes.""" fill_value = attrs.pop('FillValue', np.nan) # covered by valid_range valid_range = attrs.pop('valid_range', None) if dataset_id.get('calibration') == 'counts': # preserve integer type of counts if possible attrs['_FillValue'] = fill_value new_fill = fill_value else: new_fill = np.nan if valid_range is not None: # Due to a bug in the valid_range upper limit in the 10.8(24) and 12.0(25) # in the HDF data, this is hardcoded here. if dataset_id['name'] in ['24', '25'] and valid_range[1] == 4095: valid_range[1] = 25000 # typically bad_values == 65535, saturated == 65534 # dead detector == 65533 data = data.where((data >= valid_range[0]) & (data <= valid_range[1]), new_fill) return data def _get_bt_dataset(self, data, calibration_index, wave_number): """Get the dataset as brightness temperature. Apparently we don't use these calibration factors for Rad -> BT:: coeffs = self._get_coefficients(ds_info['calibration_key'], calibration_index) # coefficients are per-scan, we need to repeat the values for a # clean alignment coeffs = np.repeat(coeffs, data.shape[0] // coeffs.shape[1], axis=1) coeffs = coeffs.rename({ coeffs.dims[0]: 'coefficients', coeffs.dims[1]: 'y' }) # match data dims data = coeffs[0] + coeffs[1] * data + coeffs[2] * data**2 + coeffs[3] * data**3 """ # pass the dask array bt_data = rad2temp(wave_number, data.data * 1e-5) # brightness temperature if isinstance(bt_data, np.ndarray): # old versions of pyspectral produce numpy arrays data.data = da.from_array(bt_data, chunks=data.data.chunks) else: # new versions of pyspectral can do dask arrays data.data = bt_data # additional corrections from the file corr_coeff_a = float(self['/attr/TBB_Trans_Coefficient_A'][calibration_index]) corr_coeff_b = float(self['/attr/TBB_Trans_Coefficient_B'][calibration_index]) if corr_coeff_a != 0: data = (data - corr_coeff_b) / corr_coeff_a # Some BT bands seem to have 0 in the first 10 columns # and it is an invalid Kelvin measurement, so let's mask data = data.where(data != 0) return data satpy-0.34.0/satpy/readers/mimic_TPW2_nc.py000066400000000000000000000153021420401153000204560ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2019 Satpy developers # # This file is part of Satpy. # # Satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # Satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # Satpy. If not, see . # # """Reader for Mimic TPW data in netCDF format from SSEC. This module implements reader for MIMIC_TPW2 netcdf files. MIMIC-TPW2 is an experimental global product of total precipitable water (TPW), using morphological compositing of the MIRS retrieval from several available operational microwave-frequency sensors. Originally described in a 2010 paper by Wimmers and Velden. This Version 2 is developed from an older method that uses simpler, but more limited TPW retrievals and advection calculations. More information, data and credits at http://tropic.ssec.wisc.edu/real-time/mtpw2/credits.html """ import logging import numpy as np import xarray as xr from pyresample.geometry import AreaDefinition from satpy.readers.netcdf_utils import NetCDF4FileHandler, netCDF4 logger = logging.getLogger(__name__) class MimicTPW2FileHandler(NetCDF4FileHandler): """NetCDF4 reader for MIMC TPW.""" def __init__(self, filename, filename_info, filetype_info): """Initialize the reader.""" super(MimicTPW2FileHandler, self).__init__(filename, filename_info, filetype_info, xarray_kwargs={"decode_times": False}) def available_datasets(self, configured_datasets=None): """Get datasets in file matching gelocation shape (lat/lon).""" lat_shape = self.file_content.get('/dimension/lat') lon_shape = self.file_content.get('/dimension/lon') # Read the lat/lon variables? handled_variables = set() # update previously configured datasets logger.debug("Starting previously configured variables loop...") for is_avail, ds_info in (configured_datasets or []): # some other file handler knows how to load this if is_avail is not None: yield is_avail, ds_info var_name = ds_info.get('file_key', ds_info['name']) # logger.debug("Evaluating previously configured variable: %s", var_name) matches = self.file_type_matches(ds_info['file_type']) # we can confidently say that we can provide this dataset and can # provide more info if matches and var_name in self: logger.debug("Handling previously configured variable: %s", var_name) handled_variables.add(var_name) new_info = ds_info.copy() # don't mess up the above yielded yield True, new_info elif is_avail is None: # if we didn't know how to handle this dataset and no one else did # then we should keep it going down the chain yield is_avail, ds_info # Iterate over dataset contents for var_name, val in self.file_content.items(): # Only evaluate variables if isinstance(val, netCDF4.Variable): logger.debug("Evaluating new variable: %s", var_name) var_shape = self[var_name + "/shape"] logger.debug("Dims:{}".format(var_shape)) if var_shape == (lat_shape, lon_shape): logger.debug("Found valid additional dataset: %s", var_name) # Skip anything we have already configured if var_name in handled_variables: logger.debug("Already handled, skipping: %s", var_name) continue handled_variables.add(var_name) # Create new ds_info object new_info = { 'name': var_name, 'file_key': var_name, 'file_type': self.filetype_info['file_type'], } logger.debug(var_name) yield True, new_info def get_dataset(self, ds_id, info): """Load dataset designated by the given key from file.""" logger.debug("Getting data for: %s", ds_id['name']) file_key = info.get('file_key', ds_id['name']) data = np.flipud(self[file_key]) data = xr.DataArray(data, dims=['y', 'x']) data.attrs = self.get_metadata(data, info) if 'lon' in data.dims: data.rename({'lon': 'x'}) if 'lat' in data.dims: data.rename({'lat': 'y'}) return data def get_area_def(self, dsid): """Flip data up/down and define equirectangular AreaDefintion.""" flip_lat = np.flipud(self['latArr']) latlon = np.meshgrid(self['lonArr'], flip_lat) width = self['lonArr/shape'][0] height = self['latArr/shape'][0] lower_left_x = latlon[0][height-1][0] lower_left_y = latlon[1][height-1][0] upper_right_y = latlon[1][0][width-1] upper_right_x = latlon[0][0][width-1] area_extent = (lower_left_x, lower_left_y, upper_right_x, upper_right_y) description = "MIMIC TPW WGS84" area_id = 'mimic' proj_id = 'World Geodetic System 1984' projection = 'EPSG:4326' area_def = AreaDefinition(area_id, description, proj_id, projection, width, height, area_extent, ) return area_def def get_metadata(self, data, info): """Get general metadata for file.""" metadata = {} metadata.update(data.attrs) metadata.update(info) metadata.update({ 'platform_shortname': 'aggregated microwave', 'sensor': 'mimic', 'start_time': self.start_time, 'end_time': self.end_time, }) metadata.update(self[info.get('file_key')].variable.attrs) return metadata @property def start_time(self): """Start timestamp of the dataset determined from yaml.""" return self.filename_info['start_time'] @property def end_time(self): """End timestamp of the dataset same as start_time.""" return self.filename_info.get('end_time', self.start_time) @property def sensor_name(self): """Sensor name.""" return self["sensor"] satpy-0.34.0/satpy/readers/mirs.py000066400000000000000000000461611420401153000170450ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Interface to MiRS product.""" import datetime import logging import os from collections import Counter import dask.array as da import numpy as np import xarray as xr from satpy import CHUNK_SIZE from satpy.aux_download import retrieve from satpy.readers.file_handlers import BaseFileHandler LOG = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) try: # try getting setuptools/distribute's version of resource retrieval first from pkg_resources import resource_string as get_resource_string except ImportError: from pkgutil import get_data as get_resource_string # type: ignore # # 'Polo' variable in MiRS files use these values for H/V polarization POLO_V = 2 POLO_H = 3 amsu = "amsu-mhs" PLATFORMS = {"n18": "NOAA-18", "n19": "NOAA-19", "np": "NOAA-19", "m2": "MetOp-A", "m1": "MetOp-B", "m3": "MetOp-C", "ma2": "MetOp-A", "ma1": "MetOp-B", "ma3": "MetOp-C", "npp": "NPP", "f17": "DMSP-F17", "f18": "DMSP-F18", "gpm": "GPM", "n20": "NOAA-20", } SENSOR = {"n18": amsu, "n19": amsu, "n20": 'atms', "np": amsu, "m1": amsu, "m2": amsu, "m3": amsu, "ma1": amsu, "ma2": amsu, "ma3": amsu, "npp": "atms", "jpss": "atms", "f17": "ssmis", "f18": "ssmis", "gpm": "GPI", } def read_atms_coeff_to_string(fn): """Read the coefficients into a string.""" if os.path.isfile(fn): coeff_str = open(fn, "r").readlines() else: parts = fn.split(":") mod_part, file_part = parts if len(parts) == 2 else ("", parts[0]) mod_part = mod_part or __package__ # self.__module__ coeff_str = get_resource_string(mod_part, file_part).decode().split("\n") return coeff_str def read_atms_limb_correction_coefficients(fn): """Read the limb correction files.""" coeff_str = read_atms_coeff_to_string(fn) n_chn = 22 n_fov = 96 # make the string a generator coeff_lines = (line.strip() for line in coeff_str) all_coeffs = np.zeros((n_chn, n_fov, n_chn), dtype=np.float32) all_amean = np.zeros((n_chn, n_fov, n_chn), dtype=np.float32) all_dmean = np.zeros(n_chn, dtype=np.float32) all_nchx = np.zeros(n_chn, dtype=np.int32) all_nchanx = np.zeros((n_chn, n_chn), dtype=np.int32) all_nchanx[:] = 9999 # There should be 22 sections for chan_idx in range(n_chn): # blank line at the start of each section _ = next(coeff_lines) # section header next_line = next(coeff_lines) _nx, nchx, dmean = [x.strip() for x in next_line.split(" ") if x] all_nchx[chan_idx] = nchx = int(nchx) all_dmean[chan_idx] = float(dmean) # coeff locations (indexes to put the future coefficients in) next_line = next(coeff_lines) locations = [int(x.strip()) for x in next_line.split(" ") if x] if len(locations) != nchx: raise RuntimeError for x in range(nchx): all_nchanx[chan_idx, x] = locations[x] - 1 # Read 'nchx' coefficients for each of 96 FOV for fov_idx in range(n_fov): # chan_num, fov_num, *coefficients, error coeff_line_parts = [x.strip() for x in next(coeff_lines).split(" ") if x][2:] coeffs = [float(x) for x in coeff_line_parts[:nchx]] ameans = [float(x) for x in coeff_line_parts[nchx:-1]] # not used but nice to know the purpose of the last column. # _error_val = float(coeff_line_parts[-1]) for x in range(nchx): all_coeffs[chan_idx, fov_idx, all_nchanx[chan_idx, x]] = coeffs[x] all_amean[all_nchanx[chan_idx, x], fov_idx, chan_idx] = ameans[x] return all_dmean, all_coeffs, all_amean, all_nchx, all_nchanx def apply_atms_limb_correction(datasets, channel_idx, dmean, coeffs, amean, nchx, nchanx): """Calculate the correction for each channel.""" ds = datasets[channel_idx] fov_line_correct = [] for fov_idx in range(ds.shape[1]): coeff_sum = np.zeros(ds.shape[0], dtype=ds.dtype) for k in range(nchx[channel_idx]): chn_repeat = nchanx[channel_idx, k] coef = coeffs[channel_idx, fov_idx, chn_repeat] * ( datasets[chn_repeat, :, fov_idx] - amean[chn_repeat, fov_idx, channel_idx]) coeff_sum = np.add(coef, coeff_sum) fov_line_correct.append(np.add(coeff_sum, dmean[channel_idx])) return np.stack(fov_line_correct, axis=1) def get_coeff_by_sfc(coeff_fn, bt_data, idx): """Read coefficients for specific filename (land or sea).""" sfc_coeff = read_atms_limb_correction_coefficients(coeff_fn) # transpose bt_data for correction bt_data = bt_data.transpose("Channel", "y", "x") c_size = bt_data[idx, :, :].chunks correction = da.map_blocks(apply_atms_limb_correction, bt_data, idx, *sfc_coeff, chunks=c_size, meta=np.array((), dtype=bt_data.dtype)) return correction def limb_correct_atms_bt(bt_data, surf_type_mask, coeff_fns, ds_info): """Gather data needed for limb correction.""" idx = ds_info['channel_index'] LOG.info("Starting ATMS Limb Correction...") sea_bt = get_coeff_by_sfc(coeff_fns['sea'], bt_data, idx) land_bt = get_coeff_by_sfc(coeff_fns['land'], bt_data, idx) LOG.info("Finishing limb correction") is_sea = (surf_type_mask == 0) new_data = np.where(is_sea, sea_bt, land_bt) bt_corrected = xr.DataArray(new_data, dims=("y", "x"), attrs=ds_info) return bt_corrected class MiRSL2ncHandler(BaseFileHandler): """MiRS handler for NetCDF4 files using xarray. The MiRS retrieval algorithm runs on multiple sensors. For the ATMS sensors, a limb correction is applied by default. In order to change that behavior, use the keyword argument ``limb_correction=False``:: from satpy import Scene, find_files_and_readers filenames = find_files_and_readers(base_dir, reader="mirs") scene = Scene(filenames, reader_kwargs={'limb_correction': False}) """ def __init__(self, filename, filename_info, filetype_info, limb_correction=True): """Init method.""" super(MiRSL2ncHandler, self).__init__(filename, filename_info, filetype_info, ) self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=False, decode_coords=True, chunks={'Field_of_view': CHUNK_SIZE, 'Scanline': CHUNK_SIZE}) # y,x is used in satpy, bands rather than channel using in xrimage self.nc = self.nc.rename_dims({"Scanline": "y", "Field_of_view": "x"}) self.nc = self.nc.rename({"Latitude": "latitude", "Longitude": "longitude"}) self.platform_name = self._get_platform_name self.sensor = self._get_sensor self.limb_correction = limb_correction @property def platform_shortname(self): """Get platform shortname.""" return self.filename_info['platform_shortname'] @property def _get_platform_name(self): """Get platform name.""" try: res = PLATFORMS[self.filename_info['platform_shortname'].lower()] except KeyError: res = "mirs" return res.lower() @property def _get_sensor(self): """Get sensor.""" try: res = SENSOR[self.filename_info["platform_shortname"].lower()] except KeyError: res = self.sensor_names return res @property def sensor_names(self): """Return standard sensor names for the file's data.""" return list(set(SENSOR.values())) @property def start_time(self): """Get start time.""" # old file format if self.filename_info.get("date", False): s_time = datetime.datetime.combine( self.force_date("date"), self.force_time("start_time") ) self.filename_info["start_time"] = s_time return self.filename_info["start_time"] @property def end_time(self): """Get end time.""" # old file format if self.filename_info.get("date", False): end_time = datetime.datetime.combine( self.force_date("date"), self.force_time("end_time") ) self.filename_info["end_time"] = end_time return self.filename_info["end_time"] def force_date(self, key): """Force datetime.date for combine.""" if isinstance(self.filename_info[key], datetime.datetime): return self.filename_info[key].date() return self.filename_info[key] def force_time(self, key): """Force datetime.time for combine.""" if isinstance(self.filename_info.get(key), datetime.datetime): return self.filename_info.get(key).time() return self.filename_info.get(key) @property def _get_coeff_filenames(self): """Retrieve necessary files for coefficients if needed.""" coeff_fn = {'sea': None, 'land': None} if self.platform_name == "noaa-20": coeff_fn['land'] = retrieve("readers/limbcoef_atmsland_noaa20.txt") coeff_fn['sea'] = retrieve("readers/limbcoef_atmssea_noaa20.txt") if self.platform_name == 'npp': coeff_fn['land'] = retrieve("readers/limbcoef_atmsland_snpp.txt") coeff_fn['sea'] = retrieve("readers/limbcoef_atmssea_snpp.txt") return coeff_fn def update_metadata(self, ds_info): """Get metadata.""" metadata = {} metadata.update(ds_info) metadata.update({ 'sensor': self.sensor, 'platform_name': self.platform_name, 'start_time': self.start_time, 'end_time': self.end_time, }) return metadata @staticmethod def _nan_for_dtype(data_arr_dtype): # don't force the conversion from 32-bit float to 64-bit float # if we don't have to if data_arr_dtype.type == np.float32: return np.float32(np.nan) if np.issubdtype(data_arr_dtype, np.timedelta64): return np.timedelta64('NaT') if np.issubdtype(data_arr_dtype, np.datetime64): return np.datetime64('NaT') return np.nan @staticmethod def _scale_data(data_arr, scale_factor, add_offset): """Scale data, if needed.""" scaling_needed = not (scale_factor == 1 and add_offset == 0) if scaling_needed: data_arr = data_arr * scale_factor + add_offset return data_arr def _fill_data(self, data_arr, fill_value, scale_factor, add_offset): """Fill missing data with NaN.""" if fill_value is not None: fill_value = self._scale_data(fill_value, scale_factor, add_offset) fill_out = self._nan_for_dtype(data_arr.dtype) data_arr = data_arr.where(data_arr != fill_value, fill_out) return data_arr def _apply_valid_range(self, data_arr, valid_range, scale_factor, add_offset): """Get and apply valid_range.""" if valid_range is not None: valid_min, valid_max = valid_range valid_min = self._scale_data(valid_min, scale_factor, add_offset) valid_max = self._scale_data(valid_max, scale_factor, add_offset) if valid_min is not None and valid_max is not None: data_arr = data_arr.where((data_arr >= valid_min) & (data_arr <= valid_max)) return data_arr def apply_attributes(self, data, ds_info): """Combine attributes from file and yaml and apply. File attributes should take precedence over yaml if both are present """ try: global_attr_fill = self.nc.missing_value except AttributeError: global_attr_fill = 1.0 # let file metadata take precedence over ds_info from yaml, # but if yaml has more to offer, include it here, but fix # units. ds_info.update(data.attrs) # special cases if ds_info['name'] in ["latitude", "longitude"]: ds_info["standard_name"] = ds_info.get("standard_name", ds_info['name']) # try to assign appropriate units (if "Kelvin" covert to K) units_convert = {"Kelvin": "K"} data_unit = ds_info.get('units', None) ds_info['units'] = units_convert.get(data_unit, data_unit) scale = ds_info.pop('scale_factor', 1.0) offset = ds_info.pop('add_offset', 0.) fill_value = ds_info.pop("_FillValue", global_attr_fill) valid_range = ds_info.pop('valid_range', None) data = self._scale_data(data, scale, offset) data = self._fill_data(data, fill_value, scale, offset) data = self._apply_valid_range(data, valid_range, scale, offset) data.attrs = ds_info return data, ds_info def get_dataset(self, ds_id, ds_info): """Get datasets.""" if 'dependencies' in ds_info.keys(): idx = ds_info['channel_index'] data = self['BT'] data = data.rename(new_name_or_name_dict=ds_info["name"]) data, ds_info = self.apply_attributes(data, ds_info) if self.sensor.lower() == "atms" and self.limb_correction: sfc_type_mask = self['Sfc_type'] data = limb_correct_atms_bt(data, sfc_type_mask, self._get_coeff_filenames, ds_info) self.nc = self.nc.merge(data) else: LOG.info("No Limb Correction applied.") data = data[:, :, idx] else: data = self[ds_id['name']] data, ds_info = self.apply_attributes(data, ds_info) data.attrs = self.update_metadata(ds_info) return data def available_datasets(self, configured_datasets=None): """Dynamically discover what variables can be loaded from this file. See :meth:`satpy.readers.file_handlers.BaseHandler.available_datasets` for more information. """ handled_vars = set() for is_avail, ds_info in (configured_datasets or []): if is_avail is not None: # some other file handler said it has this dataset # we don't know any more information than the previous # file handler so let's yield early yield is_avail, ds_info continue yaml_info = {} if self.file_type_matches(ds_info['file_type']): handled_vars.add(ds_info['name']) yaml_info = ds_info if ds_info['name'] == 'BT': yield from self._available_btemp_datasets(yaml_info) yield True, ds_info yield from self._available_new_datasets(handled_vars) def _count_channel_repeat_number(self): """Count channel/polarization pair repetition.""" freq = self.nc.coords.get('Freq', self.nc.get('Freq')) polo = self.nc['Polo'] chn_total = Counter() normals = [] for idx, (f, p) in enumerate(zip(freq, polo)): normal_f = str(int(f)) normal_p = 'v' if p == POLO_V else 'h' chn_total[normal_f + normal_p] += 1 normals.append((idx, f, p, normal_f, normal_p)) return chn_total, normals def _available_btemp_datasets(self, yaml_info): """Create metadata for channel BTs.""" chn_total, normals = self._count_channel_repeat_number() # keep track of current channel count for string description chn_cnt = Counter() for idx, _f, _p, normal_f, normal_p in normals: chn_cnt[normal_f + normal_p] += 1 p_count = str(chn_cnt[normal_f + normal_p] if chn_total[normal_f + normal_p] > 1 else '') new_name = "btemp_{}{}{}".format(normal_f, normal_p, p_count) desc_bt = "Channel {} Brightness Temperature at {}GHz {}{}" desc_bt = desc_bt.format(idx, normal_f, normal_p, p_count) ds_info = yaml_info.copy() ds_info.update({ 'file_type': self.filetype_info['file_type'], 'name': new_name, 'description': desc_bt, 'channel_index': idx, 'frequency': "{}GHz".format(normal_f), 'polarization': normal_p, 'dependencies': ('BT', 'Sfc_type'), 'coordinates': ['longitude', 'latitude'] }) yield True, ds_info def _get_ds_info_for_data_arr(self, var_name): ds_info = { 'file_type': self.filetype_info['file_type'], 'name': var_name, 'coordinates': ["longitude", "latitude"] } return ds_info def _is_2d_yx_data_array(self, data_arr): has_y_dim = data_arr.dims[0] == "y" has_x_dim = data_arr.dims[1] == "x" return has_y_dim and has_x_dim def _available_new_datasets(self, handled_vars): """Metadata for available variables other than BT.""" possible_vars = list(self.nc.items()) + list(self.nc.coords.items()) for var_name, data_arr in possible_vars: if var_name in handled_vars: continue if data_arr.ndim != 2: # we don't currently handle non-2D variables continue if not self._is_2d_yx_data_array(data_arr): # we need 'traditional' y/x dimensions currently continue ds_info = self._get_ds_info_for_data_arr(var_name) yield True, ds_info def __getitem__(self, item): """Wrap around `self.nc[item]`.""" data = self.nc[item] # 'Freq' dimension causes issues in other processing if 'Freq' in data.coords: data = data.drop_vars('Freq') return data satpy-0.34.0/satpy/readers/modis_l1b.py000066400000000000000000000263141420401153000177420ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2010-2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Modis level 1b hdf-eos format reader. Introduction ------------ The ``modis_l1b`` reader reads and calibrates Modis L1 image data in hdf-eos format. Files often have a pattern similar to the following one: .. parsed-literal:: M[O/Y]D02[1/H/Q]KM.A[date].[time].[collection].[processing_time].hdf Other patterns where "collection" and/or "proccessing_time" are missing might also work (see the readers yaml file for details). Geolocation files (MOD03) are also supported. Geolocation files ----------------- For the 1km data (mod021km) geolocation files (mod03) are optional. If not given to the reader 1km geolocations will be interpolated from the 5km geolocation contained within the file. For the 500m and 250m data geolocation files are needed. References: - Modis gelocation description: http://www.icare.univ-lille1.fr/wiki/index.php/MODIS_geolocation """ import logging import numpy as np import xarray as xr from satpy import CHUNK_SIZE from satpy.readers.hdf4_utils import from_sds from satpy.readers.hdfeos_base import HDFEOSBaseFileReader, HDFEOSGeoReader logger = logging.getLogger(__name__) class HDFEOSBandReader(HDFEOSBaseFileReader): """Handler for the regular band channels.""" res = {"1": 1000, "Q": 250, "H": 500} def __init__(self, filename, filename_info, filetype_info): """Init the file handler.""" HDFEOSBaseFileReader.__init__(self, filename, filename_info, filetype_info) ds = self.metadata['INVENTORYMETADATA'][ 'COLLECTIONDESCRIPTIONCLASS']['SHORTNAME']['VALUE'] self.resolution = self.res[ds[-3]] def get_dataset(self, key, info): """Read data from file and return the corresponding projectables.""" datadict = { 1000: ['EV_250_Aggr1km_RefSB', 'EV_500_Aggr1km_RefSB', 'EV_1KM_RefSB', 'EV_1KM_Emissive'], 500: ['EV_250_Aggr500_RefSB', 'EV_500_RefSB'], 250: ['EV_250_RefSB']} if self.resolution != key['resolution']: return datasets = datadict[self.resolution] for dataset in datasets: subdata = self.sd.select(dataset) var_attrs = subdata.attributes() band_names = var_attrs["band_names"].split(",") # get the relative indices of the desired channel try: index = band_names.index(key['name']) except ValueError: continue uncertainty = self.sd.select(dataset + "_Uncert_Indexes") array = xr.DataArray(from_sds(subdata, chunks=CHUNK_SIZE)[index, :, :], dims=['y', 'x']).astype(np.float32) valid_range = var_attrs['valid_range'] # Fill values: # Data Value Meaning # 65535 Fill Value (includes reflective band data at night mode # and completely missing L1A scans) # 65534 L1A DN is missing within a scan # 65533 Detector is saturated # 65532 Cannot compute zero point DN, e.g., SV is saturated # 65531 Detector is dead (see comments below) # 65530 RSB dn** below the minimum of the scaling range # 65529 TEB radiance or RSB dn** exceeds the maximum of the # scaling range # 65528 Aggregation algorithm failure # 65527 Rotation of Earth view Sector from nominal science # collection position # 65526 Calibration coefficient b1 could not be computed # 65525 Subframe is dead # 65524 Both sides of the PCLW electronics on simultaneously # 65501 - 65523 (reserved for future use) # 65500 NAD closed upper limit array = array.where(array >= np.float32(valid_range[0])) array = array.where(array <= np.float32(valid_range[1])) array = array.where(from_sds(uncertainty, chunks=CHUNK_SIZE)[index, :, :] < 15) if key['calibration'] == 'brightness_temperature': projectable = calibrate_bt(array, var_attrs, index, key['name']) info.setdefault('units', 'K') info.setdefault('standard_name', 'toa_brightness_temperature') elif key['calibration'] == 'reflectance': projectable = calibrate_refl(array, var_attrs, index) info.setdefault('units', '%') info.setdefault('standard_name', 'toa_bidirectional_reflectance') elif key['calibration'] == 'radiance': projectable = calibrate_radiance(array, var_attrs, index) info.setdefault('units', var_attrs.get('radiance_units')) info.setdefault('standard_name', 'toa_outgoing_radiance_per_unit_wavelength') elif key['calibration'] == 'counts': projectable = calibrate_counts(array, var_attrs, index) info.setdefault('units', 'counts') info.setdefault('standard_name', 'counts') # made up else: raise ValueError("Unknown calibration for " "key: {}".format(key)) projectable.attrs = info # if ((platform_name == 'Aqua' and key['name'] in ["6", "27", "36"]) or # (platform_name == 'Terra' and key['name'] in ["29"])): # height, width = projectable.shape # row_indices = projectable.mask.sum(1) == width # if row_indices.sum() != height: # projectable.mask[row_indices, :] = True # Get the orbit number # if not satscene.orbit: # mda = self.data.attributes()["CoreMetadata.0"] # orbit_idx = mda.index("ORBITNUMBER") # satscene.orbit = mda[orbit_idx + 111:orbit_idx + 116] # Trimming out dead sensor lines (detectors) on terra: # (in addition channel 27, 30, 34, 35, and 36 are nosiy) # if satscene.satname == "terra": # for band in ["29"]: # if not satscene[band].is_loaded() or satscene[band].data.mask.all(): # continue # width = satscene[band].data.shape[1] # height = satscene[band].data.shape[0] # indices = satscene[band].data.mask.sum(1) < width # if indices.sum() == height: # continue # satscene[band] = satscene[band].data[indices, :] # satscene[band].area = geometry.SwathDefinition( # lons=satscene[band].area.lons[indices, :], # lats=satscene[band].area.lats[indices, :]) self._add_satpy_metadata(key, projectable) return projectable class MixedHDFEOSReader(HDFEOSGeoReader, HDFEOSBandReader): """A file handler for the files that have both regular bands and geographical information in them.""" def __init__(self, filename, filename_info, filetype_info): """Init the file handler.""" HDFEOSGeoReader.__init__(self, filename, filename_info, filetype_info) HDFEOSBandReader.__init__(self, filename, filename_info, filetype_info) def get_dataset(self, key, info): """Get the dataset.""" if key['name'] in HDFEOSGeoReader.DATASET_NAMES: return HDFEOSGeoReader.get_dataset(self, key, info) return HDFEOSBandReader.get_dataset(self, key, info) def calibrate_counts(array, attributes, index): """Calibration for counts channels.""" offset = np.float32(attributes["corrected_counts_offsets"][index]) scale = np.float32(attributes["corrected_counts_scales"][index]) array = (array - offset) * scale return array def calibrate_radiance(array, attributes, index): """Calibration for radiance channels.""" offset = np.float32(attributes["radiance_offsets"][index]) scale = np.float32(attributes["radiance_scales"][index]) array = (array - offset) * scale return array def calibrate_refl(array, attributes, index): """Calibration for reflective channels.""" offset = np.float32(attributes["reflectance_offsets"][index]) scale = np.float32(attributes["reflectance_scales"][index]) # convert to reflectance and convert from 1 to % array = (array - offset) * scale * 100 return array def calibrate_bt(array, attributes, index, band_name): """Calibration for the emissive channels.""" offset = np.float32(attributes["radiance_offsets"][index]) scale = np.float32(attributes["radiance_scales"][index]) array = (array - offset) * scale # Planck constant (Joule second) h__ = np.float32(6.6260755e-34) # Speed of light in vacuum (meters per second) c__ = np.float32(2.9979246e+8) # Boltzmann constant (Joules per Kelvin) k__ = np.float32(1.380658e-23) # Derived constants c_1 = 2 * h__ * c__ * c__ c_2 = (h__ * c__) / k__ # Effective central wavenumber (inverse centimeters) cwn = np.array([ 2.641775E+3, 2.505277E+3, 2.518028E+3, 2.465428E+3, 2.235815E+3, 2.200346E+3, 1.477967E+3, 1.362737E+3, 1.173190E+3, 1.027715E+3, 9.080884E+2, 8.315399E+2, 7.483394E+2, 7.308963E+2, 7.188681E+2, 7.045367E+2], dtype=np.float32) # Temperature correction slope (no units) tcs = np.array([ 9.993411E-1, 9.998646E-1, 9.998584E-1, 9.998682E-1, 9.998819E-1, 9.998845E-1, 9.994877E-1, 9.994918E-1, 9.995495E-1, 9.997398E-1, 9.995608E-1, 9.997256E-1, 9.999160E-1, 9.999167E-1, 9.999191E-1, 9.999281E-1], dtype=np.float32) # Temperature correction intercept (Kelvin) tci = np.array([ 4.770532E-1, 9.262664E-2, 9.757996E-2, 8.929242E-2, 7.310901E-2, 7.060415E-2, 2.204921E-1, 2.046087E-1, 1.599191E-1, 8.253401E-2, 1.302699E-1, 7.181833E-2, 1.972608E-2, 1.913568E-2, 1.817817E-2, 1.583042E-2], dtype=np.float32) # Transfer wavenumber [cm^(-1)] to wavelength [m] cwn = 1. / (cwn * 100) # Some versions of the modis files do not contain all the bands. emmissive_channels = ["20", "21", "22", "23", "24", "25", "27", "28", "29", "30", "31", "32", "33", "34", "35", "36"] global_index = emmissive_channels.index(band_name) cwn = cwn[global_index] tcs = tcs[global_index] tci = tci[global_index] array = c_2 / (cwn * np.log(c_1 / (1000000 * array * cwn ** 5) + 1)) array = (array - tci) / tcs return array satpy-0.34.0/satpy/readers/modis_l2.py000066400000000000000000000215671420401153000176060ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Modis level 2 hdf-eos format reader. Introduction ------------ The ``modis_l2`` reader reads and calibrates Modis L2 image data in hdf-eos format. Since there are a multitude of different level 2 datasets not all of theses are implemented (yet). Currently the reader supports: - m[o/y]d35_l2: cloud_mask dataset - some datasets in m[o/y]d06 files To get a list of the available datasets for a given file refer to the "Load data" section in :doc:`../readers`. Geolocation files ----------------- Similar to the ``modis_l1b`` reader the geolocation files (mod03) for the 1km data are optional and if not given 1km geolocations will be interpolated from the 5km geolocation contained within the file. For the 500m and 250m data geolocation files are needed. References: - Documentation about the format: https://modis-atmos.gsfc.nasa.gov/products """ import logging import numpy as np import xarray as xr from satpy import CHUNK_SIZE from satpy.readers.hdf4_utils import from_sds from satpy.readers.hdfeos_base import HDFEOSGeoReader logger = logging.getLogger(__name__) class ModisL2HDFFileHandler(HDFEOSGeoReader): """File handler for MODIS HDF-EOS Level 2 files. Includes error handling for files produced by IMAPP produced files. """ def _load_all_metadata_attributes(self): try: return super()._load_all_metadata_attributes() except KeyError: return {} @property def is_imapp_mask_byte1(self): """Get if this file is the IMAPP 'mask_byte1' file type.""" return "mask_byte1" in self.filetype_info["file_type"] @property def start_time(self): """Get the start time of the dataset.""" try: return super().start_time except KeyError: try: return self.filename_info["start_time"] except KeyError: return self.filename_info["acquisition_time"] @property def end_time(self): """Get the end time of the dataset.""" try: return super().end_time except KeyError: return self.start_time @staticmethod def read_geo_resolution(metadata): """Parse metadata to find the geolocation resolution. It is implemented as a staticmethod to match read_mda pattern. """ try: return HDFEOSGeoReader.read_geo_resolution(metadata) except RuntimeError: # most L2 products are 5000m return 5000 def _select_hdf_dataset(self, hdf_dataset_name, byte_dimension): """Load a dataset from HDF-EOS level 2 file.""" dataset = self.sd.select(hdf_dataset_name) dask_arr = from_sds(dataset, chunks=CHUNK_SIZE) attrs = dataset.attributes() dims = ['y', 'x'] if byte_dimension == 0: dims = ['i', 'y', 'x'] dask_arr = dask_arr.astype(np.uint8) elif byte_dimension == 2: dims = ['y', 'x', 'i'] dask_arr = dask_arr.astype(np.uint8) dataset = xr.DataArray(dask_arr, dims=dims, attrs=attrs) if 'i' in dataset.dims: # Reorder dimensions for consistency dataset = dataset.transpose('i', 'y', 'x') return dataset def get_dataset(self, dataset_id, dataset_info): """Get DataArray for specified dataset.""" dataset_name = dataset_id['name'] if self.is_geo_loadable_dataset(dataset_name): return HDFEOSGeoReader.get_dataset(self, dataset_id, dataset_info) dataset_name_in_file = dataset_info['file_key'] if self.is_imapp_mask_byte1: dataset_name_in_file = dataset_info.get('imapp_file_key', dataset_name_in_file) # The dataset asked correspond to a given set of bits of the HDF EOS dataset if 'byte' in dataset_info and 'byte_dimension' in dataset_info: dataset = self._extract_and_mask_category_dataset(dataset_id, dataset_info, dataset_name_in_file) else: # No byte manipulation required dataset = self.load_dataset(dataset_name_in_file, dataset_info.pop("category", False)) self._add_satpy_metadata(dataset_id, dataset) return dataset def _extract_and_mask_category_dataset(self, dataset_id, dataset_info, var_name): # what dimension is per-byte byte_dimension = None if self.is_imapp_mask_byte1 else dataset_info['byte_dimension'] dataset = self._select_hdf_dataset(var_name, byte_dimension) # category products always have factor=1/offset=0 so don't apply them # also remove them so they don't screw up future satpy processing dataset.attrs.pop('scale_factor', None) dataset.attrs.pop('add_offset', None) # Don't do this byte work if we are using the IMAPP mask_byte1 file if self.is_imapp_mask_byte1: return dataset dataset = _extract_byte_mask(dataset, dataset_info['byte'], dataset_info['bit_start'], dataset_info['bit_count']) dataset = self._mask_with_quality_assurance_if_needed(dataset, dataset_info, dataset_id) return dataset def _mask_with_quality_assurance_if_needed(self, dataset, dataset_info, dataset_id): if not dataset_info.get('quality_assurance', False): return dataset # Get quality assurance dataset recursively quality_assurance_dataset_id = dataset_id.from_dict( dict(name='quality_assurance', resolution=1000) ) quality_assurance_dataset_info = { 'name': 'quality_assurance', 'resolution': 1000, 'byte_dimension': 2, 'byte': 0, 'bit_start': 0, 'bit_count': 1, 'file_key': 'Quality_Assurance' } quality_assurance = self.get_dataset( quality_assurance_dataset_id, quality_assurance_dataset_info ) # Duplicate quality assurance dataset to create relevant filter duplication_factor = [int(dataset_dim / quality_assurance_dim) for dataset_dim, quality_assurance_dim in zip(dataset.shape, quality_assurance.shape)] quality_assurance = np.tile(quality_assurance, duplication_factor) # Replace unassured data by NaN value dataset[np.where(quality_assurance == 0)] = dataset.attrs["_FillValue"] return dataset def _extract_byte_mask(dataset, byte_information, bit_start, bit_count): if isinstance(byte_information, int): # Only one byte: select the byte information byte_dataset = dataset[byte_information, :, :] elif isinstance(byte_information, (list, tuple)) and len(byte_information) == 2: # Two bytes: recombine the two bytes dataset_a = dataset[byte_information[0], :, :] dataset_b = dataset[byte_information[1], :, :] dataset_a = np.uint16(dataset_a) dataset_a = np.left_shift(dataset_a, 8) # dataset_a << 8 byte_dataset = np.bitwise_or(dataset_a, dataset_b).astype(np.uint16) shape = byte_dataset.shape # We replicate the concatenated byte with the right shape byte_dataset = np.repeat(np.repeat(byte_dataset, 4, axis=0), 4, axis=1) # All bits carry information, we update bit_start consequently bit_start = np.arange(16, dtype=np.uint16).reshape((4, 4)) bit_start = np.tile(bit_start, (shape[0], shape[1])) # Compute the final bit mask attrs = dataset.attrs.copy() dataset = _bits_strip(bit_start, bit_count, byte_dataset) dataset.attrs = attrs return dataset def _bits_strip(bit_start, bit_count, value): """Extract specified bit from bit representation of integer value. Parameters ---------- bit_start : int Starting index of the bits to extract (first bit has index 0) bit_count : int Number of bits starting from bit_start to extract value : int Number from which to extract the bits Returns ------- int Value of the extracted bits """ bit_mask = pow(2, bit_start + bit_count) - 1 return np.right_shift(np.bitwise_and(value, bit_mask), bit_start) satpy-0.34.0/satpy/readers/msi_safe.py000066400000000000000000000303531420401153000176550ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """SAFE MSI L1C reader. The MSI data has a special value for saturated pixels. By default, these pixels are set to np.inf, but for some applications it might be desirable to have these pixels left untouched. For this case, the `mask_saturated` flag is available in the reader, and can be toggled with ``reader_kwargs`` upon Scene creation:: scene = satpy.Scene(filenames, reader='msi_safe', reader_kwargs={'mask_saturated': False}) scene.load(['B01']) L1B format description for the files read here: https://sentinels.copernicus.eu/documents/247904/0/Sentinel-2-product-specifications-document-V14-9.pdf/ """ import logging import xml.etree.ElementTree as ET import dask.array as da import numpy as np import rioxarray from pyresample import geometry from xarray import DataArray from satpy import CHUNK_SIZE from satpy._compat import cached_property from satpy.readers.file_handlers import BaseFileHandler logger = logging.getLogger(__name__) PLATFORMS = {'S2A': "Sentinel-2A", 'S2B': "Sentinel-2B", 'S2C': "Sentinel-2C", 'S2D': "Sentinel-2D"} class SAFEMSIL1C(BaseFileHandler): """File handler for SAFE MSI files (jp2).""" def __init__(self, filename, filename_info, filetype_info, mda, tile_mda, mask_saturated=True): """Initialize the reader.""" super(SAFEMSIL1C, self).__init__(filename, filename_info, filetype_info) del mask_saturated self._start_time = filename_info['observation_time'] self._end_time = filename_info['observation_time'] self._channel = filename_info['band_name'] self._tile_mda = tile_mda self._mda = mda self.platform_name = PLATFORMS[filename_info['fmission_id']] def get_dataset(self, key, info): """Load a dataset.""" if self._channel != key['name']: return logger.debug('Reading %s.', key['name']) proj = self._read_from_file(key) proj.attrs = info.copy() proj.attrs['units'] = '%' proj.attrs['platform_name'] = self.platform_name return proj def _read_from_file(self, key): proj = rioxarray.open_rasterio(self.filename, chunks=CHUNK_SIZE) proj = proj.squeeze("band") if key["calibration"] == "reflectance": return self._mda.calibrate_to_reflectances(proj, self._channel) if key["calibration"] == "radiance": return self._mda.calibrate_to_radiances(proj, self._channel) @property def start_time(self): """Get the start time.""" return self._start_time @property def end_time(self): """Get the end time.""" return self._start_time def get_area_def(self, dsid): """Get the area def.""" if self._channel != dsid['name']: return return self._tile_mda.get_area_def(dsid) class SAFEMSIXMLMetadata(BaseFileHandler): """Base class for SAFE MSI XML metadata filehandlers.""" def __init__(self, filename, filename_info, filetype_info, mask_saturated=True): """Init the reader.""" super().__init__(filename, filename_info, filetype_info) self._start_time = filename_info['observation_time'] self._end_time = filename_info['observation_time'] self.root = ET.parse(self.filename) self.tile = filename_info['dtile_number'] self.platform_name = PLATFORMS[filename_info['fmission_id']] self.mask_saturated = mask_saturated import bottleneck # noqa import geotiepoints # noqa @property def end_time(self): """Get end time.""" return self._start_time @property def start_time(self): """Get start time.""" return self._start_time class SAFEMSIMDXML(SAFEMSIXMLMetadata): """File handle for sentinel 2 safe XML generic metadata.""" def calibrate_to_reflectances(self, data, band_name): """Calibrate *data* using the radiometric information for the metadata.""" quantification = int(self.root.find('.//QUANTIFICATION_VALUE').text) data = self._sanitize_data(data) return (data + self.band_offset(band_name)) / quantification * 100 def _sanitize_data(self, data): data = data.where(data != self.no_data) if self.mask_saturated: data = data.where(data != self.saturated, np.inf) return data def band_offset(self, band): """Get the band offset for *band*.""" band_index = self._band_index(band) return self.band_offsets.get(band_index, 0) def _band_index(self, band): band_indices = self.band_indices band_conversions = {"B01": "B1", "B02": "B2", "B03": "B3", "B04": "B4", "B05": "B5", "B06": "B6", "B07": "B7", "B08": "B8", "B8A": "B8A", "B09": "B9", "B10": "B10", "B11": "B11", "B12": "B12"} band_index = band_indices[band_conversions[band]] return band_index @cached_property def band_indices(self): """Get the band indices from the metadata.""" spectral_info = self.root.findall('.//Spectral_Information') band_indices = {spec.attrib["physicalBand"]: int(spec.attrib["bandId"]) for spec in spectral_info} return band_indices @cached_property def band_offsets(self): """Get the band offsets from the metadata.""" offsets = self.root.find('.//Radiometric_Offset_List') if offsets is not None: band_offsets = {int(off.attrib["band_id"]): float(off.text) for off in offsets} else: band_offsets = {} return band_offsets @cached_property def special_values(self): """Get the special values from the metadata.""" special_values = self.root.findall('.//Special_Values') special_values_dict = {value[0].text: float(value[1].text) for value in special_values} return special_values_dict @property def no_data(self): """Get the nodata value from the metadata.""" return self.special_values["NODATA"] @property def saturated(self): """Get the saturated value from the metadata.""" return self.special_values["SATURATED"] def calibrate_to_radiances(self, data, band_name): """Calibrate *data* to radiance using the radiometric information for the metadata.""" physical_gain = self.physical_gain(band_name) data = self._sanitize_data(data) return (data + self.band_offset(band_name)) / physical_gain def physical_gain(self, band_name): """Get the physical gain for a given *band_name*.""" band_index = self._band_index(band_name) return self.physical_gains[band_index] @cached_property def physical_gains(self): """Get the physical gains dictionary.""" physical_gains = {int(elt.attrib["bandId"]): float(elt.text) for elt in self.root.findall(".//PHYSICAL_GAINS")} return physical_gains def _fill_swath_edges(angles): """Fill gaps at edges of swath.""" darr = DataArray(angles, dims=['y', 'x']) darr = darr.bfill('x') darr = darr.ffill('x') darr = darr.bfill('y') darr = darr.ffill('y') angles = darr.data return angles class SAFEMSITileMDXML(SAFEMSIXMLMetadata): """File handle for sentinel 2 safe XML tile metadata.""" def __init__(self, filename, filename_info, filetype_info, mask_saturated=True): """Init the reader.""" super().__init__(filename, filename_info, filetype_info, mask_saturated) self.geocoding = self.root.find('.//Tile_Geocoding') def get_area_def(self, dsid): """Get the area definition of the dataset.""" area_extent = self._area_extent(dsid['resolution']) cols, rows = self._shape(dsid['resolution']) area = geometry.AreaDefinition( self.tile, "On-the-fly area", self.tile, self.projection, cols, rows, area_extent) return area @cached_property def projection(self): """Get the geographic projection.""" from pyproj import CRS epsg = self.geocoding.find('HORIZONTAL_CS_CODE').text return CRS(epsg) def _area_extent(self, resolution): cols, rows = self._shape(resolution) geoposition = self.geocoding.find(f'Geoposition[@resolution="{resolution}"]') ulx = float(geoposition.find('ULX').text) uly = float(geoposition.find('ULY').text) xdim = float(geoposition.find('XDIM').text) ydim = float(geoposition.find('YDIM').text) area_extent = (ulx, uly + rows * ydim, ulx + cols * xdim, uly) return area_extent def _shape(self, resolution): rows = int(self.geocoding.find('Size[@resolution="' + str(resolution) + '"]/NROWS').text) cols = int(self.geocoding.find('Size[@resolution="' + str(resolution) + '"]/NCOLS').text) return cols, rows @staticmethod def _do_interp(minterp, xcoord, ycoord): interp_points2 = np.vstack((ycoord.ravel(), xcoord.ravel())) res = minterp(interp_points2) return res.reshape(xcoord.shape) def interpolate_angles(self, angles, resolution): """Interpolate the angles.""" from geotiepoints.multilinear import MultilinearInterpolator cols, rows = self._shape(resolution) smin = [0, 0] smax = np.array(angles.shape) - 1 orders = angles.shape minterp = MultilinearInterpolator(smin, smax, orders) minterp.set_values(da.atleast_2d(angles.ravel())) y = da.arange(rows, dtype=angles.dtype, chunks=CHUNK_SIZE) / (rows-1) * (angles.shape[0] - 1) x = da.arange(cols, dtype=angles.dtype, chunks=CHUNK_SIZE) / (cols-1) * (angles.shape[1] - 1) xcoord, ycoord = da.meshgrid(x, y) return da.map_blocks(self._do_interp, minterp, xcoord, ycoord, dtype=angles.dtype, chunks=xcoord.chunks) def _get_coarse_dataset(self, key, info): """Get the coarse dataset refered to by `key` from the XML data.""" angles = self.root.find('.//Tile_Angles') if key['name'] in ['solar_zenith_angle', 'solar_azimuth_angle']: angles = self._get_solar_angles(angles, info) elif key['name'] in ['satellite_zenith_angle', 'satellite_azimuth_angle']: angles = self._get_satellite_angles(angles, info) else: angles = None return angles def _get_solar_angles(self, angles, info): angles = self._get_values_from_tag(angles, info['xml_tag']) return angles @staticmethod def _get_values_from_tag(xml_tree, xml_tag): elts = xml_tree.findall(xml_tag + '/Values_List/VALUES') return np.array([[val for val in elt.text.split()] for elt in elts], dtype=np.float64) def _get_satellite_angles(self, angles, info): arrays = [] elts = angles.findall(info['xml_tag'] + '[@bandId="1"]') for elt in elts: arrays.append(self._get_values_from_tag(elt, info['xml_item'])) angles = np.nanmean(np.dstack(arrays), -1) return angles def get_dataset(self, key, info): """Get the dataset referred to by `key`.""" angles = self._get_coarse_dataset(key, info) if angles is None: return None angles = _fill_swath_edges(angles) res = self.interpolate_angles(angles, key['resolution']) proj = DataArray(res, dims=['y', 'x']) proj.attrs = info.copy() proj.attrs['units'] = 'degrees' proj.attrs['platform_name'] = self.platform_name return proj satpy-0.34.0/satpy/readers/msu_gsa_l1b.py000066400000000000000000000073531420401153000202670ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2022 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reader for the Arctica-M1 MSU-GS/A data. The files for this reader are HDF5 and contain channel data at 1km resolution for the VIS channels and 4km resolution for the IR channels. Geolocation data is available at both resolutions, as is sun and satellite geometry. This reader was tested on sample data provided by EUMETSAT. """ from datetime import datetime import numpy as np from satpy.readers.hdf5_utils import HDF5FileHandler class MSUGSAFileHandler(HDF5FileHandler): """MSU-GS/A L1B file reader.""" @property def start_time(self): """Time for timeslot scan start.""" dtstr = self['/attr/timestamp_without_timezone'] return datetime.strptime(dtstr, "%Y-%m-%dT%H:%M:%S") @property def satellite_altitude(self): """Satellite altitude at time of scan. There is no documentation but this appears to be height above surface in meters. """ return float(self['/attr/satellite_observation_point_height']) @property def satellite_latitude(self): """Satellite latitude at time of scan.""" return float(self['/attr/satellite_observation_point_latitude']) @property def satellite_longitude(self): """Satellite longitude at time of scan.""" return float(self['/attr/satellite_observation_point_longitude']) @property def sensor_name(self): """Sensor name is hardcoded.""" sensor = 'MSU-GS/A' return sensor @property def platform_name(self): """Platform name is also hardcoded.""" platform = 'Arctica-M N1' return platform @staticmethod def _apply_scale_offset(in_data): """Apply the scale and offset to data.""" scl = in_data.attrs['scale'] off = in_data.attrs['offset'] return in_data * scl + off def get_dataset(self, dataset_id, ds_info): """Load data variable and metadata and calibrate if needed.""" file_key = ds_info.get('file_key', dataset_id['name']) data = self[file_key] attrs = data.attrs.copy() # avoid contaminating other band loading attrs.update(ds_info) # The fill value also needs to be applied fill_val = attrs.pop('fill_value') data = data.where(data != fill_val, np.nan) # Data has a scale and offset that we must apply data = self._apply_scale_offset(data) # Data is given as radiance values, we must convert if we want reflectance if dataset_id.get('calibration') == "reflectance": solconst = float(attrs.pop('F_solar_constant')) data = np.pi * data / solconst # Satpy expects reflectance values in 0-100 range data = data * 100. data.attrs = attrs data.attrs.update({ 'platform_name': self.platform_name, 'sensor': self.sensor_name, 'sat_altitude': self.satellite_altitude, 'sat_latitude': self.satellite_latitude, 'sat_longitude': self.satellite_longitude, }) return data satpy-0.34.0/satpy/readers/mviri_l1b_fiduceo_nc.py000066400000000000000000000652141420401153000221350ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """FIDUCEO MVIRI FCDR Reader. Introduction ------------ The FIDUCEO MVIRI FCDR is a Fundamental Climate Data Record (FCDR) of re-calibrated Level 1.5 Infrared, Water Vapour, and Visible radiances from the Meteosat Visible Infra-Red Imager (MVIRI) instrument onboard the Meteosat First Generation satellites. There are two variants of the dataset: The *full FCDR* and a simplified version called *easy FCDR*. Some datasets are only available in one of the two variants, see the corresponding YAML definition in ``satpy/etc/readers/``. Dataset Names ------------- The FIDUCEO MVIRI readers use names ``VIS``, ``WV`` and ``IR`` for the visible, water vapor and infrared channels, respectively. These are different from the original netCDF variable names for the following reasons: - VIS channel is named differently in full FCDR (``counts_vis``) and easy FCDR (``toa_bidirectional_reflectance_vis``) - netCDF variable names contain the calibration level (e.g. ``counts_...``), which might be confusing for satpy users if a different calibration level is chosen. Remaining datasets (such as quality flags and uncertainties) have the same name in the reader as in the netCDF file. Example ------- This is how to read FIDUCEO MVIRI FCDR data in satpy: .. code-block:: python from satpy import Scene scn = Scene(filenames=['FIDUCEO_FCDR_L15_MVIRI_MET7-57.0...'], reader='mviri_l1b_fiduceo_nc') scn.load(['VIS', 'WV', 'IR']) Global netCDF attributes are available in the ``raw_metadata`` attribute of each loaded dataset. Image Orientation ----------------- The images are stored in MVIRI scanning direction, that means South is up and East is right. This can be changed as follows: .. code-block:: python scn.load(['VIS'], upper_right_corner='NE') Geolocation ----------- In addition to the image data, FIDUCEO also provides so called *static FCDRs* containing latitude and longitude coordinates. In order to simplify their usage, the FIDUCEO MVIRI readers do not make use of these static files, but instead provide an area definition that can be used to compute longitude and latitude coordinates on demand. .. code-block:: python area = scn['VIS'].attrs['area'] lons, lats = area.get_lonlats() Those were compared to the static FCDR and they agree very well, however there are small differences. The mean difference is < 1E3 degrees for all channels and projection longitudes. Huge VIS Reflectances --------------------- You might encounter huge VIS reflectances (10^8 percent and greater) in situations where both radiance and solar zenith angle are small. The reader certainly needs some improvement in this regard. Maybe the corresponding uncertainties can be used to filter these cases before calculating reflectances. VIS Channel Quality Flags ------------------------- Quality flags are available for the VIS channel only. A simple approach for masking bad quality pixels is to set the ``mask_bad_quality`` keyword argument to ``True``: .. code-block:: python scn = Scene(filenames=['FIDUCEO_FCDR_L15_MVIRI_MET7-57.0...'], reader='mviri_l1b_fiduceo_nc', reader_kwargs={'mask_bad_quality': True}) See :class:`FiduceoMviriBase` for an argument description. In some situations however the entire image can be flagged (look out for warnings). In that case check out the ``quality_pixel_bitmask`` and ``data_quality_bitmask`` datasets to find out why. Angles ------ The FIDUCEO MVIRI FCDR provides satellite and solar angles on a coarse tiepoint grid. By default these datasets will be interpolated to the higher VIS resolution. This can be changed as follows: .. code-block:: python scn.load(['solar_zenith_angle'], resolution=4500) If you need the angles in both resolutions, use data queries: .. code-block:: python from satpy import DataQuery query_vis = DataQuery( name='solar_zenith_angle', resolution=2250 ) query_ir = DataQuery( name='solar_zenith_angle', resolution=4500 ) scn.load([query_vis, query_ir]) # Use the query objects to access the datasets as follows sza_vis = scn[query_vis] References ---------- - `[Handbook]`_ MFG User Handbook - `[PUG]`_ FIDUCEO MVIRI FCDR Product User Guide .. _[Handbook]: https://www.eumetsat.int/media/7323 .. _[PUG]: http://doi.org/10.15770/EUM_SEC_CLM_0009 """ import abc import warnings from functools import lru_cache import dask.array as da import numpy as np import xarray as xr from satpy import CHUNK_SIZE from satpy.readers._geos_area import get_area_definition, get_area_extent, sampling_to_lfac_cfac from satpy.readers.file_handlers import BaseFileHandler EQUATOR_RADIUS = 6378140.0 POLE_RADIUS = 6356755.0 ALTITUDE = 42164000.0 - EQUATOR_RADIUS """[Handbook] section 5.2.1.""" MVIRI_FIELD_OF_VIEW = 18.0 """[Handbook] section 5.3.2.1.""" CHANNELS = ['VIS', 'WV', 'IR'] ANGLES = [ 'solar_zenith_angle', 'solar_azimuth_angle', 'satellite_zenith_angle', 'satellite_azimuth_angle' ] OTHER_REFLECTANCES = [ 'u_independent_toa_bidirectional_reflectance', 'u_structured_toa_bidirectional_reflectance' ] HIGH_RESOL = 2250 class IRWVCalibrator: """Calibrate IR & WV channels.""" def __init__(self, coefs): """Initialize the calibrator. Args: coefs: Calibration coefficients. """ self.coefs = coefs def calibrate(self, counts, calibration): """Calibrate IR/WV counts to the given calibration.""" if calibration == 'counts': return counts elif calibration in ('radiance', 'brightness_temperature'): return self._calibrate_rad_bt(counts, calibration) else: raise KeyError( 'Invalid IR/WV calibration: {}'.format(calibration.name) ) def _calibrate_rad_bt(self, counts, calibration): """Calibrate counts to radiance or brightness temperature.""" rad = self._counts_to_radiance(counts) if calibration == 'radiance': return rad bt = self._radiance_to_brightness_temperature(rad) return bt def _counts_to_radiance(self, counts): """Convert IR/WV counts to radiance. Reference: [PUG], equations (4.1) and (4.2). """ rad = self.coefs['a'] + self.coefs['b'] * counts return rad.where(rad > 0, np.float32(np.nan)) def _radiance_to_brightness_temperature(self, rad): """Convert IR/WV radiance to brightness temperature. Reference: [PUG], equations (5.1) and (5.2). """ bt = self.coefs['bt_b'] / (np.log(rad) - self.coefs['bt_a']) return bt.where(bt > 0, np.float32(np.nan)) class VISCalibrator: """Calibrate VIS channel.""" def __init__(self, coefs, solar_zenith_angle=None): """Initialize the calibrator. Args: coefs: Calibration coefficients. solar_zenith_angle (optional): Solar zenith angle. Only required for calibration to reflectance. """ self.coefs = coefs self.solar_zenith_angle = solar_zenith_angle def calibrate(self, counts, calibration): """Calibrate VIS counts.""" if calibration == 'counts': return counts elif calibration in ('radiance', 'reflectance'): return self._calibrate_rad_refl(counts, calibration) else: raise KeyError( 'Invalid VIS calibration: {}'.format(calibration.name) ) def _calibrate_rad_refl(self, counts, calibration): """Calibrate counts to radiance or reflectance.""" rad = self._counts_to_radiance(counts) if calibration == 'radiance': return rad refl = self._radiance_to_reflectance(rad) refl = self.update_refl_attrs(refl) return refl def _counts_to_radiance(self, counts): """Convert VIS counts to radiance. Reference: [PUG], equations (7) and (8). """ years_since_launch = self.coefs['years_since_launch'] a_cf = (self.coefs['a0'] + self.coefs['a1'] * years_since_launch + self.coefs['a2'] * years_since_launch ** 2) mean_count_space_vis = self.coefs['mean_count_space'] rad = (counts - mean_count_space_vis) * a_cf return rad.where(rad > 0, np.float32(np.nan)) def _radiance_to_reflectance(self, rad): """Convert VIS radiance to reflectance factor. Note: Produces huge reflectances in situations where both radiance and solar zenith angle are small. Maybe the corresponding uncertainties can be used to filter these cases before calculating reflectances. Reference: [PUG], equation (6). """ sza = self.solar_zenith_angle.where( da.fabs(self.solar_zenith_angle) < 90, np.float32(np.nan) ) # direct illumination only cos_sza = np.cos(np.deg2rad(sza)) refl = ( (np.pi * self.coefs['distance_sun_earth'] ** 2) / (self.coefs['solar_irradiance'] * cos_sza) * rad ) return self.refl_factor_to_percent(refl) def update_refl_attrs(self, refl): """Update attributes of reflectance datasets.""" refl.attrs['sun_earth_distance_correction_applied'] = True refl.attrs['sun_earth_distance_correction_factor'] = self.coefs[ 'distance_sun_earth'].item() return refl @staticmethod def refl_factor_to_percent(refl): """Convert reflectance factor to percent.""" return refl * 100 class Navigator: """Navigate MVIRI images.""" def get_area_def(self, im_size, projection_longitude): """Create MVIRI area definition.""" proj_params = self._get_proj_params(im_size, projection_longitude) extent = get_area_extent(proj_params) return get_area_definition(proj_params, extent) def _get_proj_params(self, im_size, projection_longitude): """Get projection parameters for the given settings.""" area_name = 'geos_mviri_{0}x{0}'.format(im_size) lfac, cfac, loff, coff = self._get_factors_offsets(im_size) return { 'ssp_lon': projection_longitude, 'a': EQUATOR_RADIUS, 'b': POLE_RADIUS, 'h': ALTITUDE, 'units': 'm', 'loff': loff - im_size, 'coff': coff, 'lfac': -lfac, 'cfac': -cfac, 'nlines': im_size, 'ncols': im_size, 'scandir': 'S2N', # Reference: [PUG] section 2. 'p_id': area_name, 'a_name': area_name, 'a_desc': 'MVIRI Geostationary Projection' } def _get_factors_offsets(self, im_size): """Determine line/column offsets and scaling factors.""" # For offsets see variables "asamp" and "aline" of subroutine # "refgeo" in [Handbook] and in # https://github.com/FIDUCEO/FCDR_MVIRI/blob/master/lib/nrCrunch/cruncher.f loff = coff = im_size / 2 + 0.5 lfac = cfac = sampling_to_lfac_cfac( np.deg2rad(MVIRI_FIELD_OF_VIEW) / im_size ) return lfac, cfac, loff, coff class Interpolator: """Interpolate datasets to another resolution.""" @staticmethod def interp_tiepoints(ds, target_x, target_y): """Interpolate dataset between tiepoints. Uses linear interpolation. FUTURE: [PUG] recommends cubic spline interpolation. Args: ds: Dataset to be interpolated target_x: Target x coordinates target_y: Target y coordinates """ # No tiepoint coordinates specified in the files. Use dimensions # to calculate tiepoint sampling and assign tiepoint coordinates # accordingly. sampling = target_x.size // ds.coords['x'].size ds = ds.assign_coords(x=target_x.values[::sampling], y=target_y.values[::sampling]) return ds.interp(x=target_x.values, y=target_y.values) @staticmethod def interp_acq_time(time2d, target_y): """Interpolate scanline acquisition time to the given coordinates. The files provide timestamps per pixel for the low resolution channels (IR/WV) only. 1) Average values in each line to obtain one timestamp per line. 2) For the VIS channel duplicate values in y-direction (as advised by [PUG]). Note that the timestamps do not increase monotonically with the line number in some cases. Returns: Mean scanline acquisition timestamps """ # Compute mean timestamp per scanline time = time2d.mean(dim='x') # If required, repeat timestamps in y-direction to obtain higher # resolution y = time.coords['y'].values if y.size < target_y.size: reps = target_y.size // y.size y_rep = np.repeat(y, reps) time_hires = time.reindex(y=y_rep) time_hires = time_hires.assign_coords(y=target_y) return time_hires return time class VisQualityControl: """Simple quality control for VIS channel.""" def __init__(self, mask): """Initialize the quality control.""" self._mask = mask def check(self): """Check VIS channel quality and issue a warning if it's bad.""" use_with_caution = da.bitwise_and(self._mask, 2) if use_with_caution.all(): warnings.warn( 'All pixels of the VIS channel are flagged as "use with ' 'caution". Use datasets "quality_pixel_bitmask" and ' '"data_quality_bitmask" to find out why.' ) def mask(self, ds): """Mask VIS pixels with bad quality. Pixels are considered bad quality if the "quality_pixel_bitmask" is everything else than 0 (no flag set). """ return ds.where(self._mask == 0, np.float32(np.nan)) def is_high_resol(resolution): """Identify high resolution channel.""" return resolution == HIGH_RESOL class DatasetWrapper: """Helper class for accessing the dataset.""" def __init__(self, nc): """Wrap the given dataset.""" self.nc = nc @property def attrs(self): """Exposes dataset attributes.""" return self.nc.attrs def __getitem__(self, item): """Get a variable from the dataset.""" ds = self.nc[item] if self._should_dims_be_renamed(ds): ds = self._rename_dims(ds) elif self._coordinates_not_assigned(ds): ds = self._reassign_coords(ds) self._cleanup_attrs(ds) return ds def _should_dims_be_renamed(self, ds): """Determine whether dataset dimensions need to be renamed.""" return 'y_ir_wv' in ds.dims or 'y_tie' in ds.dims def _rename_dims(self, ds): """Rename dataset dimensions to match satpy's expectations.""" new_names = { 'y_ir_wv': 'y', 'x_ir_wv': 'x', 'y_tie': 'y', 'x_tie': 'x' } for old_name, new_name in new_names.items(): if old_name in ds.dims: ds = ds.rename({old_name: new_name}) return ds def _coordinates_not_assigned(self, ds): return 'y' in ds.dims and 'y' not in ds.coords def _reassign_coords(self, ds): """Re-assign coordinates. For some reason xarray doesn't assign coordinates to all high resolution data variables. """ return ds.assign_coords({'y': self.nc.coords['y'], 'x': self.nc.coords['x']}) def _cleanup_attrs(self, ds): """Cleanup dataset attributes.""" # Remove ancillary_variables attribute to avoid downstream # satpy warnings. ds.attrs.pop('ancillary_variables', None) def get_time(self): """Get time coordinate. Variable is sometimes named "time" and sometimes "time_ir_wv". """ try: return self['time_ir_wv'] except KeyError: return self['time'] def get_xy_coords(self, resolution): """Get x and y coordinates for the given resolution.""" if is_high_resol(resolution): return self.nc.coords['x'], self.nc.coords['y'] return self.nc.coords['x_ir_wv'], self.nc.coords['x_ir_wv'] def get_image_size(self, resolution): """Get image size for the given resolution.""" if is_high_resol(resolution): return self.nc.coords['y'].size return self.nc.coords['y_ir_wv'].size class FiduceoMviriBase(BaseFileHandler): """Baseclass for FIDUCEO MVIRI file handlers.""" nc_keys = { 'WV': 'count_wv', 'IR': 'count_ir' } def __init__(self, filename, filename_info, filetype_info, mask_bad_quality=False): """Initialize the file handler. Args: mask_bad_quality: Mask VIS pixels with bad quality, that means any quality flag except "ok". If you need more control, use the ``quality_pixel_bitmask`` and ``data_quality_bitmask`` datasets. """ super(FiduceoMviriBase, self).__init__( filename, filename_info, filetype_info) self.mask_bad_quality = mask_bad_quality nc_raw = xr.open_dataset( filename, chunks={'x': CHUNK_SIZE, 'y': CHUNK_SIZE, 'x_ir_wv': CHUNK_SIZE, 'y_ir_wv': CHUNK_SIZE} ) self.nc = DatasetWrapper(nc_raw) # Projection longitude is not provided in the file, read it from the # filename. self.projection_longitude = float(filename_info['projection_longitude']) self.calib_coefs = self._get_calib_coefs() def get_dataset(self, dataset_id, dataset_info): """Get the dataset.""" name = dataset_id['name'] resolution = dataset_id['resolution'] if name in ANGLES: ds = self._get_angles(name, resolution) elif name in CHANNELS: ds = self._get_channel(name, resolution, dataset_id['calibration']) else: ds = self._get_other_dataset(name) ds = self._cleanup_coords(ds) self._update_attrs(ds, dataset_info) return ds def get_area_def(self, dataset_id): """Get area definition of the given dataset.""" im_size = self.nc.get_image_size(dataset_id['resolution']) nav = Navigator() return nav.get_area_def( im_size=im_size, projection_longitude=self.projection_longitude ) def _get_channel(self, name, resolution, calibration): """Get and calibrate channel data.""" ds = self.nc[self.nc_keys[name]] ds = self._calibrate( ds, channel=name, calibration=calibration ) if name == 'VIS': qc = VisQualityControl(self.nc['quality_pixel_bitmask']) if self.mask_bad_quality: ds = qc.mask(ds) else: qc.check() ds['acq_time'] = self._get_acq_time(resolution) return ds @lru_cache(maxsize=8) # 4 angle datasets with two resolutions each def _get_angles(self, name, resolution): """Get angle dataset. Files provide angles (solar/satellite zenith & azimuth) at a coarser resolution. Interpolate them to the desired resolution. """ angles = self.nc[name] target_x, target_y = self.nc.get_xy_coords(resolution) return Interpolator.interp_tiepoints( angles, target_x=target_x, target_y=target_y ) def _get_other_dataset(self, name): """Get other datasets such as uncertainties.""" ds = self.nc[name] if name in OTHER_REFLECTANCES: ds = VISCalibrator.refl_factor_to_percent(ds) return ds def _update_attrs(self, ds, info): """Update dataset attributes.""" ds.attrs.update(info) ds.attrs.update({'platform': self.filename_info['platform'], 'sensor': self.filename_info['sensor']}) ds.attrs['raw_metadata'] = self.nc.attrs ds.attrs['orbital_parameters'] = self._get_orbital_parameters() def _cleanup_coords(self, ds): """Cleanup dataset coordinates. Y/x coordinates have been useful for interpolation so far, but they only contain row/column numbers. Drop these coordinates so that Satpy can assign projection coordinates upstream (based on the area definition). """ return ds.drop_vars(['y', 'x']) def _calibrate(self, ds, channel, calibration): """Calibrate the given dataset.""" if channel == 'VIS': return self._calibrate_vis(ds, channel, calibration) calib = IRWVCalibrator(self.calib_coefs[channel]) return calib.calibrate(ds, calibration) @abc.abstractmethod def _calibrate_vis(self, ds, channel, calibration): # pragma: no cover """Calibrate VIS channel. To be implemented by subclasses.""" raise NotImplementedError def _get_calib_coefs(self): """Get calibration coefficients for all channels. Note: Only coefficients present in both file types. """ coefs = { 'VIS': { 'distance_sun_earth': self.nc['distance_sun_earth'], 'solar_irradiance': self.nc['solar_irradiance_vis'] }, 'IR': { 'a': self.nc['a_ir'], 'b': self.nc['b_ir'], 'bt_a': self.nc['bt_a_ir'], 'bt_b': self.nc['bt_b_ir'] }, 'WV': { 'a': self.nc['a_wv'], 'b': self.nc['b_wv'], 'bt_a': self.nc['bt_a_wv'], 'bt_b': self.nc['bt_b_wv'] }, } # Convert coefficients to 32bit float to reduce memory footprint # of calibrated data. for ch in coefs: for name in coefs[ch]: coefs[ch][name] = np.float32(coefs[ch][name]) return coefs @lru_cache(maxsize=3) # Three channels def _get_acq_time(self, resolution): """Get scanline acquisition time for the given resolution. Note that the acquisition time does not increase monotonically with the scanline number due to the scan pattern and rectification. """ time2d = self.nc.get_time() _, target_y = self.nc.get_xy_coords(resolution) return Interpolator.interp_acq_time(time2d, target_y=target_y.values) def _get_orbital_parameters(self): """Get the orbital parameters.""" orbital_parameters = { 'projection_longitude': self.projection_longitude, 'projection_latitude': 0.0, 'projection_altitude': ALTITUDE } ssp_lon, ssp_lat = self._get_ssp_lonlat() if not np.isnan(ssp_lon) and not np.isnan(ssp_lat): orbital_parameters.update({ 'satellite_actual_longitude': ssp_lon, 'satellite_actual_latitude': ssp_lat, # altitude not available }) return orbital_parameters def _get_ssp_lonlat(self): """Get longitude and latitude at the subsatellite point. Easy FCDR files provide satellite position at the beginning and end of the scan. This method computes the mean of those two values. In the full FCDR the information seems to be missing. Returns: Subsatellite longitude and latitude """ ssp_lon = self._get_ssp('longitude') ssp_lat = self._get_ssp('latitude') return ssp_lon, ssp_lat def _get_ssp(self, coord): key_start = 'sub_satellite_{}_start'.format(coord) key_end = 'sub_satellite_{}_end'.format(coord) try: sub_lonlat = np.nanmean( [self.nc[key_start].values, self.nc[key_end].values] ) except KeyError: # Variables seem to be missing in full FCDR sub_lonlat = np.nan return sub_lonlat class FiduceoMviriEasyFcdrFileHandler(FiduceoMviriBase): """File handler for FIDUCEO MVIRI Easy FCDR.""" nc_keys = FiduceoMviriBase.nc_keys.copy() nc_keys['VIS'] = 'toa_bidirectional_reflectance_vis' def _calibrate_vis(self, ds, channel, calibration): """Calibrate VIS channel. Easy FCDR provides reflectance only, no counts or radiance. """ if calibration == 'reflectance': coefs = self.calib_coefs[channel] cal = VISCalibrator(coefs) refl = cal.refl_factor_to_percent(ds) refl = cal.update_refl_attrs(refl) return refl elif calibration in ('counts', 'radiance'): raise ValueError('Cannot calibrate to {}. Easy FCDR provides ' 'reflectance only.'.format(calibration.name)) else: raise KeyError('Invalid calibration: {}'.format(calibration.name)) class FiduceoMviriFullFcdrFileHandler(FiduceoMviriBase): """File handler for FIDUCEO MVIRI Full FCDR.""" nc_keys = FiduceoMviriBase.nc_keys.copy() nc_keys['VIS'] = 'count_vis' def _get_calib_coefs(self): """Add additional VIS coefficients only present in full FCDR.""" coefs = super()._get_calib_coefs() coefs['VIS'].update({ 'years_since_launch': np.float32(self.nc['years_since_launch']), 'a0': np.float32(self.nc['a0_vis']), 'a1': np.float32(self.nc['a1_vis']), 'a2': np.float32(self.nc['a2_vis']), 'mean_count_space': np.float32( self.nc['mean_count_space_vis'] ) }) return coefs def _calibrate_vis(self, ds, channel, calibration): """Calibrate VIS channel.""" sza = None if calibration == 'reflectance': sza = self._get_angles('solar_zenith_angle', HIGH_RESOL) cal = VISCalibrator(self.calib_coefs[channel], sza) return cal.calibrate(ds, calibration) satpy-0.34.0/satpy/readers/netcdf_utils.py000066400000000000000000000257351420401153000205620ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Helpers for reading netcdf-based files.""" import logging import dask.array as da import netCDF4 import numpy as np import xarray as xr from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.utils import np2str LOG = logging.getLogger(__name__) class NetCDF4FileHandler(BaseFileHandler): """Small class for inspecting a NetCDF4 file and retrieving its metadata/header data. File information can be accessed using bracket notation. Variables are accessed by using: wrapper["var_name"] Or: wrapper["group/subgroup/var_name"] Attributes can be accessed by appending "/attr/attr_name" to the item string: wrapper["group/subgroup/var_name/attr/units"] Or for global attributes: wrapper["/attr/platform_short_name"] Or for all of global attributes: wrapper["/attrs"] Note that loading datasets requires reopening the original file (unless those datasets are cached, see below), but to get just the shape of the dataset append "/shape" to the item string: wrapper["group/subgroup/var_name/shape"] If your file has many small data variables that are frequently accessed, you may choose to cache some of them. You can do this by passing a number, any variable smaller than this number in bytes will be read into RAM. Warning, this part of the API is provisional and subject to change. You may get an additional speedup by passing ``cache_handle=True``. This will keep the netCDF4 dataset handles open throughout the lifetime of the object, and instead of using `xarray.open_dataset` to open every data variable, a dask array will be created "manually". This may be useful if you have a dataset distributed over many files, such as for FCI. Note that the coordinates will be missing in this case. If you use this option, ``xarray_kwargs`` will have no effect. Args: filename (str): File to read filename_info (dict): Dictionary with filename information filetype_info (dict): Dictionary with filetype information auto_maskandscale (bool): Apply mask and scale factors xarray_kwargs (dict): Addition arguments to `xarray.open_dataset` cache_var_size (int): Cache variables smaller than this size. cache_handle (bool): Keep files open for lifetime of filehandler. """ file_handle = None def __init__(self, filename, filename_info, filetype_info, auto_maskandscale=False, xarray_kwargs=None, cache_var_size=0, cache_handle=False): """Initialize object.""" super(NetCDF4FileHandler, self).__init__( filename, filename_info, filetype_info) self.file_content = {} self.cached_file_content = {} try: file_handle = netCDF4.Dataset(self.filename, 'r') except IOError: LOG.exception( 'Failed reading file %s. Possibly corrupted file', self.filename) raise self.auto_maskandscale = auto_maskandscale if hasattr(file_handle, "set_auto_maskandscale"): file_handle.set_auto_maskandscale(auto_maskandscale) self.collect_metadata("", file_handle) self.collect_dimensions("", file_handle) if cache_var_size > 0: self.collect_cache_vars( [varname for (varname, var) in self.file_content.items() if isinstance(var, netCDF4.Variable) and isinstance(var.dtype, np.dtype) # vlen may be str and var.size * var.dtype.itemsize < cache_var_size], file_handle) if cache_handle: self.file_handle = file_handle else: file_handle.close() self._xarray_kwargs = xarray_kwargs or {} self._xarray_kwargs.setdefault('chunks', CHUNK_SIZE) self._xarray_kwargs.setdefault('mask_and_scale', self.auto_maskandscale) def __del__(self): """Delete the file handler.""" if self.file_handle is not None: try: self.file_handle.close() except RuntimeError: # presumably closed already pass def _collect_global_attrs(self, obj): """Collect all the global attributes for the provided file object.""" global_attrs = {} for key in obj.ncattrs(): fc_key = f"/attr/{key}" value = self._get_attr_value(obj, key) self.file_content[fc_key] = global_attrs[key] = value self.file_content["/attrs"] = global_attrs def _collect_attrs(self, name, obj): """Collect all the attributes for the provided file object.""" for key in obj.ncattrs(): fc_key = f"{name}/attr/{key}" value = self._get_attr_value(obj, key) self.file_content[fc_key] = value def _get_attr_value(self, obj, key): value = getattr(obj, key) try: value = np2str(value) except ValueError: pass return value def collect_metadata(self, name, obj): """Collect all file variables and attributes for the provided file object. This method also iterates through subgroups of the provided object. """ # Look through each subgroup base_name = name + "/" if name else "" self._collect_groups_info(base_name, obj) self._collect_variables_info(base_name, obj) if not name: self._collect_global_attrs(obj) else: self._collect_attrs(name, obj) def _collect_groups_info(self, base_name, obj): for group_name, group_obj in obj.groups.items(): full_group_name = base_name + group_name self.file_content[full_group_name] = group_obj self._collect_attrs(full_group_name, group_obj) self.collect_metadata(full_group_name, group_obj) def _collect_variables_info(self, base_name, obj): for var_name, var_obj in obj.variables.items(): var_name = base_name + var_name self.file_content[var_name] = var_obj self.file_content[var_name + "/dtype"] = var_obj.dtype self.file_content[var_name + "/shape"] = var_obj.shape self.file_content[var_name + "/dimensions"] = var_obj.dimensions self._collect_attrs(var_name, var_obj) def collect_dimensions(self, name, obj): """Collect dimensions.""" for dim_name, dim_obj in obj.dimensions.items(): dim_name = "{}/dimension/{}".format(name, dim_name) self.file_content[dim_name] = len(dim_obj) def collect_cache_vars(self, cache_vars, obj): """Collect data variables for caching. This method will collect some data variables and store them in RAM. This may be useful if some small variables are frequently accessed, to prevent needlessly frequently opening and closing the file, which in case of xarray is associated with some overhead. Should be called later than `collect_metadata`. Args: cache_vars (List[str]): Names of data variables to be cached. obj (netCDF4.Dataset): Dataset object from which to read them. """ for var_name in cache_vars: v = self.file_content[var_name] self.cached_file_content[var_name] = xr.DataArray( v[:], dims=v.dimensions, attrs=v.__dict__, name=v.name) def __getitem__(self, key): """Get item for given key.""" val = self.file_content[key] if isinstance(val, netCDF4.Variable): return self._get_variable(key, val) if isinstance(val, netCDF4.Group): return self._get_group(key, val) return val def _get_variable(self, key, val): """Get a variable from the netcdf file.""" if key in self.cached_file_content: return self.cached_file_content[key] # these datasets are closed and inaccessible when the file is # closed, need to reopen # TODO: Handle HDF4 versus NetCDF3 versus NetCDF4 parts = key.rsplit('/', 1) if len(parts) == 2: group, key = parts else: group = None if self.file_handle is not None: val = self._get_var_from_filehandle(group, key) else: val = self._get_var_from_xr(group, key) return val def _get_group(self, key, val): """Get a group from the netcdf file.""" # Full groups are conveniently read with xr even if file_handle is available with xr.open_dataset(self.filename, group=key, **self._xarray_kwargs) as nc: val = nc return val def _get_var_from_xr(self, group, key): with xr.open_dataset(self.filename, group=group, **self._xarray_kwargs) as nc: val = nc[key] # Even though `chunks` is specified in the kwargs, xarray # uses dask.arrays only for data variables that have at least # one dimension; for zero-dimensional data variables (scalar), # it uses its own lazy loading for scalars. When those are # accessed after file closure, xarray reopens the file without # closing it again. This will leave potentially many open file # objects (which may in turn trigger a Segmentation Fault: # https://github.com/pydata/xarray/issues/2954#issuecomment-491221266 if not val.chunks: val.load() return val def _get_var_from_filehandle(self, group, key): # Not getting coordinates as this is more work, therefore more # overhead, and those are not used downstream. if group is None: g = self.file_handle else: g = self.file_handle[group] v = g[key] x = xr.DataArray( da.from_array(v), dims=v.dimensions, attrs=v.__dict__, name=v.name) return x def __contains__(self, item): """Get item from file content.""" return item in self.file_content def get(self, item, default=None): """Get item.""" if item in self: return self[item] else: return default satpy-0.34.0/satpy/readers/nucaps.py000066400000000000000000000414401420401153000173570ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Interface to NUCAPS Retrieval NetCDF files. NUCAPS stands for NOAA Unique Combined Atmospheric Processing System. NUCAPS retrievals include temperature, moisture, trace gas, and cloud-cleared radiance profiles. Product details can be found at: https://www.ospo.noaa.gov/Products/atmosphere/soundings/nucaps/ This reader supports both standard NOAA NUCAPS EDRs, and Science EDRs, which are essentially a subset of the standard EDRs with some additional parameters such as relative humidity and boundary layer temperature. NUCAPS data is derived from Cross-track Infrared Sounder (CrIS) data, and from Advanced Technology Microwave Sounder (ATMS) data, instruments onboard Joint Polar Satellite System spacecraft. """ import logging from collections import defaultdict import numpy as np import pandas as pd import xarray as xr from satpy.readers.netcdf_utils import NetCDF4FileHandler from satpy.readers.yaml_reader import FileYAMLReader LOG = logging.getLogger(__name__) # It's difficult to do processing without knowing the pressure levels beforehand ALL_PRESSURE_LEVELS = [ 0.0161, 0.0384, 0.0769, 0.137, 0.2244, 0.3454, 0.5064, 0.714, 0.9753, 1.2972, 1.6872, 2.1526, 2.7009, 3.3398, 4.077, 4.9204, 5.8776, 6.9567, 8.1655, 9.5119, 11.0038, 12.6492, 14.4559, 16.4318, 18.5847, 20.9224, 23.4526, 26.1829, 29.121, 32.2744, 35.6505, 39.2566, 43.1001, 47.1882, 51.5278, 56.126, 60.9895, 66.1253, 71.5398, 77.2396, 83.231, 89.5204, 96.1138, 103.017, 110.237, 117.777, 125.646, 133.846, 142.385, 151.266, 160.496, 170.078, 180.018, 190.32, 200.989, 212.028, 223.441, 235.234, 247.408, 259.969, 272.919, 286.262, 300, 314.137, 328.675, 343.618, 358.966, 374.724, 390.893, 407.474, 424.47, 441.882, 459.712, 477.961, 496.63, 515.72, 535.232, 555.167, 575.525, 596.306, 617.511, 639.14, 661.192, 683.667, 706.565, 729.886, 753.628, 777.79, 802.371, 827.371, 852.788, 878.62, 904.866, 931.524, 958.591, 986.067, 1013.95, 1042.23, 1070.92, 1100 ] class NUCAPSFileHandler(NetCDF4FileHandler): """File handler for NUCAPS netCDF4 format.""" def __init__(self, *args, **kwargs): """Initialize file handler.""" # remove kwargs that reader instance used that file handler does not kwargs.pop('mask_surface', None) kwargs.pop('mask_quality', None) kwargs.setdefault('xarray_kwargs', {}).setdefault( 'decode_times', False) super(NUCAPSFileHandler, self).__init__(*args, **kwargs) def __contains__(self, item): """Return item from file content.""" return item in self.file_content def _parse_datetime(self, datestr): """Parse NUCAPS datetime string.""" return pd.to_datetime(datestr).to_pydatetime() @property def start_time(self): """Get start time.""" try: return self._parse_datetime(self['/attr/time_coverage_start']) except KeyError: # If attribute not present, use time from file name return self.filename_info['start_time'] @property def end_time(self): """Get end time.""" try: return self._parse_datetime(self['/attr/time_coverage_end']) except KeyError: # If attribute not present, use time from file name return self.filename_info['end_time'] @property def start_orbit_number(self): """Return orbit number for the beginning of the swath.""" try: return int(self['/attr/start_orbit_number']) except KeyError: return 0 @property def end_orbit_number(self): """Return orbit number for the end of the swath.""" try: return int(self['/attr/end_orbit_number']) except KeyError: return 0 @property def platform_name(self): """Return standard platform name for the file's data.""" try: res = self['/attr/platform_name'] if isinstance(res, np.ndarray): return str(res.astype(str)) return res except KeyError: return self.filename_info['platform_shortname'] @property def sensor_names(self): """Return standard sensor or instrument name for the file's data.""" try: res = self['/attr/instrument_name'] res = [x.strip() for x in res.split(',')] if len(res) == 1: return res[0].lower() except KeyError: res = ['CrIS', 'ATMS', 'VIIRS'] return set(name.lower() for name in res) def get_shape(self, ds_id, ds_info): """Return data array shape for item specified.""" var_path = ds_info.get('file_key', '{}'.format(ds_id['name'])) if var_path + '/shape' not in self: # loading a scalar value shape = 1 else: shape = self[var_path + "/shape"] if "index" in ds_info: shape = shape[1:] if "pressure_index" in ds_info: shape = shape[:-1] return shape def get_metadata(self, dataset_id, ds_info): """Get metadata.""" var_path = ds_info.get('file_key', '{}'.format(dataset_id['name'])) shape = self.get_shape(dataset_id, ds_info) file_units = ds_info.get('file_units', self.get(var_path + '/attr/units')) ds_info.update(getattr(self[var_path], 'attrs', {})) # don't overwrite information in the files attrs because the same # `.attrs` is used for each separate Temperature pressure level dataset # Plus, if someone gets metadata multiple times then we are screwed info = ds_info info.update(ds_info) info.update(dataset_id.to_dict()) info.update({ "shape": shape, "units": ds_info.get("units", file_units), "platform_name": self.platform_name, "sensor": self.sensor_names, "start_orbit": self.start_orbit_number, "end_orbit": self.end_orbit_number, }) if 'standard_name' not in info: sname_path = var_path + '/attr/standard_name' info['standard_name'] = self.get(sname_path) if dataset_id['name'] != 'Quality_Flag': anc_vars = info.get('ancillary_variables', []) if 'Quality_Flag' not in anc_vars: anc_vars.append('Quality_Flag') info['ancillary_variables'] = anc_vars return info def get_dataset(self, dataset_id, ds_info): """Load data array and metadata for specified dataset.""" var_path = ds_info.get('file_key', '{}'.format(dataset_id['name'])) metadata = self.get_metadata(dataset_id, ds_info) valid_min, valid_max = self[var_path + '/attr/valid_range'] fill_value = self.get(var_path + '/attr/_FillValue') d_tmp = self[var_path] if "index" in ds_info: d_tmp = d_tmp[int(ds_info["index"])] if "pressure_index" in ds_info: d_tmp = d_tmp[..., int(ds_info["pressure_index"])] # this is a pressure based field # include surface_pressure as metadata sp = self['Surface_Pressure'] # Older format if 'number_of_FORs' in sp.dims: sp = sp.rename({'number_of_FORs': 'y'}) # Newer format if 'Number_of_CrIS_FORs' in sp.dims: sp = sp.rename({'Number_of_CrIS_FORs': 'y'}) if 'surface_pressure' in ds_info: ds_info['surface_pressure'] = xr.concat((ds_info['surface_pressure'], sp), dim='y') else: ds_info['surface_pressure'] = sp # include all the pressure levels ds_info.setdefault('pressure_levels', self['Pressure'][0]) data = d_tmp if valid_min is not None and valid_max is not None: # the original .cfg/INI based reader only checked valid_max data = data.where((data <= valid_max)) # | (data >= valid_min)) if fill_value is not None: data = data.where(data != fill_value) # this _FillValue is no longer valid metadata.pop('_FillValue', None) data.attrs.pop('_FillValue', None) data.attrs.update(metadata) # Older format if 'number_of_FORs' in data.dims: data = data.rename({'number_of_FORs': 'y'}) # Newer format if 'Number_of_CrIS_FORs' in data.dims: data = data.rename({'Number_of_CrIS_FORs': 'y'}) return data class NUCAPSReader(FileYAMLReader): """Reader for NUCAPS NetCDF4 files.""" def __init__(self, config_files, mask_surface=True, mask_quality=True, **kwargs): """Configure reader behavior. Args: mask_surface (boolean): mask anything below the surface pressure mask_quality (boolean): mask anything where the `Quality_Flag` metadata is ``!= 1``. """ self.pressure_dataset_names = defaultdict(list) super(NUCAPSReader, self).__init__(config_files, **kwargs) self.mask_surface = self.info.get('mask_surface', mask_surface) self.mask_quality = self.info.get('mask_quality', mask_quality) def load_ds_ids_from_config(self): """Convert config dataset entries to DataIDs. Special handling is done to provide level specific datasets for any pressured based datasets. For example, a dataset is added for each pressure level of 'Temperature' with each new dataset being named 'Temperature_Xmb' where X is the pressure level. """ super(NUCAPSReader, self).load_ds_ids_from_config() for ds_id in list(self.all_ids.keys()): ds_info = self.all_ids[ds_id] if ds_info.get('pressure_based', False): for idx, lvl_num in enumerate(ALL_PRESSURE_LEVELS): if lvl_num < 5.0: suffix = "_{:0.03f}mb".format(lvl_num) else: suffix = "_{:0.0f}mb".format(lvl_num) new_info = ds_info.copy() new_info['pressure_level'] = lvl_num new_info['pressure_index'] = idx new_info['file_key'] = '{}'.format(ds_id['name']) new_info['name'] = ds_id['name'] + suffix new_ds_id = ds_id._replace(name=new_info['name']) new_info['id'] = new_ds_id self.all_ids[new_ds_id] = new_info self.pressure_dataset_names[ds_id['name']].append(new_info['name']) def load(self, dataset_keys, previous_datasets=None, pressure_levels=None): """Load data from one or more set of files. :param pressure_levels: mask out certain pressure levels: True for all levels (min, max) for a range of pressure levels [...] list of levels to include """ dataset_keys = set(self.get_dataset_key(x) for x in dataset_keys) if pressure_levels is not None: self._filter_dataset_keys_outside_pressure_levels(dataset_keys, pressure_levels) # Add pressure levels to the datasets to load if needed so # we can do further filtering after loading plevels_ds_id = self.get_dataset_key('Pressure_Levels') remove_plevels = False if plevels_ds_id not in dataset_keys: dataset_keys.add(plevels_ds_id) remove_plevels = True datasets_loaded = super(NUCAPSReader, self).load( dataset_keys, previous_datasets=previous_datasets) if pressure_levels is not None: if remove_plevels: plevels_ds = datasets_loaded.pop(plevels_ds_id) dataset_keys.remove(plevels_ds_id) else: plevels_ds = datasets_loaded[plevels_ds_id] _remove_data_at_pressure_levels(datasets_loaded, plevels_ds, pressure_levels) if self.mask_surface: _mask_data_below_surface_pressure(datasets_loaded, dataset_keys) if self.mask_quality: _mask_data_with_quality_flag(datasets_loaded, dataset_keys) return datasets_loaded def _filter_dataset_keys_outside_pressure_levels(self, dataset_keys, pressure_levels): for ds_id in dataset_keys.copy(): ds_info = self.all_ids[ds_id] ds_level = ds_info.get("pressure_level") if ds_level is not None: if pressure_levels is True: # they want all pressure levels continue elif len(pressure_levels) == 2 and pressure_levels[0] <= ds_level <= pressure_levels[1]: # given a min and a max pressure level continue elif np.isclose(pressure_levels, ds_level).any(): # they asked for this specific pressure level continue else: # they don't want this dataset at this pressure level LOG.debug("Removing dataset to load: %s", ds_id) dataset_keys.remove(ds_id) continue def _remove_data_at_pressure_levels(datasets_loaded, plevels_ds, pressure_levels): cond = _get_pressure_level_condition(plevels_ds, pressure_levels) if cond is not None: new_plevels = plevels_ds.where(cond, drop=True) else: new_plevels = plevels_ds for ds_id in datasets_loaded.keys(): ds_obj = datasets_loaded[ds_id] if plevels_ds.dims[0] not in ds_obj.dims: continue if cond is not None: datasets_loaded[ds_id] = ds_obj.where(cond, drop=True) datasets_loaded[ds_id].attrs['pressure_levels'] = new_plevels def _get_pressure_level_condition(plevels_ds, pressure_levels): if pressure_levels is True: cond = None elif len(pressure_levels) == 2: cond = (plevels_ds >= pressure_levels[0]) & (plevels_ds <= pressure_levels[1]) else: cond = plevels_ds == pressure_levels return cond def _mask_data_below_surface_pressure(datasets_loaded, dataset_keys): LOG.debug("Filtering pressure levels at or below the surface pressure") for ds_id in sorted(dataset_keys): ds = datasets_loaded[ds_id] if "surface_pressure" not in ds.attrs or "pressure_levels" not in ds.attrs: continue data_pressure = ds.attrs["pressure_levels"] surface_pressure = ds.attrs["surface_pressure"] if isinstance(surface_pressure, float): # scalar needs to become array for each record surface_pressure = np.repeat(surface_pressure, ds.shape[0]) if surface_pressure.ndim == 1 and surface_pressure.shape[0] == ds.shape[0]: # surface is one element per record LOG.debug("Filtering %s at and below the surface pressure", ds_id) if ds.ndim == 2: surface_pressure = np.repeat(surface_pressure[:, None], data_pressure.shape[0], axis=1) data_pressure = np.repeat(data_pressure[None, :], surface_pressure.shape[0], axis=0) datasets_loaded[ds_id] = ds.where(data_pressure < surface_pressure) else: # entire dataset represents one pressure level data_pressure = ds.attrs["pressure_level"] datasets_loaded[ds_id] = ds.where(data_pressure < surface_pressure) else: LOG.warning("Not sure how to handle shape of 'surface_pressure' metadata") def _mask_data_with_quality_flag(datasets_loaded, dataset_keys): LOG.debug("Filtering data based on quality flags") for ds_id in sorted(dataset_keys): ds = datasets_loaded[ds_id] quality_flag = [ x for x in ds.attrs.get('ancillary_variables', []) if x.attrs.get('name') == 'Quality_Flag'] if not quality_flag: continue quality_flag = quality_flag[0] if quality_flag.dims[0] not in ds.dims: continue LOG.debug("Masking %s where quality flag doesn't equal 1", ds_id) datasets_loaded[ds_id] = ds.where(quality_flag == 0) satpy-0.34.0/satpy/readers/nwcsaf_msg2013_hdf5.py000066400000000000000000000124471420401153000214360ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reader for the old NWCSAF/Geo (v2013 and earlier) cloud product format. References: - The NWCSAF GEO 2013 products documentation: http://www.nwcsaf.org/web/guest/archive - Search for Code "ICD/3"; Type "MSG" and the box to the right should say 'Status' (which means any status). Version 7.0 seems to be for v2013 http://www.nwcsaf.org/aemetRest/downloadAttachment/2623 """ import logging from datetime import datetime import h5py import numpy as np from pyresample.geometry import AreaDefinition from satpy.readers.hdf5_utils import HDF5FileHandler logger = logging.getLogger(__name__) PLATFORM_NAMES = {'MSG1': 'Meteosat-8', 'MSG2': 'Meteosat-9', 'MSG3': 'Meteosat-10', 'MSG4': 'Meteosat-11', } class Hdf5NWCSAF(HDF5FileHandler): """NWCSAF MSG hdf5 reader.""" def __init__(self, filename, filename_info, filetype_info): """Init method.""" super(Hdf5NWCSAF, self).__init__(filename, filename_info, filetype_info) self.cache = {} def get_dataset(self, dataset_id, ds_info): """Load a dataset.""" file_key = ds_info.get('file_key', dataset_id['name']) data = self[file_key] nodata = None if 'SCALING_FACTOR' in data.attrs and 'OFFSET' in data.attrs: dtype = np.dtype(data.data) if dataset_id['name'] in ['ctth_alti']: data.attrs['valid_range'] = (0, 27000) data.attrs['_FillValue'] = np.nan if dataset_id['name'] in ['ctth_alti', 'ctth_pres', 'ctth_tempe', 'ctth_effective_cloudiness']: dtype = np.dtype('float32') nodata = 255 if dataset_id['name'] in ['ct']: data.attrs['valid_range'] = (0, 20) data.attrs['_FillValue'] = 255 # data.attrs['palette_meanings'] = list(range(21)) attrs = data.attrs scaled_data = (data * data.attrs['SCALING_FACTOR'] + data.attrs['OFFSET']).astype(dtype) if nodata: scaled_data = scaled_data.where(data != nodata) scaled_data = scaled_data.where(scaled_data >= 0) data = scaled_data data.attrs = attrs for key in list(data.attrs.keys()): val = data.attrs[key] if isinstance(val, h5py.h5r.Reference): del data.attrs[key] return data def get_area_def(self, dsid): """Get the area definition of the datasets in the file.""" if dsid['name'].endswith('_pal'): raise NotImplementedError cfac = self.file_content['/attr/CFAC'] lfac = self.file_content['/attr/LFAC'] coff = self.file_content['/attr/COFF'] loff = self.file_content['/attr/LOFF'] numcols = int(self.file_content['/attr/NC']) numlines = int(self.file_content['/attr/NL']) aex = get_area_extent(cfac, lfac, coff, loff, numcols, numlines) pname = self.file_content['/attr/PROJECTION_NAME'] proj = {} if pname.startswith("GEOS"): proj["proj"] = "geos" proj["a"] = "6378169.0" proj["b"] = "6356583.8" proj["h"] = "35785831.0" proj["lon_0"] = str(float(pname.split("<")[1][:-1])) else: raise NotImplementedError("Only geos projection supported yet.") area_def = AreaDefinition(self.file_content['/attr/REGION_NAME'], self.file_content['/attr/REGION_NAME'], pname, proj, numcols, numlines, aex) return area_def @property def start_time(self): """Return the start time of the object.""" return datetime.strptime(self.file_content['/attr/IMAGE_ACQUISITION_TIME'], '%Y%m%d%H%M') def get_area_extent(cfac, lfac, coff, loff, numcols, numlines): """Get the area extent from msg parameters.""" xur = (numcols - coff) * 2 ** 16 / (cfac * 1.0) xur = np.deg2rad(xur) * 35785831.0 xll = (-1 - coff) * 2 ** 16 / (cfac * 1.0) xll = np.deg2rad(xll) * 35785831.0 xres = (xur - xll) / numcols xur, xll = xur - xres / 2, xll + xres / 2 yll = (numlines - loff) * 2 ** 16 / (-lfac * 1.0) yll = np.deg2rad(yll) * 35785831.0 yur = (-1 - loff) * 2 ** 16 / (-lfac * 1.0) yur = np.deg2rad(yur) * 35785831.0 yres = (yur - yll) / numlines yll, yur = yll + yres / 2, yur - yres / 2 return xll, yll, xur, yur satpy-0.34.0/satpy/readers/nwcsaf_nc.py000066400000000000000000000345061420401153000200340ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2022 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Nowcasting SAF common PPS&MSG NetCDF/CF format reader. References: - The NWCSAF GEO 2018 products documentation: http://www.nwcsaf.org/web/guest/archive """ import logging import os from datetime import datetime from functools import lru_cache import dask.array as da import numpy as np import xarray as xr from pyproj import CRS from pyresample.geometry import AreaDefinition from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.utils import unzip_file logger = logging.getLogger(__name__) SENSOR = {'NOAA-19': 'avhrr-3', 'NOAA-18': 'avhrr-3', 'NOAA-15': 'avhrr-3', 'Metop-A': 'avhrr-3', 'Metop-B': 'avhrr-3', 'Metop-C': 'avhrr-3', 'EOS-Aqua': 'modis', 'EOS-Terra': 'modis', 'Suomi-NPP': 'viirs', 'NOAA-20': 'viirs', 'JPSS-1': 'viirs', 'GOES-16': 'abi', 'GOES-17': 'abi', 'Himawari-8': 'ahi', 'Himawari-9': 'ahi', } PLATFORM_NAMES = {'MSG1': 'Meteosat-8', 'MSG2': 'Meteosat-9', 'MSG3': 'Meteosat-10', 'MSG4': 'Meteosat-11', 'GOES16': 'GOES-16', 'GOES17': 'GOES-17', } class NcNWCSAF(BaseFileHandler): """NWCSAF PPS&MSG NetCDF reader.""" def __init__(self, filename, filename_info, filetype_info): """Init method.""" super(NcNWCSAF, self).__init__(filename, filename_info, filetype_info) self._unzipped = unzip_file(self.filename) if self._unzipped: self.filename = self._unzipped self.cache = {} self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=False, chunks=CHUNK_SIZE) self.nc = self.nc.rename({'nx': 'x', 'ny': 'y'}) self.sw_version = self.nc.attrs['source'] self.pps = False self.platform_name = None self.sensor = None self.file_key_prefix = filetype_info.get("file_key_prefix", "") try: # NWCSAF/Geo: try: kwrgs = {'sat_id': self.nc.attrs['satellite_identifier']} except KeyError: kwrgs = {'sat_id': self.nc.attrs['satellite_identifier'].astype(str)} except KeyError: # NWCSAF/PPS: kwrgs = {'platform_name': self.nc.attrs['platform']} self.set_platform_and_sensor(**kwrgs) def set_platform_and_sensor(self, **kwargs): """Set some metadata: platform_name, sensors, and pps (identifying PPS or Geo).""" try: # NWCSAF/Geo self.platform_name = PLATFORM_NAMES.get(kwargs['sat_id'], kwargs['sat_id']) except KeyError: # NWCSAF/PPS self.platform_name = kwargs['platform_name'] self.pps = True self.sensor = set([SENSOR.get(self.platform_name, 'seviri')]) def remove_timedim(self, var): """Remove time dimension from dataset.""" if self.pps and var.dims[0] == 'time': data = var[0, :, :] data.attrs = var.attrs var = data return var def get_dataset(self, dsid, info): """Load a dataset.""" dsid_name = dsid['name'] if dsid_name in self.cache: logger.debug('Get the data set from cache: %s.', dsid_name) return self.cache[dsid_name] if dsid_name in ['lon', 'lat'] and dsid_name not in self.nc: # Get full resolution lon,lat from the reduced (tie points) grid lon, lat = self.upsample_geolocation() if dsid_name == "lon": return lon else: return lat logger.debug('Reading %s.', dsid_name) file_key = self._get_filekey(dsid_name, info) variable = self.nc[file_key] variable = self.remove_timedim(variable) variable = self.scale_dataset(variable, info) return variable def _get_filekey(self, dsid_name, info): try: file_key = self.file_key_prefix + info["file_key"] except KeyError: file_key = dsid_name return file_key def scale_dataset(self, variable, info): """Scale the data set, applying the attributes from the netCDF file. The scale and offset attributes will then be removed from the resulting variable. """ variable = remove_empties(variable) scale = variable.attrs.get('scale_factor', np.array(1)) offset = variable.attrs.get('add_offset', np.array(0)) if np.issubdtype((scale + offset).dtype, np.floating) or np.issubdtype(variable.dtype, np.floating): variable = self._mask_variable(variable) attrs = variable.attrs.copy() variable = variable * scale + offset variable.attrs = attrs if 'valid_range' in variable.attrs: variable.attrs['valid_range'] = variable.attrs['valid_range'] * scale + offset variable.attrs.pop('add_offset', None) variable.attrs.pop('scale_factor', None) variable.attrs.update({'platform_name': self.platform_name, 'sensor': self.sensor}) if not variable.attrs.get('standard_name', '').endswith('status_flag'): # TODO: do we really need to add units to everything ? variable.attrs.setdefault('units', '1') ancillary_names = variable.attrs.get('ancillary_variables', '') try: variable.attrs['ancillary_variables'] = ancillary_names.split() except AttributeError: pass if 'palette_meanings' in variable.attrs: variable = self._prepare_variable_for_palette(variable, info) if 'standard_name' in info: variable.attrs.setdefault('standard_name', info['standard_name']) variable = self._adjust_variable_for_legacy_software(variable) return variable @staticmethod def _mask_variable(variable): if '_FillValue' in variable.attrs: variable = variable.where( variable != variable.attrs['_FillValue']) variable.attrs['_FillValue'] = np.nan if 'valid_range' in variable.attrs: variable = variable.where( variable <= variable.attrs['valid_range'][1]) variable = variable.where( variable >= variable.attrs['valid_range'][0]) if 'valid_max' in variable.attrs: variable = variable.where( variable <= variable.attrs['valid_max']) if 'valid_min' in variable.attrs: variable = variable.where( variable >= variable.attrs['valid_min']) return variable def _prepare_variable_for_palette(self, variable, info): try: so_dataset = self.nc[self.file_key_prefix + info['scale_offset_dataset']] except KeyError: scale = 1 offset = 0 else: scale = so_dataset.attrs['scale_factor'] offset = so_dataset.attrs['add_offset'] variable.attrs['palette_meanings'] = [int(val) for val in variable.attrs['palette_meanings'].split()] if variable.attrs['palette_meanings'][0] == 1: variable.attrs['palette_meanings'] = [0] + variable.attrs['palette_meanings'] variable = xr.DataArray(da.vstack((np.array(variable.attrs['fill_value_color']), variable.data)), coords=variable.coords, dims=variable.dims, attrs=variable.attrs) val, idx = np.unique(variable.attrs['palette_meanings'], return_index=True) variable.attrs['palette_meanings'] = val * scale + offset variable = variable[idx] return variable def _adjust_variable_for_legacy_software(self, variable): if self.sw_version == 'NWC/PPS version v2014' and variable.attrs.get('standard_name') == 'cloud_top_altitude': # pps 2014 valid range and palette don't match variable.attrs['valid_range'] = (0., 9000.) if (self.sw_version == 'NWC/PPS version v2014' and variable.attrs.get('long_name') == 'RGB Palette for ctth_alti'): # pps 2014 palette has the nodata color (black) first variable = variable[1:, :] return variable @lru_cache(maxsize=1) def upsample_geolocation(self): """Upsample the geolocation (lon,lat) from the tiepoint grid.""" from geotiepoints import SatelliteInterpolator # Read the fields needed: col_indices = self.nc['nx_reduced'].values row_indices = self.nc['ny_reduced'].values lat_reduced = self.scale_dataset(self.nc['lat_reduced'], {}) lon_reduced = self.scale_dataset(self.nc['lon_reduced'], {}) shape = (self.nc['y'].shape[0], self.nc['x'].shape[0]) cols_full = np.arange(shape[1]) rows_full = np.arange(shape[0]) satint = SatelliteInterpolator((lon_reduced.values, lat_reduced.values), (row_indices, col_indices), (rows_full, cols_full)) lons, lats = satint.interpolate() lon = xr.DataArray(lons, attrs=lon_reduced.attrs, dims=['y', 'x']) lat = xr.DataArray(lats, attrs=lat_reduced.attrs, dims=['y', 'x']) return lon, lat def get_area_def(self, dsid): """Get the area definition of the datasets in the file. Only applicable for MSG products! """ if self.pps: # PPS: raise NotImplementedError if dsid['name'].endswith('_pal'): raise NotImplementedError crs, area_extent = self._get_projection() crs, area_extent = self._ensure_crs_extents_in_meters(crs, area_extent) nlines, ncols = self.nc[dsid['name']].shape area = AreaDefinition('some_area_name', "On-the-fly area", 'geosmsg', crs, ncols, nlines, area_extent) return area @staticmethod def _ensure_crs_extents_in_meters(crs, area_extent): """Fix units in Earth shape, satellite altitude and 'units' attribute.""" if 'kilo' in crs.axis_info[0].unit_name: proj_dict = crs.to_dict() proj_dict["units"] = "m" if "a" in proj_dict: proj_dict["a"] *= 1000. if "b" in proj_dict: proj_dict["b"] *= 1000. if "R" in proj_dict: proj_dict["R"] *= 1000. proj_dict["h"] *= 1000. area_extent = tuple([val * 1000. for val in area_extent]) crs = CRS.from_dict(proj_dict) return crs, area_extent def __del__(self): """Delete the instance.""" if self._unzipped: try: os.remove(self._unzipped) except OSError: pass @property def start_time(self): """Return the start time of the object.""" try: # MSG: try: return datetime.strptime(self.nc.attrs['time_coverage_start'], '%Y-%m-%dT%H:%M:%SZ') except TypeError: return datetime.strptime(self.nc.attrs['time_coverage_start'].astype(str), '%Y-%m-%dT%H:%M:%SZ') except ValueError: # PPS: return datetime.strptime(self.nc.attrs['time_coverage_start'], '%Y%m%dT%H%M%S%fZ') @property def end_time(self): """Return the end time of the object.""" try: # MSG: try: return datetime.strptime(self.nc.attrs['time_coverage_end'], '%Y-%m-%dT%H:%M:%SZ') except TypeError: return datetime.strptime(self.nc.attrs['time_coverage_end'].astype(str), '%Y-%m-%dT%H:%M:%SZ') except ValueError: # PPS: return datetime.strptime(self.nc.attrs['time_coverage_end'], '%Y%m%dT%H%M%S%fZ') @property def sensor_names(self): """List of sensors represented in this file.""" return self.sensor def _get_projection(self): """Get projection from the NetCDF4 attributes.""" try: proj_str = self.nc.attrs['gdal_projection'] except TypeError: proj_str = self.nc.attrs['gdal_projection'].decode() # Check the a/b/h units radius_a = proj_str.split('+a=')[-1].split()[0] if float(radius_a) > 10e3: units = 'm' scale = 1.0 else: units = 'km' scale = 1e3 if 'units' not in proj_str: proj_str = proj_str + ' +units=' + units area_extent = (float(self.nc.attrs['gdal_xgeo_up_left']) / scale, float(self.nc.attrs['gdal_ygeo_low_right']) / scale, float(self.nc.attrs['gdal_xgeo_low_right']) / scale, float(self.nc.attrs['gdal_ygeo_up_left']) / scale) crs = CRS.from_string(proj_str) return crs, area_extent def remove_empties(variable): """Remove empty objects from the *variable*'s attrs.""" import h5py for key, val in variable.attrs.items(): if isinstance(val, h5py._hl.base.Empty): variable.attrs.pop(key) return variable satpy-0.34.0/satpy/readers/olci_nc.py000066400000000000000000000340211420401153000174710ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Sentinel-3 OLCI reader. This reader supports an optional argument to choose the 'engine' for reading OLCI netCDF4 files. By default, this reader uses the default xarray choice of engine, as defined in the :func:`xarray.open_dataset` documentation`. As an alternative, the user may wish to use the 'h5netcdf' engine, but that is not default as it typically prints many non-fatal but confusing error messages to the terminal. To choose between engines the user can do as follows for the default:: scn = Scene(filenames=my_files, reader='olci_l1b') or as follows for the h5netcdf engine:: scn = Scene(filenames=my_files, reader='olci_l1b', reader_kwargs={'engine': 'h5netcdf'}) References: - :func:`xarray.open_dataset` """ import logging from contextlib import suppress from functools import reduce import dask.array as da import numpy as np import xarray as xr from satpy import CHUNK_SIZE from satpy._compat import cached_property from satpy.readers import open_file_or_filename from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import angle2xyz, xyz2angle logger = logging.getLogger(__name__) PLATFORM_NAMES = {'S3A': 'Sentinel-3A', 'S3B': 'Sentinel-3B'} class BitFlags(object): """Manipulate flags stored bitwise.""" flag_list = ['INVALID', 'WATER', 'LAND', 'CLOUD', 'SNOW_ICE', 'INLAND_WATER', 'TIDAL', 'COSMETIC', 'SUSPECT', 'HISOLZEN', 'SATURATED', 'MEGLINT', 'HIGHGLINT', 'WHITECAPS', 'ADJAC', 'WV_FAIL', 'PAR_FAIL', 'AC_FAIL', 'OC4ME_FAIL', 'OCNN_FAIL', 'Extra_1', 'KDM_FAIL', 'Extra_2', 'CLOUD_AMBIGUOUS', 'CLOUD_MARGIN', 'BPAC_ON', 'WHITE_SCATT', 'LOWRW', 'HIGHRW'] meaning = {f: i for i, f in enumerate(flag_list)} def __init__(self, value): """Init the flags.""" self._value = value def __getitem__(self, item): """Get the item.""" pos = self.meaning[item] data = self._value if isinstance(data, xr.DataArray): data = data.data res = ((data >> pos) % 2).astype(bool) res = xr.DataArray(res, coords=self._value.coords, attrs=self._value.attrs, dims=self._value.dims) else: res = ((data >> pos) % 2).astype(bool) return res class NCOLCIBase(BaseFileHandler): """The OLCI reader base.""" def __init__(self, filename, filename_info, filetype_info, engine=None): """Init the olci reader base.""" super(NCOLCIBase, self).__init__(filename, filename_info, filetype_info) self._engine = engine self._start_time = filename_info['start_time'] self._end_time = filename_info['end_time'] # TODO: get metadata from the manifest file (xfdumanifest.xml) self.platform_name = PLATFORM_NAMES[filename_info['mission_id']] self.sensor = 'olci' self.open_file = None @cached_property def nc(self): """Get the nc xr dataset.""" f_obj = open_file_or_filename(self.filename) dataset = xr.open_dataset(f_obj, decode_cf=True, mask_and_scale=True, engine=self._engine, chunks={'columns': CHUNK_SIZE, 'rows': CHUNK_SIZE}) return dataset.rename({'columns': 'x', 'rows': 'y'}) @property def start_time(self): """Start time property.""" return self._start_time @property def end_time(self): """End time property.""" return self._end_time def get_dataset(self, key, info): """Load a dataset.""" logger.debug('Reading %s.', key['name']) variable = self.nc[key['name']] return variable def __del__(self): """Close the NetCDF file that may still be open.""" with suppress(IOError, OSError, AttributeError): self.nc.close() class NCOLCICal(NCOLCIBase): """Dummy class for calibration.""" class NCOLCIGeo(NCOLCIBase): """Dummy class for navigation.""" class NCOLCIChannelBase(NCOLCIBase): """Base class for channel reading.""" def __init__(self, filename, filename_info, filetype_info, engine=None): """Init the file handler.""" super(NCOLCIChannelBase, self).__init__(filename, filename_info, filetype_info) self.channel = filename_info.get('dataset_name') class NCOLCI1B(NCOLCIChannelBase): """File handler for OLCI l1b.""" def __init__(self, filename, filename_info, filetype_info, cal, engine=None): """Init the file handler.""" super(NCOLCI1B, self).__init__(filename, filename_info, filetype_info) self.cal = cal.nc @staticmethod def _get_items(idx, solar_flux): """Get items.""" return solar_flux[idx] def _get_solar_flux(self, band): """Get the solar flux for the band.""" solar_flux = self.cal['solar_flux'].isel(bands=band).values d_index = self.cal['detector_index'].fillna(0).astype(int) return da.map_blocks(self._get_items, d_index.data, solar_flux=solar_flux, dtype=solar_flux.dtype) def get_dataset(self, key, info): """Load a dataset.""" if self.channel != key['name']: return logger.debug('Reading %s.', key['name']) radiances = self.nc[self.channel + '_radiance'] if key['calibration'] == 'reflectance': idx = int(key['name'][2:]) - 1 sflux = self._get_solar_flux(idx) radiances = radiances / sflux * np.pi * 100 radiances.attrs['units'] = '%' radiances.attrs['platform_name'] = self.platform_name radiances.attrs['sensor'] = self.sensor radiances.attrs.update(key.to_dict()) return radiances class NCOLCI2(NCOLCIChannelBase): """File handler for OLCI l2.""" def get_dataset(self, key, info): """Load a dataset.""" if self.channel is not None and self.channel != key['name']: return logger.debug('Reading %s.', key['name']) if self.channel is not None and self.channel.startswith('Oa'): dataset = self.nc[self.channel + '_reflectance'] else: dataset = self.nc[info['nc_key']] if key['name'] == 'wqsf': dataset.attrs['_FillValue'] = 1 elif key['name'] == 'mask': dataset = self.getbitmask(dataset) dataset.attrs['platform_name'] = self.platform_name dataset.attrs['sensor'] = self.sensor dataset.attrs.update(key.to_dict()) return dataset def getbitmask(self, wqsf, items=None): """Get the bitmask.""" if items is None: items = ["INVALID", "SNOW_ICE", "INLAND_WATER", "SUSPECT", "AC_FAIL", "CLOUD", "HISOLZEN", "OCNN_FAIL", "CLOUD_MARGIN", "CLOUD_AMBIGUOUS", "LOWRW", "LAND"] bflags = BitFlags(wqsf) return reduce(np.logical_or, [bflags[item] for item in items]) class NCOLCILowResData(BaseFileHandler): """Handler for low resolution data.""" def __init__(self, filename, filename_info, filetype_info, engine=None): """Init the file handler.""" super(NCOLCILowResData, self).__init__(filename, filename_info, filetype_info) self.nc = None # TODO: get metadata from the manifest file (xfdumanifest.xml) self.platform_name = PLATFORM_NAMES[filename_info['mission_id']] self.sensor = 'olci' self.cache = {} self.engine = engine def _open_dataset(self): if self.nc is None: self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=True, engine=self.engine, chunks={'tie_columns': CHUNK_SIZE, 'tie_rows': CHUNK_SIZE}) self.nc = self.nc.rename({'tie_columns': 'x', 'tie_rows': 'y'}) self.l_step = self.nc.attrs['al_subsampling_factor'] self.c_step = self.nc.attrs['ac_subsampling_factor'] def _do_interpolate(self, data): if not isinstance(data, tuple): data = (data,) shape = data[0].shape from geotiepoints.interpolator import Interpolator tie_lines = np.arange(0, (shape[0] - 1) * self.l_step + 1, self.l_step) tie_cols = np.arange(0, (shape[1] - 1) * self.c_step + 1, self.c_step) lines = np.arange((shape[0] - 1) * self.l_step + 1) cols = np.arange((shape[1] - 1) * self.c_step + 1) along_track_order = 1 cross_track_order = 3 satint = Interpolator([x.values for x in data], (tie_lines, tie_cols), (lines, cols), along_track_order, cross_track_order) int_data = satint.interpolate() return [xr.DataArray(da.from_array(x, chunks=(CHUNK_SIZE, CHUNK_SIZE)), dims=['y', 'x']) for x in int_data] def _need_interpolation(self): return (self.c_step != 1 or self.l_step != 1) def __del__(self): """Close the NetCDF file that may still be open.""" try: self.nc.close() except (OSError, AttributeError): pass class NCOLCIAngles(NCOLCILowResData): """File handler for the OLCI angles.""" datasets = {'satellite_azimuth_angle': 'OAA', 'satellite_zenith_angle': 'OZA', 'solar_azimuth_angle': 'SAA', 'solar_zenith_angle': 'SZA'} def get_dataset(self, key, info): """Load a dataset.""" if key['name'] not in self.datasets: return self._open_dataset() logger.debug('Reading %s.', key['name']) if self._need_interpolation() and self.cache.get(key['name']) is None: if key['name'].startswith('satellite'): zen = self.nc[self.datasets['satellite_zenith_angle']] zattrs = zen.attrs azi = self.nc[self.datasets['satellite_azimuth_angle']] aattrs = azi.attrs elif key['name'].startswith('solar'): zen = self.nc[self.datasets['solar_zenith_angle']] zattrs = zen.attrs azi = self.nc[self.datasets['solar_azimuth_angle']] aattrs = azi.attrs else: raise NotImplementedError("Don't know how to read " + key['name']) x, y, z = angle2xyz(azi, zen) x, y, z = self._do_interpolate((x, y, z)) azi, zen = xyz2angle(x, y, z) azi.attrs = aattrs zen.attrs = zattrs if 'zenith' in key['name']: values = zen elif 'azimuth' in key['name']: values = azi else: raise NotImplementedError("Don't know how to read " + key['name']) if key['name'].startswith('satellite'): self.cache['satellite_zenith_angle'] = zen self.cache['satellite_azimuth_angle'] = azi elif key['name'].startswith('solar'): self.cache['solar_zenith_angle'] = zen self.cache['solar_azimuth_angle'] = azi elif key['name'] in self.cache: values = self.cache[key['name']] else: values = self.nc[self.datasets[key['name']]] values.attrs['platform_name'] = self.platform_name values.attrs['sensor'] = self.sensor values.attrs.update(key.to_dict()) return values def __del__(self): """Close the NetCDF file that may still be open.""" try: self.nc.close() except (OSError, AttributeError): pass class NCOLCIMeteo(NCOLCILowResData): """File handler for the OLCI meteo data.""" datasets = ['humidity', 'sea_level_pressure', 'total_columnar_water_vapour', 'total_ozone'] # TODO: the following depends on more than columns, rows # float atmospheric_temperature_profile(tie_rows, tie_columns, tie_pressure_levels) ; # float horizontal_wind(tie_rows, tie_columns, wind_vectors) ; # float reference_pressure_level(tie_pressure_levels) ; def get_dataset(self, key, info): """Load a dataset.""" if key['name'] not in self.datasets: return self._open_dataset() logger.debug('Reading %s.', key['name']) if self._need_interpolation() and self.cache.get(key['name']) is None: data = self.nc[key['name']] values, = self._do_interpolate(data) values.attrs = data.attrs self.cache[key['name']] = values elif key['name'] in self.cache: values = self.cache[key['name']] else: values = self.nc[key['name']] values.attrs['platform_name'] = self.platform_name values.attrs['sensor'] = self.sensor values.attrs.update(key.to_dict()) return values satpy-0.34.0/satpy/readers/omps_edr.py000066400000000000000000000127431420401153000177020ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2011-2015 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Interface to OMPS EDR format.""" import logging from datetime import datetime, timedelta import numpy as np from satpy.readers.hdf5_utils import HDF5FileHandler NO_DATE = datetime(1958, 1, 1) EPSILON_TIME = timedelta(days=2) LOG = logging.getLogger(__name__) class EDRFileHandler(HDF5FileHandler): """EDR file handler.""" _fill_name = "_FillValue" @property def start_orbit_number(self): """Get the start orbit number.""" return self.filename_info['orbit'] @property def end_orbit_number(self): """Get the end orbit number.""" return self.filename_info['orbit'] @property def platform_name(self): """Get the platform name.""" return self.filename_info['platform_shortname'] @property def sensor_name(self): """Get the sensor name.""" return self.filename_info['instrument_shortname'] def get_shape(self, ds_id, ds_info): """Get the shape.""" return self[ds_info['file_key'] + '/shape'] def adjust_scaling_factors(self, factors, file_units, output_units): """Adjust scaling factors.""" if factors is None or factors[0] is None: factors = [1, 0] if file_units == output_units: LOG.debug("File units and output units are the same (%s)", file_units) return factors return np.array(factors) def get_metadata(self, dataset_id, ds_info): """Get the metadata.""" var_path = ds_info.get('file_key', '{}'.format(dataset_id['name'])) info = getattr(self[var_path], 'attrs', {}).copy() info.pop('DIMENSION_LIST', None) info.update(ds_info) file_units = ds_info.get('file_units') if file_units is None: file_units = self.get(var_path + '/attr/units', self.get(var_path + '/attr/Units')) if file_units is None: raise KeyError("File variable '{}' has no units attribute".format(var_path)) if file_units == 'deg': file_units = 'degrees' elif file_units == 'Unitless': file_units = '1' info.update({ "shape": self.get_shape(dataset_id, ds_info), "file_units": file_units, "units": ds_info.get("units", file_units), "platform_name": self.platform_name, "sensor": self.sensor_name, "start_orbit": self.start_orbit_number, "end_orbit": self.end_orbit_number, }) info.update(dataset_id.to_dict()) if 'standard_name' not in ds_info: info['standard_name'] = self.get(var_path + '/attr/Title', dataset_id['name']) return info def get_dataset(self, dataset_id, ds_info): """Get the dataset.""" var_path = ds_info.get('file_key', '{}'.format(dataset_id['name'])) metadata = self.get_metadata(dataset_id, ds_info) valid_min, valid_max = self.get(var_path + '/attr/valid_range', self.get(var_path + '/attr/ValidRange', (None, None))) if valid_min is None or valid_max is None: valid_min = self.get(var_path + '/attr/valid_min', None) valid_max = self.get(var_path + '/attr/valid_max', None) if valid_min is None or valid_max is None: raise KeyError("File variable '{}' has no valid range attribute".format(var_path)) fill_name = var_path + '/attr/{}'.format(self._fill_name) if fill_name in self: fill_value = self[fill_name] else: fill_value = None data = self[var_path] scale_factor_path = var_path + '/attr/ScaleFactor' if scale_factor_path in self: scale_factor = self[scale_factor_path] scale_offset = self[var_path + '/attr/Offset'] else: scale_factor = None scale_offset = None if valid_min is not None and valid_max is not None: # the original .cfg/INI based reader only checked valid_max data = data.where((data <= valid_max) & (data >= valid_min)) if fill_value is not None: data = data.where(data != fill_value) factors = (scale_factor, scale_offset) factors = self.adjust_scaling_factors(factors, metadata['file_units'], ds_info.get("units")) if factors[0] != 1 or factors[1] != 0: data = data * factors[0] + factors[1] data.attrs.update(metadata) if 'DIMENSION_LIST' in data.attrs: data.attrs.pop('DIMENSION_LIST') dimensions = self.get_reference(var_path, 'DIMENSION_LIST') for dim, coord in zip(data.dims, dimensions): data.coords[dim] = coord[0] return data class EDREOSFileHandler(EDRFileHandler): """EDR EOS file handler.""" _fill_name = "MissingValue" satpy-0.34.0/satpy/readers/safe_sar_l2_ocn.py000066400000000000000000000114171420401153000211060ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """SAFE SAR L2 OCN format reader. The OCN data contains various parameters, but mainly the wind speed and direction calculated from SAR data and input model data from ECMWF Implemented in this reader is the OWI, Ocean Wind field. See more at ESA webpage https://sentinel.esa.int/web/sentinel/ocean-wind-field-component """ import logging import numpy as np import xarray as xr from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler logger = logging.getLogger(__name__) class SAFENC(BaseFileHandler): """Measurement file reader.""" def __init__(self, filename, filename_info, filetype_info): """Init the file reader.""" super(SAFENC, self).__init__(filename, filename_info, filetype_info) self._start_time = filename_info['start_time'] self._end_time = filename_info['end_time'] # For some SAFE packages, fstart_time differs, but start_time is the same # To avoid over writing exiting file with same start_time, a solution is to # use fstart_time self._fstart_time = filename_info['fstart_time'] self._fend_time = filename_info['fend_time'] self._polarization = filename_info['polarization'] self.lats = None self.lons = None self._shape = None self.area = None self.nc = xr.open_dataset(filename, decode_cf=True, mask_and_scale=False, chunks={'owiAzSize': CHUNK_SIZE, 'owiRaSize': CHUNK_SIZE}) self.nc = self.nc.rename({'owiAzSize': 'y'}) self.nc = self.nc.rename({'owiRaSize': 'x'}) self.filename = filename def get_dataset(self, key, info): """Load a dataset.""" if key['name'] in ['owiLat', 'owiLon']: if self.lons is None or self.lats is None: self.lons = self.nc['owiLon'] self.lats = self.nc['owiLat'] if key['name'] == 'owiLat': res = self.lats else: res = self.lons res.attrs = info else: res = self._get_data_channels(key, info) if 'missionName' in self.nc.attrs: res.attrs.update({'platform_name': self.nc.attrs['missionName']}) res.attrs.update({'fstart_time': self._fstart_time}) res.attrs.update({'fend_time': self._fend_time}) if not self._shape: self._shape = res.shape return res def _get_data_channels(self, key, info): res = self.nc[key['name']] if key['name'] in ['owiHs', 'owiWl', 'owiDirmet']: res = xr.DataArray(res, dims=['y', 'x', 'oswPartitions']) elif key['name'] in ['owiNrcs', 'owiNesz', 'owiNrcsNeszCorr']: res = xr.DataArray(res, dims=['y', 'x', 'oswPolarisation']) elif key['name'] in ['owiPolarisationName']: res = xr.DataArray(res, dims=['owiPolarisation']) elif key['name'] in ['owiCalConstObsi', 'owiCalConstInci']: res = xr.DataArray(res, dims=['owiIncSize']) elif key['name'].startswith('owi'): res = xr.DataArray(res, dims=['y', 'x']) else: res = xr.DataArray(res, dims=['y', 'x']) res.attrs.update(info) if '_FillValue' in res.attrs: res = res.where(res != res.attrs['_FillValue']) res.attrs['_FillValue'] = np.nan return res @property def start_time(self): """Product start_time, parsed from the measurement file name.""" return self._start_time @property def end_time(self): """Product end_time, parsed from the measurement file name.""" return self._end_time @property def fstart_time(self): """Product fstart_time meaning the start time parsed from the SAFE directory.""" return self._fstart_time @property def fend_time(self): """Product fend_time meaning the end time parsed from the SAFE directory.""" return self._fend_time satpy-0.34.0/satpy/readers/sar_c_safe.py000066400000000000000000000612751420401153000201630ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """SAFE SAR-C reader. This module implements a reader for Sentinel 1 SAR-C GRD (level1) SAFE format as provided by ESA. The format is comprised of a directory containing multiple files, most notably two measurement files in geotiff and a few xml files for calibration, noise and metadata. References: - *Level 1 Product Formatting* https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-1-sar/products-algorithms/level-1-product-formatting - J. Park, A. A. Korosov, M. Babiker, S. Sandven and J. Won, *"Efficient Thermal Noise Removal for Sentinel-1 TOPSAR Cross-Polarization Channel,"* in IEEE Transactions on Geoscience and Remote Sensing, vol. 56, no. 3, pp. 1555-1565, March 2018. doi: `10.1109/TGRS.2017.2765248 `_ """ import logging from functools import lru_cache from threading import Lock import defusedxml.ElementTree as ET import numpy as np import rasterio import rioxarray import xarray as xr from dask import array as da from dask.base import tokenize from xarray import DataArray from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler logger = logging.getLogger(__name__) def dictify(r): """Convert an ElementTree into a dict.""" return {r.tag: _dictify(r)} def _dictify(r): """Convert an xml element to dict.""" d = {} if r.text and r.text.strip(): try: return int(r.text) except ValueError: try: return float(r.text) except ValueError: return r.text for x in r.findall("./*"): if x.tag in d and not isinstance(d[x.tag], list): d[x.tag] = [d[x.tag]] d[x.tag].append(_dictify(x)) else: d[x.tag] = _dictify(x) return d def _get_calibration_name(calibration): """Get the proper calibration name.""" calibration_name = getattr(calibration, "name", calibration) or 'gamma' if calibration_name == 'sigma_nought': calibration_name = 'sigmaNought' elif calibration_name == 'beta_nought': calibration_name = 'betaNought' return calibration_name class SAFEXML(BaseFileHandler): """XML file reader for the SAFE format.""" def __init__(self, filename, filename_info, filetype_info, header_file=None): """Init the xml filehandler.""" super(SAFEXML, self).__init__(filename, filename_info, filetype_info) self._start_time = filename_info['start_time'] self._end_time = filename_info['end_time'] self._polarization = filename_info['polarization'] self.root = ET.parse(self.filename) self.hdr = {} if header_file is not None: self.hdr = header_file.get_metadata() else: self.hdr = self.get_metadata() self._image_shape = (self.hdr['product']['imageAnnotation']['imageInformation']['numberOfLines'], self.hdr['product']['imageAnnotation']['imageInformation']['numberOfSamples']) def get_metadata(self): """Convert the xml metadata to dict.""" return dictify(self.root.getroot()) @property def start_time(self): """Get the start time.""" return self._start_time @property def end_time(self): """Get the end time.""" return self._end_time class SAFEXMLAnnotation(SAFEXML): """XML file reader for the SAFE format, Annotation file.""" def get_dataset(self, key, info, chunks=None): """Load a dataset.""" if self._polarization != key["polarization"]: return if key["name"] == "incidence_angle": return self.get_incidence_angle(chunks=chunks or CHUNK_SIZE) @lru_cache(maxsize=10) def get_incidence_angle(self, chunks): """Get the incidence angle array.""" incidence_angle = XMLArray(self.root, ".//geolocationGridPoint", "incidenceAngle") return incidence_angle.expand(self._image_shape, chunks=chunks) class SAFEXMLCalibration(SAFEXML): """XML file reader for the SAFE format, Calibration file.""" def get_dataset(self, key, info, chunks=None): """Load a dataset.""" if self._polarization != key["polarization"]: return if key["name"] == "calibration_constant": return self.get_calibration_constant() return self.get_calibration(key["name"], chunks=chunks or CHUNK_SIZE) def get_calibration_constant(self): """Load the calibration constant.""" return float(self.root.find('.//absoluteCalibrationConstant').text) @lru_cache(maxsize=10) def get_calibration(self, calibration, chunks=None): """Get the calibration array.""" calibration_name = _get_calibration_name(calibration) calibration_vector = self._get_calibration_vector(calibration_name, chunks) return calibration_vector def _get_calibration_vector(self, calibration_name, chunks): """Get the calibration vector.""" calibration_vector = XMLArray(self.root, ".//calibrationVector", calibration_name) return calibration_vector.expand(self._image_shape, chunks=chunks) class SAFEXMLNoise(SAFEXML): """XML file reader for the SAFE format, Noise file.""" def __init__(self, filename, filename_info, filetype_info, header_file=None): """Init the xml filehandler.""" super().__init__(filename, filename_info, filetype_info, header_file) self.azimuth_noise_reader = AzimuthNoiseReader(self.root, self._image_shape) def get_dataset(self, key, info, chunks=None): """Load a dataset.""" if self._polarization != key["polarization"]: return if key["name"] == "noise": return self.get_noise_correction(chunks=chunks or CHUNK_SIZE) @lru_cache(maxsize=10) def get_noise_correction(self, chunks=None): """Get the noise correction array.""" try: noise = self.read_legacy_noise(chunks) except KeyError: range_noise = self.read_range_noise_array(chunks) azimuth_noise = self.azimuth_noise_reader.read_azimuth_noise_array(chunks) noise = range_noise * azimuth_noise return noise def read_legacy_noise(self, chunks): """Read noise for legacy GRD data.""" noise = XMLArray(self.root, ".//noiseVector", "noiseLut") return noise.expand(self._image_shape, chunks) def read_range_noise_array(self, chunks): """Read the range-noise array.""" range_noise = XMLArray(self.root, ".//noiseRangeVector", "noiseRangeLut") return range_noise.expand(self._image_shape, chunks) class AzimuthNoiseReader: """Class to parse and read azimuth-noise data. The azimuth noise vector is provided as a series of blocks, each comprised of a column of data to fill the block and a start and finish column number, and a start and finish line. For example, we can see here a (fake) azimuth noise array:: [[ 1. 1. 1. nan nan nan nan nan nan nan] [ 1. 1. 1. nan nan nan nan nan nan nan] [ 2. 2. 3. 3. 3. 4. 4. 4. 4. nan] [ 2. 2. 3. 3. 3. 4. 4. 4. 4. nan] [ 2. 2. 3. 3. 3. 4. 4. 4. 4. nan] [ 2. 2. 5. 5. 5. 5. 6. 6. 6. 6.] [ 2. 2. 5. 5. 5. 5. 6. 6. 6. 6.] [ 2. 2. 5. 5. 5. 5. 6. 6. 6. 6.] [ 2. 2. 7. 7. 7. 7. 7. 8. 8. 8.] [ 2. 2. 7. 7. 7. 7. 7. 8. 8. 8.]] As is shown here, the blocks may not cover the full array, and hence it has to be gap-filled with NaNs. """ def __init__(self, root, shape): """Set up the azimuth noise reader.""" self.root = root self.elements = self.root.findall(".//noiseAzimuthVector") self._image_shape = shape self.blocks = [] def read_azimuth_noise_array(self, chunks=CHUNK_SIZE): """Read the azimuth noise vectors.""" self._read_azimuth_noise_blocks(chunks) populated_array = self._assemble_azimuth_noise_blocks(chunks) return populated_array def _read_azimuth_noise_blocks(self, chunks): """Read the azimuth noise blocks.""" self.blocks = [] for elt in self.elements: block = _AzimuthBlock(elt) new_arr = block.expand(chunks) self.blocks.append(new_arr) def _assemble_azimuth_noise_blocks(self, chunks): """Assemble the azimuth noise blocks into one single array.""" # The strategy here is a bit convoluted. The job would be trivial if # performed on regular numpy arrays, but here we want to keep the data # as xarray/dask array as much as possible. # Using a pure xarray approach was tested (with `combine_first`, # `interpolate_na`, etc), but was found to be memory-hungry at the time # of implementation (March 2021). Hence the usage of a custom algorithm, # relying mostly on dask arrays. slices = self._create_dask_slices_from_blocks(chunks) populated_array = da.vstack(slices).rechunk(chunks) populated_array = xr.DataArray(populated_array, dims=['y', 'x'], coords={'x': np.arange(self._image_shape[1]), 'y': np.arange(self._image_shape[0])}) return populated_array def _create_dask_slices_from_blocks(self, chunks): """Create full-width slices from azimuth noise blocks.""" current_line = 0 slices = [] while current_line < self._image_shape[0]: new_slice = self._create_dask_slice_from_block_line(current_line, chunks) slices.append(new_slice) current_line += new_slice.shape[0] return slices def _create_dask_slice_from_block_line(self, current_line, chunks): """Create a dask slice from the blocks at the current line.""" pieces = self._get_array_pieces_for_current_line(current_line) dask_pieces = self._get_padded_dask_pieces(pieces, chunks) new_slice = da.hstack(dask_pieces) return new_slice def _get_array_pieces_for_current_line(self, current_line): """Get the array pieces that cover the current line.""" current_blocks = self._find_blocks_covering_line(current_line) current_blocks.sort(key=(lambda x: x.coords['x'][0])) next_line = self._get_next_start_line(current_blocks, current_line) current_y = np.arange(current_line, next_line) pieces = [arr.sel(y=current_y) for arr in current_blocks] return pieces def _find_blocks_covering_line(self, current_line): """Find the blocks covering a given line.""" current_blocks = [] for block in self.blocks: if block.coords['y'][0] <= current_line <= block.coords['y'][-1]: current_blocks.append(block) return current_blocks def _get_next_start_line(self, current_blocks, current_line): next_line = min((arr.coords['y'][-1] for arr in current_blocks)) + 1 blocks_starting_soon = [block for block in self.blocks if current_line < block.coords["y"][0] < next_line] if blocks_starting_soon: next_start_line = min((arr.coords["y"][0] for arr in blocks_starting_soon)) next_line = min(next_line, next_start_line) return next_line def _get_padded_dask_pieces(self, pieces, chunks): """Get the padded pieces of a slice.""" pieces = sorted(pieces, key=(lambda x: x.coords['x'][0])) dask_pieces = [] previous_x_end = -1 piece = pieces[0] next_x_start = piece.coords['x'][0].item() y_shape = len(piece.coords['y']) x_shape = (next_x_start - previous_x_end - 1) self._fill_dask_pieces(dask_pieces, (y_shape, x_shape), chunks) for i, piece in enumerate(pieces): dask_pieces.append(piece.data) previous_x_end = piece.coords['x'][-1].item() try: next_x_start = pieces[i + 1].coords['x'][0].item() except IndexError: next_x_start = self._image_shape[1] x_shape = (next_x_start - previous_x_end - 1) self._fill_dask_pieces(dask_pieces, (y_shape, x_shape), chunks) return dask_pieces @staticmethod def _fill_dask_pieces(dask_pieces, shape, chunks): if shape[1] > 0: new_piece = da.full(shape, np.nan, chunks=chunks) dask_pieces.append(new_piece) def interpolate_slice(slice_rows, slice_cols, interpolator): """Interpolate the given slice of the larger array.""" fine_rows = np.arange(slice_rows.start, slice_rows.stop, slice_rows.step) fine_cols = np.arange(slice_cols.start, slice_cols.stop, slice_cols.step) return interpolator(fine_cols, fine_rows) class _AzimuthBlock: """Implementation of an single azimuth-noise block.""" def __init__(self, xml_element): """Set up the block from an XML element.""" self.element = xml_element def expand(self, chunks): """Build an azimuth block from xml data.""" corr = 1 # This isn't needed with newer data (> 2020). When was the change operated? # # The azimuth noise is normalized per swath to account for gain # differences between the swaths in EW mode. # # This is based on the this reference: # J. Park, A. A. Korosov, M. Babiker, S. Sandven and J. Won, # "Efficient Thermal Noise Removal for Sentinel-1 TOPSAR Cross-Polarization Channel," # in IEEE Transactions on Geoscience and Remote Sensing, vol. 56, no. 3, # pp. 1555-1565, March 2018. # doi: 10.1109/TGRS.2017.2765248 # # For old data. < 2020 # swath = elt.find('swath').text # if swath == 'EW1': # corr = 1.5 # if swath in ['EW4', 'IW3']: # corr = 1.2 # if swath == 'EW5': # corr = 1.5 data = self.lut * corr x_coord = np.arange(self.first_pixel, self.last_pixel + 1) y_coord = np.arange(self.first_line, self.last_line + 1) new_arr = (da.ones((len(y_coord), len(x_coord)), chunks=chunks) * np.interp(y_coord, self.lines, data)[:, np.newaxis]) new_arr = xr.DataArray(new_arr, dims=['y', 'x'], coords={'x': x_coord, 'y': y_coord}) return new_arr @property def first_pixel(self): return int(self.element.find('firstRangeSample').text) @property def last_pixel(self): return int(self.element.find('lastRangeSample').text) @property def first_line(self): return int(self.element.find('firstAzimuthLine').text) @property def last_line(self): return int(self.element.find('lastAzimuthLine').text) @property def lines(self): lines = self.element.find('line').text.split() return np.array(lines).astype(int) @property def lut(self): lut = self.element.find('noiseAzimuthLut').text.split() return np.array(lut).astype(float) class XMLArray: """A proxy for getting xml data as an array.""" def __init__(self, root, list_tag, element_tag): """Set up the XML array.""" self.root = root self.list_tag = list_tag self.element_tag = element_tag self.data, self.low_res_coords = self._read_xml_array() def expand(self, shape, chunks=None): """Generate the full-blown array.""" return self.interpolate_xml_array(shape, chunks=chunks) def _read_xml_array(self): """Read an array from xml.""" elements = self.get_data_items() y = [] x = [] data = [] for elt in elements: new_x = elt.find('pixel').text.split() y += [int(elt.find('line').text)] * len(new_x) x += [int(val) for val in new_x] data += [float(val) for val in elt.find(self.element_tag).text.split()] return np.asarray(data), (x, y) def get_data_items(self): """Get the data items for this array.""" data_items = self.root.findall(self.list_tag) if not data_items: raise KeyError("Can't find data items for xml tag " + self.list_tag) return data_items def interpolate_xml_array(self, shape, chunks): """Interpolate arbitrary size dataset to a full sized grid.""" xpoints, ypoints = self.low_res_coords return interpolate_xarray_linear(xpoints, ypoints, self.data, shape, chunks=chunks) def interpolate_xarray(xpoints, ypoints, values, shape, kind='cubic', blocksize=CHUNK_SIZE): """Interpolate, generating a dask array.""" vchunks = range(0, shape[0], blocksize) hchunks = range(0, shape[1], blocksize) token = tokenize(blocksize, xpoints, ypoints, values, kind, shape) name = 'interpolate-' + token from scipy.interpolate import interp2d interpolator = interp2d(xpoints, ypoints, values, kind=kind) dskx = {(name, i, j): (interpolate_slice, slice(vcs, min(vcs + blocksize, shape[0])), slice(hcs, min(hcs + blocksize, shape[1])), interpolator) for i, vcs in enumerate(vchunks) for j, hcs in enumerate(hchunks) } res = da.Array(dskx, name, shape=list(shape), chunks=(blocksize, blocksize), dtype=values.dtype) return DataArray(res, dims=('y', 'x')) def intp(grid_x, grid_y, interpolator): """Interpolate.""" return interpolator((grid_y, grid_x)) def interpolate_xarray_linear(xpoints, ypoints, values, shape, chunks=CHUNK_SIZE): """Interpolate linearly, generating a dask array.""" from scipy.interpolate.interpnd import LinearNDInterpolator, _ndim_coords_from_arrays if isinstance(chunks, (list, tuple)): vchunks, hchunks = chunks else: vchunks, hchunks = chunks, chunks points = _ndim_coords_from_arrays(np.vstack((np.asarray(ypoints), np.asarray(xpoints))).T) interpolator = LinearNDInterpolator(points, values) grid_x, grid_y = da.meshgrid(da.arange(shape[1], chunks=hchunks), da.arange(shape[0], chunks=vchunks)) # workaround for non-thread-safe first call of the interpolator: interpolator((0, 0)) res = da.map_blocks(intp, grid_x, grid_y, interpolator=interpolator) return DataArray(res, dims=('y', 'x')) class SAFEGRD(BaseFileHandler): """Measurement file reader. The measurement files are in geotiff format and read using rasterio. For performance reasons, the reading adapts the chunk size to match the file's block size. """ def __init__(self, filename, filename_info, filetype_info, calfh, noisefh, annotationfh): """Init the grd filehandler.""" super(SAFEGRD, self).__init__(filename, filename_info, filetype_info) self._start_time = filename_info['start_time'] self._end_time = filename_info['end_time'] self._polarization = filename_info['polarization'] self._mission_id = filename_info['mission_id'] self.calibration = calfh self.noise = noisefh self.annotation = annotationfh self.read_lock = Lock() self.filehandle = rasterio.open(self.filename, 'r', sharing=False) def get_dataset(self, key, info): """Load a dataset.""" if self._polarization != key["polarization"]: return logger.debug('Reading %s.', key['name']) if key['name'] in ['longitude', 'latitude', 'altitude']: logger.debug('Constructing coordinate arrays.') arrays = dict() arrays['longitude'], arrays['latitude'], arrays['altitude'] = self.get_lonlatalts() data = arrays[key['name']] data.attrs.update(info) else: data = rioxarray.open_rasterio(self.filename, lock=False, chunks=(1, CHUNK_SIZE, CHUNK_SIZE)).squeeze() data = data.assign_coords(x=np.arange(len(data.coords['x'])), y=np.arange(len(data.coords['y']))) data = self._calibrate_and_denoise(data, key) data.attrs.update(info) data.attrs.update({'platform_name': self._mission_id}) data = self._change_quantity(data, key['quantity']) return data @staticmethod def _change_quantity(data, quantity): """Change quantity to dB if needed.""" if quantity == 'dB': data.data = 10 * np.log10(data.data) data.attrs['units'] = 'dB' else: data.attrs['units'] = '1' return data def _calibrate_and_denoise(self, data, key): """Calibrate and denoise the data.""" chunks = CHUNK_SIZE dn = self._get_digital_number(data) dn = self._denoise(dn, chunks) data = self._calibrate(dn, chunks, key) return data def _get_digital_number(self, data): """Get the digital numbers (uncalibrated data).""" data = data.where(data > 0) data = data.astype(np.float64) dn = data * data return dn def _denoise(self, dn, chunks): """Denoise the data.""" logger.debug('Reading noise data.') noise = self.noise.get_noise_correction(chunks=chunks).fillna(0) dn = dn - noise return dn def _calibrate(self, dn, chunks, key): """Calibrate the data.""" logger.debug('Reading calibration data.') cal = self.calibration.get_calibration(key['calibration'], chunks=chunks) cal_constant = self.calibration.get_calibration_constant() logger.debug('Calibrating.') data = ((dn + cal_constant) / (cal ** 2)).clip(min=0) return data @lru_cache(maxsize=2) def get_lonlatalts(self): """Obtain GCPs and construct latitude and longitude arrays. Args: band (gdal band): Measurement band which comes with GCP's array_shape (tuple) : The size of the data array Returns: coordinates (tuple): A tuple with longitude and latitude arrays """ band = self.filehandle (xpoints, ypoints), (gcp_lons, gcp_lats, gcp_alts), (gcps, crs) = self.get_gcps() # FIXME: do interpolation on cartesian coordinates if the area is # problematic. longitudes = interpolate_xarray(xpoints, ypoints, gcp_lons, band.shape) latitudes = interpolate_xarray(xpoints, ypoints, gcp_lats, band.shape) altitudes = interpolate_xarray(xpoints, ypoints, gcp_alts, band.shape) longitudes.attrs['gcps'] = gcps longitudes.attrs['crs'] = crs latitudes.attrs['gcps'] = gcps latitudes.attrs['crs'] = crs altitudes.attrs['gcps'] = gcps altitudes.attrs['crs'] = crs return longitudes, latitudes, altitudes def get_gcps(self): """Read GCP from the GDAL band. Args: band (gdal band): Measurement band which comes with GCP's coordinates (tuple): A tuple with longitude and latitude arrays Returns: points (tuple): Pixel and Line indices 1d arrays gcp_coords (tuple): longitude and latitude 1d arrays """ gcps = self.filehandle.gcps gcp_array = np.array([(p.row, p.col, p.x, p.y, p.z) for p in gcps[0]]) ypoints = np.unique(gcp_array[:, 0]) xpoints = np.unique(gcp_array[:, 1]) gcp_lons = gcp_array[:, 2].reshape(ypoints.shape[0], xpoints.shape[0]) gcp_lats = gcp_array[:, 3].reshape(ypoints.shape[0], xpoints.shape[0]) gcp_alts = gcp_array[:, 4].reshape(ypoints.shape[0], xpoints.shape[0]) return (xpoints, ypoints), (gcp_lons, gcp_lats, gcp_alts), gcps @property def start_time(self): """Get the start time.""" return self._start_time @property def end_time(self): """Get the end time.""" return self._end_time satpy-0.34.0/satpy/readers/satpy_cf_nc.py000066400000000000000000000306231420401153000203570ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . r"""Reader for files produced with the cf netcdf writer in satpy. Introduction ------------ The ``satpy_cf_nc`` reader reads data written by the satpy cf_writer. Filenames for cf_writer are optional. There are several readers using the same satpy_cf_nc.py reader. * Generic reader ``satpy_cf_nc`` * EUMETSAT GAC FDR reader ``avhrr_l1c_eum_gac_fdr_nc`` Generic reader -------------- The generic ``satpy_cf_nc`` reader reads files of type: .. code-block:: none '{platform_name}-{sensor}-{start_time:%Y%m%d%H%M%S}-{end_time:%Y%m%d%H%M%S}.nc' Example ------- Here is an example how to read the data in satpy: .. code-block:: python from satpy import Scene filenames = ['data/npp-viirs-mband-20201007075915-20201007080744.nc'] scn = Scene(reader='satpy_cf_nc', filenames=filenames) scn.load(['M05']) scn['M05'] Output: .. code-block:: none dask.array Coordinates: longitude (y, x) float32 dask.array latitude (y, x) float32 dask.array Dimensions without coordinates: y, x Attributes: start_time: 2020-10-07 07:59:15 start_orbit: 46350 end_time: 2020-10-07 08:07:44 end_orbit: 46350 calibration: reflectance long_name: M05 modifiers: ('sunz_corrected',) platform_name: Suomi-NPP resolution: 742 sensor: viirs standard_name: toa_bidirectional_reflectance units: % wavelength: 0.672 µm (0.662-0.682 µm) date_created: 2020-10-07T08:20:02Z instrument: VIIRS Notes: Available datasets and attributes will depend on the data saved with the cf_writer. EUMETSAT AVHRR GAC FDR L1C reader --------------------------------- The ``avhrr_l1c_eum_gac_fdr_nc`` reader reads files of type: .. code-block:: none ''AVHRR-GAC_FDR_1C_{platform}_{start_time:%Y%m%dT%H%M%SZ}_{end_time:%Y%m%dT%H%M%SZ}_{processing_mode}_{disposition_mode}_{creation_time}_{version_int:04d}.nc' Example ------- Here is an example how to read the data in satpy: .. code-block:: python from satpy import Scene filenames = ['data/AVHRR-GAC_FDR_1C_N06_19810330T042358Z_19810330T060903Z_R_O_20200101T000000Z_0100.nc'] scn = Scene(reader='avhrr_l1c_eum_gac_fdr_nc', filenames=filenames) scn.load(['brightness_temperature_channel_4']) scn['brightness_temperature_channel_4'] Output: .. code-block:: none dask.array Coordinates: * x (x) int16 0 1 2 3 4 5 6 7 8 ... 401 402 403 404 405 406 407 408 * y (y) int64 0 1 2 3 4 5 6 7 8 9 10 acq_time (y) datetime64[ns] dask.array longitude (y, x) float64 dask.array latitude (y, x) float64 dask.array Attributes: start_time: 1981-03-30 04:23:58 end_time: 1981-03-30 06:09:03 calibration: brightness_temperature modifiers: () resolution: 1050 standard_name: toa_brightness_temperature units: K wavelength: 10.8 µm (10.3-11.3 µm) Conventions: CF-1.8 ACDD-1.3 comment: Developed in cooperation with EUME... creator_email: ops@eumetsat.int creator_name: EUMETSAT creator_url: https://www.eumetsat.int/ date_created: 2020-09-14T10:50:51.073707 disposition_mode: O gac_filename: NSS.GHRR.NA.D81089.S0423.E0609.B09... geospatial_lat_max: 89.95386902434623 geospatial_lat_min: -89.97581969005503 geospatial_lat_resolution: 1050 meters geospatial_lat_units: degrees_north geospatial_lon_max: 179.99952992568998 geospatial_lon_min: -180.0 geospatial_lon_resolution: 1050 meters geospatial_lon_units: degrees_east ground_station: GC id: DOI:10.5676/EUM/AVHRR_GAC_L1C_FDR/... institution: EUMETSAT instrument: Earth Remote Sensing Instruments >... keywords: ATMOSPHERE > ATMOSPHERIC RADIATION... keywords_vocabulary: GCMD Science Keywords, Version 9.1 licence: EUMETSAT data policy https://www.e... naming_authority: int.eumetsat orbit_number_end: 9123 orbit_number_start: 9122 orbital_parameters_tle: ['1 11416U 79057A 81090.16350942... platform: Earth Observation Satellites > NOA... processing_level: 1C processing_mode: R product_version: 1.0.0 references: Devasthale, A., M. Raspaud, C. Sch... source: AVHRR GAC Level 1 Data standard_name_vocabulary: CF Standard Name Table v73 summary: Fundamental Data Record (FDR) of m... sun_earth_distance_correction_factor: 0.9975244779999585 time_coverage_end: 19820803T003900Z time_coverage_start: 19800101T000000Z title: AVHRR GAC L1C FDR version_calib_coeffs: PATMOS-x, v2017r1 version_pygac: 1.4.0 version_pygac_fdr: 0.1.dev107+gceb7b26.d20200910 version_satpy: 0.21.1.dev894+g5cf76e6 history: Created by pytroll/satpy on 2020-0... name: brightness_temperature_channel_4 _satpy_id: DataID(name='brightness_temperatur... ancillary_variables: [] """ import itertools import json import logging import xarray as xr from satpy import CHUNK_SIZE from satpy.dataset.dataid import WavelengthRange from satpy.readers.file_handlers import BaseFileHandler logger = logging.getLogger(__name__) class SatpyCFFileHandler(BaseFileHandler): """File handler for Satpy's CF netCDF files.""" def __init__(self, filename, filename_info, filetype_info, numeric_name_prefix='CHANNEL_'): """Initialize file handler.""" super().__init__(filename, filename_info, filetype_info) self.engine = None self._numeric_name_prefix = numeric_name_prefix @property def start_time(self): """Get start time.""" return self.filename_info['start_time'] @property def end_time(self): """Get end time.""" return self.filename_info.get('end_time', self.start_time) @property def sensor(self): """Get sensor.""" nc = xr.open_dataset(self.filename, engine=self.engine) return nc.attrs['instrument'].replace('/', '-').lower() @property def sensor_names(self): """Get sensor set.""" return {self.sensor} def available_datasets(self, configured_datasets=None): """Add information of available datasets.""" existing = self._existing_datasets(configured_datasets=configured_datasets) dynamic = self._dynamic_datasets() coordinates = self._coordinate_datasets() for dataset_available, dataset_info in itertools.chain(existing, dynamic, coordinates): yield dataset_available, dataset_info def _existing_datasets(self, configured_datasets=None): """Add information of existing datasets.""" for is_avail, ds_info in (configured_datasets or []): yield is_avail, ds_info def fix_modifier_attr(self, ds_info): """Fix modifiers attribute.""" # Empty modifiers are read as [], which causes problems later if 'modifiers' in ds_info and not ds_info['modifiers']: ds_info['modifiers'] = () try: try: ds_info['modifiers'] = tuple(ds_info['modifiers'].split(' ')) except AttributeError: pass except KeyError: pass def _assign_ds_info(self, var_name, val): """Assign ds_info.""" ds_info = dict(val.attrs) ds_info['file_type'] = self.filetype_info['file_type'] ds_info['name'] = ds_info['nc_store_name'] = var_name if 'original_name' in ds_info: ds_info['name'] = ds_info['original_name'] elif self._numeric_name_prefix and var_name.startswith(self._numeric_name_prefix): ds_info['name'] = var_name.replace(self._numeric_name_prefix, '') try: ds_info['wavelength'] = WavelengthRange.from_cf(ds_info['wavelength']) except KeyError: pass return ds_info def _dynamic_datasets(self): """Add information of dynamic datasets.""" nc = xr.open_dataset(self.filename, engine=self.engine) # get dynamic variables known to this file (that we created) for var_name, val in nc.data_vars.items(): ds_info = self._assign_ds_info(var_name, val) self.fix_modifier_attr(ds_info) yield True, ds_info def _coordinate_datasets(self, configured_datasets=None): """Add information of coordinate datasets.""" nc = xr.open_dataset(self.filename, engine=self.engine) for var_name, val in nc.coords.items(): ds_info = dict(val.attrs) ds_info['file_type'] = self.filetype_info['file_type'] ds_info['name'] = var_name self.fix_modifier_attr(ds_info) yield True, ds_info def get_dataset(self, ds_id, ds_info): """Get dataset.""" logger.debug("Getting data for: %s", ds_id['name']) nc = xr.open_dataset(self.filename, engine=self.engine, chunks={'y': CHUNK_SIZE, 'x': CHUNK_SIZE}) name = ds_info.get('nc_store_name', ds_id['name']) file_key = ds_info.get('file_key', name) data = nc[file_key] if name != ds_id['name']: data = data.rename(ds_id['name']) data.attrs.update(nc.attrs) # For now add global attributes to all datasets if "orbital_parameters" in data.attrs: data.attrs["orbital_parameters"] = _str2dict(data.attrs["orbital_parameters"]) return data def _str2dict(val): """Convert string to dictionary.""" if isinstance(val, str): val = json.loads(val) return val satpy-0.34.0/satpy/readers/scatsat1_l2b.py000066400000000000000000000053101420401153000203440ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . # type: ignore """ScatSat-1 L2B Reader, distributed by Eumetsat in HDF5 format.""" from datetime import datetime import h5py from satpy.dataset import Dataset from satpy.readers.file_handlers import BaseFileHandler class SCATSAT1L2BFileHandler(BaseFileHandler): def __init__(self, filename, filename_info, filetype_info): super(SCATSAT1L2BFileHandler, self).__init__(filename, filename_info, filetype_info) self.h5f = h5py.File(self.filename, "r") h5data = self.h5f['science_data'] self.filename_info['start_time'] = datetime.strptime(h5data.attrs['Range Beginning Date'], '%Y-%jT%H:%M:%S.%f') self.filename_info['end_time'] = datetime.strptime(h5data.attrs['Range Ending Date'], '%Y-%jT%H:%M:%S.%f') self.lons = None self.lats = None self.wind_speed_scale = float(h5data.attrs['Wind Speed Selection Scale']) self.wind_direction_scale = float(h5data.attrs['Wind Direction Selection Scale']) self.latitude_scale = float(h5data.attrs['Latitude Scale']) self.longitude_scale = float(h5data.attrs['Longitude Scale']) def get_dataset(self, key, info): h5data = self.h5f['science_data'] stdname = info.get('standard_name') if stdname in ['latitude', 'longitude']: if self.lons is None or self.lats is None: self.lons = h5data['Longitude'][:]*self.longitude_scale self.lats = h5data['Latitude'][:]*self.latitude_scale if info['standard_name'] == 'longitude': return Dataset(self.lons, id=key, **info) else: return Dataset(self.lats, id=key, **info) if stdname in ['wind_speed']: windspeed = h5data['Wind_speed_selection'][:, :] * self.wind_speed_scale return Dataset(windspeed, id=key, **info) if stdname in ['wind_direction']: wind_direction = h5data['Wind_direction_selection'][:, :] * self.wind_direction_scale return Dataset(wind_direction, id=key, **info) satpy-0.34.0/satpy/readers/scmi.py000066400000000000000000000270471420401153000170300ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """SCMI NetCDF4 Reader. SCMI files are typically used for data for the ABI instrument onboard the GOES-16/17 satellites. It is the primary format used for providing ABI data to the AWIPS visualization clients used by the US National Weather Service forecasters. The python code for this reader may be reused by other readers as NetCDF schemes/metadata change for different products. The initial reader using this code is the "scmi_abi" reader (see `abi_l1b_scmi.yaml` for more information). There are two forms of these files that this reader supports: 1. Official SCMI format: NetCDF4 files where the main data variable is stored in a variable called "Sectorized_CMI". This variable name can be configured in the YAML configuration file. 2. Satpy/Polar2Grid SCMI format: NetCDF4 files based on the official SCMI format created for the Polar2Grid project. This format was migrated to Satpy as part of Polar2Grid's adoption of Satpy for the majority of its features. This format is what is produced by Satpy's `scmi` writer. This format can be identified by a single variable named "data" and a global attribute named ``"awips_id"`` that is set to a string starting with ``"AWIPS_"``. """ import logging import os from datetime import datetime import numpy as np import xarray as xr from pyresample import geometry from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler # NetCDF doesn't support multi-threaded reading, trick it by opening # as one whole chunk then split it up before we do any calculations LOAD_CHUNK_SIZE = int(os.getenv('PYTROLL_LOAD_CHUNK_SIZE', -1)) logger = logging.getLogger(__name__) class SCMIFileHandler(BaseFileHandler): """Handle a single SCMI NetCDF4 file.""" def __init__(self, filename, filename_info, filetype_info): """Set up the SCMI file handler.""" super(SCMIFileHandler, self).__init__(filename, filename_info, filetype_info) # xarray's default netcdf4 engine self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=False, chunks={'x': LOAD_CHUNK_SIZE, 'y': LOAD_CHUNK_SIZE}) self.platform_name = self.nc.attrs['satellite_id'] self.sensor = self._get_sensor() self.nlines = self.nc.dims['y'] self.ncols = self.nc.dims['x'] self.coords = {} def _get_sensor(self): """Determine the sensor for this file.""" # sometimes Himawari-8 (or 9) data is stored in SCMI format is_h8 = 'H8' in self.platform_name is_h9 = 'H9' in self.platform_name is_ahi = is_h8 or is_h9 return 'ahi' if is_ahi else 'abi' @property def sensor_names(self): """Get the sensor names.""" return [self.sensor] def __getitem__(self, item): """Wrap around `self.nc[item]`. Some datasets use a 32-bit float scaling factor like the 'x' and 'y' variables which causes inaccurate unscaled data values. This method forces the scale factor to a 64-bit float first. """ data = self.nc[item] attrs = data.attrs factor = data.attrs.get('scale_factor') offset = data.attrs.get('add_offset') fill = data.attrs.get('_FillValue') if fill is not None: data = data.where(data != fill) if factor is not None: # make sure the factor is a 64-bit float # can't do this in place since data is most likely uint16 # and we are making it a 64-bit float data = data * float(factor) + offset data.attrs = attrs # handle coordinates (and recursive fun) new_coords = {} # 'time' dimension causes issues in other processing if 'time' in data.coords: data = data.drop_vars('time') if item in data.coords: self.coords[item] = data for coord_name in data.coords.keys(): if coord_name not in self.coords: self.coords[coord_name] = self[coord_name] new_coords[coord_name] = self.coords[coord_name] data.coords.update(new_coords) return data def get_shape(self, key, info): """Get the shape of the data.""" return self.nlines, self.ncols def get_dataset(self, key, info): """Load a dataset.""" logger.debug('Reading in get_dataset %s.', key['name']) var_name = info.get('file_key', self.filetype_info.get('file_key')) if var_name: data = self[var_name] elif 'Sectorized_CMI' in self.nc: data = self['Sectorized_CMI'] elif 'data' in self.nc: data = self['data'] # NetCDF doesn't support multi-threaded reading, trick it by opening # as one whole chunk then split it up before we do any calculations data = data.chunk({'x': CHUNK_SIZE, 'y': CHUNK_SIZE}) # convert to satpy standard units factor = data.attrs.pop('scale_factor', 1) offset = data.attrs.pop('add_offset', 0) units = data.attrs.get('units', 1) # the '*1' unit is some weird convention added/needed by AWIPS if units in ['1', '*1'] and key['calibration'] == 'reflectance': data *= 100 factor *= 100 # used for valid_min/max data.attrs['units'] = '%' # set up all the attributes that might be useful to the user/satpy data.attrs.update({'platform_name': self.platform_name, 'sensor': data.attrs.get('sensor', self.sensor), }) if 'satellite_longitude' in self.nc.attrs: data.attrs['satellite_longitude'] = self.nc.attrs['satellite_longitude'] data.attrs['satellite_latitude'] = self.nc.attrs['satellite_latitude'] data.attrs['satellite_altitude'] = self.nc.attrs['satellite_altitude'] scene_id = self.nc.attrs.get('scene_id') if scene_id is not None: data.attrs['scene_id'] = scene_id data.attrs.update(key.to_dict()) data.attrs.pop('_FillValue', None) if 'valid_min' in data.attrs: vmin = data.attrs.pop('valid_min') vmax = data.attrs.pop('valid_max') vmin = vmin * factor + offset vmax = vmax * factor + offset data.attrs['valid_min'] = vmin data.attrs['valid_max'] = vmax return data def _get_cf_grid_mapping_var(self): """Figure out which grid mapping should be used.""" gmaps = ['fixedgrid_projection', 'goes_imager_projection', 'lambert_projection', 'polar_projection', 'mercator_projection'] if 'grid_mapping' in self.filename_info: gmaps = [self.filename_info.get('grid_mapping')] + gmaps for grid_mapping in gmaps: if grid_mapping in self.nc: return self.nc[grid_mapping] raise KeyError("Can't find grid mapping variable in SCMI file") def _get_proj4_name(self, projection): """Map CF projection name to PROJ.4 name.""" gmap_name = projection.attrs['grid_mapping_name'] proj = { 'geostationary': 'geos', 'lambert_conformal_conic': 'lcc', 'polar_stereographic': 'stere', 'mercator': 'merc', }.get(gmap_name, gmap_name) return proj def _get_proj_specific_params(self, projection): """Convert CF projection parameters to PROJ.4 dict.""" proj = self._get_proj4_name(projection) proj_dict = { 'proj': proj, 'a': float(projection.attrs['semi_major_axis']), 'b': float(projection.attrs['semi_minor_axis']), 'units': 'm', } if proj == 'geos': proj_dict['h'] = float(projection.attrs['perspective_point_height']) proj_dict['sweep'] = projection.attrs.get('sweep_angle_axis', 'y') proj_dict['lon_0'] = float(projection.attrs['longitude_of_projection_origin']) proj_dict['lat_0'] = float(projection.attrs.get('latitude_of_projection_origin', 0.0)) elif proj == 'lcc': proj_dict['lat_0'] = float(projection.attrs['standard_parallel']) proj_dict['lon_0'] = float(projection.attrs['longitude_of_central_meridian']) proj_dict['lat_1'] = float(projection.attrs['latitude_of_projection_origin']) elif proj == 'stere': proj_dict['lat_ts'] = float(projection.attrs['standard_parallel']) proj_dict['lon_0'] = float(projection.attrs['straight_vertical_longitude_from_pole']) proj_dict['lat_0'] = float(projection.attrs['latitude_of_projection_origin']) elif proj == 'merc': proj_dict['lat_ts'] = float(projection.attrs['standard_parallel']) proj_dict['lat_0'] = proj_dict['lat_ts'] proj_dict['lon_0'] = float(projection.attrs['longitude_of_projection_origin']) else: raise ValueError("Can't handle projection '{}'".format(proj)) return proj_dict def _calc_extents(self, proj_dict): """Calculate area extents from x/y variables.""" h = float(proj_dict.get('h', 1.)) # force to 64-bit float x = self['x'] y = self['y'] x_units = x.attrs.get('units', 'rad') if x_units == 'meters': h_factor = 1. factor = 1. elif x_units == 'microradian': h_factor = h factor = 1e6 else: # radians h_factor = h factor = 1. x_l = h_factor * x[0] / factor x_r = h_factor * x[-1] / factor y_l = h_factor * y[-1] / factor y_u = h_factor * y[0] / factor x_half = (x_r - x_l) / (self.ncols - 1) / 2. y_half = (y_u - y_l) / (self.nlines - 1) / 2. return x_l - x_half, y_l - y_half, x_r + x_half, y_u + y_half def get_area_def(self, key): """Get the area definition of the data at hand.""" # FIXME: Can't we pass dataset info to the get_area_def? projection = self._get_cf_grid_mapping_var() proj_dict = self._get_proj_specific_params(projection) area_extent = self._calc_extents(proj_dict) area_name = '{}_{}'.format(self.sensor, proj_dict['proj']) return geometry.AreaDefinition( area_name, "SCMI file area", area_name, proj_dict, self.ncols, self.nlines, np.asarray(area_extent)) @property def start_time(self): """Get the start time.""" return datetime.strptime(self.nc.attrs['start_date_time'], '%Y%j%H%M%S') @property def end_time(self): """Get the end time.""" return self.start_time def __del__(self): """Delete the instance.""" try: self.nc.close() except OSError: pass satpy-0.34.0/satpy/readers/seadas_l2.py000066400000000000000000000060711420401153000177240ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reader for SEADAS L2 products. This reader currently only supports MODIS and VIIRS Chlorophyll A from SEADAS. """ from datetime import datetime from .hdf4_utils import HDF4FileHandler TIME_FORMAT = "%Y%j%H%M%S" class SEADASL2HDFFileHandler(HDF4FileHandler): """Simple handler of SEADAS L2 files.""" def _add_satpy_metadata(self, data): data.attrs["sensor"] = self.sensor_names data.attrs["platform_name"] = self._platform_name() data.attrs["rows_per_scan"] = self._rows_per_scan() return data def _rows_per_scan(self): if "modis" in self.sensor_names: return 10 if "viirs" in self.sensor_names: return 16 raise ValueError(f"Don't know how to read data for sensors: {self.sensor_names}") def _platform_name(self): platform = self["/attr/Mission"] platform_dict = {'NPP': 'Suomi-NPP', 'JPSS-1': 'NOAA-20', 'JPSS-2': 'NOAA-21'} return platform_dict.get(platform, platform) @property def start_time(self): """Get the starting observation time of this file's data.""" start_time = self["/attr/Start Time"] return datetime.strptime(start_time[:-3], TIME_FORMAT) @property def end_time(self): """Get the ending observation time of this file's data.""" end_time = self["/attr/End Time"] return datetime.strptime(end_time[:-3], TIME_FORMAT) @property def sensor_names(self): """Get sensor for the current file's data.""" # Example: MODISA or VIIRSN or VIIRSJ1 sensor_name = self["/attr/Sensor Name"].lower() if sensor_name.startswith("modis"): return {"modis"} return {"viirs"} def get_dataset(self, data_id, dataset_info): """Get DataArray for the specified DataID.""" file_key = dataset_info.get("file_key", data_id["name"]) data = self[file_key] valid_range = data.attrs["valid_range"] data = data.where(valid_range[0] <= data) data = data.where(data <= valid_range[1]) for attr_name in ("standard_name", "long_name", "units"): val = data.attrs[attr_name] if val[-1] == "\x00": data.attrs[attr_name] = data.attrs[attr_name][:-1] data = self._add_satpy_metadata(data) return data satpy-0.34.0/satpy/readers/seviri_base.py000066400000000000000000001021321420401153000203550ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Common functionality for SEVIRI L1.5 data readers. Introduction ------------ *The Spinning Enhanced Visible and InfraRed Imager (SEVIRI) is the primary instrument on Meteosat Second Generation (MSG) and has the capacity to observe the Earth in 12 spectral channels.* *Level 1.5 corresponds to image data that has been corrected for all unwanted radiometric and geometric effects, has been geolocated using a standardised projection, and has been calibrated and radiance-linearised.* (From the EUMETSAT documentation) Satpy provides the following readers for SEVIRI L1.5 data in different formats: - Native: :mod:`satpy.readers.seviri_l1b_native` - HRIT: :mod:`satpy.readers.seviri_l1b_hrit` - netCDF: :mod:`satpy.readers.seviri_l1b_nc` Calibration ----------- This section describes how to control the calibration of SEVIRI L1.5 data. Calibration to radiance ^^^^^^^^^^^^^^^^^^^^^^^ The SEVIRI L1.5 data readers allow for choosing between two file-internal calibration coefficients to convert counts to radiances: - Nominal for all channels (default) - GSICS where available (IR currently) and nominal for the remaining channels (VIS & HRV currently) In order to change the default behaviour, use the ``reader_kwargs`` keyword argument upon Scene creation:: import satpy scene = satpy.Scene(filenames, reader='seviri_l1b_...', reader_kwargs={'calib_mode': 'GSICS'}) scene.load(['VIS006', 'IR_108']) Furthermore, it is possible to specify external calibration coefficients for the conversion from counts to radiances. External coefficients take precedence over internal coefficients, but you can also mix internal and external coefficients: If external calibration coefficients are specified for only a subset of channels, the remaining channels will be calibrated using the chosen file-internal coefficients (nominal or GSICS). Calibration coefficients must be specified in [mW m-2 sr-1 (cm-1)-1]. In the following example we use external calibration coefficients for the ``VIS006`` & ``IR_108`` channels, and nominal coefficients for the remaining channels:: coefs = {'VIS006': {'gain': 0.0236, 'offset': -1.20}, 'IR_108': {'gain': 0.2156, 'offset': -10.4}} scene = satpy.Scene(filenames, reader='seviri_l1b_...', reader_kwargs={'ext_calib_coefs': coefs}) scene.load(['VIS006', 'VIS008', 'IR_108', 'IR_120']) In the next example we use external calibration coefficients for the ``VIS006`` & ``IR_108`` channels, GSICS coefficients where available (other IR channels) and nominal coefficients for the rest:: coefs = {'VIS006': {'gain': 0.0236, 'offset': -1.20}, 'IR_108': {'gain': 0.2156, 'offset': -10.4}} scene = satpy.Scene(filenames, reader='seviri_l1b_...', reader_kwargs={'calib_mode': 'GSICS', 'ext_calib_coefs': coefs}) scene.load(['VIS006', 'VIS008', 'IR_108', 'IR_120']) Calibration to reflectance ^^^^^^^^^^^^^^^^^^^^^^^^^^ When loading solar channels, the SEVIRI L1.5 data readers apply a correction for the Sun-Earth distance variation throughout the year - as recommended by the EUMETSAT document `Conversion from radiances to reflectances for SEVIRI warm channels`_. In the unlikely situation that this correction is not required, it can be removed on a per-channel basis using :func:`satpy.readers.utils.remove_earthsun_distance_correction`. Metadata ^^^^^^^^ The SEVIRI L1.5 readers provide the following metadata: * The ``orbital_parameters`` attribute provides the nominal and actual satellite position, as well as the projection centre. See the `Metadata` section in the :doc:`../readers` chapter for more information. * The ``acq_time`` coordinate provides the mean acquisition time for each scanline. Use a ``MultiIndex`` to enable selection by acquisition time: .. code-block:: python import pandas as pd mi = pd.MultiIndex.from_arrays([scn['IR_108']['y'].data, scn['IR_108']['acq_time'].data], names=('y_coord', 'time')) scn['IR_108']['y'] = mi scn['IR_108'].sel(time=np.datetime64('2019-03-01T12:06:13.052000000')) * Raw metadata from the file header can be included by setting the reader argument ``include_raw_metadata=True`` (HRIT and Native format only). Note that this comes with a performance penalty of up to 10% if raw metadata from multiple segments or scans need to be combined. By default arrays with more than 100 elements are excluded to limit the performance penalty. This threshold can be adjusted using the ``mda_max_array_size`` reader keyword argument: .. code-block:: python scene = satpy.Scene(filenames, reader='seviri_l1b_hrit/native', reader_kwargs={'include_raw_metadata': True, 'mda_max_array_size': 1000}) References: - `MSG Level 1.5 Image Data Format Description`_ - `Radiometric Calibration of MSG SEVIRI Level 1.5 Image Data in Equivalent Spectral Blackbody Radiance`_ .. _Conversion from radiances to reflectances for SEVIRI warm channels: https://www-cdn.eumetsat.int/files/2020-04/pdf_msg_seviri_rad2refl.pdf .. _MSG Level 1.5 Image Data Format Description: https://www-cdn.eumetsat.int/files/2020-05/pdf_ten_05105_msg_img_data.pdf .. _Radiometric Calibration of MSG SEVIRI Level 1.5 Image Data in Equivalent Spectral Blackbody Radiance: https://www-cdn.eumetsat.int/files/2020-04/pdf_ten_msg_seviri_rad_calib.pdf """ import warnings import dask.array as da import numpy as np import pyproj from numpy.polynomial.chebyshev import Chebyshev from satpy import CHUNK_SIZE from satpy.readers.eum_base import issue_revision, time_cds_short from satpy.readers.utils import apply_earthsun_distance_correction PLATFORM_DICT = { 'MET08': 'Meteosat-8', 'MET09': 'Meteosat-9', 'MET10': 'Meteosat-10', 'MET11': 'Meteosat-11', 'MSG1': 'Meteosat-8', 'MSG2': 'Meteosat-9', 'MSG3': 'Meteosat-10', 'MSG4': 'Meteosat-11', } REPEAT_CYCLE_DURATION = 15 C1 = 1.19104273e-5 C2 = 1.43877523 VISIR_NUM_COLUMNS = 3712 VISIR_NUM_LINES = 3712 HRV_NUM_COLUMNS = 11136 HRV_NUM_LINES = 11136 CHANNEL_NAMES = {1: "VIS006", 2: "VIS008", 3: "IR_016", 4: "IR_039", 5: "WV_062", 6: "WV_073", 7: "IR_087", 8: "IR_097", 9: "IR_108", 10: "IR_120", 11: "IR_134", 12: "HRV"} VIS_CHANNELS = ['HRV', 'VIS006', 'VIS008', 'IR_016'] # Polynomial coefficients for spectral-effective BT fits BTFIT = {} # [A, B, C] BTFIT['IR_039'] = [0.0, 1.011751900, -3.550400] BTFIT['WV_062'] = [0.00001805700, 1.000255533, -1.790930] BTFIT['WV_073'] = [0.00000231818, 1.000668281, -0.456166] BTFIT['IR_087'] = [-0.00002332000, 1.011803400, -1.507390] BTFIT['IR_097'] = [-0.00002055330, 1.009370670, -1.030600] BTFIT['IR_108'] = [-0.00007392770, 1.032889800, -3.296740] BTFIT['IR_120'] = [-0.00007009840, 1.031314600, -3.181090] BTFIT['IR_134'] = [-0.00007293450, 1.030424800, -2.645950] SATNUM = {321: "8", 322: "9", 323: "10", 324: "11"} CALIB = {} # Meteosat 8 CALIB[321] = {'HRV': {'F': 78.7599}, 'VIS006': {'F': 65.2296}, 'VIS008': {'F': 73.0127}, 'IR_016': {'F': 62.3715}, 'IR_039': {'VC': 2567.33, 'ALPHA': 0.9956, 'BETA': 3.41}, 'WV_062': {'VC': 1598.103, 'ALPHA': 0.9962, 'BETA': 2.218}, 'WV_073': {'VC': 1362.081, 'ALPHA': 0.9991, 'BETA': 0.478}, 'IR_087': {'VC': 1149.069, 'ALPHA': 0.9996, 'BETA': 0.179}, 'IR_097': {'VC': 1034.343, 'ALPHA': 0.9999, 'BETA': 0.06}, 'IR_108': {'VC': 930.647, 'ALPHA': 0.9983, 'BETA': 0.625}, 'IR_120': {'VC': 839.66, 'ALPHA': 0.9988, 'BETA': 0.397}, 'IR_134': {'VC': 752.387, 'ALPHA': 0.9981, 'BETA': 0.578}} # Meteosat 9 CALIB[322] = {'HRV': {'F': 79.0113}, 'VIS006': {'F': 65.2065}, 'VIS008': {'F': 73.1869}, 'IR_016': {'F': 61.9923}, 'IR_039': {'VC': 2568.832, 'ALPHA': 0.9954, 'BETA': 3.438}, 'WV_062': {'VC': 1600.548, 'ALPHA': 0.9963, 'BETA': 2.185}, 'WV_073': {'VC': 1360.330, 'ALPHA': 0.9991, 'BETA': 0.47}, 'IR_087': {'VC': 1148.620, 'ALPHA': 0.9996, 'BETA': 0.179}, 'IR_097': {'VC': 1035.289, 'ALPHA': 0.9999, 'BETA': 0.056}, 'IR_108': {'VC': 931.7, 'ALPHA': 0.9983, 'BETA': 0.64}, 'IR_120': {'VC': 836.445, 'ALPHA': 0.9988, 'BETA': 0.408}, 'IR_134': {'VC': 751.792, 'ALPHA': 0.9981, 'BETA': 0.561}} # Meteosat 10 CALIB[323] = {'HRV': {'F': 78.9416}, 'VIS006': {'F': 65.5148}, 'VIS008': {'F': 73.1807}, 'IR_016': {'F': 62.0208}, 'IR_039': {'VC': 2547.771, 'ALPHA': 0.9915, 'BETA': 2.9002}, 'WV_062': {'VC': 1595.621, 'ALPHA': 0.9960, 'BETA': 2.0337}, 'WV_073': {'VC': 1360.337, 'ALPHA': 0.9991, 'BETA': 0.4340}, 'IR_087': {'VC': 1148.130, 'ALPHA': 0.9996, 'BETA': 0.1714}, 'IR_097': {'VC': 1034.715, 'ALPHA': 0.9999, 'BETA': 0.0527}, 'IR_108': {'VC': 929.842, 'ALPHA': 0.9983, 'BETA': 0.6084}, 'IR_120': {'VC': 838.659, 'ALPHA': 0.9988, 'BETA': 0.3882}, 'IR_134': {'VC': 750.653, 'ALPHA': 0.9982, 'BETA': 0.5390}} # Meteosat 11 CALIB[324] = {'HRV': {'F': 79.0035}, 'VIS006': {'F': 65.2656}, 'VIS008': {'F': 73.1692}, 'IR_016': {'F': 61.9416}, 'IR_039': {'VC': 2555.280, 'ALPHA': 0.9916, 'BETA': 2.9438}, 'WV_062': {'VC': 1596.080, 'ALPHA': 0.9959, 'BETA': 2.0780}, 'WV_073': {'VC': 1361.748, 'ALPHA': 0.9990, 'BETA': 0.4929}, 'IR_087': {'VC': 1147.433, 'ALPHA': 0.9996, 'BETA': 0.1731}, 'IR_097': {'VC': 1034.851, 'ALPHA': 0.9998, 'BETA': 0.0597}, 'IR_108': {'VC': 931.122, 'ALPHA': 0.9983, 'BETA': 0.6256}, 'IR_120': {'VC': 839.113, 'ALPHA': 0.9988, 'BETA': 0.4002}, 'IR_134': {'VC': 748.585, 'ALPHA': 0.9981, 'BETA': 0.5635}} def get_cds_time(days, msecs): """Compute timestamp given the days since epoch and milliseconds of the day. 1958-01-01 00:00 is interpreted as fill value and will be replaced by NaT (Not a Time). Args: days (int, either scalar or numpy.ndarray): Days since 1958-01-01 msecs (int, either scalar or numpy.ndarray): Milliseconds of the day Returns: numpy.datetime64: Timestamp(s) """ if np.isscalar(days): days = np.array([days], dtype='int64') msecs = np.array([msecs], dtype='int64') time = np.datetime64('1958-01-01').astype('datetime64[ms]') + \ days.astype('timedelta64[D]') + msecs.astype('timedelta64[ms]') time[time == np.datetime64('1958-01-01 00:00')] = np.datetime64("NaT") if len(time) == 1: return time[0] return time def add_scanline_acq_time(dataset, acq_time): """Add scanline acquisition time to the given dataset.""" dataset.coords['acq_time'] = ('y', acq_time) dataset.coords['acq_time'].attrs[ 'long_name'] = 'Mean scanline acquisition time' def dec10216(inbuf): """Decode 10 bits data into 16 bits words. :: /* * pack 4 10-bit words in 5 bytes into 4 16-bit words * * 0 1 2 3 4 5 * 01234567890123456789012345678901234567890 * 0 1 2 3 4 */ ip = &in_buffer[i]; op = &out_buffer[j]; op[0] = ip[0]*4 + ip[1]/64; op[1] = (ip[1] & 0x3F)*16 + ip[2]/16; op[2] = (ip[2] & 0x0F)*64 + ip[3]/4; op[3] = (ip[3] & 0x03)*256 +ip[4]; """ arr10 = inbuf.astype(np.uint16) arr16_len = int(len(arr10) * 4 / 5) arr10_len = int((arr16_len * 5) / 4) arr10 = arr10[:arr10_len] # adjust size # dask is slow with indexing arr10_0 = arr10[::5] arr10_1 = arr10[1::5] arr10_2 = arr10[2::5] arr10_3 = arr10[3::5] arr10_4 = arr10[4::5] arr16_0 = (arr10_0 << 2) + (arr10_1 >> 6) arr16_1 = ((arr10_1 & 63) << 4) + (arr10_2 >> 4) arr16_2 = ((arr10_2 & 15) << 6) + (arr10_3 >> 2) arr16_3 = ((arr10_3 & 3) << 8) + arr10_4 arr16 = da.stack([arr16_0, arr16_1, arr16_2, arr16_3], axis=-1).ravel() arr16 = da.rechunk(arr16, arr16.shape[0]) return arr16 class MpefProductHeader(object): """MPEF product header class.""" def get(self): """Return numpy record_array for MPEF product header.""" record = [ ('MPEF_File_Id', np.int16), ('MPEF_Header_Version', np.uint8), ('ManualDissAuthRequest', bool), ('ManualDisseminationAuth', bool), ('DisseminationAuth', bool), ('NominalTime', time_cds_short), ('ProductQuality', np.uint8), ('ProductCompleteness', np.uint8), ('ProductTimeliness', np.uint8), ('ProcessingInstanceId', np.int8), ('ImagesUsed', self.images_used, (4,)), ('BaseAlgorithmVersion', issue_revision), ('ProductAlgorithmVersion', issue_revision), ('InstanceServerName', 'S2'), ('SpacecraftName', 'S2'), ('Mission', 'S3'), ('RectificationLongitude', 'S5'), ('Encoding', 'S1'), ('TerminationSpace', 'S1'), ('EncodingVersion', np.uint16), ('Channel', np.uint8), ('ImageLocation', 'S3'), ('GsicsCalMode', np.bool), ('GsicsCalValidity', np.bool), ('Padding', 'S2'), ('OffsetToData', np.uint32), ('Padding2', 'S9'), ('RepeatCycle', 'S15'), ] return np.dtype(record).newbyteorder('>') @property def images_used(self): """Return structure for images_used.""" record = [ ('Padding1', 'S2'), ('ExpectedImage', time_cds_short), ('ImageReceived', bool), ('Padding2', 'S1'), ('UsedImageStart_Day', np.uint16), ('UsedImageStart_Millsec', np.uint32), ('Padding3', 'S2'), ('UsedImageEnd_Day', np.uint16), ('UsedImageEndt_Millsec', np.uint32), ] return record mpef_product_header = MpefProductHeader().get() class SEVIRICalibrationAlgorithm: """SEVIRI calibration algorithms.""" def __init__(self, platform_id, scan_time): """Initialize the calibration algorithm.""" self._platform_id = platform_id self._scan_time = scan_time def convert_to_radiance(self, data, gain, offset): """Calibrate to radiance.""" data = data.where(data > 0) return (data * gain + offset).clip(0.0, None) def _erads2bt(self, data, channel_name): """Convert effective radiance to brightness temperature.""" cal_info = CALIB[self._platform_id][channel_name] alpha = cal_info["ALPHA"] beta = cal_info["BETA"] wavenumber = CALIB[self._platform_id][channel_name]["VC"] return (self._tl15(data, wavenumber) - beta) / alpha def ir_calibrate(self, data, channel_name, cal_type): """Calibrate to brightness temperature.""" if cal_type == 1: # spectral radiances return self._srads2bt(data, channel_name) elif cal_type == 2: # effective radiances return self._erads2bt(data, channel_name) else: raise NotImplementedError('Unknown calibration type') def _srads2bt(self, data, channel_name): """Convert spectral radiance to brightness temperature.""" a__, b__, c__ = BTFIT[channel_name] wavenumber = CALIB[self._platform_id][channel_name]["VC"] temp = self._tl15(data, wavenumber) return a__ * temp * temp + b__ * temp + c__ def _tl15(self, data, wavenumber): """Compute the L15 temperature.""" return ((C2 * wavenumber) / np.log((1.0 / data) * C1 * wavenumber ** 3 + 1.0)) def vis_calibrate(self, data, solar_irradiance): """Calibrate to reflectance. This uses the method described in Conversion from radiances to reflectances for SEVIRI warm channels: https://tinyurl.com/y67zhphm """ reflectance = np.pi * data * 100.0 / solar_irradiance return apply_earthsun_distance_correction(reflectance, self._scan_time) class SEVIRICalibrationHandler: """Calibration handler for SEVIRI HRIT-, native- and netCDF-formats. Handles selection of calibration coefficients and calls the appropriate calibration algorithm. """ def __init__(self, platform_id, channel_name, coefs, calib_mode, scan_time): """Initialize the calibration handler.""" self._platform_id = platform_id self._channel_name = channel_name self._coefs = coefs self._calib_mode = calib_mode.upper() self._scan_time = scan_time self._algo = SEVIRICalibrationAlgorithm( platform_id=self._platform_id, scan_time=self._scan_time ) valid_modes = ('NOMINAL', 'GSICS') if self._calib_mode not in valid_modes: raise ValueError( 'Invalid calibration mode: {}. Choose one of {}'.format( self._calib_mode, valid_modes) ) def calibrate(self, data, calibration): """Calibrate the given data.""" if calibration == 'counts': res = data elif calibration in ['radiance', 'reflectance', 'brightness_temperature']: gain, offset = self.get_gain_offset() res = self._algo.convert_to_radiance( data.astype(np.float32), gain, offset ) else: raise ValueError( 'Invalid calibration {} for channel {}'.format( calibration, self._channel_name ) ) if calibration == 'reflectance': solar_irradiance = CALIB[self._platform_id][self._channel_name]["F"] res = self._algo.vis_calibrate(res, solar_irradiance) elif calibration == 'brightness_temperature': res = self._algo.ir_calibrate( res, self._channel_name, self._coefs['radiance_type'] ) return res def get_gain_offset(self): """Get gain & offset for calibration from counts to radiance. Choices for internal coefficients are nominal or GSICS. If no GSICS coefficients are available for a certain channel, fall back to nominal coefficients. External coefficients take precedence over internal coefficients. """ coefs = self._coefs['coefs'] # Select internal coefficients for the given calibration mode internal_gain = coefs['NOMINAL']['gain'] internal_offset = coefs['NOMINAL']['offset'] if self._calib_mode == 'GSICS': gsics_gain = coefs['GSICS']['gain'] gsics_offset = coefs['GSICS']['offset'] * gsics_gain if gsics_gain != 0 and gsics_offset != 0: # If no GSICS coefficients are available for a certain channel, # they are set to zero in the file. internal_gain = gsics_gain internal_offset = gsics_offset # Override with external coefficients, if any. gain = coefs['EXTERNAL'].get('gain', internal_gain) offset = coefs['EXTERNAL'].get('offset', internal_offset) return gain, offset def chebyshev(coefs, time, domain): """Evaluate a Chebyshev Polynomial. Args: coefs (list, np.array): Coefficients defining the polynomial time (int, float): Time where to evaluate the polynomial domain (list, tuple): Domain (or time interval) for which the polynomial is defined: [left, right] Reference: Appendix A in the MSG Level 1.5 Image Data Format Description. """ return Chebyshev(coefs, domain=domain)(time) - 0.5 * coefs[0] def chebyshev_3d(coefs, time, domain): """Evaluate Chebyshev Polynomials for three dimensions (x, y, z). Expects the three coefficient sets to be defined in the same domain. Args: coefs: (x, y, z) coefficient sets. time: See :func:`chebyshev` domain: See :func:`chebyshev` Returns: Polynomials evaluated in (x, y, z) dimension. """ x_coefs, y_coefs, z_coefs = coefs x = chebyshev(x_coefs, time, domain) y = chebyshev(y_coefs, time, domain) z = chebyshev(z_coefs, time, domain) return x, y, z class NoValidOrbitParams(Exception): """Exception when validOrbitParameters are missing.""" pass class OrbitPolynomial: """Polynomial encoding the satellite position. Satellite position as a function of time is encoded in the coefficients of an 8th-order Chebyshev polynomial. """ def __init__(self, coefs, start_time, end_time): """Initialize the polynomial.""" self.coefs = coefs self.start_time = start_time self.end_time = end_time def evaluate(self, time): """Get satellite position in earth-centered cartesion coordinates. Args: time: Timestamp where to evaluate the polynomial Returns: Earth-centered cartesion coordinates (x, y, z) in meters """ domain = [np.datetime64(self.start_time).astype('int64'), np.datetime64(self.end_time).astype('int64')] time = np.datetime64(time).astype('int64') x, y, z = chebyshev_3d(self.coefs, time, domain) return x * 1000, y * 1000, z * 1000 # km -> m def __eq__(self, other): """Test equality of two orbit polynomials.""" return ( np.array_equal(self.coefs, np.array(other.coefs)) and self.start_time == other.start_time and self.end_time == other.end_time ) def get_satpos(orbit_polynomial, time, semi_major_axis, semi_minor_axis): """Get satellite position in geodetic coordinates. Args: orbit_polynomial: OrbitPolynomial instance time: Timestamp where to evaluate the polynomial semi_major_axis: Semi-major axis of the ellipsoid semi_minor_axis: Semi-minor axis of the ellipsoid Returns: Longitude [deg east], Latitude [deg north] and Altitude [m] """ x, y, z = orbit_polynomial.evaluate(time) geocent = pyproj.CRS( proj='geocent', a=semi_major_axis, b=semi_minor_axis, units='m' ) latlong = pyproj.CRS( proj='latlong', a=semi_major_axis, b=semi_minor_axis, units='m' ) transformer = pyproj.Transformer.from_crs(geocent, latlong) lon, lat, alt = transformer.transform(x, y, z) return lon, lat, alt class OrbitPolynomialFinder: """Find orbit polynomial for a given timestamp.""" def __init__(self, orbit_polynomials): """Initialize with the given candidates. Args: orbit_polynomials: Dictionary of orbit polynomials as found in SEVIRI L1B files: .. code-block:: python {'X': x_polynomials, 'Y': y_polynomials, 'Z': z_polynomials, 'StartTime': polynomials_valid_from, 'EndTime': polynomials_valid_to} """ self.orbit_polynomials = orbit_polynomials # Left/right boundaries of time intervals for which the polynomials are # valid. self.valid_from = orbit_polynomials['StartTime'][0, :].astype( 'datetime64[us]') self.valid_to = orbit_polynomials['EndTime'][0, :].astype( 'datetime64[us]') def get_orbit_polynomial(self, time, max_delta=6): """Get orbit polynomial valid for the given time. Orbit polynomials are only valid for certain time intervals. Find the polynomial, whose corresponding interval encloses the given timestamp. If there are multiple enclosing intervals, use the most recent one. If there is no enclosing interval, find the interval whose centre is closest to the given timestamp (but not more than ``max_delta`` hours apart). Why are there gaps between those intervals? Response from EUM: A manoeuvre is a discontinuity in the orbit parameters. The flight dynamic algorithms are not made to interpolate over the time-span of the manoeuvre; hence we have elements describing the orbit before a manoeuvre and a new set of elements describing the orbit after the manoeuvre. The flight dynamic products are created so that there is an intentional gap at the time of the manoeuvre. Also the two pre-manoeuvre elements may overlap. But the overlap is not of an issue as both sets of elements describe the same pre-manoeuvre orbit (with negligible variations). """ time = np.datetime64(time) try: match = self._get_enclosing_interval(time) except ValueError: warnings.warn( 'No orbit polynomial valid for {}. Using closest ' 'match.'.format(time) ) match = self._get_closest_interval_within(time, max_delta) return OrbitPolynomial( coefs=( self.orbit_polynomials['X'][match], self.orbit_polynomials['Y'][match], self.orbit_polynomials['Z'][match] ), start_time=self.valid_from[match], end_time=self.valid_to[match] ) def _get_enclosing_interval(self, time): """Find interval enclosing the given timestamp.""" enclosing = np.where( np.logical_and( time >= self.valid_from, time < self.valid_to ) )[0] most_recent = np.argmax(self.valid_from[enclosing]) return enclosing[most_recent] def _get_closest_interval_within(self, time, threshold): """Find interval closest to the given timestamp within a given distance. Args: time: Timestamp of interest threshold: Maximum distance between timestamp and interval center Returns: Index of closest interval """ closest_match, distance = self._get_closest_interval(time) threshold_diff = np.timedelta64(threshold, 'h') if distance < threshold_diff: return closest_match raise NoValidOrbitParams( 'Unable to find orbit coefficients valid for {} +/- {}' 'hours'.format(time, threshold) ) def _get_closest_interval(self, time): """Find interval closest to the given timestamp. Returns: Index of closest interval, distance from its center """ intervals_centre = self.valid_from + 0.5 * ( self.valid_to - self.valid_from ) diffs_us = (time - intervals_centre).astype('i8') closest_match = np.argmin(np.fabs(diffs_us)) distance = abs(intervals_centre[closest_match] - time) return closest_match, distance # def calculate_area_extent(center_point, north, east, south, west, we_offset, ns_offset, column_step, line_step): def calculate_area_extent(area_dict): """Calculate the area extent seen by a geostationary satellite. Args: area_dict: A dictionary containing the required parameters center_point: Center point for the projection north: Northmost row number east: Eastmost column number west: Westmost column number south: Southmost row number column_step: Pixel resulution in meters in east-west direction line_step: Pixel resulution in meters in soutth-north direction [column_offset: Column offset, defaults to 0 if not given] [line_offset: Line offset, defaults to 0 if not given] Returns: tuple: An area extent for the scene defined by the lower left and upper right corners # For Earth model 2 and full disk VISIR, (center_point - west - 0.5 + we_offset) must be -1856.5 . # See MSG Level 1.5 Image Data Format Description Figure 7 - Alignment and numbering of the non-HRV pixels. """ center_point = area_dict['center_point'] east = area_dict['east'] west = area_dict['west'] south = area_dict['south'] north = area_dict['north'] column_step = area_dict['column_step'] line_step = area_dict['line_step'] column_offset = area_dict.get('column_offset', 0) line_offset = area_dict.get('line_offset', 0) ll_c = (center_point - east + 0.5 + column_offset) * column_step ll_l = (north - center_point + 0.5 + line_offset) * line_step ur_c = (center_point - west - 0.5 + column_offset) * column_step ur_l = (south - center_point - 0.5 + line_offset) * line_step return (ll_c, ll_l, ur_c, ur_l) def create_coef_dict(coefs_nominal, coefs_gsics, radiance_type, ext_coefs): """Create coefficient dictionary expected by calibration class.""" return { 'coefs': { 'NOMINAL': { 'gain': coefs_nominal[0], 'offset': coefs_nominal[1], }, 'GSICS': { 'gain': coefs_gsics[0], 'offset': coefs_gsics[1] }, 'EXTERNAL': ext_coefs }, 'radiance_type': radiance_type } def get_padding_area(shape, dtype): """Create a padding area filled with no data.""" if np.issubdtype(dtype, np.floating): init_value = np.nan else: init_value = 0 padding_area = da.full(shape, init_value, dtype=dtype, chunks=CHUNK_SIZE) return padding_area def pad_data_horizontally(data, final_size, east_bound, west_bound): """Pad the data given east and west bounds and the desired size.""" nlines = final_size[0] if west_bound - east_bound != data.shape[1] - 1: raise IndexError('East and west bounds do not match data shape') padding_east = get_padding_area((nlines, east_bound - 1), data.dtype) padding_west = get_padding_area((nlines, (final_size[1] - west_bound)), data.dtype) return np.hstack((padding_east, data, padding_west)) def pad_data_vertically(data, final_size, south_bound, north_bound): """Pad the data given south and north bounds and the desired size.""" ncols = final_size[1] if north_bound - south_bound != data.shape[0] - 1: raise IndexError('South and north bounds do not match data shape') padding_south = get_padding_area((south_bound - 1, ncols), data.dtype) padding_north = get_padding_area(((final_size[0] - north_bound), ncols), data.dtype) return np.vstack((padding_south, data, padding_north)) satpy-0.34.0/satpy/readers/seviri_l1b_hrit.py000066400000000000000000000706141420401153000211600ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2010-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . r"""SEVIRI HRIT format reader. Introduction ------------ The ``seviri_l1b_hrit`` reader reads and calibrates MSG-SEVIRI L1.5 image data in HRIT format. The format is explained in the `MSG Level 1.5 Image Data Format Description`_. The files are usually named as follows: .. code-block:: none H-000-MSG4__-MSG4________-_________-PRO______-201903011200-__ H-000-MSG4__-MSG4________-IR_108___-000001___-201903011200-__ H-000-MSG4__-MSG4________-IR_108___-000002___-201903011200-__ H-000-MSG4__-MSG4________-IR_108___-000003___-201903011200-__ H-000-MSG4__-MSG4________-IR_108___-000004___-201903011200-__ H-000-MSG4__-MSG4________-IR_108___-000005___-201903011200-__ H-000-MSG4__-MSG4________-IR_108___-000006___-201903011200-__ H-000-MSG4__-MSG4________-IR_108___-000007___-201903011200-__ H-000-MSG4__-MSG4________-IR_108___-000008___-201903011200-__ H-000-MSG4__-MSG4________-_________-EPI______-201903011200-__ Each image is decomposed into 24 segments (files) for the high-resolution-visible (HRV) channel and 8 segments for other visible (VIS) and infrared (IR) channels. Additionally there is one prologue and one epilogue file for the entire scan which contain global metadata valid for all channels. Reader Arguments ---------------- Some arguments can be provided to the reader to change it's behaviour. These are provided through the `Scene` instantiation, eg:: Scene(reader="seviri_l1b_hrit", filenames=fnames, reader_kwargs={'fill_hrv': False}) To see the full list of arguments that can be provided, look into the documentation of :class:`HRITMSGFileHandler`. Example ------- Here is an example how to read the data in satpy: .. code-block:: python from satpy import Scene import glob filenames = glob.glob('data/H-000-MSG4__-MSG4________-*201903011200*') scn = Scene(filenames=filenames, reader='seviri_l1b_hrit') scn.load(['VIS006', 'IR_108']) print(scn['IR_108']) Output: .. code-block:: none dask.array Coordinates: acq_time (y) datetime64[ns] NaT NaT NaT NaT NaT NaT ... NaT NaT NaT NaT NaT * x (x) float64 5.566e+06 5.563e+06 5.56e+06 ... -5.566e+06 -5.569e+06 * y (y) float64 -5.566e+06 -5.563e+06 ... 5.566e+06 5.569e+06 Attributes: satellite_longitude: 0.0 satellite_latitude: 0.0 satellite_altitude: 35785831.0 orbital_parameters: {'projection_longitude': 0.0, 'projection_latit... platform_name: Meteosat-11 georef_offset_corrected: True standard_name: brightness_temperature raw_metadata: {'file_type': 0, 'total_header_length': 6198, '... wavelength: (9.8, 10.8, 11.8) units: K sensor: seviri platform_name: Meteosat-11 start_time: 2019-03-01 12:00:09.716000 end_time: 2019-03-01 12:12:42.946000 area: Area ID: some_area_name\\nDescription: On-the-fl... name: IR_108 resolution: 3000.403165817 calibration: brightness_temperature polarization: None level: None modifiers: () ancillary_variables: [] .. _MSG Level 1.5 Image Data Format Description: https://www-cdn.eumetsat.int/files/2020-05/pdf_ten_05105_msg_img_data.pdf """ from __future__ import division import copy import logging from datetime import datetime import dask.array as da import numpy as np import xarray as xr from pyresample import geometry import satpy.readers.utils as utils from satpy import CHUNK_SIZE from satpy._compat import cached_property from satpy.readers._geos_area import get_area_definition, get_area_extent, get_geos_area_naming from satpy.readers.eum_base import get_service_mode, recarray2dict, time_cds_short from satpy.readers.hrit_base import ( HRITFileHandler, ancillary_text, annotation_header, base_hdr_map, image_data_function, ) from satpy.readers.seviri_base import ( CHANNEL_NAMES, HRV_NUM_COLUMNS, SATNUM, NoValidOrbitParams, OrbitPolynomialFinder, SEVIRICalibrationHandler, add_scanline_acq_time, create_coef_dict, get_cds_time, get_satpos, pad_data_horizontally, ) from satpy.readers.seviri_l1b_native_hdr import hrit_epilogue, hrit_prologue, impf_configuration logger = logging.getLogger('hrit_msg') # MSG implementation: key_header = np.dtype([('key_number', 'u1'), ('seed', '>f8')]) segment_identification = np.dtype([('GP_SC_ID', '>i2'), ('spectral_channel_id', '>i1'), ('segment_sequence_number', '>u2'), ('planned_start_segment_number', '>u2'), ('planned_end_segment_number', '>u2'), ('data_field_representation', '>i1')]) image_segment_line_quality = np.dtype([('line_number_in_grid', '>i4'), ('line_mean_acquisition', [('days', '>u2'), ('milliseconds', '>u4')]), ('line_validity', 'u1'), ('line_radiometric_quality', 'u1'), ('line_geometric_quality', 'u1')]) msg_variable_length_headers = { image_segment_line_quality: 'image_segment_line_quality'} msg_text_headers = {image_data_function: 'image_data_function', annotation_header: 'annotation_header', ancillary_text: 'ancillary_text'} msg_hdr_map = base_hdr_map.copy() msg_hdr_map.update({7: key_header, 128: segment_identification, 129: image_segment_line_quality }) orbit_coef = np.dtype([('StartTime', time_cds_short), ('EndTime', time_cds_short), ('X', '>f8', (8, )), ('Y', '>f8', (8, )), ('Z', '>f8', (8, )), ('VX', '>f8', (8, )), ('VY', '>f8', (8, )), ('VZ', '>f8', (8, ))]) attitude_coef = np.dtype([('StartTime', time_cds_short), ('EndTime', time_cds_short), ('XofSpinAxis', '>f8', (8, )), ('YofSpinAxis', '>f8', (8, )), ('ZofSpinAxis', '>f8', (8, ))]) cuc_time = np.dtype([('coarse', 'u1', (4, )), ('fine', 'u1', (3, ))]) class HRITMSGPrologueEpilogueBase(HRITFileHandler): """Base reader for prologue and epilogue files.""" def __init__(self, filename, filename_info, filetype_info, hdr_info): """Initialize the file handler for prologue and epilogue files.""" super(HRITMSGPrologueEpilogueBase, self).__init__(filename, filename_info, filetype_info, hdr_info) self._reduced = None def _reduce(self, mda, max_size): """Reduce the metadata.""" if self._reduced is None: self._reduced = utils.reduce_mda(mda, max_size=max_size) return self._reduced def reduce(self, max_size): """Reduce the metadata (placeholder).""" raise NotImplementedError class HRITMSGPrologueFileHandler(HRITMSGPrologueEpilogueBase): """SEVIRI HRIT prologue reader.""" def __init__(self, filename, filename_info, filetype_info, calib_mode='nominal', ext_calib_coefs=None, include_raw_metadata=False, mda_max_array_size=None, fill_hrv=None): """Initialize the reader.""" with utils.unzip_context(filename) as fn: if fn is not None: self.filename = fn super(HRITMSGPrologueFileHandler, self).__init__(self.filename, filename_info, filetype_info, (msg_hdr_map, msg_variable_length_headers, msg_text_headers)) self.prologue = {} self.read_prologue() service = filename_info['service'] if service == '': self.mda['service'] = '0DEG' else: self.mda['service'] = service def read_prologue(self): """Read the prologue metadata.""" with open(self.filename, "rb") as fp_: fp_.seek(self.mda['total_header_length']) data = np.fromfile(fp_, dtype=hrit_prologue, count=1) self.prologue.update(recarray2dict(data)) try: impf = np.fromfile(fp_, dtype=impf_configuration, count=1)[0] except IndexError: logger.info('No IMPF configuration field found in prologue.') else: self.prologue.update(recarray2dict(impf)) @cached_property def satpos(self): """Get actual satellite position in geodetic coordinates (WGS-84). Evaluate orbit polynomials at the start time of the scan. Returns: Longitude [deg east], Latitude [deg north] and Altitude [m] """ a, b = self.get_earth_radii() start_time = self.prologue['ImageAcquisition'][ 'PlannedAcquisitionTime']['TrueRepeatCycleStart'] poly_finder = OrbitPolynomialFinder(self.prologue['SatelliteStatus'][ 'Orbit']['OrbitPolynomial']) orbit_polynomial = poly_finder.get_orbit_polynomial(start_time) return get_satpos( orbit_polynomial=orbit_polynomial, time=start_time, semi_major_axis=a, semi_minor_axis=b, ) def get_earth_radii(self): """Get earth radii from prologue. Returns: Equatorial radius, polar radius [m] """ earth_model = self.prologue['GeometricProcessing']['EarthModel'] a = earth_model['EquatorialRadius'] * 1000 b = (earth_model['NorthPolarRadius'] + earth_model['SouthPolarRadius']) / 2.0 * 1000 return a, b def reduce(self, max_size): """Reduce the prologue metadata.""" return self._reduce(self.prologue, max_size=max_size) class HRITMSGEpilogueFileHandler(HRITMSGPrologueEpilogueBase): """SEVIRI HRIT epilogue reader.""" def __init__(self, filename, filename_info, filetype_info, calib_mode='nominal', ext_calib_coefs=None, include_raw_metadata=False, mda_max_array_size=None, fill_hrv=None): """Initialize the reader.""" with utils.unzip_context(filename) as fn: if fn is not None: self.filename = fn super(HRITMSGEpilogueFileHandler, self).__init__(self.filename, filename_info, filetype_info, (msg_hdr_map, msg_variable_length_headers, msg_text_headers)) self.epilogue = {} self.read_epilogue() service = filename_info['service'] if service == '': self.mda['service'] = '0DEG' else: self.mda['service'] = service def read_epilogue(self): """Read the epilogue metadata.""" with open(self.filename, "rb") as fp_: fp_.seek(self.mda['total_header_length']) data = np.fromfile(fp_, dtype=hrit_epilogue, count=1) self.epilogue.update(recarray2dict(data)) def reduce(self, max_size): """Reduce the epilogue metadata.""" return self._reduce(self.epilogue, max_size=max_size) class HRITMSGFileHandler(HRITFileHandler): """SEVIRI HRIT format reader. **Calibration** See :mod:`satpy.readers.seviri_base`. **Padding of the HRV channel** By default, the HRV channel is loaded padded with no-data, that is it is returned as a full-disk dataset. If you want the original, unpadded, data, just provide the `fill_hrv` as False in the `reader_kwargs`:: scene = satpy.Scene(filenames, reader='seviri_l1b_hrit', reader_kwargs={'fill_hrv': False}) **Metadata** See :mod:`satpy.readers.seviri_base`. """ def __init__(self, filename, filename_info, filetype_info, prologue, epilogue, calib_mode='nominal', ext_calib_coefs=None, include_raw_metadata=False, mda_max_array_size=100, fill_hrv=True): """Initialize the reader.""" super(HRITMSGFileHandler, self).__init__(filename, filename_info, filetype_info, (msg_hdr_map, msg_variable_length_headers, msg_text_headers)) self.prologue_ = prologue self.epilogue_ = epilogue self.prologue = prologue.prologue self.epilogue = epilogue.epilogue self._filename_info = filename_info self.include_raw_metadata = include_raw_metadata self.mda_max_array_size = mda_max_array_size self.fill_hrv = fill_hrv self.calib_mode = calib_mode self.ext_calib_coefs = ext_calib_coefs or {} self._get_header() def _get_header(self): """Read the header info, and fill the metadata dictionary.""" earth_model = self.prologue['GeometricProcessing']['EarthModel'] self.mda['offset_corrected'] = earth_model['TypeOfEarthModel'] == 2 # Projection a, b = self.prologue_.get_earth_radii() self.mda['projection_parameters']['a'] = a self.mda['projection_parameters']['b'] = b ssp = self.prologue['ImageDescription'][ 'ProjectionDescription']['LongitudeOfSSP'] self.mda['projection_parameters']['SSP_longitude'] = ssp self.mda['projection_parameters']['SSP_latitude'] = 0.0 # Orbital parameters self.mda['orbital_parameters']['satellite_nominal_longitude'] = self.prologue['SatelliteStatus'][ 'SatelliteDefinition']['NominalLongitude'] self.mda['orbital_parameters']['satellite_nominal_latitude'] = 0.0 try: actual_lon, actual_lat, actual_alt = self.prologue_.satpos self.mda['orbital_parameters']['satellite_actual_longitude'] = actual_lon self.mda['orbital_parameters']['satellite_actual_latitude'] = actual_lat self.mda['orbital_parameters']['satellite_actual_altitude'] = actual_alt except NoValidOrbitParams as err: logger.warning(err) # Misc self.platform_id = self.prologue["SatelliteStatus"][ "SatelliteDefinition"]["SatelliteId"] self.platform_name = "Meteosat-" + SATNUM[self.platform_id] self.mda['platform_name'] = self.platform_name service = self._filename_info['service'] if service == '': self.mda['service'] = '0DEG' else: self.mda['service'] = service self.channel_name = CHANNEL_NAMES[self.mda['spectral_channel_id']] @property def start_time(self): """Get the start time.""" return self.epilogue['ImageProductionStats'][ 'ActualScanningSummary']['ForwardScanStart'] @property def end_time(self): """Get the end time.""" return self.epilogue['ImageProductionStats'][ 'ActualScanningSummary']['ForwardScanEnd'] def _get_area_extent(self, pdict): """Get the area extent of the file. Until December 2017, the data is shifted by 1.5km SSP North and West against the nominal GEOS projection. Since December 2017 this offset has been corrected. A flag in the data indicates if the correction has been applied. If no correction was applied, adjust the area extent to match the shifted data. For more information see Section 3.1.4.2 in the MSG Level 1.5 Image Data Format Description. The correction of the area extent is documented in a `developer's memo `_. """ aex = get_area_extent(pdict) if not self.mda['offset_corrected']: # Geo-referencing offset present. Adjust area extent to match the shifted data. Note that we have to adjust # the corners in the *opposite* direction, i.e. S-E. Think of it as if the coastlines were fixed and you # dragged the image to S-E until coastlines and data area aligned correctly. # # Although the image is flipped upside-down and left-right, the projection coordinates retain their # properties, i.e. positive x/y is East/North, respectively. xadj = 1500 yadj = -1500 aex = (aex[0] + xadj, aex[1] + yadj, aex[2] + xadj, aex[3] + yadj) return aex def get_area_def(self, dsid): """Get the area definition of the band.""" # Common parameters for both HRV and other channels nlines = int(self.mda['number_of_lines']) loff = np.float32(self.mda['loff']) pdict = {} pdict['cfac'] = np.int32(self.mda['cfac']) pdict['lfac'] = np.int32(self.mda['lfac']) pdict['coff'] = np.float32(self.mda['coff']) pdict['a'] = self.mda['projection_parameters']['a'] pdict['b'] = self.mda['projection_parameters']['b'] pdict['h'] = self.mda['projection_parameters']['h'] pdict['ssp_lon'] = self.mda['projection_parameters']['SSP_longitude'] pdict['nlines'] = nlines pdict['ncols'] = int(self.mda['number_of_columns']) if (self.prologue['ImageDescription']['Level15ImageProduction'] ['ImageProcDirection'] == 0): pdict['scandir'] = 'N2S' else: pdict['scandir'] = 'S2N' area_naming_input_dict = {'platform_name': 'msg', 'instrument_name': 'seviri', 'resolution': int(dsid['resolution']) } area_naming = get_geos_area_naming({**area_naming_input_dict, **get_service_mode('seviri', pdict['ssp_lon'])}) # Compute area definition for non-HRV channels: if dsid['name'] != 'HRV': pdict['loff'] = loff - nlines aex = self._get_area_extent(pdict) pdict['a_name'] = area_naming['area_id'] pdict['a_desc'] = area_naming['description'] pdict['p_id'] = "" area = get_area_definition(pdict, aex) self.area = area return self.area segment_number = self.mda['segment_sequence_number'] current_first_line = ((segment_number - self.mda['planned_start_segment_number']) * pdict['nlines']) # Or, if we are processing HRV: pdict['a_name'] = area_naming['area_id'] pdict['p_id'] = "" bounds = self.epilogue['ImageProductionStats']['ActualL15CoverageHRV'].copy() if self.fill_hrv: bounds['UpperEastColumnActual'] = 1 bounds['UpperWestColumnActual'] = HRV_NUM_COLUMNS bounds['LowerEastColumnActual'] = 1 bounds['LowerWestColumnActual'] = HRV_NUM_COLUMNS pdict['ncols'] = HRV_NUM_COLUMNS upper_south_line = bounds[ 'LowerNorthLineActual'] - current_first_line - 1 upper_south_line = min(max(upper_south_line, 0), pdict['nlines']) lower_coff = (5566 - bounds['LowerEastColumnActual'] + 1) upper_coff = (5566 - bounds['UpperEastColumnActual'] + 1) # First we look at the lower window pdict['nlines'] = upper_south_line pdict['loff'] = loff - upper_south_line pdict['coff'] = lower_coff pdict['a_desc'] = area_naming['description'] lower_area_extent = self._get_area_extent(pdict) lower_area = get_area_definition(pdict, lower_area_extent) # Now the upper window pdict['nlines'] = nlines - upper_south_line pdict['loff'] = loff - pdict['nlines'] - upper_south_line pdict['coff'] = upper_coff pdict['a_desc'] = area_naming['description'] upper_area_extent = self._get_area_extent(pdict) upper_area = get_area_definition(pdict, upper_area_extent) area = geometry.StackedAreaDefinition(lower_area, upper_area) self.area = area.squeeze() return self.area def get_dataset(self, key, info): """Get the dataset.""" res = super(HRITMSGFileHandler, self).get_dataset(key, info) res = self.calibrate(res, key['calibration']) if key['name'] == 'HRV' and self.fill_hrv: res = self.pad_hrv_data(res) self._update_attrs(res, info) self._add_scanline_acq_time(res) return res def pad_hrv_data(self, res): """Add empty pixels around the HRV.""" logger.debug('Padding HRV data to full disk') nlines = int(self.mda['number_of_lines']) segment_number = self.mda['segment_sequence_number'] current_first_line = (segment_number - self.mda['planned_start_segment_number']) * nlines bounds = self.epilogue['ImageProductionStats']['ActualL15CoverageHRV'] upper_south_line = bounds[ 'LowerNorthLineActual'] - current_first_line - 1 upper_south_line = min(max(upper_south_line, 0), nlines) data_list = list() if upper_south_line > 0: # we have some of the lower window data_lower = pad_data_horizontally(res[:upper_south_line, :].data, (upper_south_line, HRV_NUM_COLUMNS), bounds['LowerEastColumnActual'], bounds['LowerWestColumnActual']) data_list.append(data_lower) if upper_south_line < nlines: # we have some of the upper window data_upper = pad_data_horizontally(res[upper_south_line:, :].data, (nlines - upper_south_line, HRV_NUM_COLUMNS), bounds['UpperEastColumnActual'], bounds['UpperWestColumnActual']) data_list.append(data_upper) return xr.DataArray(da.vstack(data_list), dims=('y', 'x'), attrs=res.attrs.copy()) def calibrate(self, data, calibration): """Calibrate the data.""" tic = datetime.now() calib = SEVIRICalibrationHandler( platform_id=self.platform_id, channel_name=self.channel_name, coefs=self._get_calib_coefs(self.channel_name), calib_mode=self.calib_mode, scan_time=self.start_time ) res = calib.calibrate(data, calibration) if calibration in ['radiance', 'reflectance', 'brightness_temperature']: res = self._mask_bad_quality(res) logger.debug("Calibration time " + str(datetime.now() - tic)) return res def _mask_bad_quality(self, data): """Mask scanlines with bad quality.""" # Based on missing (2) or corrupted (3) data line_mask = self.mda['image_segment_line_quality']['line_validity'] >= 2 line_mask &= self.mda['image_segment_line_quality']['line_validity'] <= 3 # Do not use (4) line_mask &= self.mda['image_segment_line_quality']['line_radiometric_quality'] == 4 line_mask &= self.mda['image_segment_line_quality']['line_geometric_quality'] == 4 data *= np.choose(line_mask, [1, np.nan])[:, np.newaxis].astype(np.float32) return data def _get_raw_mda(self): """Compile raw metadata to be included in the dataset attributes.""" # Metadata from segment header (excluding items which vary among the different segments) raw_mda = copy.deepcopy(self.mda) for key in ('image_segment_line_quality', 'segment_sequence_number', 'annotation_header', 'loff'): raw_mda.pop(key, None) # Metadata from prologue and epilogue (large arrays removed) raw_mda.update(self.prologue_.reduce(self.mda_max_array_size)) raw_mda.update(self.epilogue_.reduce(self.mda_max_array_size)) return raw_mda def _add_scanline_acq_time(self, dataset): """Add scanline acquisition time to the given dataset.""" tline = self.mda['image_segment_line_quality']['line_mean_acquisition'] acq_time = get_cds_time(days=tline['days'], msecs=tline['milliseconds']) add_scanline_acq_time(dataset, acq_time) def _update_attrs(self, res, info): """Update dataset attributes.""" res.attrs['units'] = info['units'] res.attrs['wavelength'] = info['wavelength'] res.attrs['standard_name'] = info['standard_name'] res.attrs['platform_name'] = self.platform_name res.attrs['sensor'] = 'seviri' res.attrs['satellite_longitude'] = self.mda[ 'projection_parameters']['SSP_longitude'] res.attrs['satellite_latitude'] = self.mda[ 'projection_parameters']['SSP_latitude'] res.attrs['satellite_altitude'] = self.mda['projection_parameters']['h'] res.attrs['orbital_parameters'] = { 'projection_longitude': self.mda['projection_parameters']['SSP_longitude'], 'projection_latitude': self.mda['projection_parameters']['SSP_latitude'], 'projection_altitude': self.mda['projection_parameters']['h']} res.attrs['orbital_parameters'].update(self.mda['orbital_parameters']) res.attrs['georef_offset_corrected'] = self.mda['offset_corrected'] if self.include_raw_metadata: res.attrs['raw_metadata'] = self._get_raw_mda() def _get_calib_coefs(self, channel_name): """Get coefficients for calibration from counts to radiance.""" band_idx = self.mda['spectral_channel_id'] - 1 coefs_nominal = self.prologue["RadiometricProcessing"][ "Level15ImageCalibration"] coefs_gsics = self.prologue["RadiometricProcessing"]['MPEFCalFeedback'] radiance_types = self.prologue['ImageDescription'][ 'Level15ImageProduction']['PlannedChanProcessing'] return create_coef_dict( coefs_nominal=( coefs_nominal['CalSlope'][band_idx], coefs_nominal['CalOffset'][band_idx] ), coefs_gsics=( coefs_gsics['GSICSCalCoeff'][band_idx], coefs_gsics['GSICSOffsetCount'][band_idx] ), ext_coefs=self.ext_calib_coefs.get(channel_name, {}), radiance_type=radiance_types[band_idx] ) def pad_data(data, final_size, east_bound, west_bound): """Pad the data given east and west bounds and the desired size.""" nlines = final_size[0] if west_bound - east_bound != data.shape[1] - 1: raise IndexError('East and west bounds do not match data shape') padding_east = da.zeros((nlines, east_bound - 1), dtype=data.dtype, chunks=CHUNK_SIZE) padding_west = da.zeros((nlines, (final_size[1] - west_bound)), dtype=data.dtype, chunks=CHUNK_SIZE) if np.issubdtype(data.dtype, np.floating): padding_east = padding_east * np.nan padding_west = padding_west * np.nan return np.hstack((padding_east, data, padding_west)) satpy-0.34.0/satpy/readers/seviri_l1b_icare.py000066400000000000000000000246401420401153000212730ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . r"""Interface to SEVIRI L1B data from ICARE (Lille). Introduction ------------ The ``seviri_l1b_icare`` reader reads MSG-SEVIRI L1.5 image data in HDF format that has been produced by the ICARE Data and Services Center Data can be accessed via: http://www.icare.univ-lille1.fr Each SEVIRI timeslot comes as 12 HDF files, one per band. Only those bands that are of interest need to be passed to the reader. Others can be ignored. Filenames follow the format: GEO_L1B-MSG1_YYYY-MM-DDTHH-MM-SS_G_CHANN_VX-XX.hdf Where: YYYY, MM, DD, HH, MM, SS specify the timeslot starting time. CHANN is the channel (i.e: HRV, IR016, WV073, etc) VX-XX is the processing version number Example ------- Here is an example how to read the data in satpy: .. code-block:: python from satpy import Scene import glob filenames = glob.glob('data/*2019-03-01T12-00-00*.hdf') scn = Scene(filenames=filenames, reader='seviri_l1b_icare') scn.load(['VIS006', 'IR_108']) print(scn['IR_108']) Output: .. code-block:: none dask.array Coordinates: crs object +proj=geos +a=6378169.0 +b=6356583.8 +lon_0=0.0 +h=35785831.0 +units=m +type=crs * y (y) float64 5.566e+06 5.563e+06 5.56e+06 ... -5.566e+06 -5.569e+06 * x (x) float64 -5.566e+06 -5.563e+06 -5.56e+06 ... 5.566e+06 5.569e+06 Attributes: start_time: 2004-12-29 12:15:00 end_time: 2004-12-29 12:27:44 area: Area ID: geosmsg\nDescription: MSG/SEVIRI low resol... name: IR_108 resolution: 3000.403165817 calibration: brightness_temperature polarization: None level: None modifiers: () ancillary_variables: [] """ from datetime import datetime import numpy as np from satpy.readers._geos_area import get_area_definition, get_area_extent from satpy.readers.hdf4_utils import HDF4FileHandler class SEVIRI_ICARE(HDF4FileHandler): """SEVIRI L1B handler for HDF4 files.""" def __init__(self, filename, filename_info, filetype_info): """Init the file handler.""" super(SEVIRI_ICARE, self).__init__(filename, filename_info, filetype_info) # These are VIS bands self.ref_bands = ['HRV', 'VIS006', 'VIS008', 'IR_016'] # And these are IR bands self.bt_bands = ['IR_039', 'IR_062', 'IR_073', 'IR_087', 'IR_097', 'IR_108', 'IR_120', 'IR_134', 'WV_062', 'WV_073'] @property def sensor_name(self): """Get the sensor name.""" # the sensor and platform names are stored together, eg: MSG1/SEVIRI attr = self['/attr/Sensors'] if isinstance(attr, np.ndarray): attr = str(attr.astype(str)).lower() else: attr = attr.lower() plat = attr[0:4] sens = attr[5:] # icare uses non-standard platform names if plat == 'msg1': plat = 'Meteosat-08' elif plat == 'msg2': plat = 'Meteosat-09' elif plat == 'msg3': plat = 'Meteosat-10' elif plat == 'msg4': plat = 'Meteosat-11' else: raise NameError("Unsupported satellite platform:"+plat) return [plat, sens] @property def satlon(self): """Get the satellite longitude.""" attr = self['/attr/Sub_Satellite_Longitude'] if isinstance(attr, np.ndarray): attr = float(attr.astype(str)) return attr @property def projlon(self): """Get the projection longitude.""" attr = self['/attr/Projection_Longitude'] if isinstance(attr, np.ndarray): attr = float(attr.astype(str)) return attr @property def projection(self): """Get the projection.""" attr = self['/attr/Geographic_Projection'] if isinstance(attr, np.ndarray): attr = str(attr.astype(str)) attr = attr.lower() if attr != 'geos': raise NotImplementedError("Only the GEOS projection is supported.\ This is:", attr) return attr @property def zone(self): """Get the zone.""" attr = self['/attr/Zone'] if isinstance(attr, np.ndarray): attr = str(attr.astype(str)).lower() return attr @property def res(self): """Get the resolution.""" attr = self['/attr/Nadir_Pixel_Size'] if isinstance(attr, np.ndarray): attr = str(attr.astype(str)).lower() return float(attr) @property def end_time(self): """Get the end time.""" attr = self['/attr/End_Acquisition_Date'] if isinstance(attr, np.ndarray): attr = str(attr.astype(str)) # In some versions milliseconds are present, sometimes not. try: endacq = datetime.strptime(attr, "%Y-%m-%dT%H:%M:%SZ") except ValueError: endacq = datetime.strptime(attr, "%Y-%m-%dT%H:%M:%S.%fZ") return endacq @property def start_time(self): """Get the start time.""" attr = self['/attr/Beginning_Acquisition_Date'] if isinstance(attr, np.ndarray): attr = str(attr.astype(str)) # In some versions milliseconds are present, sometimes not. try: stacq = datetime.strptime(attr, "%Y-%m-%dT%H:%M:%SZ") except ValueError: stacq = datetime.strptime(attr, "%Y-%m-%dT%H:%M:%S.%fZ") return stacq @property def alt(self): """Get the altitude.""" attr = self['/attr/Altitude'] if isinstance(attr, np.ndarray): attr = attr.astype(str) attr = float(attr) # This is stored in km, convert to m attr = attr * 1000. return attr @property def geoloc(self): """Get the geolocation.""" attr = self['/attr/Geolocation'] if isinstance(attr, np.ndarray): attr = attr.astype(str) cfac = float(attr[0]) coff = float(attr[1]) lfac = float(attr[2]) loff = float(attr[3]) return [cfac, lfac, coff, loff] def get_metadata(self, data, ds_info): """Get the metadata.""" mda = {} mda.update(data.attrs) mda.update(ds_info) geoloc = self.geoloc mda.update({ 'start_time': self.start_time, 'end_time': self.end_time, 'platform_name': self.sensor_name[0], 'sensor': self.sensor_name[1], 'zone': self.zone, 'projection_altitude': self.alt, 'cfac': geoloc[0], 'lfac': geoloc[1], 'coff': geoloc[2], 'loff': geoloc[3], 'resolution': self.res, 'satellite_actual_longitude': self.satlon, 'projection_longitude': self.projlon, 'projection_type': self.projection }) return mda def _get_dsname(self, ds_id): """Return the correct dataset name based on requested band.""" if ds_id['name'] in self.ref_bands: ds_get_name = 'Normalized_Radiance' elif ds_id['name'] in self.bt_bands: ds_get_name = 'Brightness_Temperature' else: raise NameError("Datset type "+ds_id['name']+" is not supported.") return ds_get_name def get_dataset(self, ds_id, ds_info): """Get the dataset.""" ds_get_name = self._get_dsname(ds_id) data = self[ds_get_name] data.attrs = self.get_metadata(data, ds_info) fill = data.attrs.pop('_FillValue') offset = data.attrs.get('add_offset') scale_factor = data.attrs.get('scale_factor') data = data.where(data != fill) data.values = data.values.astype(np.float32) if scale_factor is not None and offset is not None: data.values *= scale_factor data.values += offset # Now we correct range from 0-1 to 0-100 for VIS: if ds_id['name'] in self.ref_bands: data.values *= 100. return data def get_area_def(self, ds_id): """Get the area def.""" ds_get_name = self._get_dsname(ds_id) ds_shape = self[ds_get_name + '/shape'] geoloc = self.geoloc pdict = {} pdict['cfac'] = np.int32(geoloc[0]) pdict['lfac'] = np.int32(geoloc[1]) pdict['coff'] = np.float32(geoloc[2]) pdict['loff'] = -np.float32(geoloc[3]) # Unfortunately this dataset does not store a, b or h. # We assume a and b here, and calculate h from altitude # a and b are from SEVIRI data HRIT header (201912101300) pdict['a'] = 6378169 pdict['b'] = 6356583.8 pdict['h'] = self.alt - pdict['a'] pdict['ssp_lon'] = self.projlon pdict['ncols'] = int(ds_shape[0]) pdict['nlines'] = int(ds_shape[1]) # Force scandir to SEVIRI default, not known from file pdict['scandir'] = 'S2N' pdict['a_name'] = 'geosmsg' if ds_id['name'] == 'HRV': pdict['a_desc'] = 'MSG/SEVIRI HRV channel area' pdict['p_id'] = 'msg_hires' else: pdict['a_desc'] = 'MSG/SEVIRI low resolution channel area' pdict['p_id'] = 'msg_lowres' aex = get_area_extent(pdict) area = get_area_definition(pdict, aex) return area satpy-0.34.0/satpy/readers/seviri_l1b_native.py000066400000000000000000000771461420401153000215070ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """SEVIRI native format reader. References: - `MSG Level 1.5 Native Format File Definition`_ .. _MSG Level 1.5 Native Format File Definition: https://www-cdn.eumetsat.int/files/2020-04/pdf_fg15_msg-native-format-15.pdf """ import logging from datetime import datetime import dask.array as da import numpy as np import xarray as xr from pyresample import geometry from satpy import CHUNK_SIZE from satpy._compat import cached_property from satpy.readers._geos_area import get_area_definition, get_geos_area_naming from satpy.readers.eum_base import get_service_mode, recarray2dict, time_cds_short from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.seviri_base import ( CHANNEL_NAMES, HRV_NUM_COLUMNS, HRV_NUM_LINES, SATNUM, VISIR_NUM_COLUMNS, VISIR_NUM_LINES, NoValidOrbitParams, OrbitPolynomialFinder, SEVIRICalibrationHandler, add_scanline_acq_time, calculate_area_extent, create_coef_dict, dec10216, get_cds_time, get_satpos, pad_data_horizontally, pad_data_vertically, ) from satpy.readers.seviri_l1b_native_hdr import ( DEFAULT_15_SECONDARY_PRODUCT_HEADER, GSDTRecords, get_native_header, native_trailer, ) from satpy.readers.utils import reduce_mda logger = logging.getLogger('native_msg') class NativeMSGFileHandler(BaseFileHandler): """SEVIRI native format reader. **Calibration** See :mod:`satpy.readers.seviri_base`. **Padding channel data to full disk** By providing the `fill_disk` as True in the `reader_kwargs`, the channel is loaded as full disk, padded with no-data where necessary. This is especially useful for the HRV channel, but can also be used for RSS and ROI data. By default the original, unpadded, data are loaded:: scene = satpy.Scene(filenames, reader='seviri_l1b_native', reader_kwargs={'fill_disk': False}) **Metadata** See :mod:`satpy.readers.seviri_base`. """ def __init__(self, filename, filename_info, filetype_info, calib_mode='nominal', fill_disk=False, ext_calib_coefs=None, include_raw_metadata=False, mda_max_array_size=100): """Initialize the reader.""" super(NativeMSGFileHandler, self).__init__(filename, filename_info, filetype_info) self.platform_name = None self.calib_mode = calib_mode self.ext_calib_coefs = ext_calib_coefs or {} self.fill_disk = fill_disk self.include_raw_metadata = include_raw_metadata self.mda_max_array_size = mda_max_array_size # Declare required variables. self.header = {} self.mda = {} self.trailer = {} # Read header, prepare dask-array, read trailer and initialize image boundaries # Available channels are known only after the header has been read self.header_type = get_native_header(self._has_archive_header()) self._read_header() self.dask_array = da.from_array(self._get_memmap(), chunks=(CHUNK_SIZE,)) self._read_trailer() self.image_boundaries = ImageBoundaries(self.header, self.trailer, self.mda) def _has_archive_header(self): """Check whether the file includes an ASCII archive header.""" ascii_startswith = b'FormatName : NATIVE' with open(self.filename, mode='rb') as istream: return istream.read(36) == ascii_startswith @property def start_time(self): """Read the repeat cycle start time from metadata.""" return self.header['15_DATA_HEADER']['ImageAcquisition'][ 'PlannedAcquisitionTime']['TrueRepeatCycleStart'] @property def end_time(self): """Read the repeat cycle end time from metadata.""" return self.header['15_DATA_HEADER']['ImageAcquisition'][ 'PlannedAcquisitionTime']['PlannedRepeatCycleEnd'] def _get_data_dtype(self): """Get the dtype of the file based on the actual available channels.""" pkhrec = [ ('GP_PK_HEADER', GSDTRecords.gp_pk_header), ('GP_PK_SH1', GSDTRecords.gp_pk_sh1) ] pk_head_dtype = np.dtype(pkhrec) def get_lrec(cols): lrec = [ ("gp_pk", pk_head_dtype), ("version", np.uint8), ("satid", np.uint16), ("time", (np.uint16, 5)), ("lineno", np.uint32), ("chan_id", np.uint8), ("acq_time", time_cds_short), ("line_validity", np.uint8), ("line_rquality", np.uint8), ("line_gquality", np.uint8), ("line_data", (np.uint8, cols)) ] return lrec # each pixel is 10-bits -> one line of data has 25% more bytes # than the number of columns suggest (10/8 = 1.25) visir_rec = get_lrec(int(self.mda['number_of_columns'] * 1.25)) number_of_visir_channels = len( [s for s in self.mda['channel_list'] if not s == 'HRV']) drec = [('visir', (visir_rec, number_of_visir_channels))] if self.mda['available_channels']['HRV']: hrv_rec = get_lrec(int(self.mda['hrv_number_of_columns'] * 1.25)) drec.append(('hrv', (hrv_rec, 3))) return np.dtype(drec) def _get_memmap(self): """Get the memory map for the SEVIRI data.""" with open(self.filename) as fp: data_dtype = self._get_data_dtype() hdr_size = self.header_type.itemsize return np.memmap(fp, dtype=data_dtype, shape=(self.mda['number_of_lines'],), offset=hdr_size, mode="r") def _read_header(self): """Read the header info.""" data = np.fromfile(self.filename, dtype=self.header_type, count=1) self.header.update(recarray2dict(data)) if '15_SECONDARY_PRODUCT_HEADER' not in self.header: # No archive header, that means we have a complete file # including all channels. self.header['15_SECONDARY_PRODUCT_HEADER'] = DEFAULT_15_SECONDARY_PRODUCT_HEADER data15hd = self.header['15_DATA_HEADER'] sec15hd = self.header['15_SECONDARY_PRODUCT_HEADER'] # Set the list of available channels: self.mda['available_channels'] = get_available_channels(self.header) self.mda['channel_list'] = [i for i in CHANNEL_NAMES.values() if self.mda['available_channels'][i]] self.platform_id = data15hd[ 'SatelliteStatus']['SatelliteDefinition']['SatelliteId'] self.mda['platform_name'] = "Meteosat-" + SATNUM[self.platform_id] self.mda['offset_corrected'] = data15hd['GeometricProcessing'][ 'EarthModel']['TypeOfEarthModel'] == 2 equator_radius = data15hd['GeometricProcessing'][ 'EarthModel']['EquatorialRadius'] * 1000. north_polar_radius = data15hd[ 'GeometricProcessing']['EarthModel']['NorthPolarRadius'] * 1000. south_polar_radius = data15hd[ 'GeometricProcessing']['EarthModel']['SouthPolarRadius'] * 1000. polar_radius = (north_polar_radius + south_polar_radius) * 0.5 ssp_lon = data15hd['ImageDescription'][ 'ProjectionDescription']['LongitudeOfSSP'] self.mda['projection_parameters'] = {'a': equator_radius, 'b': polar_radius, 'h': 35785831.00, 'ssp_longitude': ssp_lon} north = int(sec15hd['NorthLineSelectedRectangle']['Value']) east = int(sec15hd['EastColumnSelectedRectangle']['Value']) south = int(sec15hd['SouthLineSelectedRectangle']['Value']) west = int(sec15hd['WestColumnSelectedRectangle']['Value']) ncolumns = west - east + 1 nrows = north - south + 1 # check if the file has less rows or columns than # the maximum, if so it is a rapid scanning service # or region of interest file if (nrows < VISIR_NUM_LINES) or (ncolumns < VISIR_NUM_COLUMNS): self.mda['is_full_disk'] = False else: self.mda['is_full_disk'] = True # If the number of columns in the file is not divisible by 4, # UMARF will add extra columns to the file modulo = ncolumns % 4 padding = 0 if modulo > 0: padding = 4 - modulo cols_visir = ncolumns + padding # Check the VISIR calculated column dimension against # the header information cols_visir_hdr = int(sec15hd['NumberColumnsVISIR']['Value']) if cols_visir_hdr != cols_visir: logger.warning( "Number of VISIR columns from the header is incorrect!") logger.warning("Header: %d", cols_visir_hdr) logger.warning("Calculated: = %d", cols_visir) # HRV Channel - check if the area is reduced in east west # direction as this affects the number of columns in the file cols_hrv_hdr = int(sec15hd['NumberColumnsHRV']['Value']) if ncolumns < VISIR_NUM_COLUMNS: cols_hrv = cols_hrv_hdr else: cols_hrv = int(cols_hrv_hdr / 2) # self.mda represents the 16bit dimensions not 10bit self.mda['number_of_lines'] = int(sec15hd['NumberLinesVISIR']['Value']) self.mda['number_of_columns'] = cols_visir self.mda['hrv_number_of_lines'] = int(sec15hd["NumberLinesHRV"]['Value']) self.mda['hrv_number_of_columns'] = cols_hrv def _read_trailer(self): hdr_size = self.header_type.itemsize data_size = (self._get_data_dtype().itemsize * self.mda['number_of_lines']) with open(self.filename) as fp: fp.seek(hdr_size + data_size) data = np.fromfile(fp, dtype=native_trailer, count=1) self.trailer.update(recarray2dict(data)) def get_area_def(self, dataset_id): """Get the area definition of the band. In general, image data from one window/area is available. For the HRV channel in FES mode, however, data from two windows ('Lower' and 'Upper') are available. Hence, we collect lists of area-extents and corresponding number of image lines/columns. In case of FES HRV data, two area definitions are computed, stacked and squeezed. For other cases, the lists will only have one entry each, from which a single area definition is computed. Note that the AreaDefinition area extents returned by this function for Native data will be slightly different compared to the area extents returned by the SEVIRI HRIT reader. This is due to slightly different pixel size values when calculated using the data available in the files. E.g. for the 3 km grid: ``Native: data15hd['ImageDescription']['ReferenceGridVIS_IR']['ColumnDirGridStep'] == 3000.4031658172607`` ``HRIT: np.deg2rad(2.**16 / pdict['lfac']) * pdict['h'] == 3000.4032785810186`` This results in the Native 3 km full-disk area extents being approx. 20 cm shorter in each direction. The method for calculating the area extents used by the HRIT reader (CFAC/LFAC mechanism) keeps the highest level of numeric precision and is used as reference by EUM. For this reason, the standard area definitions defined in the `areas.yaml` file correspond to the HRIT ones. """ pdict = {} pdict['a'] = self.mda['projection_parameters']['a'] pdict['b'] = self.mda['projection_parameters']['b'] pdict['h'] = self.mda['projection_parameters']['h'] pdict['ssp_lon'] = self.mda['projection_parameters']['ssp_longitude'] area_naming_input_dict = {'platform_name': 'msg', 'instrument_name': 'seviri', 'resolution': int(dataset_id['resolution']) } area_naming = get_geos_area_naming({**area_naming_input_dict, **get_service_mode('seviri', pdict['ssp_lon'])}) pdict['a_name'] = area_naming['area_id'] pdict['a_desc'] = area_naming['description'] pdict['p_id'] = "" area_extent = self.get_area_extent(dataset_id) areas = list() for aex, nlines, ncolumns in zip(area_extent['area_extent'], area_extent['nlines'], area_extent['ncolumns']): pdict['nlines'] = nlines pdict['ncols'] = ncolumns areas.append(get_area_definition(pdict, aex)) if len(areas) == 2: area = geometry.StackedAreaDefinition(areas[0], areas[1]) area = area.squeeze() else: area = areas[0] return area def get_area_extent(self, dataset_id): """Get the area extent of the file. Until December 2017, the data is shifted by 1.5km SSP North and West against the nominal GEOS projection. Since December 2017 this offset has been corrected. A flag in the data indicates if the correction has been applied. If no correction was applied, adjust the area extent to match the shifted data. For more information see Section 3.1.4.2 in the MSG Level 1.5 Image Data Format Description. The correction of the area extent is documented in a `developer's memo `_. """ data15hd = self.header['15_DATA_HEADER'] # check for Earth model as this affects the north-south and # west-east offsets # section 3.1.4.2 of MSG Level 1.5 Image Data Format Description earth_model = data15hd['GeometricProcessing']['EarthModel'][ 'TypeOfEarthModel'] if earth_model == 2: ns_offset = 0 we_offset = 0 elif earth_model == 1: ns_offset = -0.5 we_offset = 0.5 if dataset_id['name'] == 'HRV': ns_offset = -1.5 we_offset = 1.5 else: raise NotImplementedError( 'Unrecognised Earth model: {}'.format(earth_model) ) if dataset_id['name'] == 'HRV': grid_origin = data15hd['ImageDescription']['ReferenceGridHRV']['GridOrigin'] center_point = (HRV_NUM_COLUMNS / 2) - 2 column_step = data15hd['ImageDescription']['ReferenceGridHRV']['ColumnDirGridStep'] * 1000.0 line_step = data15hd['ImageDescription']['ReferenceGridHRV']['LineDirGridStep'] * 1000.0 nlines_fulldisk = HRV_NUM_LINES ncolumns_fulldisk = HRV_NUM_COLUMNS else: grid_origin = data15hd['ImageDescription']['ReferenceGridVIS_IR']['GridOrigin'] center_point = VISIR_NUM_COLUMNS / 2 column_step = data15hd['ImageDescription']['ReferenceGridVIS_IR']['ColumnDirGridStep'] * 1000.0 line_step = data15hd['ImageDescription']['ReferenceGridVIS_IR']['LineDirGridStep'] * 1000.0 nlines_fulldisk = VISIR_NUM_LINES ncolumns_fulldisk = VISIR_NUM_COLUMNS # Calculations assume grid origin is south-east corner # section 7.2.4 of MSG Level 1.5 Image Data Format Description origins = {0: 'NW', 1: 'SW', 2: 'SE', 3: 'NE'} if grid_origin != 2: msg = 'Grid origin not supported number: {}, {} corner'.format( grid_origin, origins[grid_origin] ) raise NotImplementedError(msg) aex_data = {'area_extent': [], 'nlines': [], 'ncolumns': []} img_bounds = self.image_boundaries.get_img_bounds(dataset_id, self.is_roi()) for south_bound, north_bound, east_bound, west_bound in zip(*img_bounds.values()): if self.fill_disk: east_bound, west_bound = 1, ncolumns_fulldisk if not self.mda['is_full_disk']: south_bound, north_bound = 1, nlines_fulldisk nlines = north_bound - south_bound + 1 ncolumns = west_bound - east_bound + 1 area_dict = {'center_point': center_point, 'east': east_bound, 'west': west_bound, 'south': south_bound, 'north': north_bound, 'column_step': column_step, 'line_step': line_step, 'column_offset': we_offset, 'line_offset': ns_offset } aex = calculate_area_extent(area_dict) aex_data['area_extent'].append(aex) aex_data['nlines'].append(nlines) aex_data['ncolumns'].append(ncolumns) return aex_data def is_roi(self): """Check if data covers a selected region of interest (ROI). Standard RSS data consists of 3712 columns and 1392 lines, covering the three northmost segements of the SEVIRI disk. Hence, if the data does not cover the full disk, nor the standard RSS region in RSS mode, it's assumed to be ROI data. """ is_rapid_scan = self.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] # Standard RSS data is assumed to cover the three northmost segements, thus consisting of all 3712 columns and # the 1392 northmost lines nlines = int(self.mda['number_of_lines']) ncolumns = int(self.mda['number_of_columns']) north_bound = int(self.header['15_SECONDARY_PRODUCT_HEADER']['NorthLineSelectedRectangle']['Value']) is_top3segments = (ncolumns == VISIR_NUM_COLUMNS and nlines == 1392 and north_bound == VISIR_NUM_LINES) return not self.mda['is_full_disk'] and not (is_rapid_scan and is_top3segments) def get_dataset(self, dataset_id, dataset_info): """Get the dataset.""" if dataset_id['name'] not in self.mda['channel_list']: raise KeyError('Channel % s not available in the file' % dataset_id['name']) elif dataset_id['name'] not in ['HRV']: data = self._get_visir_channel(dataset_id) else: data = self._get_hrv_channel() xarr = xr.DataArray(data, dims=['y', 'x']).where(data != 0).astype(np.float32) if xarr is None: return None dataset = self.calibrate(xarr, dataset_id) self._add_scanline_acq_time(dataset, dataset_id) self._update_attrs(dataset, dataset_info) if self.fill_disk and not (dataset_id['name'] != 'HRV' and self.mda['is_full_disk']): padder = Padder(dataset_id, self.image_boundaries.get_img_bounds(dataset_id, self.is_roi()), self.mda['is_full_disk']) dataset = padder.pad_data(dataset) return dataset def _get_visir_channel(self, dataset_id): shape = (self.mda['number_of_lines'], self.mda['number_of_columns']) # Check if there is only 1 channel in the list as a change # is needed in the arrray assignment ie channl id is not present if len(self.mda['channel_list']) == 1: raw = self.dask_array['visir']['line_data'] else: i = self.mda['channel_list'].index(dataset_id['name']) raw = self.dask_array['visir']['line_data'][:, i, :] data = dec10216(raw.flatten()) data = data.reshape(shape) return data def _get_hrv_channel(self): shape = (self.mda['hrv_number_of_lines'], self.mda['hrv_number_of_columns']) shape_layer = (self.mda['number_of_lines'], self.mda['hrv_number_of_columns']) data_list = [] for i in range(3): raw = self.dask_array['hrv']['line_data'][:, i, :] data = dec10216(raw.flatten()) data = data.reshape(shape_layer) data_list.append(data) return np.stack(data_list, axis=1).reshape(shape) def calibrate(self, data, dataset_id): """Calibrate the data.""" tic = datetime.now() channel_name = dataset_id['name'] calib = SEVIRICalibrationHandler( platform_id=self.platform_id, channel_name=channel_name, coefs=self._get_calib_coefs(channel_name), calib_mode=self.calib_mode, scan_time=self.start_time ) res = calib.calibrate(data, dataset_id['calibration']) logger.debug("Calibration time " + str(datetime.now() - tic)) return res def _get_calib_coefs(self, channel_name): """Get coefficients for calibration from counts to radiance.""" # even though all the channels may not be present in the file, # the header does have calibration coefficients for all the channels # hence, this channel index needs to refer to full channel list band_idx = list(CHANNEL_NAMES.values()).index(channel_name) coefs_nominal = self.header['15_DATA_HEADER'][ 'RadiometricProcessing']['Level15ImageCalibration'] coefs_gsics = self.header['15_DATA_HEADER'][ 'RadiometricProcessing']['MPEFCalFeedback'] radiance_types = self.header['15_DATA_HEADER']['ImageDescription'][ 'Level15ImageProduction']['PlannedChanProcessing'] return create_coef_dict( coefs_nominal=( coefs_nominal['CalSlope'][band_idx], coefs_nominal['CalOffset'][band_idx] ), coefs_gsics=( coefs_gsics['GSICSCalCoeff'][band_idx], coefs_gsics['GSICSOffsetCount'][band_idx] ), ext_coefs=self.ext_calib_coefs.get(channel_name, {}), radiance_type=radiance_types[band_idx] ) def _add_scanline_acq_time(self, dataset, dataset_id): """Add scanline acquisition time to the given dataset.""" if dataset_id['name'] == 'HRV': tline = self._get_acq_time_hrv() else: tline = self._get_acq_time_visir(dataset_id) acq_time = get_cds_time(days=tline['Days'], msecs=tline['Milliseconds']) add_scanline_acq_time(dataset, acq_time) def _get_acq_time_hrv(self): """Get raw acquisition time for HRV channel.""" tline = self.dask_array['hrv']['acq_time'] tline0 = tline[:, 0] tline1 = tline[:, 1] tline2 = tline[:, 2] return da.stack((tline0, tline1, tline2), axis=1).reshape( self.mda['hrv_number_of_lines']).compute() def _get_acq_time_visir(self, dataset_id): """Get raw acquisition time for VIS/IR channels.""" # Check if there is only 1 channel in the list as a change # is needed in the arrray assignment ie channl id is not present if len(self.mda['channel_list']) == 1: return self.dask_array['visir']['acq_time'].compute() i = self.mda['channel_list'].index(dataset_id['name']) return self.dask_array['visir']['acq_time'][:, i].compute() def _update_attrs(self, dataset, dataset_info): """Update dataset attributes.""" dataset.attrs['units'] = dataset_info['units'] dataset.attrs['wavelength'] = dataset_info['wavelength'] dataset.attrs['standard_name'] = dataset_info['standard_name'] dataset.attrs['platform_name'] = self.mda['platform_name'] dataset.attrs['sensor'] = 'seviri' dataset.attrs['georef_offset_corrected'] = self.mda[ 'offset_corrected'] orbital_parameters = { 'projection_longitude': self.mda['projection_parameters'][ 'ssp_longitude'], 'projection_latitude': 0., 'projection_altitude': self.mda['projection_parameters']['h'], 'satellite_nominal_longitude': self.header['15_DATA_HEADER'][ 'SatelliteStatus']['SatelliteDefinition'][ 'NominalLongitude'], 'satellite_nominal_latitude': 0.0 } try: actual_lon, actual_lat, actual_alt = self.satpos orbital_parameters.update({ 'satellite_actual_longitude': actual_lon, 'satellite_actual_latitude': actual_lat, 'satellite_actual_altitude': actual_alt }) except NoValidOrbitParams as err: logger.warning(err) dataset.attrs['orbital_parameters'] = orbital_parameters if self.include_raw_metadata: dataset.attrs['raw_metadata'] = reduce_mda( self.header, max_size=self.mda_max_array_size ) @cached_property def satpos(self): """Get actual satellite position in geodetic coordinates (WGS-84). Evaluate orbit polynomials at the start time of the scan. Returns: Longitude [deg east], Latitude [deg north] and Altitude [m] """ poly_finder = OrbitPolynomialFinder(self.header['15_DATA_HEADER'][ 'SatelliteStatus']['Orbit']['OrbitPolynomial']) orbit_polynomial = poly_finder.get_orbit_polynomial(self.start_time) return get_satpos( orbit_polynomial=orbit_polynomial, time=self.start_time, semi_major_axis=self.mda['projection_parameters']['a'], semi_minor_axis=self.mda['projection_parameters']['b'] ) class ImageBoundaries: """Collect image boundary information.""" def __init__(self, header, trailer, mda): """Initialize the class.""" self._header = header self._trailer = trailer self._mda = mda def get_img_bounds(self, dataset_id, is_roi): """Get image line and column boundaries. returns: Dictionary with the four keys 'south_bound', 'north_bound', 'east_bound' and 'west_bound', each containing a list of the respective line/column numbers of the image boundaries. Lists (rather than scalars) are returned since the HRV data in FES mode contain data from two windows/areas. """ if dataset_id['name'] == 'HRV' and not is_roi: img_bounds = self._get_hrv_actual_img_bounds() else: img_bounds = self._get_selected_img_bounds(dataset_id) self._check_for_valid_bounds(img_bounds) return img_bounds def _get_hrv_actual_img_bounds(self): """Get HRV (if not ROI) image boundaries from the ActualL15CoverageHRV information stored in the trailer.""" hrv_bounds = self._trailer['15TRAILER']['ImageProductionStats']['ActualL15CoverageHRV'] img_bounds = {'south_bound': [], 'north_bound': [], 'east_bound': [], 'west_bound': []} for hrv_window in ['Lower', 'Upper']: img_bounds['south_bound'].append(hrv_bounds['%sSouthLineActual' % hrv_window]) img_bounds['north_bound'].append(hrv_bounds['%sNorthLineActual' % hrv_window]) img_bounds['east_bound'].append(hrv_bounds['%sEastColumnActual' % hrv_window]) img_bounds['west_bound'].append(hrv_bounds['%sWestColumnActual' % hrv_window]) # Data from the upper hrv window are only available in FES mode if not self._mda['is_full_disk']: break return img_bounds def _get_selected_img_bounds(self, dataset_id): """Get VISIR and HRV (if ROI) image boundaries from the SelectedRectangle information stored in the header.""" sec15hd = self._header['15_SECONDARY_PRODUCT_HEADER'] south_bound = int(sec15hd['SouthLineSelectedRectangle']['Value']) east_bound = int(sec15hd['EastColumnSelectedRectangle']['Value']) if dataset_id['name'] == 'HRV': nlines, ncolumns = self._get_hrv_img_shape() south_bound = self._convert_visir_bound_to_hrv(south_bound) east_bound = self._convert_visir_bound_to_hrv(east_bound) else: nlines, ncolumns = self._get_visir_img_shape() north_bound = south_bound + nlines - 1 west_bound = east_bound + ncolumns - 1 img_bounds = {'south_bound': [south_bound], 'north_bound': [north_bound], 'east_bound': [east_bound], 'west_bound': [west_bound]} return img_bounds def _get_hrv_img_shape(self): nlines = int(self._mda['hrv_number_of_lines']) ncolumns = int(self._mda['hrv_number_of_columns']) return nlines, ncolumns def _get_visir_img_shape(self): nlines = int(self._mda['number_of_lines']) ncolumns = int(self._mda['number_of_columns']) return nlines, ncolumns @staticmethod def _convert_visir_bound_to_hrv(bound): return 3 * bound - 2 @staticmethod def _check_for_valid_bounds(img_bounds): len_img_bounds = [len(bound) for bound in img_bounds.values()] same_lengths = (len(set(len_img_bounds)) == 1) no_empty = (min(len_img_bounds) > 0) if not (same_lengths and no_empty): raise ValueError('Invalid image boundaries') class Padder: """Padding of HRV, RSS and ROI data to full disk.""" def __init__(self, dataset_id, img_bounds, is_full_disk): """Initialize the padder.""" self._img_bounds = img_bounds self._is_full_disk = is_full_disk if dataset_id['name'] == 'HRV': self._final_shape = (HRV_NUM_LINES, HRV_NUM_COLUMNS) else: self._final_shape = (VISIR_NUM_LINES, VISIR_NUM_COLUMNS) def pad_data(self, dataset): """Pad data to full disk with empty pixels.""" logger.debug('Padding data to full disk') data_list = [] for south_bound, north_bound, east_bound, west_bound in zip(*self._img_bounds.values()): nlines = north_bound - south_bound + 1 data = self._extract_data_to_pad(dataset, south_bound, north_bound) padded_data = pad_data_horizontally(data, (nlines, self._final_shape[1]), east_bound, west_bound) data_list.append(padded_data) padded_data = da.vstack(data_list) # If we're dealing with RSS or ROI data, we also need to pad vertically in order to form a full disk array if not self._is_full_disk: padded_data = pad_data_vertically(padded_data, self._final_shape, south_bound, north_bound) return xr.DataArray(padded_data, dims=('y', 'x'), attrs=dataset.attrs.copy()) def _extract_data_to_pad(self, dataset, south_bound, north_bound): """Extract the data that shall be padded. In case of FES (HRV) data, 'dataset' contains data from twoseparate windows that are padded separately. Hence, we extract a subset of data. """ if self._is_full_disk: data = dataset[south_bound - 1:north_bound, :].data else: data = dataset.data return data def get_available_channels(header): """Get the available channels from the header information.""" chlist_str = header['15_SECONDARY_PRODUCT_HEADER'][ 'SelectedBandIDs']['Value'] retv = {} for idx, char in zip(range(12), chlist_str): retv[CHANNEL_NAMES[idx + 1]] = (char == 'X') return retv satpy-0.34.0/satpy/readers/seviri_l1b_native_hdr.py000066400000000000000000001126071420401153000223340ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2014-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Header and trailer records of SEVIRI native format.""" import numpy as np from satpy.readers.eum_base import time_cds, time_cds_expanded, time_cds_short from satpy.readers.seviri_base import HRV_NUM_COLUMNS, HRV_NUM_LINES, VISIR_NUM_COLUMNS, VISIR_NUM_LINES class GSDTRecords(object): """MSG Ground Segment Data Type records. Reference Document (EUM/MSG/SPE/055): MSG Ground Segment Design Specification (GSDS) """ gp_fac_env = np.uint8 gp_fac_id = np.uint8 gp_sc_id = np.uint16 gp_su_id = np.uint32 gp_svce_type = np.uint8 # 4 bytes gp_cpu_address = [ ('Qualifier_1', np.uint8), ('Qualifier_2', np.uint8), ('Qualifier_3', np.uint8), ('Qualifier_4', np.uint8) ] # 22 bytes gp_pk_header = [ ('HeaderVersionNo', np.uint8), ('PacketType', np.uint8), ('SubHeaderType', np.uint8), ('SourceFacilityId', gp_fac_id), ('SourceEnvId', gp_fac_env), ('SourceInstanceId', np.uint8), ('SourceSUId', gp_su_id), ('SourceCPUId', gp_cpu_address), ('DestFacilityId', gp_fac_id), ('DestEnvId', gp_fac_env), ('SequenceCount', np.uint16), ('PacketLength', np.int32) ] # 16 bytes gp_pk_sh1 = [ ('SubHeaderVersionNo', np.uint8), ('ChecksumFlag', bool), ('Acknowledgement', (np.uint8, 4)), ('ServiceType', gp_svce_type), ('ServiceSubtype', np.uint8), ('PacketTime', time_cds_short), ('SpacecraftId', gp_sc_id) ] class Msg15NativeHeaderRecord(object): """SEVIRI Level 1.5 header for native-format.""" def get(self, with_archive_header): """Get the header type.""" # 450400 bytes including archive header # 445286 bytes excluding archive header record = [] if with_archive_header: record += [ ('15_MAIN_PRODUCT_HEADER', L15MainProductHeaderRecord().get()), ('15_SECONDARY_PRODUCT_HEADER', L15SecondaryProductHeaderRecord().get()), ] record += [ ('GP_PK_HEADER', GSDTRecords.gp_pk_header), ('GP_PK_SH1', GSDTRecords.gp_pk_sh1), ('15_DATA_HEADER', L15DataHeaderRecord().get()) ] return np.dtype(record).newbyteorder('>') class L15PhData(object): """L15 Ph handler.""" # 80 bytes l15_ph_data = [ ('Name', 'S30'), ('Value', 'S50') ] class L15MainProductHeaderRecord(object): """L15 Main Product header handler. Reference Document: MSG Level 1.5 Native Format File Definition """ def get(self): """Get header data.""" l15_ph_data = L15PhData.l15_ph_data l15_ph_data_identification = [ ('Name', 'S30'), ('Size', 'S16'), ('Address', 'S16') ] # 3674 bytes record = [ ('FormatName', l15_ph_data), ('FormatDocumentName', l15_ph_data), ('FormatDocumentMajorVersion', l15_ph_data), ('FormatDocumentMinorVersion', l15_ph_data), ('CreationDateTime', l15_ph_data), ('CreatingCentre', l15_ph_data), ('DataSetIdentification', (l15_ph_data_identification, 27)), ('TotalFileSize', l15_ph_data), ('GORT', l15_ph_data), ('ASTI', l15_ph_data), ('LLOS', l15_ph_data), ('SNIT', l15_ph_data), ('AIID', l15_ph_data), ('SSBT', l15_ph_data), ('SSST', l15_ph_data), ('RRCC', l15_ph_data), ('RRBT', l15_ph_data), ('RRST', l15_ph_data), ('PPRC', l15_ph_data), ('PPDT', l15_ph_data), ('GPLV', l15_ph_data), ('APNM', l15_ph_data), ('AARF', l15_ph_data), ('UUDT', l15_ph_data), ('QQOV', l15_ph_data), ('UDSP', l15_ph_data) ] return record class L15SecondaryProductHeaderRecord(object): """L15 Secondary Product header handler. Reference Document: MSG Level 1.5 Native Format File Definition """ def get(self): """Get header data.""" l15_ph_data = L15PhData.l15_ph_data # 1440 bytes record = [ ('ABID', l15_ph_data), ('SMOD', l15_ph_data), ('APXS', l15_ph_data), ('AVPA', l15_ph_data), ('LSCD', l15_ph_data), ('LMAP', l15_ph_data), ('QDLC', l15_ph_data), ('QDLP', l15_ph_data), ('QQAI', l15_ph_data), ('SelectedBandIDs', l15_ph_data), ('SouthLineSelectedRectangle', l15_ph_data), ('NorthLineSelectedRectangle', l15_ph_data), ('EastColumnSelectedRectangle', l15_ph_data), ('WestColumnSelectedRectangle', l15_ph_data), ('NumberLinesVISIR', l15_ph_data), ('NumberColumnsVISIR', l15_ph_data), ('NumberLinesHRV', l15_ph_data), ('NumberColumnsHRV', l15_ph_data) ] return record class L15DataHeaderRecord(object): """L15 Data Header handler. Reference Document (EUM/MSG/ICD/105): MSG Level 1.5 Image Data Format Description """ def get(self): """Get header record data.""" # 445248 bytes record = [ ('15HeaderVersion', np.uint8), ('SatelliteStatus', self.satellite_status), ('ImageAcquisition', self.image_acquisition), ('CelestialEvents', self.celestial_events), ('ImageDescription', self.image_description), ('RadiometricProcessing', self.radiometric_processing), ('GeometricProcessing', self.geometric_processing), ('IMPFConfiguration', self.impf_configuration)] return record @property def satellite_status(self): """Get satellite status data.""" # 7 bytes satellite_definition = [ ('SatelliteId', np.uint16), ('NominalLongitude', np.float32), ('SatelliteStatus', np.uint8)] # 28 bytes satellite_operations = [ ('LastManoeuvreFlag', bool), ('LastManoeuvreStartTime', time_cds_short), ('LastManoeuvreEndTime', time_cds_short), ('LastManoeuvreType', np.uint8), ('NextManoeuvreFlag', bool), ('NextManoeuvreStartTime', time_cds_short), ('NextManoeuvreEndTime', time_cds_short), ('NextManoeuvreType', np.uint8)] # 396 bytes orbit_coeff = [ ('StartTime', time_cds_short), ('EndTime', time_cds_short), ('X', (np.float64, 8)), ('Y', (np.float64, 8)), ('Z', (np.float64, 8)), ('VX', (np.float64, 8)), ('VY', (np.float64, 8)), ('VZ', (np.float64, 8))] # 39612 bytes orbit = [ ('PeriodStartTime', time_cds_short), ('PeriodEndTime', time_cds_short), ('OrbitPolynomial', (orbit_coeff, 100))] # 204 bytes attitude_coeff = [ ('StartTime', time_cds_short), ('EndTime', time_cds_short), ('XofSpinAxis', (np.float64, 8)), ('YofSpinAxis', (np.float64, 8)), ('ZofSpinAxis', (np.float64, 8))] # 20420 bytes attitude = [ ('PeriodStartTime', time_cds_short), ('PeriodEndTime', time_cds_short), ('PrincipleAxisOffsetAngle', np.float64), ('AttitudePolynomial', (attitude_coeff, 100))] # 59 bytes utc_correlation = [ ('PeriodStartTime', time_cds_short), ('PeriodEndTime', time_cds_short), ('OnBoardTimeStart', (np.uint8, 7)), ('VarOnBoardTimeStart', np.float64), ('A1', np.float64), ('VarA1', np.float64), ('A2', np.float64), ('VarA2', np.float64)] # 60134 bytes record = [ ('SatelliteDefinition', satellite_definition), ('SatelliteOperations', satellite_operations), ('Orbit', orbit), ('Attitude', attitude), ('SpinRetreatRCStart', np.float64), ('UTCCorrelation', utc_correlation)] return record @property def image_acquisition(self): """Get image acquisition data.""" planned_acquisition_time = [ ('TrueRepeatCycleStart', time_cds_expanded), ('PlanForwardScanEnd', time_cds_expanded), ('PlannedRepeatCycleEnd', time_cds_expanded)] radiometer_status = [ ('ChannelStatus', (np.uint8, 12)), ('DetectorStatus', (np.uint8, 42))] hrv_frame_offsets = [ ('MDUNomHRVDelay1', np.uint16), ('MDUNomHRVDelay2', np.uint16), ('Spare', np.uint16), ('MDUNomHRVBreakLine', np.uint16)] operation_parameters = [ ('L0_LineCounter', np.uint16), ('K1_RetraceLines', np.uint16), ('K2_PauseDeciseconds', np.uint16), ('K3_RetraceLines', np.uint16), ('K4_PauseDeciseconds', np.uint16), ('K5_RetraceLines', np.uint16), ('XDeepSpaceWindowPosition', np.uint8)] radiometer_settings = [ ('MDUSamplingDelays', (np.uint16, 42)), ('HRVFrameOffsets', hrv_frame_offsets), ('DHSSSynchSelection', np.uint8), ('MDUOutGain', (np.uint16, 42)), ('MDUCoarseGain', (np.uint8, 42)), ('MDUFineGain', (np.uint16, 42)), ('MDUNumericalOffset', (np.uint16, 42)), ('PUGain', (np.uint16, 42)), ('PUOffset', (np.uint16, 27)), ('PUBias', (np.uint16, 15)), ('OperationParameters', operation_parameters), ('RefocusingLines', np.uint16), ('RefocusingDirection', np.uint8), ('RefocusingPosition', np.uint16), ('ScanRefPosFlag', bool), ('ScanRefPosNumber', np.uint16), ('ScanRefPosVal', np.float32), ('ScanFirstLine', np.uint16), ('ScanLastLine', np.uint16), ('RetraceStartLine', np.uint16)] decontamination = [ ('DecontaminationNow', bool), ('DecontaminationStart', time_cds_short), ('DecontaminationEnd', time_cds_short)] radiometer_operations = [ ('LastGainChangeFlag', bool), ('LastGainChangeTime', time_cds_short), ('Decontamination', decontamination), ('BBCalScheduled', bool), ('BBCalibrationType', np.uint8), ('BBFirstLine', np.uint16), ('BBLastLine', np.uint16), ('ColdFocalPlaneOpTemp', np.uint16), ('WarmFocalPlaneOpTemp', np.uint16)] record = [ ('PlannedAcquisitionTime', planned_acquisition_time), ('RadiometerStatus', radiometer_status), ('RadiometerSettings', radiometer_settings), ('RadiometerOperations', radiometer_operations)] return record @property def celestial_events(self): """Get celestial events data.""" earth_moon_sun_coeff = [ ('StartTime', time_cds_short), ('EndTime', time_cds_short), ('AlphaCoef', (np.float64, 8)), ('BetaCoef', (np.float64, 8))] star_coeff = [ ('StarId', np.uint16), ('StartTime', time_cds_short), ('EndTime', time_cds_short), ('AlphaCoef', (np.float64, 8)), ('BetaCoef', (np.float64, 8))] ephemeris = [ ('PeriodTimeStart', time_cds_short), ('PeriodTimeEnd', time_cds_short), ('RelatedOrbitFileTime', 'S15'), ('RelatedAttitudeFileTime', 'S15'), ('EarthEphemeris', (earth_moon_sun_coeff, 100)), ('MoonEphemeris', (earth_moon_sun_coeff, 100)), ('SunEphemeris', (earth_moon_sun_coeff, 100)), ('StarEphemeris', (star_coeff, (20, 100)))] relation_to_image = [ ('TypeOfEclipse', np.uint8), ('EclipseStartTime', time_cds_short), ('EclipseEndTime', time_cds_short), ('VisibleBodiesInImage', np.uint8), ('BodiesCloseToFOV', np.uint8), ('ImpactOnImageQuality', np.uint8)] record = [ ('CelestialBodiesPosition', ephemeris), ('RelationToImage', relation_to_image)] return record @property def image_description(self): """Get image description data.""" projection_description = [ ('TypeOfProjection', np.uint8), ('LongitudeOfSSP', np.float32)] reference_grid = [ ('NumberOfLines', np.int32), ('NumberOfColumns', np.int32), ('LineDirGridStep', np.float32), ('ColumnDirGridStep', np.float32), ('GridOrigin', np.uint8)] planned_coverage_vis_ir = [ ('SouthernLinePlanned', np.int32), ('NorthernLinePlanned', np.int32), ('EasternColumnPlanned', np.int32), ('WesternColumnPlanned', np.int32)] planned_coverage_hrv = [ ('LowerSouthLinePlanned', np.int32), ('LowerNorthLinePlanned', np.int32), ('LowerEastColumnPlanned', np.int32), ('LowerWestColumnPlanned', np.int32), ('UpperSouthLinePlanned', np.int32), ('UpperNorthLinePlanned', np.int32), ('UpperEastColumnPlanned', np.int32), ('UpperWestColumnPlanned', np.int32)] level_15_image_production = [ ('ImageProcDirection', np.uint8), ('PixelGenDirection', np.uint8), ('PlannedChanProcessing', (np.uint8, 12))] record = [ ('ProjectionDescription', projection_description), ('ReferenceGridVIS_IR', reference_grid), ('ReferenceGridHRV', reference_grid), ('PlannedCoverageVIS_IR', planned_coverage_vis_ir), ('PlannedCoverageHRV', planned_coverage_hrv), ('Level15ImageProduction', level_15_image_production)] return record @property def radiometric_processing(self): """Get radiometric processing data.""" rp_summary = [ ('RadianceLinearization', (bool, 12)), ('DetectorEqualization', (bool, 12)), ('OnboardCalibrationResult', (bool, 12)), ('MPEFCalFeedback', (bool, 12)), ('MTFAdaptation', (bool, 12)), ('StrayLightCorrection', (bool, 12))] level_15_image_calibration = [ ('CalSlope', np.float64), ('CalOffset', np.float64)] time_cuc_size = [ ('CT1', np.uint8), ('CT2', np.uint8), ('CT3', np.uint8), ('CT4', np.uint8), ('FT1', np.uint8), ('FT2', np.uint8), ('FT3', np.uint8)] cold_fp_temperature = [ ('FCUNominalColdFocalPlaneTemp', np.uint16), ('FCURedundantColdFocalPlaneTemp', np.uint16)] warm_fp_temperature = [ ('FCUNominalWarmFocalPlaneVHROTemp', np.uint16), ('FCURedundantWarmFocalPlaneVHROTemp', np.uint16)] scan_mirror_temperature = [ ('FCUNominalScanMirrorSensor1Temp', np.uint16), ('FCURedundantScanMirrorSensor1Temp', np.uint16), ('FCUNominalScanMirrorSensor2Temp', np.uint16), ('FCURedundantScanMirrorSensor2Temp', np.uint16)] m1m2m3_temperature = [ ('FCUNominalM1MirrorSensor1Temp', np.uint16), ('FCURedundantM1MirrorSensor1Temp', np.uint16), ('FCUNominalM1MirrorSensor2Temp', np.uint16), ('FCURedundantM1MirrorSensor2Temp', np.uint16), ('FCUNominalM23AssemblySensor1Temp', np.uint8), ('FCURedundantM23AssemblySensor1Temp', np.uint8), ('FCUNominalM23AssemblySensor2Temp', np.uint8), ('FCURedundantM23AssemblySensor2Temp', np.uint8)] baffle_temperature = [ ('FCUNominalM1BaffleTemp', np.uint16), ('FCURedundantM1BaffleTemp', np.uint16)] blackbody_temperature = [ ('FCUNominalBlackBodySensorTemp', np.uint16), ('FCURedundantBlackBodySensorTemp', np.uint16)] fcu_mode = [ ('FCUNominalSMMStatus', 'S2'), ('FCURedundantSMMStatus', 'S2')] extracted_bb_data = [ ('NumberOfPixelsUsed', np.uint32), ('MeanCount', np.float32), ('RMS', np.float32), ('MaxCount', np.uint16), ('MinCount', np.uint16), ('BB_Processing_Slope', np.float64), ('BB_Processing_Offset', np.float64)] bb_related_data = [ ('OnBoardBBTime', time_cuc_size), ('MDUOutGain', (np.uint16, 42)), ('MDUCoarseGain', (np.uint8, 42)), ('MDUFineGain', (np.uint16, 42)), ('MDUNumericalOffset', (np.uint16, 42)), ('PUGain', (np.uint16, 42)), ('PUOffset', (np.uint16, 27)), ('PUBias', (np.uint16, 15)), ('DCRValues', (np.uint8, 63)), ('X_DeepSpaceWindowPosition', np.int8), ('ColdFPTemperature', cold_fp_temperature), ('WarmFPTemperature', warm_fp_temperature), ('ScanMirrorTemperature', scan_mirror_temperature), ('M1M2M3Temperature', m1m2m3_temperature), ('BaffleTemperature', baffle_temperature), ('BlackBodyTemperature', blackbody_temperature), ('FCUMode', fcu_mode), ('ExtractedBBData', (extracted_bb_data, 12))] black_body_data_used = [ ('BBObservationUTC', time_cds_expanded), ('BBRelatedData', bb_related_data)] impf_cal_data = [ ('ImageQualityFlag', np.uint8), ('ReferenceDataFlag', np.uint8), ('AbsCalMethod', np.uint8), ('Pad1', 'S1'), ('AbsCalWeightVic', np.float32), ('AbsCalWeightXsat', np.float32), ('AbsCalCoeff', np.float32), ('AbsCalError', np.float32), ('GSICSCalCoeff', np.float32), ('GSICSCalError', np.float32), ('GSICSOffsetCount', np.float32)] rad_proc_mtf_adaptation = [ ('VIS_IRMTFCorrectionE_W', (np.float32, (33, 16))), ('VIS_IRMTFCorrectionN_S', (np.float32, (33, 16))), ('HRVMTFCorrectionE_W', (np.float32, (9, 16))), ('HRVMTFCorrectionN_S', (np.float32, (9, 16))), ('StraylightCorrection', (np.float32, (12, 8, 8)))] record = [ ('RPSummary', rp_summary), ('Level15ImageCalibration', (level_15_image_calibration, 12)), ('BlackBodyDataUsed', black_body_data_used), ('MPEFCalFeedback', (impf_cal_data, 12)), ('RadTransform', (np.float32, (42, 64))), ('RadProcMTFAdaptation', rad_proc_mtf_adaptation)] return record @property def geometric_processing(self): """Get geometric processing data.""" opt_axis_distances = [ ('E-WFocalPlane', (np.float32, 42)), ('N_SFocalPlane', (np.float32, 42))] earth_model = [ ('TypeOfEarthModel', np.uint8), ('EquatorialRadius', np.float64), ('NorthPolarRadius', np.float64), ('SouthPolarRadius', np.float64)] record = [ ('OptAxisDistances', opt_axis_distances), ('EarthModel', earth_model), ('AtmosphericModel', (np.float32, (12, 360))), ('ResamplingFunctions', (np.uint8, 12))] return record @property def impf_configuration(self): """Get impf configuration information.""" overall_configuration = [ ('Issue', np.uint16), ('Revision', np.uint16) ] sw_version = overall_configuration info_base_versions = sw_version su_configuration = [ ('SWVersion', sw_version), ('InfoBaseVersions', (info_base_versions, 10)) ] su_details = [ ('SUId', GSDTRecords.gp_su_id), ('SUIdInstance', np.int8), ('SUMode', np.uint8), ('SUState', np.uint8), ('SUConfiguration', su_configuration) ] equalisation_params = [ ('ConstCoeff', np.float32), ('LinearCoeff', np.float32), ('QuadraticCoeff', np.float32) ] black_body_data_for_warm_start = [ ('GTotalForMethod1', (np.float64, 12)), ('GTotalForMethod2', (np.float64, 12)), ('GTotalForMethod3', (np.float64, 12)), ('GBackForMethod1', (np.float64, 12)), ('GBackForMethod2', (np.float64, 12)), ('GBackForMethod3', (np.float64, 12)), ('RatioGTotalToGBack', (np.float64, 12)), ('GainInFrontOpticsCont', (np.float64, 12)), ('CalibrationConstants', (np.float32, 12)), ('maxIncidentRadiance', (np.float64, 12)), ('TimeOfColdObsSeconds', np.float64), ('TimeOfColdObsNanoSecs', np.float64), ('IncidenceRadiance', (np.float64, 12)), ('TempCal', np.float64), ('TempM1', np.float64), ('TempScan', np.float64), ('TempM1Baf', np.float64), ('TempCalSurround', np.float64) ] mirror_parameters = [ ('MaxFeedbackVoltage', np.float64), ('MinFeedbackVoltage', np.float64), ('MirrorSlipEstimate', np.float64) ] hktm_parameters = [ ('TimeS0Packet', time_cds_short), ('TimeS1Packet', time_cds_short), ('TimeS2Packet', time_cds_short), ('TimeS3Packet', time_cds_short), ('TimeS4Packet', time_cds_short), ('TimeS5Packet', time_cds_short), ('TimeS6Packet', time_cds_short), ('TimeS7Packet', time_cds_short), ('TimeS8Packet', time_cds_short), ('TimeS9Packet', time_cds_short), ('TimeSYPacket', time_cds_short), ('TimePSPacket', time_cds_short) ] warm_start_params = [ ('ScanningLaw', (np.float64, 1527)), ('RadFramesAlignment', (np.float64, 3)), ('ScanningLawVariation', (np.float32, 2)), ('EqualisationParams', (equalisation_params, 42)), ('BlackBodyDataForWarmStart', black_body_data_for_warm_start), ('MirrorParameters', mirror_parameters), ('LastSpinPeriod', np.float64), ('HKTMParameters', hktm_parameters), ('WSPReserved', (np.uint8, 3312)) ] record = [ ('OverallConfiguration', overall_configuration), ('SUDetails', (su_details, 50)), ('WarmStartParams', warm_start_params) ] return record class Msg15NativeTrailerRecord(object): """SEVIRI Level 1.5 trailer for native-format. Reference Document (EUM/MSG/ICD/105): MSG Level 1.5 Image Data Format Description """ def get(self): """Get header record data.""" # 380363 bytes record = [ ('GP_PK_HEADER', GSDTRecords.gp_pk_header), ('GP_PK_SH1', GSDTRecords.gp_pk_sh1), ('15TRAILER', self.seviri_l15_trailer) ] return np.dtype(record).newbyteorder('>') @property def seviri_l15_trailer(self): """Get file trailer data.""" record = [ ('15TrailerVersion', np.uint8), ('ImageProductionStats', self.image_production_stats), ('NavigationExtractionResults', self.navigation_extraction_results), ('RadiometricQuality', self.radiometric_quality), ('GeometricQuality', self.geometric_quality), ('TimelinessAndCompleteness', self.timeliness_and_completeness) ] return record @property def image_production_stats(self): """Get image production statistics.""" gp_sc_id = GSDTRecords.gp_sc_id actual_scanning_summary = [ ('NominalImageScanning', np.uint8), ('ReducedScan', np.uint8), ('ForwardScanStart', time_cds_short), ('ForwardScanEnd', time_cds_short) ] radiometric_behaviour = [ ('NominalBehaviour', np.uint8), ('RadScanIrregularity', np.uint8), ('RadStoppage', np.uint8), ('RepeatCycleNotCompleted', np.uint8), ('GainChangeTookPlace', np.uint8), ('DecontaminationTookPlace', np.uint8), ('NoBBCalibrationAchieved', np.uint8), ('IncorrectTemperature', np.uint8), ('InvalidBBData', np.uint8), ('InvalidAuxOrHKTMData', np.uint8), ('RefocusingMechanismActuated', np.uint8), ('MirrorBackToReferencePos', np.uint8) ] reception_summary_stats = [ ('PlannedNumberOfL10Lines', (np.uint32, 12)), ('NumberOfMissingL10Lines', (np.uint32, 12)), ('NumberOfCorruptedL10Lines', (np.uint32, 12)), ('NumberOfReplacedL10Lines', (np.uint32, 12)) ] l15_image_validity = [ ('NominalImage', np.uint8), ('NonNominalBecauseIncomplete', np.uint8), ('NonNominalRadiometricQuality', np.uint8), ('NonNominalGeometricQuality', np.uint8), ('NonNominalTimeliness', np.uint8), ('IncompleteL15', np.uint8), ] actual_l15_coverage_vis_ir = [ ('SouthernLineActual', np.int32), ('NorthernLineActual', np.int32), ('EasternColumnActual', np.int32), ('WesternColumnActual', np.int32) ] actual_l15_coverage_hrv = [ ('LowerSouthLineActual', np.int32), ('LowerNorthLineActual', np.int32), ('LowerEastColumnActual', np.int32), ('LowerWestColumnActual', np.int32), ('UpperSouthLineActual', np.int32), ('UpperNorthLineActual', np.int32), ('UpperEastColumnActual', np.int32), ('UpperWestColumnActual', np.int32), ] record = [ ('SatelliteId', gp_sc_id), ('ActualScanningSummary', actual_scanning_summary), ('RadiometricBehaviour', radiometric_behaviour), ('ReceptionSummaryStats', reception_summary_stats), ('L15ImageValidity', (l15_image_validity, 12)), ('ActualL15CoverageVIS_IR', actual_l15_coverage_vis_ir), ('ActualL15CoverageHRV', actual_l15_coverage_hrv) ] return record @property def navigation_extraction_results(self): """Get navigation extraction data.""" horizon_observation = [ ('HorizonId', np.uint8), ('Alpha', np.float64), ('AlphaConfidence', np.float64), ('Beta', np.float64), ('BetaConfidence', np.float64), ('ObservationTime', time_cds), ('SpinRate', np.float64), ('AlphaDeviation', np.float64), ('BetaDeviation', np.float64) ] star_observation = [ ('StarId', np.uint16), ('Alpha', np.float64), ('AlphaConfidence', np.float64), ('Beta', np.float64), ('BetaConfidence', np.float64), ('ObservationTime', time_cds), ('SpinRate', np.float64), ('AlphaDeviation', np.float64), ('BetaDeviation', np.float64) ] landmark_observation = [ ('LandmarkId', np.uint16), ('LandmarkLongitude', np.float64), ('LandmarkLatitude', np.float64), ('Alpha', np.float64), ('AlphaConfidence', np.float64), ('Beta', np.float64), ('BetaConfidence', np.float64), ('ObservationTime', time_cds), ('SpinRate', np.float64), ('AlphaDeviation', np.float64), ('BetaDeviation', np.float64) ] record = [ ('ExtractedHorizons', (horizon_observation, 4)), ('ExtractedStars', (star_observation, 20)), ('ExtractedLandmarks', (landmark_observation, 50)) ] return record @property def radiometric_quality(self): """Get radiometric quality record data.""" l10_rad_quality = [ ('FullImageMinimumCount', np.uint16), ('FullImageMaximumCount', np.uint16), ('EarthDiskMinimumCount', np.uint16), ('EarthDiskMaximumCount', np.uint16), ('MoonMinimumCount', np.uint16), ('MoonMaximumCount', np.uint16), ('FullImageMeanCount', np.float32), ('FullImageStandardDeviation', np.float32), ('EarthDiskMeanCount', np.float32), ('EarthDiskStandardDeviation', np.float32), ('MoonMeanCount', np.float32), ('MoonStandardDeviation', np.float32), ('SpaceMeanCount', np.float32), ('SpaceStandardDeviation', np.float32), ('SESpaceCornerMeanCount', np.float32), ('SESpaceCornerStandardDeviation', np.float32), ('SWSpaceCornerMeanCount', np.float32), ('SWSpaceCornerStandardDeviation', np.float32), ('NESpaceCornerMeanCount', np.float32), ('NESpaceCornerStandardDeviation', np.float32), ('NWSpaceCornerMeanCount', np.float32), ('NWSpaceCornerStandardDeviation', np.float32), ('4SpaceCornersMeanCount', np.float32), ('4SpaceCornersStandardDeviation', np.float32), ('FullImageHistogram', (np.uint32, 256)), ('EarthDiskHistogram', (np.uint32, 256)), ('ImageCentreSquareHistogram', (np.uint32, 256)), ('SESpaceCornerHistogram', (np.uint32, 128)), ('SWSpaceCornerHistogram', (np.uint32, 128)), ('NESpaceCornerHistogram', (np.uint32, 128)), ('NWSpaceCornerHistogram', (np.uint32, 128)), ('FullImageEntropy', (np.float32, 3)), ('EarthDiskEntropy', (np.float32, 3)), ('ImageCentreSquareEntropy', (np.float32, 3)), ('SESpaceCornerEntropy', (np.float32, 3)), ('SWSpaceCornerEntropy', (np.float32, 3)), ('NESpaceCornerEntropy', (np.float32, 3)), ('NWSpaceCornerEntropy', (np.float32, 3)), ('4SpaceCornersEntropy', (np.float32, 3)), ('ImageCentreSquarePSD_EW', (np.float32, 128)), ('FullImagePSD_EW', (np.float32, 128)), ('ImageCentreSquarePSD_NS', (np.float32, 128)), ('FullImagePSD_NS', (np.float32, 128)) ] l15_rad_quality = [ ('FullImageMinimumCount', np.uint16), ('FullImageMaximumCount', np.uint16), ('EarthDiskMinimumCount', np.uint16), ('EarthDiskMaximumCount', np.uint16), ('FullImageMeanCount', np.float32), ('FullImageStandardDeviation', np.float32), ('EarthDiskMeanCount', np.float32), ('EarthDiskStandardDeviation', np.float32), ('SpaceMeanCount', np.float32), ('SpaceStandardDeviation', np.float32), ('FullImageHistogram', (np.uint32, 256)), ('EarthDiskHistogram', (np.uint32, 256)), ('ImageCentreSquareHistogram', (np.uint32, 256)), ('FullImageEntropy', (np.float32, 3)), ('EarthDiskEntropy', (np.float32, 3)), ('ImageCentreSquareEntropy', (np.float32, 3)), ('ImageCentreSquarePSD_EW', (np.float32, 128)), ('FullImagePSD_EW', (np.float32, 128)), ('ImageCentreSquarePSD_NS', (np.float32, 128)), ('FullImagePSD_NS', (np.float32, 128)), ('SESpaceCornerL15_RMS', np.float32), ('SESpaceCornerL15_Mean', np.float32), ('SWSpaceCornerL15_RMS', np.float32), ('SWSpaceCornerL15_Mean', np.float32), ('NESpaceCornerL15_RMS', np.float32), ('NESpaceCornerL15_Mean', np.float32), ('NWSpaceCornerL15_RMS', np.float32), ('NWSpaceCornerL15_Mean', np.float32) ] record = [ ('L10RadQuality', (l10_rad_quality, 42)), ('L15RadQuality', (l15_rad_quality, 12)) ] return record @property def geometric_quality(self): """Get geometric quality record data.""" absolute_accuracy = [ ('QualityInfoValidity', np.uint8), ('EastWestAccuracyRMS', np.float32), ('NorthSouthAccuracyRMS', np.float32), ('MagnitudeRMS', np.float32), ('EastWestUncertaintyRMS', np.float32), ('NorthSouthUncertaintyRMS', np.float32), ('MagnitudeUncertaintyRMS', np.float32), ('EastWestMaxDeviation', np.float32), ('NorthSouthMaxDeviation', np.float32), ('MagnitudeMaxDeviation', np.float32), ('EastWestUncertaintyMax', np.float32), ('NorthSouthUncertaintyMax', np.float32), ('MagnitudeUncertaintyMax', np.float32) ] relative_accuracy = absolute_accuracy pixels_500_relative_accuracy = absolute_accuracy pixels_16_relative_accuracy = absolute_accuracy misregistration_residuals = [ ('QualityInfoValidity', np.uint8), ('EastWestResidual', np.float32), ('NorthSouthResidual', np.float32), ('EastWestUncertainty', np.float32), ('NorthSouthUncertainty', np.float32), ('EastWestRMS', np.float32), ('NorthSouthRMS', np.float32), ('EastWestMagnitude', np.float32), ('NorthSouthMagnitude', np.float32), ('EastWestMagnitudeUncertainty', np.float32), ('NorthSouthMagnitudeUncertainty', np.float32) ] geometric_quality_status = [ ('QualityNominal', np.uint8), ('NominalAbsolute', np.uint8), ('NominalRelativeToPreviousImage', np.uint8), ('NominalForREL500', np.uint8), ('NominalForREL16', np.uint8), ('NominalForResMisreg', np.uint8) ] record = [ ('AbsoluteAccuracy', (absolute_accuracy, 12)), ('RelativeAccuracy', (relative_accuracy, 12)), ('500PixelsRelativeAccuracy', (pixels_500_relative_accuracy, 12)), ('16PixelsRelativeAccuracy', (pixels_16_relative_accuracy, 12)), ('MisregistrationResiduals', (misregistration_residuals, 12)), ('GeometricQualityStatus', (geometric_quality_status, 12)) ] return record @property def timeliness_and_completeness(self): """Get time and completeness record data.""" timeliness = [ ('MaxDelay', np.float32), ('MinDelay', np.float32), ('MeanDelay', np.float32) ] completeness = [ ('PlannedL15ImageLines', np.uint16), ('GeneratedL15ImageLines', np.uint16), ('ValidL15ImageLines', np.uint16), ('DummyL15ImageLines', np.uint16), ('CorruptedL15ImageLines', np.uint16) ] record = [ ('Timeliness', timeliness), ('Completeness', (completeness, 12)) ] return record class HritPrologue(L15DataHeaderRecord): """HRIT Prologue handler.""" def get(self): """Get record data array.""" # X bytes record = [ ('SatelliteStatus', self.satellite_status), ('ImageAcquisition', self.image_acquisition), ('CelestialEvents', self.celestial_events), ('ImageDescription', self.image_description), ('RadiometricProcessing', self.radiometric_processing), ('GeometricProcessing', self.geometric_processing) ] return np.dtype(record).newbyteorder('>') def get_native_header(with_archive_header=True): """Get Native format header type. There are two variants, one including an ASCII archive header and one without that header. The header is prepended if the data are ordered through the EUMETSAT data center. """ return Msg15NativeHeaderRecord().get(with_archive_header) DEFAULT_15_SECONDARY_PRODUCT_HEADER = { 'NorthLineSelectedRectangle': {'Value': VISIR_NUM_LINES}, 'SouthLineSelectedRectangle': {'Value': 1}, 'EastColumnSelectedRectangle': {'Value': 1}, 'WestColumnSelectedRectangle': {'Value': VISIR_NUM_COLUMNS}, 'NumberColumnsVISIR': {'Value': VISIR_NUM_COLUMNS}, 'NumberLinesVISIR': {'Value': VISIR_NUM_LINES}, 'NumberColumnsHRV': {'Value': HRV_NUM_COLUMNS}, 'NumberLinesHRV': {'Value': HRV_NUM_LINES}, 'SelectedBandIDs': {'Value': 'XXXXXXXXXXXX'} } """Default secondary product header for files containing all channels.""" hrit_epilogue = np.dtype( Msg15NativeTrailerRecord().seviri_l15_trailer).newbyteorder('>') hrit_prologue = HritPrologue().get() impf_configuration = np.dtype( L15DataHeaderRecord().impf_configuration).newbyteorder('>') native_trailer = Msg15NativeTrailerRecord().get() satpy-0.34.0/satpy/readers/seviri_l1b_nc.py000066400000000000000000000351251420401153000206100ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """SEVIRI netcdf format reader.""" import datetime import logging import numpy as np import xarray as xr from satpy import CHUNK_SIZE from satpy._compat import cached_property from satpy.readers._geos_area import get_area_definition, get_geos_area_naming from satpy.readers.eum_base import get_service_mode from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.seviri_base import ( CHANNEL_NAMES, SATNUM, NoValidOrbitParams, OrbitPolynomialFinder, SEVIRICalibrationHandler, add_scanline_acq_time, get_cds_time, get_satpos, ) logger = logging.getLogger('nc_msg') class NCSEVIRIFileHandler(BaseFileHandler): """File handler for NC seviri files. **Calibration** See :mod:`satpy.readers.seviri_base`. Note that there is only one set of calibration coefficients available in the netCDF files and therefore there is no `calib_mode` argument. **Metadata** See :mod:`satpy.readers.seviri_base`. """ def __init__(self, filename, filename_info, filetype_info, ext_calib_coefs=None): """Init the file handler.""" super(NCSEVIRIFileHandler, self).__init__(filename, filename_info, filetype_info) self.ext_calib_coefs = ext_calib_coefs or {} self.nc = None self.mda = {} self.reference = datetime.datetime(1958, 1, 1) self._read_file() @property def start_time(self): """Get the start time.""" return self.deltaSt @property def end_time(self): """Get the end time.""" return self.deltaEnd def _read_file(self): """Read the file.""" if self.nc is None: self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=False, chunks=CHUNK_SIZE) # Obtain some area definition attributes equatorial_radius = (self.nc.attrs['equatorial_radius'] * 1000.) polar_radius = (self.nc.attrs['north_polar_radius'] * 1000 + self.nc.attrs['south_polar_radius'] * 1000) * 0.5 ssp_lon = self.nc.attrs['longitude_of_SSP'] self.mda['projection_parameters'] = {'a': equatorial_radius, 'b': polar_radius, 'h': 35785831.00, 'ssp_longitude': ssp_lon} self.mda['number_of_lines'] = int(self.nc.dims['num_rows_vis_ir']) self.mda['number_of_columns'] = int(self.nc.dims['num_columns_vis_ir']) self.mda['hrv_number_of_lines'] = int(self.nc.dims['num_rows_hrv']) self.mda['hrv_number_of_columns'] = int(self.nc.dims['num_columns_hrv']) self.deltaSt = self.reference + datetime.timedelta( days=int(self.nc.attrs['true_repeat_cycle_start_day']), milliseconds=int(self.nc.attrs['true_repeat_cycle_start_mi_sec'])) self.deltaEnd = self.reference + datetime.timedelta( days=int(self.nc.attrs['planned_repeat_cycle_end_day']), milliseconds=int(self.nc.attrs['planned_repeat_cycle_end_mi_sec'])) self.north = int(self.nc.attrs['north_most_line']) self.east = int(self.nc.attrs['east_most_pixel']) self.west = int(self.nc.attrs['west_most_pixel']) self.south = int(self.nc.attrs['south_most_line']) self.platform_id = int(self.nc.attrs['satellite_id']) def get_dataset(self, dataset_id, dataset_info): """Get the dataset.""" channel = dataset_id['name'] if (channel == 'HRV'): self.nc = self.nc.rename({'num_columns_hrv': 'x', 'num_rows_hrv': 'y'}) else: # the first channel of a composite will rename the dimension variable # but the later channels will raise a value error as its already been renamed # we can just ignore these exceptions try: self.nc = self.nc.rename({'num_columns_vis_ir': 'x', 'num_rows_vis_ir': 'y'}) except ValueError: pass dataset = self.nc[dataset_info['nc_key']] # Correct for the scan line order # TODO: Move _add_scanline_acq_time() call to the end of the method # once flipping is removed. self._add_scanline_acq_time(dataset, dataset_id) dataset = dataset.sel(y=slice(None, None, -1)) dataset = self.calibrate(dataset, dataset_id) self._update_attrs(dataset, dataset_info) return dataset def calibrate(self, dataset, dataset_id): """Calibrate the data.""" channel = dataset_id['name'] calibration = dataset_id['calibration'] if dataset_id['calibration'] == 'counts': dataset.attrs['_FillValue'] = 0 calib = SEVIRICalibrationHandler( platform_id=int(self.platform_id), channel_name=channel, coefs=self._get_calib_coefs(dataset, channel), calib_mode='NOMINAL', scan_time=self.start_time ) return calib.calibrate(dataset, calibration) def _get_calib_coefs(self, dataset, channel): """Get coefficients for calibration from counts to radiance.""" band_idx = list(CHANNEL_NAMES.values()).index(channel) offset = dataset.attrs['add_offset'].astype('float32') gain = dataset.attrs['scale_factor'].astype('float32') # Only one calibration available here return { 'coefs': { 'NOMINAL': { 'gain': gain, 'offset': offset }, 'EXTERNAL': self.ext_calib_coefs.get(channel, {}) }, 'radiance_type': self.nc['planned_chan_processing'].values[band_idx] } def _update_attrs(self, dataset, dataset_info): """Update dataset attributes.""" dataset.attrs.update(self.nc[dataset_info['nc_key']].attrs) dataset.attrs.update(dataset_info) dataset.attrs['platform_name'] = "Meteosat-" + SATNUM[self.platform_id] dataset.attrs['sensor'] = 'seviri' dataset.attrs['orbital_parameters'] = { 'projection_longitude': self.mda['projection_parameters']['ssp_longitude'], 'projection_latitude': 0., 'projection_altitude': self.mda['projection_parameters']['h'], 'satellite_nominal_longitude': float( self.nc.attrs['nominal_longitude'] ), 'satellite_nominal_latitude': 0.0, } try: actual_lon, actual_lat, actual_alt = self.satpos dataset.attrs['orbital_parameters'].update({ 'satellite_actual_longitude': actual_lon, 'satellite_actual_latitude': actual_lat, 'satellite_actual_altitude': actual_alt, }) except NoValidOrbitParams as err: logger.warning(err) dataset.attrs['georef_offset_corrected'] = self._get_earth_model() == 2 # remove attributes from original file which don't apply anymore strip_attrs = ["comment", "long_name", "nc_key", "scale_factor", "add_offset", "valid_min", "valid_max"] for a in strip_attrs: dataset.attrs.pop(a) def get_area_def(self, dataset_id): """Get the area def. Note that the AreaDefinition area extents returned by this function for NetCDF data will be slightly different compared to the area extents returned by the SEVIRI HRIT reader. This is due to slightly different pixel size values when calculated using the data available in the files. E.g. for the 3 km grid: ``NetCDF: self.nc.attrs['vis_ir_column_dir_grid_step'] == 3000.4031658172607`` ``HRIT: np.deg2rad(2.**16 / pdict['lfac']) * pdict['h'] == 3000.4032785810186`` This results in the Native 3 km full-disk area extents being approx. 20 cm shorter in each direction. The method for calculating the area extents used by the HRIT reader (CFAC/LFAC mechanism) keeps the highest level of numeric precision and is used as reference by EUM. For this reason, the standard area definitions defined in the `areas.yaml` file correspond to the HRIT ones. """ pdict = {} pdict['a'] = self.mda['projection_parameters']['a'] pdict['b'] = self.mda['projection_parameters']['b'] pdict['h'] = self.mda['projection_parameters']['h'] pdict['ssp_lon'] = self.mda['projection_parameters']['ssp_longitude'] area_naming_input_dict = {'platform_name': 'msg', 'instrument_name': 'seviri', 'resolution': int(dataset_id['resolution']) } area_naming = get_geos_area_naming({**area_naming_input_dict, **get_service_mode('seviri', pdict['ssp_lon'])}) if dataset_id['name'] == 'HRV': pdict['nlines'] = self.mda['hrv_number_of_lines'] pdict['ncols'] = self.mda['hrv_number_of_columns'] pdict['a_name'] = area_naming['area_id'] pdict['a_desc'] = area_naming['description'] pdict['p_id'] = "" else: pdict['nlines'] = self.mda['number_of_lines'] pdict['ncols'] = self.mda['number_of_columns'] pdict['a_name'] = area_naming['area_id'] pdict['a_desc'] = area_naming['description'] pdict['p_id'] = "" area = get_area_definition(pdict, self.get_area_extent(dataset_id)) return area def get_area_extent(self, dsid): """Get the area extent.""" # following calculations assume grid origin is south-east corner # section 7.2.4 of MSG Level 1.5 Image Data Format Description origins = {0: 'NW', 1: 'SW', 2: 'SE', 3: 'NE'} grid_origin = self.nc.attrs['vis_ir_grid_origin'] grid_origin = int(grid_origin, 16) if grid_origin != 2: raise NotImplementedError( 'Grid origin not supported number: {}, {} corner' .format(grid_origin, origins[grid_origin]) ) center_point = 3712/2 column_step = self.nc.attrs['vis_ir_column_dir_grid_step'] * 1000.0 line_step = self.nc.attrs['vis_ir_line_dir_grid_step'] * 1000.0 # check for Earth model as this affects the north-south and # west-east offsets # section 3.1.4.2 of MSG Level 1.5 Image Data Format Description earth_model = self._get_earth_model() if earth_model == 2: ns_offset = 0 # north +ve we_offset = 0 # west +ve elif earth_model == 1: ns_offset = -0.5 # north +ve we_offset = 0.5 # west +ve else: raise NotImplementedError( 'unrecognised earth model: {}'.format(earth_model) ) # section 3.1.5 of MSG Level 1.5 Image Data Format Description ll_c = (center_point - self.west - 0.5 + we_offset) * column_step ll_l = (self.south - center_point - 0.5 + ns_offset) * line_step ur_c = (center_point - self.east + 0.5 + we_offset) * column_step ur_l = (self.north - center_point + 0.5 + ns_offset) * line_step area_extent = (ll_c, ll_l, ur_c, ur_l) return area_extent def _add_scanline_acq_time(self, dataset, dataset_id): if dataset_id['name'] == 'HRV': # TODO: Enable once HRV reading has been fixed. return # days, msecs = self._get_acq_time_hrv() else: days, msecs = self._get_acq_time_visir(dataset_id) acq_time = get_cds_time(days.values, msecs.values) add_scanline_acq_time(dataset, acq_time) def _get_acq_time_hrv(self): day_key = 'channel_data_hrv_data_l10_line_mean_acquisition_time_day' msec_key = 'channel_data_hrv_data_l10_line_mean_acquisition_msec' days = self.nc[day_key].isel(channels_hrv_dim=0) msecs = self.nc[msec_key].isel(channels_hrv_dim=0) return days, msecs def _get_acq_time_visir(self, dataset_id): band_idx = list(CHANNEL_NAMES.values()).index(dataset_id['name']) day_key = 'channel_data_visir_data_l10_line_mean_acquisition_time_day' msec_key = 'channel_data_visir_data_l10_line_mean_acquisition_msec' days = self.nc[day_key].isel(channels_vis_ir_dim=band_idx) msecs = self.nc[msec_key].isel(channels_vis_ir_dim=band_idx) return days, msecs @cached_property def satpos(self): """Get actual satellite position in geodetic coordinates (WGS-84). Evaluate orbit polynomials at the start time of the scan. Returns: Longitude [deg east], Latitude [deg north] and Altitude [m] """ start_times_poly = get_cds_time( days=self.nc['orbit_polynomial_start_time_day'].values, msecs=self.nc['orbit_polynomial_start_time_msec'].values ) end_times_poly = get_cds_time( days=self.nc['orbit_polynomial_end_time_day'].values, msecs=self.nc['orbit_polynomial_end_time_msec'].values ) orbit_polynomials = { 'StartTime': np.array([start_times_poly]), 'EndTime': np.array([end_times_poly]), 'X': self.nc['orbit_polynomial_x'].values, 'Y': self.nc['orbit_polynomial_y'].values, 'Z': self.nc['orbit_polynomial_z'].values, } poly_finder = OrbitPolynomialFinder(orbit_polynomials) orbit_polynomial = poly_finder.get_orbit_polynomial(self.start_time) return get_satpos( orbit_polynomial=orbit_polynomial, time=self.start_time, semi_major_axis=self.mda['projection_parameters']['a'], semi_minor_axis=self.mda['projection_parameters']['b'], ) def _get_earth_model(self): return int(self.nc.attrs['type_of_earth_model'], 16) class NCSEVIRIHRVFileHandler(BaseFileHandler, SEVIRICalibrationHandler): """HRV filehandler.""" satpy-0.34.0/satpy/readers/seviri_l2_bufr.py000066400000000000000000000230231420401153000207770ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """SEVIRI L2 BUFR format reader. References: EUMETSAT Product Navigator https://navigator.eumetsat.int/ """ import logging from datetime import datetime, timedelta import dask.array as da import numpy as np import xarray as xr from satpy import CHUNK_SIZE from satpy.readers._geos_area import get_geos_area_naming from satpy.readers.eum_base import get_service_mode, recarray2dict from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.seviri_base import mpef_product_header from satpy.resample import get_area_def try: import eccodes as ec except ImportError: raise ImportError( "Missing eccodes-python and/or eccodes C-library installation. Use conda to install eccodes") logger = logging.getLogger('SeviriL2Bufr') data_center_dict = {55: {'ssp': 'E0415', 'name': '08'}, 56: {'ssp': 'E0000', 'name': '09'}, 57: {'ssp': 'E0095', 'name': '10'}, 70: {'ssp': 'E0000', 'name': '11'}} seg_size_dict = {'seviri_l2_bufr_asr': 16, 'seviri_l2_bufr_cla': 16, 'seviri_l2_bufr_csr': 16, 'seviri_l2_bufr_gii': 3, 'seviri_l2_bufr_thu': 16, 'seviri_l2_bufr_toz': 3} class SeviriL2BufrFileHandler(BaseFileHandler): """File handler for SEVIRI L2 BUFR products. **Loading data with AreaDefinition** By providing the `with_area_definition` as True in the `reader_kwargs`, the dataset is loaded with an AreaDefinition using a standardized AreaDefinition in areas.yaml. By default, the dataset will be loaded with a SwathDefinition, i.e. similar to how the data are stored in the BUFR file: scene = satpy.Scene(filenames, reader='seviri_l2_bufr', reader_kwargs={'with_area_definition': False}) """ def __init__(self, filename, filename_info, filetype_info, with_area_definition=False, **kwargs): """Initialise the file handler for SEVIRI L2 BUFR data.""" super(SeviriL2BufrFileHandler, self).__init__(filename, filename_info, filetype_info) if ('server' in filename_info): # EUMETSAT Offline Bufr product self.mpef_header = self._read_mpef_header() else: # Product was retrieved from the EUMETSAT Data Center timeStr = self.get_attribute('typicalDate')+self.get_attribute('typicalTime') buf_start_time = datetime.strptime(timeStr, "%Y%m%d%H%M%S") sc_id = self.get_attribute('satelliteIdentifier') self.mpef_header = {} self.mpef_header['NominalTime'] = buf_start_time self.mpef_header['SpacecraftName'] = data_center_dict[sc_id]['name'] self.mpef_header['RectificationLongitude'] = data_center_dict[sc_id]['ssp'] self.with_adef = with_area_definition self.seg_size = seg_size_dict[filetype_info['file_type']] @property def start_time(self): """Return the repeat cycle start time.""" return self.mpef_header['NominalTime'] @property def end_time(self): """Return the repeat cycle end time.""" return self.start_time + timedelta(minutes=15) @property def platform_name(self): """Return spacecraft name.""" return 'MET{}'.format(self.mpef_header['SpacecraftName']) @property def ssp_lon(self): """Return subsatellite point longitude.""" # e.g. E0415 ssp_lon = self.mpef_header['RectificationLongitude'] return float(ssp_lon[1:])/10. def get_area_def(self, key): """Return the area definition.""" try: return self._area_def except AttributeError: raise NotImplementedError def _read_mpef_header(self): """Read MPEF header.""" hdr = np.fromfile(self.filename, mpef_product_header, 1) return recarray2dict(hdr) def get_attribute(self, key): """Get BUFR attributes.""" # This function is inefficient as it is looping through the entire # file to get 1 attribute. It causes a problem though if you break # from the file early - dont know why but investigating - fix later fh = open(self.filename, "rb") while True: # get handle for message bufr = ec.codes_bufr_new_from_file(fh) if bufr is None: break ec.codes_set(bufr, 'unpack', 1) attr = ec.codes_get(bufr, key) ec.codes_release(bufr) fh.close() return attr def get_array(self, key): """Get all data from file for the given BUFR key.""" with open(self.filename, "rb") as fh: msgCount = 0 while True: bufr = ec.codes_bufr_new_from_file(fh) if bufr is None: break ec.codes_set(bufr, 'unpack', 1) # if is the first message initialise our final array if (msgCount == 0): arr = da.from_array(ec.codes_get_array( bufr, key, float), chunks=CHUNK_SIZE) else: tmpArr = da.from_array(ec.codes_get_array( bufr, key, float), chunks=CHUNK_SIZE) arr = da.concatenate((arr, tmpArr)) msgCount = msgCount+1 ec.codes_release(bufr) if arr.size == 1: arr = arr[0] return arr def get_dataset(self, dataset_id, dataset_info): """Create dataset. Load data from BUFR file using the BUFR key in dataset_info and create the dataset with or without an AreaDefinition. """ arr = self.get_array(dataset_info['key']) if self.with_adef: xarr = self.get_dataset_with_area_def(arr, dataset_id) # coordinates are not relevant when returning data with an AreaDefinition if 'coordinates' in dataset_info.keys(): del dataset_info['coordinates'] else: xarr = xr.DataArray(arr, dims=["y"]) if 'fill_value' in dataset_info: xarr = xarr.where(xarr != dataset_info['fill_value']) self._add_attributes(xarr, dataset_info) return xarr def get_dataset_with_area_def(self, arr, dataset_id): """Get dataset with an AreaDefinition.""" if dataset_id['name'] in ['latitude', 'longitude']: self.__setattr__(dataset_id['name'], arr) xarr = xr.DataArray(arr, dims=["y"]) else: lons_1d, lats_1d, data_1d = da.compute(self.longitude, self.latitude, arr) self._area_def = self._construct_area_def(dataset_id) icol, irow = self._area_def.get_array_indices_from_lonlat(lons_1d, lats_1d) data_2d = np.empty(self._area_def.shape) data_2d[:] = np.nan data_2d[irow.compressed(), icol.compressed()] = data_1d[~irow.mask] xarr = xr.DataArray(da.from_array(data_2d, CHUNK_SIZE), dims=('y', 'x')) ntotal = len(icol) nvalid = len(icol.compressed()) if nvalid < ntotal: logging.warning(f'{ntotal-nvalid} out of {ntotal} data points could not be put on ' f'the grid {self._area_def.area_id}.') return xarr def _construct_area_def(self, dataset_id): """Construct a standardized AreaDefinition based on satellite, instrument, resolution and sub-satellite point. Returns: AreaDefinition: A pyresample AreaDefinition object containing the area definition. """ res = dataset_id['resolution'] area_naming_input_dict = {'platform_name': 'msg', 'instrument_name': 'seviri', 'resolution': res, } area_naming = get_geos_area_naming({**area_naming_input_dict, **get_service_mode('seviri', self.ssp_lon)}) # Datasets with a segment size of 3 pixels extend outside the original SEVIRI 3km grid (with 1238 x 1238 # segments a 3 pixels). Hence, we need to use corresponding area defintions in areas.yaml if self.seg_size == 3: area_naming['area_id'] += '_ext' area_naming['description'] += ' (extended outside original 3km grid)' # Construct AreaDefinition from standardized area definition in areas.yaml. stand_area_def = get_area_def(area_naming['area_id']) return stand_area_def def _add_attributes(self, xarr, dataset_info): """Add dataset attributes to xarray.""" xarr.attrs['sensor'] = 'SEVIRI' xarr.attrs['platform_name'] = self.platform_name xarr.attrs['ssp_lon'] = self.ssp_lon xarr.attrs['seg_size'] = self.seg_size xarr.attrs.update(dataset_info) satpy-0.34.0/satpy/readers/seviri_l2_grib.py000066400000000000000000000261021420401153000207650ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2019-2020 Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # satpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . """Reader for the SEVIRI L2 products in GRIB2 format. References: FM 92 GRIB Edition 2 https://www.wmo.int/pages/prog/www/WMOCodes/Guides/GRIB/GRIB2_062006.pdf EUMETSAT Product Navigator https://navigator.eumetsat.int/ """ import logging from datetime import timedelta import dask.array as da import numpy as np import xarray as xr from satpy import CHUNK_SIZE from satpy.readers._geos_area import get_area_definition, get_geos_area_naming from satpy.readers.eum_base import get_service_mode from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.seviri_base import PLATFORM_DICT, REPEAT_CYCLE_DURATION, calculate_area_extent try: import eccodes as ec except ImportError: raise ImportError( "Missing eccodes-python and/or eccodes C-library installation. Use conda to install eccodes") logger = logging.getLogger(__name__) class SeviriL2GribFileHandler(BaseFileHandler): """Reader class for SEVIRI L2 products in GRIB format.""" def __init__(self, filename, filename_info, filetype_info): """Read the global attributes and prepare for dataset reading.""" super().__init__(filename, filename_info, filetype_info) # Turn on support for multiple fields in single GRIB messages (required for SEVIRI L2 files) ec.codes_grib_multi_support_on() @property def start_time(self): """Return the sensing start time.""" return self.filename_info['start_time'] @property def end_time(self): """Return the sensing end time.""" return self.start_time + timedelta(minutes=REPEAT_CYCLE_DURATION) def get_area_def(self, dataset_id): """Return the area definition for a dataset.""" self._area_dict['column_step'] = dataset_id.resolution self._area_dict['line_step'] = dataset_id.resolution area_extent = calculate_area_extent(self._area_dict) # Call the get_area_definition function to obtain the area area_def = get_area_definition(self._pdict, area_extent) return area_def def get_dataset(self, dataset_id, dataset_info): """Get dataset using the parameter_number key in dataset_info. In a previous version of the reader, the attributes (nrows, ncols, ssp_lon) and projection information (pdict and area_dict) were computed while initializing the file handler. Also the code would break out from the While-loop below as soon as the correct parameter_number was found. This has now been revised becasue the reader would sometimes give corrupt information about the number of messages in the file and the dataset dimensions within a given message if the file was only partly read (not looping over all messages) in an earlier instance. """ logger.debug('Reading in file to get dataset with parameter number %d.', dataset_info['parameter_number']) xarr = None message_found = False with open(self.filename, 'rb') as fh: # Iterate over all messages and fetch data when the correct parameter number is found while True: gid = ec.codes_grib_new_from_file(fh) if gid is None: if not message_found: # Could not obtain a valid message ID from the grib file logger.warning("Could not find parameter_number %d in GRIB file, no valid Dataset created", dataset_info['parameter_number']) break # Check if the parameter number in the GRIB message corresponds to the required key parameter_number = self._get_from_msg(gid, 'parameterNumber') if parameter_number == dataset_info['parameter_number']: self._res = dataset_id.resolution self._read_attributes(gid) # Read the missing value missing_value = self._get_from_msg(gid, 'missingValue') # Retrieve values and metadata from the GRIB message, masking the values equal to missing_value xarr = self._get_xarray_from_msg(gid) xarr.data = da.where(xarr.data == missing_value, np.nan, xarr.data) ec.codes_release(gid) # Combine all metadata into the dataset attributes and break out of the loop xarr.attrs.update(dataset_info) xarr.attrs.update(self._get_attributes()) message_found = True else: # The parameter number is not the correct one, release gid and skip to next message ec.codes_release(gid) return xarr def _read_attributes(self, gid): """Read the parameter attributes from the message and create the projection and area dictionaries.""" # Read SSP and date/time self._ssp_lon = self._get_from_msg(gid, 'longitudeOfSubSatellitePointInDegrees') # Read number of points on the x and y axes self._nrows = self._get_from_msg(gid, 'Ny') self._ncols = self._get_from_msg(gid, 'Nx') # Creates the projection and area dictionaries self._pdict, self._area_dict = self._get_proj_area(gid) def _get_proj_area(self, gid): """Compute the dictionary with the projection and area definition from a GRIB message. Args: gid: The ID of the GRIB message. Returns: tuple: A tuple of two dictionaries for the projection and the area definition. pdict: a: Earth major axis [m] b: Earth minor axis [m] h: Height over surface [m] ssp_lon: longitude of subsatellite point [deg] nlines: number of lines ncols: number of columns a_name: name of the area a_desc: description of the area p_id: id of the projection area_dict: center_point: coordinate of the center point north: coodinate of the north limit east: coodinate of the east limit west: coodinate of the west limit south: coodinate of the south limit """ # Get name of area definition area_naming_input_dict = {'platform_name': 'msg', 'instrument_name': 'seviri', 'resolution': self._res, } area_naming = get_geos_area_naming({**area_naming_input_dict, **get_service_mode('seviri', self._ssp_lon)}) # Read all projection and area parameters from the message earth_major_axis_in_meters = self._get_from_msg(gid, 'earthMajorAxis') * 1000.0 # [m] earth_minor_axis_in_meters = self._get_from_msg(gid, 'earthMinorAxis') * 1000.0 # [m] earth_major_axis_in_meters = self._scale_earth_axis(earth_major_axis_in_meters) earth_minor_axis_in_meters = self._scale_earth_axis(earth_minor_axis_in_meters) nr_in_radius_of_earth = self._get_from_msg(gid, 'NrInRadiusOfEarth') xp_in_grid_lengths = self._get_from_msg(gid, 'XpInGridLengths') h_in_meters = earth_major_axis_in_meters * (nr_in_radius_of_earth - 1.0) # [m] # Create the dictionary with the projection data pdict = { 'a': earth_major_axis_in_meters, 'b': earth_minor_axis_in_meters, 'h': h_in_meters, 'ssp_lon': self._ssp_lon, 'nlines': self._ncols, 'ncols': self._nrows, 'a_name': area_naming['area_id'], 'a_desc': area_naming['description'], 'p_id': "", } # Compute the dictionary with the area extension area_dict = { 'center_point': xp_in_grid_lengths, 'north': self._nrows, 'east': 1, 'west': self._ncols, 'south': 1, } return pdict, area_dict @staticmethod def _scale_earth_axis(data): """Scale Earth axis data to make sure the value matched the expected unit [m]. The earthMinorAxis value stored in the aerosol over sea product is scaled incorrectly by a factor of 1e8. This method provides a flexible temporarily workaraound by making sure that all earth axis values are scaled such that they are on the order of millions of meters as expected by the reader. As soon as the scaling issue has been resolved by EUMETSAT this workaround can be removed. """ scale_factor = 10 ** np.ceil(np.log10(1e6/data)) return data * scale_factor def _get_xarray_from_msg(self, gid): """Read the values from the GRIB message and return a DataArray object. Args: gid: The ID of the GRIB message. Returns: DataArray: The array containing the retrieved values. """ # Data from GRIB message are read into an Xarray... xarr = xr.DataArray(da.from_array(ec.codes_get_values( gid).reshape(self._nrows, self._ncols), CHUNK_SIZE), dims=('y', 'x')) return xarr def _get_attributes(self): """Create a dictionary of attributes to be added to the dataset. Returns: dict: A dictionary of parameter attributes. ssp_lon: longitude of subsatellite point sensor: name of sensor platform_name: name of the platform """ orbital_parameters = { 'projection_longitude': self._ssp_lon } attributes = { 'orbital_parameters': orbital_parameters, 'sensor': 'seviri', 'platform_name': PLATFORM_DICT[self.filename_info['spacecraft']] } return attributes @staticmethod def _get_from_msg(gid, key): """Get a value from the GRIB message based on the key, return None if missing. Args: gid: The ID of the GRIB message. key: The key of the required attribute. Returns: The retrieved attribute or None if the key is missing. """ try: attr = ec.codes_get(gid, key) except ec.KeyValueNotFoundError: logger.warning("Key %s not found in GRIB message", key) attr = None return attr satpy-0.34.0/satpy/readers/slstr_l1b.py000066400000000000000000000351661420401153000200030ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """SLSTR L1b reader.""" import logging import os import re import warnings from datetime import datetime import dask.array as da import numpy as np import xarray as xr from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler logger = logging.getLogger(__name__) PLATFORM_NAMES = {'S3A': 'Sentinel-3A', 'S3B': 'Sentinel-3B'} # These are the default channel adjustment factors. # Defined in the product notice: S3.PN-SLSTR-L1.06 # https://www4-int.eumetsat.int/media/42788 CHANCALIB_FACTORS = {'S1_nadir': 1.0, 'S2_nadir': 1.0, 'S3_nadir': 1.0, 'S4_nadir': 1.0, 'S5_nadir': 1.12, 'S6_nadir': 1.2, 'S7_nadir': 1.0, 'S8_nadir': 1.0, 'S9_nadir': 1.0, 'S1_oblique': 1.0, 'S2_oblique': 1.0, 'S3_oblique': 1.0, 'S4_oblique': 1.0, 'S5_oblique': 1.15, 'S6_oblique': 1.26, 'S7_oblique': 1.0, 'S8_oblique': 1.0, 'S9_oblique': 1.0, } class NCSLSTRGeo(BaseFileHandler): """Filehandler for geo info.""" def __init__(self, filename, filename_info, filetype_info): """Initialize the geo filehandler.""" super(NCSLSTRGeo, self).__init__(filename, filename_info, filetype_info) self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=True, chunks={'columns': CHUNK_SIZE, 'rows': CHUNK_SIZE}) self.nc = self.nc.rename({'columns': 'x', 'rows': 'y'}) self.cache = {} def get_dataset(self, key, info): """Load a dataset.""" logger.debug('Reading %s.', key['name']) file_key = info['file_key'].format(view=key['view'].name[0], stripe=key['stripe'].name) try: variable = self.nc[file_key] except KeyError: return info = info.copy() info.update(variable.attrs) variable.attrs = info return variable @property def start_time(self): """Get the start time.""" return datetime.strptime(self.nc.attrs['start_time'], '%Y-%m-%dT%H:%M:%S.%fZ') @property def end_time(self): """Get the end time.""" return datetime.strptime(self.nc.attrs['stop_time'], '%Y-%m-%dT%H:%M:%S.%fZ') class NCSLSTR1B(BaseFileHandler): """Filehandler for l1 SLSTR data. By default, the calibration factors recommended by EUMETSAT are applied. This is required as the SLSTR VIS channels are producing slightly incorrect radiances that require adjustment. Satpy uses the radiance corrections in S3.PN-SLSTR-L1.06, checked 26/10/2020. User-supplied coefficients can be passed via the `user_calibration` kwarg This should be a dict of channel names (such as `S1_nadir`, `S8_oblique`). For example:: calib_dict = {'S1_nadir': 1.12} scene = satpy.Scene(filenames, reader='slstr-l1b', reader_kwargs={'user_calib': calib_dict}) Will multiply S1 nadir radiances by 1.12. """ def __init__(self, filename, filename_info, filetype_info, user_calibration=None): """Initialize the SLSTR l1 data filehandler.""" super(NCSLSTR1B, self).__init__(filename, filename_info, filetype_info) self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=True, chunks={'columns': CHUNK_SIZE, 'rows': CHUNK_SIZE}) self.nc = self.nc.rename({'columns': 'x', 'rows': 'y'}) self.channel = filename_info['dataset_name'] self.stripe = filename_info['stripe'] views = {'n': 'nadir', 'o': 'oblique'} self.view = views[filename_info['view']] cal_file = os.path.join(os.path.dirname(self.filename), 'viscal.nc') self.cal = xr.open_dataset(cal_file, decode_cf=True, mask_and_scale=True, chunks={'views': CHUNK_SIZE}) indices_file = os.path.join(os.path.dirname(self.filename), 'indices_{}{}.nc'.format(self.stripe, self.view[0])) self.indices = xr.open_dataset(indices_file, decode_cf=True, mask_and_scale=True, chunks={'columns': CHUNK_SIZE, 'rows': CHUNK_SIZE}) self.indices = self.indices.rename({'columns': 'x', 'rows': 'y'}) self.platform_name = PLATFORM_NAMES[filename_info['mission_id']] self.sensor = 'slstr' if isinstance(user_calibration, dict): self.usercalib = user_calibration else: self.usercalib = None def _apply_radiance_adjustment(self, radiances): """Adjust SLSTR radiances with default or user supplied values.""" chan_name = self.channel + '_' + self.view adjust_fac = None if self.usercalib is not None: # If user supplied adjustment, use it. if chan_name in self.usercalib: adjust_fac = self.usercalib[chan_name] if adjust_fac is None: if chan_name in CHANCALIB_FACTORS: adjust_fac = CHANCALIB_FACTORS[chan_name] else: warnings.warn("Warning: No radiance adjustment supplied " + "for channel " + chan_name) return radiances return radiances * adjust_fac @staticmethod def _cal_rad(rad, didx, solar_flux=None): """Calibrate.""" indices = np.isfinite(didx) rad[indices] /= solar_flux[didx[indices].astype(int)] return rad def get_dataset(self, key, info): """Load a dataset.""" if (self.channel not in key['name'] or self.stripe != key['stripe'].name or self.view != key['view'].name): return logger.debug('Reading %s.', key['name']) if key['calibration'] == 'brightness_temperature': variable = self.nc['{}_BT_{}{}'.format(self.channel, self.stripe, self.view[0])] else: variable = self.nc['{}_radiance_{}{}'.format(self.channel, self.stripe, self.view[0])] radiances = self._apply_radiance_adjustment(variable) units = variable.attrs['units'] if key['calibration'] == 'reflectance': # TODO take into account sun-earth distance solar_flux = self.cal[re.sub('_[^_]*$', '', key['name']) + '_solar_irradiances'] d_index = self.indices['detector_{}{}'.format(self.stripe, self.view[0])] idx = 0 if self.view[0] == 'n' else 1 # 0: Nadir view, 1: oblique (check). radiances.data = da.map_blocks( self._cal_rad, radiances.data, d_index.data, solar_flux=solar_flux[:, idx].values) radiances *= np.pi * 100 units = '%' info = info.copy() info.update(radiances.attrs) info.update(key.to_dict()) info.update(dict(units=units, platform_name=self.platform_name, sensor=self.sensor, view=self.view)) radiances.attrs = info return radiances @property def start_time(self): """Get the start time.""" return datetime.strptime(self.nc.attrs['start_time'], '%Y-%m-%dT%H:%M:%S.%fZ') @property def end_time(self): """Get the end time.""" return datetime.strptime(self.nc.attrs['stop_time'], '%Y-%m-%dT%H:%M:%S.%fZ') class NCSLSTRAngles(BaseFileHandler): """Filehandler for angles.""" def _loadcart(self, fname): """Load a cartesian file of appropriate type.""" cartf = xr.open_dataset(fname, decode_cf=True, mask_and_scale=True, chunks={'columns': CHUNK_SIZE, 'rows': CHUNK_SIZE}) return cartf def __init__(self, filename, filename_info, filetype_info): """Initialize the angles reader.""" super(NCSLSTRAngles, self).__init__(filename, filename_info, filetype_info) self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=True, chunks={'columns': CHUNK_SIZE, 'rows': CHUNK_SIZE}) # TODO: get metadata from the manifest file (xfdumanifest.xml) self.platform_name = PLATFORM_NAMES[filename_info['mission_id']] self.sensor = 'slstr' self.view = filename_info['view'] self._start_time = filename_info['start_time'] self._end_time = filename_info['end_time'] carta_file = os.path.join( os.path.dirname(self.filename), 'cartesian_a{}.nc'.format(self.view[0])) carti_file = os.path.join( os.path.dirname(self.filename), 'cartesian_i{}.nc'.format(self.view[0])) cartx_file = os.path.join( os.path.dirname(self.filename), 'cartesian_tx.nc') self.carta = self._loadcart(carta_file) self.carti = self._loadcart(carti_file) self.cartx = self._loadcart(cartx_file) def get_dataset(self, key, info): """Load a dataset.""" if not key['view'].name.startswith(self.view[0]): return logger.debug('Reading %s.', key['name']) # Check if file_key is specified in the yaml file_key = info['file_key'].format(view=key['view'].name[0]) variable = self.nc[file_key] l_step = self.nc.attrs.get('al_subsampling_factor', 1) c_step = self.nc.attrs.get('ac_subsampling_factor', 16) if key.get('resolution', 1000) == 500: l_step *= 2 c_step *= 2 if c_step != 1 or l_step != 1: logger.debug('Interpolating %s.', key['name']) # TODO: do it in cartesian coordinates ! pbs at date line and # possible tie_x = self.cartx['x_tx'].data[0, :][::-1] tie_y = self.cartx['y_tx'].data[:, 0] if key.get('resolution', 1000) == 500: full_x = self.carta['x_a' + self.view[0]].data full_y = self.carta['y_a' + self.view[0]].data else: full_x = self.carti['x_i' + self.view[0]].data full_y = self.carti['y_i' + self.view[0]].data variable = variable.fillna(0) variable.attrs['resolution'] = key.get('resolution', 1000) from scipy.interpolate import RectBivariateSpline spl = RectBivariateSpline( tie_y, tie_x, variable.data[:, ::-1]) values = spl.ev(full_y, full_x) variable = xr.DataArray(da.from_array(values, chunks=(CHUNK_SIZE, CHUNK_SIZE)), dims=['y', 'x'], attrs=variable.attrs) variable.attrs['platform_name'] = self.platform_name variable.attrs['sensor'] = self.sensor if 'units' not in variable.attrs: variable.attrs['units'] = 'degrees' variable.attrs.update(key.to_dict()) return variable @property def start_time(self): """Get the start time.""" return datetime.strptime(self.nc.attrs['start_time'], '%Y-%m-%dT%H:%M:%S.%fZ') @property def end_time(self): """Get the end time.""" return datetime.strptime(self.nc.attrs['stop_time'], '%Y-%m-%dT%H:%M:%S.%fZ') class NCSLSTRFlag(BaseFileHandler): """File handler for flags.""" def __init__(self, filename, filename_info, filetype_info): """Initialize the flag reader.""" super(NCSLSTRFlag, self).__init__(filename, filename_info, filetype_info) self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=True, chunks={'columns': CHUNK_SIZE, 'rows': CHUNK_SIZE}) self.nc = self.nc.rename({'columns': 'x', 'rows': 'y'}) self.stripe = filename_info['stripe'] views = {'n': 'nadir', 'o': 'oblique'} self.view = views[filename_info['view']] # TODO: get metadata from the manifest file (xfdumanifest.xml) self.platform_name = PLATFORM_NAMES[filename_info['mission_id']] self.sensor = 'slstr' def get_dataset(self, key, info): """Load a dataset.""" if (self.stripe != key['stripe'].name or self.view != key['view'].name): return logger.debug('Reading %s.', key['name']) file_key = info['file_key'].format(view=key['view'].name[0], stripe=key['stripe'].name) variable = self.nc[file_key] info = info.copy() info.update(variable.attrs) info.update(key.to_dict()) info.update(dict(platform_name=self.platform_name, sensor=self.sensor)) variable.attrs = info return variable @property def start_time(self): """Get the start time.""" return datetime.strptime(self.nc.attrs['start_time'], '%Y-%m-%dT%H:%M:%S.%fZ') @property def end_time(self): """Get the end time.""" return datetime.strptime(self.nc.attrs['stop_time'], '%Y-%m-%dT%H:%M:%S.%fZ') satpy-0.34.0/satpy/readers/slstr_l2.py000066400000000000000000000061651420401153000176370ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reader for Sentinel-3 SLSTR SST data.""" from datetime import datetime import xarray as xr from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler class SLSTRL2FileHandler(BaseFileHandler): """File handler for Sentinel-3 SSL L2 netCDF files.""" def __init__(self, filename, filename_info, filetype_info, engine=None): """Initialize the file handler for Sentinel-3 SSL L2 netCDF data.""" super(SLSTRL2FileHandler, self).__init__(filename, filename_info, filetype_info) if filename.endswith('tar'): import os import tarfile import tempfile with tempfile.TemporaryDirectory() as tempdir: with tarfile.open(name=filename, mode='r') as tf: sst_filename = next((name for name in tf.getnames() if name.endswith('nc') and 'GHRSST-SSTskin' in name)) tf.extract(sst_filename, tempdir) fullpath = os.path.join(tempdir, sst_filename) self.nc = xr.open_dataset(fullpath, decode_cf=True, mask_and_scale=True, engine=engine, chunks={'ni': CHUNK_SIZE, 'nj': CHUNK_SIZE}) else: self.nc = xr.open_dataset(filename, decode_cf=True, mask_and_scale=True, engine=engine, chunks={'ni': CHUNK_SIZE, 'nj': CHUNK_SIZE}) self.nc = self.nc.rename({'ni': 'x', 'nj': 'y'}) self.filename_info['start_time'] = datetime.strptime( self.nc.start_time, '%Y%m%dT%H%M%SZ') self.filename_info['end_time'] = datetime.strptime( self.nc.stop_time, '%Y%m%dT%H%M%SZ') def get_dataset(self, key, info): """Get any available dataset.""" stdname = info.get('standard_name') return self.nc[stdname].squeeze() @property def start_time(self): """Get start time.""" return self.filename_info['start_time'] @property def end_time(self): """Get end time.""" return self.filename_info['end_time'] satpy-0.34.0/satpy/readers/smos_l2_wind.py000066400000000000000000000146061420401153000204710ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """SMOS L2 wind Reader. Data can be found here after register: https://www.smosstorm.org/Data2/SMOS-NRT-wind-Products-access Format documentation at the same site after register: SMOS_WIND_DS_PDD_20191107_signed.pdf """ import logging from datetime import datetime import numpy as np from pyresample.geometry import AreaDefinition from satpy.readers.netcdf_utils import NetCDF4FileHandler, netCDF4 logger = logging.getLogger(__name__) class SMOSL2WINDFileHandler(NetCDF4FileHandler): """File handler for SMOS L2 wind netCDF files.""" @property def start_time(self): """Get start time.""" return datetime.strptime(self['/attr/time_coverage_start'], "%Y-%m-%dT%H:%M:%S Z") @property def end_time(self): """Get end time.""" return datetime.strptime(self['/attr/time_coverage_end'], "%Y-%m-%dT%H:%M:%S Z") @property def platform_shortname(self): """Get platform shortname.""" return self.filename_info['platform_shortname'] @property def platform_name(self): """Get platform.""" return self['/attr/platform'] def get_metadata(self, data, ds_info): """Get metadata.""" metadata = {} metadata.update(data.attrs) metadata.update(ds_info) metadata.update({ 'platform_shortname': self.platform_shortname, 'platform_name': self.platform_name, 'sensor': self['/attr/instrument'], 'start_time': self.start_time, 'end_time': self.end_time, 'level': self['/attr/processing_level'], }) return metadata def available_datasets(self, configured_datasets=None): """Automatically determine datasets provided by this file.""" handled_variables = set() # Iterate over dataset contents for var_name, val in self.file_content.items(): # Only evaluate variables if not isinstance(val, netCDF4.Variable): continue if (var_name in handled_variables): logger.debug("Already handled, skipping: %s", var_name) continue handled_variables.add(var_name) new_info = { 'name': var_name, 'file_type': self.filetype_info['file_type'], } yield True, new_info def _mask_dataset(self, data): """Mask out fill values.""" try: fill = data.attrs['_FillValue'] data.attrs['_FillValue'] = np.nan return data.where(data != fill) except KeyError: return data def _adjust_lon_coord(self, data): """Adjust lon coordinate to -180 .. 180 ( not 0 .. 360).""" data = data.assign_coords(lon=(((data.lon + 180) % 360) - 180)) return data.where(data < 180., data - 360.) def _rename_coords(self, data): """Rename coords.""" rename_dict = {} if 'lon' in data.dims: data = self._adjust_lon_coord(data) rename_dict['lon'] = 'x' if 'lat' in data.dims: rename_dict['lat'] = 'y' # Rename the coordinates to x and y return data.rename(rename_dict) def _remove_time_coordinate(self, data): """Remove time coordinate.""" # Remove dimension where size is 1, eg. time data = data.squeeze() # Remove if exists time as coordinate if 'time' in data.coords: data = data.drop_vars('time') return data def _roll_dataset_lon_coord(self, data): """Roll dataset along the lon coordinate.""" if 'lon' in data.dims: data = data.roll(lon=720, roll_coords=True) return data def get_dataset(self, ds_id, ds_info): """Get dataset.""" data = self[ds_id['name']] data.attrs = self.get_metadata(data, ds_info) data = self._remove_time_coordinate(data) data = self._roll_dataset_lon_coord(data) data = self._rename_coords(data) data = self._mask_dataset(data) if len(data.dims) >= 2 and all([dim in data.dims for dim in ['x', 'y']]): # Remove the first and last row as these values extends beyond +-90 latitude # if the dataset contains the y dimmension. # As this is data over open sea these has no values. data = data.where((data.y > -90.0) & (data.y < 90.0), drop=True) elif len(data.dims) == 1 and 'y' in data.dims: data = data.where((data.y > 0) & (data.y < len(data.y) - 1), drop=True) return data def _create_area_extent(self, width, height): """Create area extent.""" # Creating a meshgrid, not needed actually, but makes it easy to find extremes _lon = self._adjust_lon_coord(self['lon']) _lon = self._roll_dataset_lon_coord(_lon) latlon = np.meshgrid(_lon, self['lat'][1:self['lat/shape'][0] - 1]) lower_left_x = latlon[0][height - 1][0] - 0.125 lower_left_y = latlon[1][height - 1][0] + 0.125 upper_right_y = latlon[1][1][width - 1] - 0.125 upper_right_x = latlon[0][1][width - 1] + 0.125 return (lower_left_x, lower_left_y, upper_right_x, upper_right_y) def get_area_def(self, dsid): """Define AreaDefintion.""" width = self['lon/shape'][0] height = self['lat/shape'][0] - 2 area_extent = self._create_area_extent(width, height) description = "SMOS L2 Wind Equirectangular Projection" area_id = 'smos_eqc' proj_id = 'equirectangular' proj_dict = {'init': self['/attr/geospatial_bounds_vertical_crs']} area_def = AreaDefinition(area_id, description, proj_id, proj_dict, width, height, area_extent, ) return area_def satpy-0.34.0/satpy/readers/tropomi_l2.py000066400000000000000000000225631420401153000201610ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Interface to TROPOMI L2 Reader. The TROPOspheric Monitoring Instrument (TROPOMI) is the satellite instrument on board the Copernicus Sentinel-5 Precursor satellite. It measures key atmospheric trace gasses, such as ozone, nitrogen oxides, sulfur dioxide, carbon monoxide, methane, and formaldehyde. Level 2 data products are available via the Copernicus Open Access Hub. For more information visit the following URL: http://www.tropomi.eu/data-products/level-2-products """ import logging import dask.array as da import numpy as np import xarray as xr from satpy import CHUNK_SIZE from satpy.readers.netcdf_utils import NetCDF4FileHandler, netCDF4 logger = logging.getLogger(__name__) class TROPOMIL2FileHandler(NetCDF4FileHandler): """File handler for TROPOMI L2 netCDF files.""" @property def start_time(self): """Get start time.""" return self.filename_info['start_time'] @property def end_time(self): """Get end time.""" return self.filename_info.get('end_time', self.start_time) @property def platform_shortname(self): """Get platform shortname.""" return self.filename_info['platform_shortname'] @property def sensor(self): """Get sensor.""" res = self['/attr/sensor'] if isinstance(res, np.ndarray): return str(res.astype(str)).lower() return res.lower() @property def sensor_names(self): """Get sensor set.""" return {self.sensor} def available_datasets(self, configured_datasets=None): """Automatically determine datasets provided by this file.""" logger.debug("Available_datasets begin...") # Determine shape of the geolocation data (lat/lon) lat_shape = None for var_name, _val in self.file_content.items(): # Could probably avoid this hardcoding, will think on it if (var_name == 'PRODUCT/latitude'): lat_shape = self[var_name + "/shape"] break handled_variables = set() # update previously configured datasets logger.debug("Starting previously configured variables loop...") # if bounds exists, we can assemble them later bounds_exist = 'latitude_bounds' in self and 'longitude_bounds' in self for is_avail, ds_info in (configured_datasets or []): # some other file handler knows how to load this if is_avail is not None: yield is_avail, ds_info var_name = ds_info.get('file_key', ds_info['name']) # logger.debug("Evaluating previously configured variable: %s", var_name) matches = self.file_type_matches(ds_info['file_type']) # we can confidently say that we can provide this dataset and can # provide more info assembled = var_name in ['assembled_lat_bounds', 'assembled_lon_bounds'] if (matches and var_name in self) or (assembled and bounds_exist): logger.debug("Handling previously configured variable: %s", var_name) if not assembled: # Because assembled variables and bounds use the same file_key, # we need to omit file_key once. handled_variables.add(var_name) new_info = ds_info.copy() # don't mess up the above yielded yield True, new_info elif is_avail is None: # if we didn't know how to handle this dataset and no one else did # then we should keep it going down the chain yield is_avail, ds_info yield from self._iterate_over_dataset_contents(handled_variables, lat_shape) def _iterate_over_dataset_contents(self, handled_variables, shape): """Iterate over dataset contents. This is where we dynamically add new datasets We will sift through all groups and variables, looking for data matching the geolocation bounds """ for var_name, val in self.file_content.items(): # Only evaluate variables if isinstance(val, netCDF4.Variable): logger.debug("Evaluating new variable: %s", var_name) var_shape = self[var_name + "/shape"] logger.debug("Dims:{}".format(var_shape)) if shape == var_shape[:len(shape)]: logger.debug("Found valid additional dataset: %s", var_name) # Skip anything we have already configured if var_name in handled_variables: logger.debug("Already handled, skipping: %s", var_name) continue handled_variables.add(var_name) last_index_separator = var_name.rindex('/') last_index_separator = last_index_separator + 1 var_name_no_path = var_name[last_index_separator:] logger.debug("Using short name of: %s", var_name_no_path) # Create new ds_info object if var_name_no_path in ['latitude_bounds', 'longitude_bounds']: coordinates = [] else: coordinates = ['longitude', 'latitude'] new_info = { 'name': var_name_no_path, 'file_key': var_name, 'coordinates': coordinates, 'file_type': self.filetype_info['file_type'], } yield True, new_info def get_metadata(self, data, ds_info): """Get metadata.""" metadata = {} metadata.update(data.attrs) metadata.update(ds_info) metadata.update({ 'platform_shortname': self.platform_shortname, 'sensor': self.sensor, 'start_time': self.start_time, 'end_time': self.end_time, }) return metadata def _rename_dims(self, data_arr): """Normalize dimension names with the rest of Satpy.""" dims_dict = {} if 'ground_pixel' in data_arr.dims: dims_dict['ground_pixel'] = 'x' if 'scanline' in data_arr.dims: dims_dict['scanline'] = 'y' return data_arr.rename(dims_dict) def prepare_geo(self, bounds_data): """Prepare lat/lon bounds for pcolormesh. lat/lon bounds are ordered in the following way:: 3----2 | | 0----1 Extend longitudes and latitudes with one element to support "pcolormesh":: (X[i+1, j], Y[i+1, j]) (X[i+1, j+1], Y[i+1, j+1]) +--------+ | C[i,j] | +--------+ (X[i, j], Y[i, j]) (X[i, j+1], Y[i, j+1]) """ # Create the left array left = np.vstack([bounds_data[:, :, 0], bounds_data[-1:, :, 3]]) # Create the right array right = np.vstack([bounds_data[:, -1:, 1], bounds_data[-1:, -1:, 2]]) # Stack horizontally dest = np.hstack([left, right]) # Convert to DataArray dask_dest = da.from_array(dest, chunks=CHUNK_SIZE) dest = xr.DataArray(dask_dest, dims=('y_bounds', 'x_bounds'), attrs=bounds_data.attrs ) return dest def get_dataset(self, ds_id, ds_info): """Get dataset.""" logger.debug("Getting data for: %s", ds_id['name']) file_key = ds_info.get('file_key', ds_id['name']) data = self[file_key] data.attrs = self.get_metadata(data, ds_info) fill_value = data.attrs.get('_FillValue', np.float32(np.nan)) data = data.squeeze() # preserve integer data types if possible if np.issubdtype(data.dtype, np.integer): new_fill = fill_value else: new_fill = np.float32(np.nan) data.attrs.pop('_FillValue', None) good_mask = data != fill_value scale_factor = data.attrs.get('scale_factor') add_offset = data.attrs.get('add_offset') if scale_factor is not None: data = data * scale_factor + add_offset data = data.where(good_mask, new_fill) data = self._rename_dims(data) # drop coords whose units are not meters drop_list = ['y', 'x', 'layer', 'vertices'] coords_exist = [coord for coord in drop_list if coord in data.coords] if coords_exist: data = data.drop_vars(coords_exist) if ds_id['name'] in ['assembled_lat_bounds', 'assembled_lon_bounds']: data = self.prepare_geo(data) return data satpy-0.34.0/satpy/readers/utils.py000066400000000000000000000331561420401153000172330ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2014-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Helper functions for satpy readers.""" import bz2 import logging import os import shutil import tempfile import warnings from contextlib import closing from io import BytesIO from shutil import which from subprocess import PIPE, Popen # nosec import numpy as np import pyproj import xarray as xr from pyresample.geometry import AreaDefinition from satpy import CHUNK_SIZE LOGGER = logging.getLogger(__name__) def np2str(value): """Convert an `numpy.string_` to str. Args: value (ndarray): scalar or 1-element numpy array to convert Raises: ValueError: if value is array larger than 1-element or it is not of type `numpy.string_` or it is not a numpy array """ if hasattr(value, 'dtype') and \ issubclass(value.dtype.type, (np.str_, np.string_, np.object_)) \ and value.size == 1: value = value.item() if not isinstance(value, str): # python 3 - was scalar numpy array of bytes # otherwise python 2 - scalar numpy array of 'str' value = value.decode() return value else: raise ValueError("Array is not a string type or is larger than 1") def _get_geostationary_height(geos_area): params = geos_area.crs.coordinate_operation.params h_param = [p for p in params if 'satellite height' in p.name.lower()][0] return h_param.value def _get_geostationary_reference_longitude(geos_area): params = geos_area.crs.coordinate_operation.params lon_0_params = [p for p in params if 'longitude of natural origin' in p.name.lower()] if not lon_0_params: return 0 elif len(lon_0_params) != 1: raise ValueError("Not sure how to get reference longitude " "information from AreaDefinition.") return lon_0_params[0].value def _get_geostationary_semi_axes(geos_area): from pyresample.utils import proj4_radius_parameters return proj4_radius_parameters(geos_area.crs) def get_geostationary_angle_extent(geos_area): """Get the max earth (vs space) viewing angles in x and y.""" # TODO: take into account sweep_axis_angle parameter a, b = _get_geostationary_semi_axes(geos_area) h = _get_geostationary_height(geos_area) req = float(a) / 1000 rp = float(b) / 1000 h = float(h) / 1000 + req # compute some constants aeq = 1 - req**2 / (h ** 2) ap_ = 1 - rp**2 / (h ** 2) # generate points around the north hemisphere in satellite projection # make it a bit smaller so that we stay inside the valid area xmax = np.arccos(np.sqrt(aeq)) ymax = np.arccos(np.sqrt(ap_)) return xmax, ymax def get_geostationary_mask(area, chunks=None): """Compute a mask of the earth's shape as seen by a geostationary satellite. Args: area (pyresample.geometry.AreaDefinition) : Corresponding area definition chunks (int or tuple): Chunk size for the 2D array that is generated. Returns: Boolean mask, True inside the earth's shape, False outside. """ # Compute projection coordinates at the earth's limb h = _get_geostationary_height(area) xmax, ymax = get_geostationary_angle_extent(area) xmax *= h ymax *= h # Compute projection coordinates at the centre of each pixel x, y = area.get_proj_coords(chunks=chunks or CHUNK_SIZE) # Compute mask of the earth's elliptical shape return ((x / xmax) ** 2 + (y / ymax) ** 2) <= 1 def _lonlat_from_geos_angle(x, y, geos_area): """Get lons and lats from x, y in projection coordinates.""" a, b = _get_geostationary_semi_axes(geos_area) h = _get_geostationary_height(geos_area) lon_0 = _get_geostationary_reference_longitude(geos_area) h__ = float(h + a) / 1000 b__ = (a / float(b)) ** 2 sd = np.sqrt((h__ * np.cos(x) * np.cos(y)) ** 2 - (np.cos(y)**2 + b__ * np.sin(y)**2) * (h__**2 - (float(a) / 1000)**2)) # sd = 0 sn = (h__ * np.cos(x) * np.cos(y) - sd) / (np.cos(y)**2 + b__ * np.sin(y)**2) s1 = h__ - sn * np.cos(x) * np.cos(y) s2 = sn * np.sin(x) * np.cos(y) s3 = -sn * np.sin(y) sxy = np.sqrt(s1**2 + s2**2) lons = np.rad2deg(np.arctan2(s2, s1)) + lon_0 lats = np.rad2deg(-np.arctan2(b__ * s3, sxy)) return lons, lats def get_geostationary_bounding_box(geos_area, nb_points=50): """Get the bbox in lon/lats of the valid pixels inside *geos_area*. Args: geos_area: The geostationary area to analyse. nb_points: Number of points on the polygon """ xmax, ymax = get_geostationary_angle_extent(geos_area) h = _get_geostationary_height(geos_area) # generate points around the north hemisphere in satellite projection # make it a bit smaller so that we stay inside the valid area x = np.cos(np.linspace(-np.pi, 0, nb_points // 2)) * (xmax - 0.001) y = -np.sin(np.linspace(-np.pi, 0, nb_points // 2)) * (ymax - 0.001) # clip the projection coordinates to fit the area extent of geos_area ll_x, ll_y, ur_x, ur_y = (np.array(geos_area.area_extent) / float(h)) x = np.clip(np.concatenate([x, x[::-1]]), min(ll_x, ur_x), max(ll_x, ur_x)) y = np.clip(np.concatenate([y, -y]), min(ll_y, ur_y), max(ll_y, ur_y)) return _lonlat_from_geos_angle(x, y, geos_area) def get_sub_area(area, xslice, yslice): """Apply slices to the area_extent and size of the area.""" new_area_extent = ((area.pixel_upper_left[0] + (xslice.start - 0.5) * area.pixel_size_x), (area.pixel_upper_left[1] - (yslice.stop - 0.5) * area.pixel_size_y), (area.pixel_upper_left[0] + (xslice.stop - 0.5) * area.pixel_size_x), (area.pixel_upper_left[1] - (yslice.start - 0.5) * area.pixel_size_y)) return AreaDefinition(area.area_id, area.name, area.proj_id, area.crs, xslice.stop - xslice.start, yslice.stop - yslice.start, new_area_extent) def unzip_file(filename): """Unzip the file if file is bzipped = ending with 'bz2'.""" if filename.endswith('bz2'): fdn, tmpfilepath = tempfile.mkstemp() LOGGER.info("Using temp file for BZ2 decompression: %s", tmpfilepath) # try pbzip2 pbzip = which('pbzip2') # Run external pbzip2 if pbzip is not None: n_thr = os.environ.get('OMP_NUM_THREADS') if n_thr: runner = [pbzip, '-dc', '-p'+str(n_thr), filename] else: runner = [pbzip, '-dc', filename] p = Popen(runner, stdout=PIPE, stderr=PIPE) # nosec stdout = BytesIO(p.communicate()[0]) status = p.returncode if status != 0: raise IOError("pbzip2 error '%s', failed, status=%d" % (filename, status)) with closing(os.fdopen(fdn, 'wb')) as ofpt: try: stdout.seek(0) shutil.copyfileobj(stdout, ofpt) except IOError: import traceback traceback.print_exc() LOGGER.info("Failed to read bzipped file %s", str(filename)) os.remove(tmpfilepath) raise return tmpfilepath # Otherwise, fall back to the original method bz2file = bz2.BZ2File(filename) with closing(os.fdopen(fdn, 'wb')) as ofpt: try: ofpt.write(bz2file.read()) except IOError: import traceback traceback.print_exc() LOGGER.info("Failed to read bzipped file %s", str(filename)) os.remove(tmpfilepath) return None return tmpfilepath return None class unzip_context(): """Context manager for uncompressing a .bz2 file on the fly. Uses `unzip_file`. Removes the uncompressed file on exit of the context manager. Returns: the filename of the uncompressed file or of the original file if it was not compressed. """ def __init__(self, filename): """Keep original filename.""" self.input_filename = filename def __enter__(self): """Uncompress file if necessary and return the relevant filename for the file handler.""" unzipped = unzip_file(self.input_filename) if unzipped is not None: self.unzipped_filename = unzipped return unzipped else: self.unzipped_filename = None return self.input_filename def __exit__(self, exc_type, exc_value, traceback): """Remove temporary file.""" if self.unzipped_filename is not None: os.remove(self.unzipped_filename) def bbox(img): """Find the bounding box around nonzero elements in the given array. Copied from https://stackoverflow.com/a/31402351/5703449 . Returns: rowmin, rowmax, colmin, colmax """ rows = np.any(img, axis=1) cols = np.any(img, axis=0) rmin, rmax = np.where(rows)[0][[0, -1]] cmin, cmax = np.where(cols)[0][[0, -1]] return rmin, rmax, cmin, cmax def get_earth_radius(lon, lat, a, b): """Compute radius of the earth ellipsoid at the given longitude and latitude. Args: lon: Geodetic longitude (degrees) lat: Geodetic latitude (degrees) a: Semi-major axis of the ellipsoid (meters) b: Semi-minor axis of the ellipsoid (meters) Returns: Earth Radius (meters) """ geocent = pyproj.Proj(proj='geocent', a=a, b=b, units='m') latlong = pyproj.Proj(proj='latlong', a=a, b=b, units='m') x, y, z = pyproj.transform(latlong, geocent, lon, lat, 0.) return np.sqrt(x**2 + y**2 + z**2) def reduce_mda(mda, max_size=100): """Recursively remove arrays with more than `max_size` elements from the given metadata dictionary.""" reduced = {} for key, val in mda.items(): if isinstance(val, dict): reduced[key] = reduce_mda(val, max_size) elif not (isinstance(val, np.ndarray) and val.size > max_size): reduced[key] = val return reduced def get_user_calibration_factors(band_name, correction_dict): """Retrieve radiance correction factors from user-supplied dict.""" if band_name in correction_dict: try: slope = correction_dict[band_name]['slope'] offset = correction_dict[band_name]['offset'] except KeyError: raise KeyError("Incorrect correction factor dictionary. You must " "supply 'slope' and 'offset' keys.") else: # If coefficients not present, warn user and use slope=1, offset=0 warnings.warn("WARNING: You have selected radiance correction but " " have not supplied coefficients for channel " + band_name) return 1., 0. return slope, offset def apply_rad_correction(data, slope, offset): """Apply GSICS-like correction factors to radiance data.""" data = (data - offset) / slope return data def get_array_date(scn_data, utc_date=None): """Get start time from a channel data array.""" if utc_date is None: try: utc_date = scn_data.attrs['start_time'] except KeyError: try: utc_date = scn_data.attrs['scheduled_time'] except KeyError: raise KeyError('Scene has no start_time ' 'or scheduled_time attribute.') return utc_date def apply_earthsun_distance_correction(reflectance, utc_date=None): """Correct reflectance data to account for changing Earth-Sun distance.""" from pyorbital.astronomy import sun_earth_distance_correction utc_date = get_array_date(reflectance, utc_date) sun_earth_dist = sun_earth_distance_correction(utc_date) reflectance.attrs['sun_earth_distance_correction_applied'] = True reflectance.attrs['sun_earth_distance_correction_factor'] = sun_earth_dist with xr.set_options(keep_attrs=True): reflectance = reflectance * sun_earth_dist * sun_earth_dist return reflectance def remove_earthsun_distance_correction(reflectance, utc_date=None): """Remove the sun-earth distance correction.""" from pyorbital.astronomy import sun_earth_distance_correction utc_date = get_array_date(reflectance, utc_date) sun_earth_dist = sun_earth_distance_correction(utc_date) reflectance.attrs['sun_earth_distance_correction_applied'] = False reflectance.attrs['sun_earth_distance_correction_factor'] = sun_earth_dist with xr.set_options(keep_attrs=True): reflectance = reflectance / (sun_earth_dist * sun_earth_dist) return reflectance satpy-0.34.0/satpy/readers/vaisala_gld360.py000066400000000000000000000062161420401153000205670ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Vaisala Global Lightning Dataset 360 reader. Vaisala Global Lightning Dataset GLD360 is data as a service that provides real-time lightning data for accurate and early detection and tracking of severe weather. The data provided is generated by a Vaisala owned and operated world-wide lightning detection sensor network. References: - [GLD360] https://www.vaisala.com/en/products/data-subscriptions-and-reports/data-sets/gld360 """ import logging import dask.array as da import pandas as pd import xarray as xr from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler logger = logging.getLogger(__name__) class VaisalaGLD360TextFileHandler(BaseFileHandler): """ASCII reader for Vaisala GDL360 data.""" def __init__(self, filename, filename_info, filetype_info): """Initialize VaisalaGLD360TextFileHandler.""" super(VaisalaGLD360TextFileHandler, self).__init__(filename, filename_info, filetype_info) names = ['gld360_date', 'gld360_time', 'latitude', 'longitude', 'power', 'unit'] types = ['str', 'str', 'float', 'float', 'float', 'str'] dtypes = dict(zip(names, types)) # Combine 'date' and 'time' into a datetime object parse_dates = {'time': ['gld360_date', 'gld360_time']} self.data = pd.read_csv(filename, delim_whitespace=True, header=None, names=names, dtype=dtypes, parse_dates=parse_dates) @property def start_time(self): """Get start time.""" return self.data['time'].iloc[0] @property def end_time(self): """Get end time.""" return self.data['time'].iloc[-1] def get_dataset(self, dataset_id, dataset_info): """Load a dataset.""" xarr = xr.DataArray(da.from_array(self.data[dataset_id['name']], chunks=CHUNK_SIZE), dims=["y"]) # Add time, longitude, and latitude as non-dimensional y-coordinates xarr['time'] = ('y', self.data['time']) xarr['longitude'] = ('y', self.data['longitude']) xarr['latitude'] = ('y', self.data['latitude']) if dataset_id['name'] == 'power': # Check that units in the file match the unit specified in the # reader yaml-file if not (self.data.unit == dataset_info['units']).all(): raise ValueError('Inconsistent units found in file!') xarr.attrs.update(dataset_info) return xarr satpy-0.34.0/satpy/readers/vii_base_nc.py000066400000000000000000000225441420401153000203330ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2020 Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # satpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . """EUMETSAT EPS-SG Visible/Infrared Imager (VII) readers base class.""" import logging from datetime import datetime from geotiepoints.viiinterpolator import tie_points_geo_interpolation, tie_points_interpolation from satpy.readers.netcdf_utils import NetCDF4FileHandler from satpy.readers.vii_utils import SCAN_ALT_TIE_POINTS, TIE_POINTS_FACTOR logger = logging.getLogger(__name__) class ViiNCBaseFileHandler(NetCDF4FileHandler): """Base reader class for VII products in netCDF format. Args: filename (str): File to read filename_info (dict): Dictionary with filename information filetype_info (dict): Dictionary with filetype information orthorect (bool): activates the orthorectification correction where available """ def __init__(self, filename, filename_info, filetype_info, orthorect=False): """Prepare the class for dataset reading.""" super().__init__(filename, filename_info, filetype_info, auto_maskandscale=True) # Saves the orthorectification flag self.orthorect = orthorect and filetype_info.get('orthorect', True) # Saves the interpolation flag self.interpolate = filetype_info.get('interpolate', True) try: longitude = self[filetype_info['cached_longitude']] latitude = self[filetype_info['cached_latitude']] if self.interpolate: self.longitude, self.latitude = self._perform_geo_interpolation(longitude, latitude) else: self.longitude, self.latitude = longitude, latitude except KeyError: logger.warning("Cached longitude and/or latitude datasets are not correctly defined in YAML file") self.longitude, self.latitude = None, None def _standardize_dims(self, variable): """Standardize dims to y, x.""" if 'num_pixels' in variable.dims: variable = variable.rename({'num_pixels': 'x', 'num_lines': 'y'}) if variable.dims[0] == 'x': variable = variable.transpose('y', 'x') return variable def get_dataset(self, dataset_id, dataset_info): """Get dataset using file_key in dataset_info.""" var_key = dataset_info['file_key'] logger.debug('Reading in file to get dataset with key %s.', var_key) if var_key == 'cached_longitude' and self.longitude is not None: variable = self.longitude.copy() elif var_key == 'cached_latitude' and self.latitude is not None: variable = self.latitude.copy() else: try: variable = self[var_key] except KeyError: logger.warning("Could not find key %s in NetCDF file, no valid Dataset created", var_key) return None # If the dataset is marked for interpolation, perform the interpolation from tie points to pixels if dataset_info.get('interpolate', False) and self.interpolate: variable = self._perform_interpolation(variable) # Perform the calibration if required if dataset_info.get('calibration') is not None: variable = self._perform_calibration(variable, dataset_info) # Perform the orthorectification if required if self.orthorect: orthorect_data_name = dataset_info.get('orthorect_data', None) if orthorect_data_name is not None: variable = self._perform_orthorectification(variable, orthorect_data_name) # If the dataset contains a longitude, change it to the interval [0., 360.) as natively in the product # since the unwrapping performed during the interpolation might have created values outside this range if dataset_info.get('standard_name', None) == 'longitude': variable %= 360. # Manage the attributes of the dataset variable.attrs.setdefault('units', None) variable.attrs.update(dataset_info) variable.attrs.update(self._get_global_attributes()) variable = self._standardize_dims(variable) return variable @staticmethod def _perform_interpolation(variable): """Perform the interpolation from tie points to pixel points. Args: variable: xarray DataArray containing the dataset to interpolate. Returns: DataArray: array containing the interpolate values, all the original metadata and the updated dimension names. """ interpolated_values = tie_points_interpolation( [variable], SCAN_ALT_TIE_POINTS, TIE_POINTS_FACTOR )[0] new_variable = interpolated_values.rename( num_tie_points_act='num_pixels', num_tie_points_alt='num_lines' ) new_variable.name = variable.name new_variable.attrs = variable.attrs return new_variable @staticmethod def _perform_geo_interpolation(longitude, latitude): """Perform the interpolation of geographic coodinates from tie points to pixel points. Args: longitude: xarray DataArray containing the longitude dataset to interpolate. latitude: xarray DataArray containing the longitude dataset to interpolate. Returns: tuple of arrays containing the interpolate values, all the original metadata and the updated dimension names. """ interpolated_longitude, interpolated_latitude = tie_points_geo_interpolation( longitude, latitude, SCAN_ALT_TIE_POINTS, TIE_POINTS_FACTOR ) new_longitude = interpolated_longitude.rename( num_tie_points_act='num_pixels', num_tie_points_alt='num_lines' ) new_longitude.name = longitude.name new_longitude.attrs = longitude.attrs new_latitude = interpolated_latitude.rename( num_tie_points_act='num_pixels', num_tie_points_alt='num_lines' ) new_latitude.name = latitude.name new_latitude.attrs = latitude.attrs return new_longitude, new_latitude def _perform_orthorectification(self, variable, orthorect_data_name): """Perform the orthorectification.""" raise NotImplementedError def _perform_calibration(self, variable, dataset_info): """Perform the calibration.""" raise NotImplementedError def _get_global_attributes(self): """Create a dictionary of global attributes to be added to all datasets.""" attributes = { 'filename': self.filename, 'start_time': self.start_time, 'end_time': self.end_time, 'spacecraft_name': self.spacecraft_name, 'ssp_lon': self.ssp_lon, 'sensor': self.sensor, 'filename_start_time': self.filename_info['sensing_start_time'], 'filename_end_time': self.filename_info['sensing_end_time'], 'platform_name': self.spacecraft_name, } # Add a "quality_group" item to the dictionary with all the variables and attributes # which are found in the 'quality' group of the VII product quality_group = self['quality'] quality_dict = {} for key in quality_group: # Add the values (as Numpy array) of each variable in the group where possible try: quality_dict[key] = quality_group[key].values except ValueError: quality_dict[key] = None # Add the attributes of the quality group quality_dict.update(quality_group.attrs) attributes['quality_group'] = quality_dict return attributes @property def start_time(self): """Get observation start time.""" try: start_time = datetime.strptime(self['/attr/sensing_start_time_utc'], '%Y%m%d%H%M%S.%f') except ValueError: start_time = datetime.strptime(self['/attr/sensing_start_time_utc'], '%Y-%m-%d %H:%M:%S.%f') return start_time @property def end_time(self): """Get observation end time.""" try: end_time = datetime.strptime(self['/attr/sensing_end_time_utc'], '%Y%m%d%H%M%S.%f') except ValueError: end_time = datetime.strptime(self['/attr/sensing_end_time_utc'], '%Y-%m-%d %H:%M:%S.%f') return end_time @property def spacecraft_name(self): """Return spacecraft name.""" return self['/attr/spacecraft'] @property def sensor(self): """Return sensor.""" return self['/attr/instrument'] @property def ssp_lon(self): """Return subsatellite point longitude.""" # This parameter is not applicable to VII return None satpy-0.34.0/satpy/readers/vii_l1b_nc.py000066400000000000000000000144421420401153000200750ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2020 Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # satpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . """EUMETSAT EPS-SG Visible/Infrared Imager (VII) Level 1B products reader. The ``vii_l1b_nc`` reader reads and calibrates EPS-SG VII L1b image data in netCDF format. The format is explained in the `EPS-SG VII Level 1B Product Format Specification`_. .. _EPS-SG VII Level 1B Product Format Specification: https://www.eumetsat.int/media/44393 """ import logging import numpy as np from satpy.readers.vii_base_nc import ViiNCBaseFileHandler from satpy.readers.vii_utils import C1, C2, MEAN_EARTH_RADIUS logger = logging.getLogger(__name__) class ViiL1bNCFileHandler(ViiNCBaseFileHandler): """Reader class for VII L1B products in netCDF format.""" def __init__(self, filename, filename_info, filetype_info, **kwargs): """Read the calibration data and prepare the class for dataset reading.""" super().__init__(filename, filename_info, filetype_info, **kwargs) # Read the variables which are required for the calibration self._bt_conversion_a = self['data/calibration_data/bt_conversion_a'].values self._bt_conversion_b = self['data/calibration_data/bt_conversion_b'].values self._channel_cw_thermal = self['data/calibration_data/channel_cw_thermal'].values self._integrated_solar_irradiance = self['data/calibration_data/Band_averaged_solar_irradiance'].values # Computes the angle factor for reflectance calibration as inverse of cosine of solar zenith angle # (the values in the product file are on tie points and in degrees, # therefore interpolation and conversion to radians are required) solar_zenith_angle = self['data/measurement_data/solar_zenith'] solar_zenith_angle_on_pixels = self._perform_interpolation(solar_zenith_angle) solar_zenith_angle_on_pixels_radians = np.radians(solar_zenith_angle_on_pixels) self.angle_factor = 1.0 / (np.cos(solar_zenith_angle_on_pixels_radians)) def _perform_calibration(self, variable, dataset_info): """Perform the calibration. Args: variable: xarray DataArray containing the dataset to calibrate. dataset_info: dictionary of information about the dataset. Returns: DataArray: array containing the calibrated values and all the original metadata. """ calibration_name = dataset_info['calibration'] if calibration_name == 'brightness_temperature': # Extract the values of calibration coefficients for the current channel chan_index = dataset_info['chan_thermal_index'] cw = self._channel_cw_thermal[chan_index] * 1e-3 a = self._bt_conversion_a[chan_index] b = self._bt_conversion_b[chan_index] # Perform the calibration calibrated_variable = self._calibrate_bt(variable, cw, a, b) calibrated_variable.attrs = variable.attrs elif calibration_name == 'reflectance': scale = 1/(dataset_info['wavelength'][2] - dataset_info['wavelength'][0]) # Extract the values of calibration coefficients for the current channel chan_index = dataset_info['chan_solar_index'] isi = scale * self._integrated_solar_irradiance[chan_index] # Perform the calibration calibrated_variable = self._calibrate_refl(variable, self.angle_factor, isi) calibrated_variable.attrs = variable.attrs elif calibration_name == 'radiance': calibrated_variable = variable else: raise ValueError("Unknown calibration %s for dataset %s" % (calibration_name, dataset_info['name'])) return calibrated_variable def _perform_orthorectification(self, variable, orthorect_data_name): """Perform the orthorectification. Args: variable: xarray DataArray containing the dataset to correct for orthorectification. orthorect_data_name: name of the orthorectification correction data in the product. Returns: DataArray: array containing the corrected values and all the original metadata. """ try: orthorect_data = self[orthorect_data_name] # Convert the orthorectification delta values from meters to degrees # based on the simplified formula using mean Earth radius variable += np.degrees(orthorect_data / MEAN_EARTH_RADIUS) except KeyError: logger.warning('Required dataset %s for orthorectification not available, skipping', orthorect_data_name) return variable @staticmethod def _calibrate_bt(radiance, cw, a, b): """Perform the calibration to brightness temperature. Args: radiance: numpy ndarray containing the radiance values. cw: center wavelength [μm]. a: temperature coefficient [-]. b: temperature coefficient [K]. Returns: numpy ndarray: array containing the calibrated brightness temperature values. """ log_expr = np.log(1.0 + C1 / ((cw ** 5) * radiance)) bt_values = b + (a * C2 / (cw * log_expr)) return bt_values @staticmethod def _calibrate_refl(radiance, angle_factor, isi): """Perform the calibration to reflectance. Args: radiance: numpy ndarray containing the radiance values. angle_factor: numpy ndarray containing the inverse of cosine of solar zenith angle [-]. isi: integrated solar irradiance [W/(m2 * μm)]. Returns: numpy ndarray: array containing the calibrated reflectance values. """ refl_values = (np.pi / isi) * angle_factor * radiance return refl_values satpy-0.34.0/satpy/readers/vii_l2_nc.py000066400000000000000000000032771420401153000177400ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2020 Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # satpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . """EUMETSAT EPS-SG Visible/Infrared Imager (VII) Level 2 products reader.""" import logging from satpy.readers.vii_base_nc import ViiNCBaseFileHandler logger = logging.getLogger(__name__) class ViiL2NCFileHandler(ViiNCBaseFileHandler): """Reader class for VII L2 products in netCDF format.""" def _perform_orthorectification(self, variable, orthorect_data_name): """Perform the orthorectification. Args: variable: xarray DataArray containing the dataset to correct for orthorectification. orthorect_data_name: name of the orthorectification correction data in the product. Returns: DataArray: array containing the corrected values and all the original metadata. """ try: orthorect_data = self[orthorect_data_name] variable += orthorect_data except KeyError: logger.warning('Required dataset %s for orthorectification not available, skipping', orthorect_data_name) return variable satpy-0.34.0/satpy/readers/vii_utils.py000066400000000000000000000022221420401153000200700ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2020 Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # satpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . """Utilities for the management of VII products.""" # PLANCK COEFFICIENTS FOR CALIBRATION AS DEFINED BY EUMETSAT C1 = 1.191062e+8 # [W/m2·sr-1·µm4] C2 = 1.4387863e+4 # [K·µm] # CONSTANTS DEFINING THE TIE POINTS TIE_POINTS_FACTOR = 8 # Sub-sampling factor of tie points wrt pixel points SCAN_ALT_TIE_POINTS = 4 # Number of tie points along the satellite track for each scan # MEAN EARTH RADIUS AS DEFINED BY IUGG MEAN_EARTH_RADIUS = 6371008.7714 # [m] satpy-0.34.0/satpy/readers/viirs_compact.py000066400000000000000000000455701420401153000207400ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2014-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Compact viirs format. This is a reader for the Compact VIIRS format shipped on Eumetcast for the VIIRS SDR. The format is compressed in multiple ways, notably by shipping only tie-points for geographical data. The interpolation of this data is done using dask operations, so it should be relatively performant. For more information on this format, the reader can refer to the `Compact VIIRS SDR Product Format User Guide` that can be found on this EARS_ page. .. _EARS: https://www.eumetsat.int/media/45988 """ import logging from contextlib import suppress from datetime import datetime, timedelta import dask.array as da import h5py import numpy as np import xarray as xr from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.utils import np2str from satpy.utils import angle2xyz, lonlat2xyz, xyz2angle, xyz2lonlat _channels_dict = {"M01": "M1", "M02": "M2", "M03": "M3", "M04": "M4", "M05": "M5", "M06": "M6", "M07": "M7", "M08": "M8", "M09": "M9", "M10": "M10", "M11": "M11", "M12": "M12", "M13": "M13", "M14": "M14", "M15": "M15", "M16": "M16", "DNB": "DNB"} logger = logging.getLogger(__name__) c = 299792458 # m.s-1 h = 6.6260755e-34 # m2kg.s-1 k = 1.380658e-23 # m2kg.s-2.K-1 short_names = {'NPP': 'Suomi-NPP', 'J01': 'NOAA-20', 'J02': 'NOAA-21'} class VIIRSCompactFileHandler(BaseFileHandler): """A file handler class for VIIRS compact format.""" def __init__(self, filename, filename_info, filetype_info): """Initialize the reader.""" super(VIIRSCompactFileHandler, self).__init__(filename, filename_info, filetype_info) self.h5f = h5py.File(self.filename, "r") self.finfo = filename_info self.lons = None self.lats = None if filetype_info['file_type'] == 'compact_m': self.ch_type = 'MOD' elif filetype_info['file_type'] == 'compact_dnb': self.ch_type = 'DNB' else: raise IOError('Compact Viirs file type not recognized.') geo_data = self.h5f["Data_Products"]["VIIRS-%s-GEO" % self.ch_type]["VIIRS-%s-GEO_Gran_0" % self.ch_type] self.min_lat = geo_data.attrs['South_Bounding_Coordinate'].item() self.max_lat = geo_data.attrs['North_Bounding_Coordinate'].item() self.min_lon = geo_data.attrs['West_Bounding_Coordinate'].item() self.max_lon = geo_data.attrs['East_Bounding_Coordinate'].item() self.switch_to_cart = ((abs(self.max_lon - self.min_lon) > 90) or (max(abs(self.min_lat), abs(self.max_lat)) > 60)) self.scans = self.h5f["All_Data"]["NumberOfScans"][0] self.geography = self.h5f["All_Data"]['VIIRS-%s-GEO_All' % self.ch_type] for key in self.h5f["All_Data"].keys(): if key.startswith("VIIRS") and key.endswith("SDR_All"): channel = key.split('-')[1] break # This supposes there is only one tiepoint zone in the track direction. channel_path = f"All_Data/VIIRS-{channel}-SDR_All" self.scan_size = self.h5f[channel_path].attrs["TiePointZoneSizeTrack"].item() self.track_offset = self.h5f[channel_path].attrs["PixelOffsetTrack"][()] self.scan_offset = self.h5f[channel_path].attrs["PixelOffsetScan"][()] try: self.group_locations = self.geography["TiePointZoneGroupLocationScanCompact"][()] except KeyError: self.group_locations = [0] self.tpz_sizes = da.from_array(self.h5f[channel_path].attrs["TiePointZoneSizeScan"], chunks=1) if len(self.tpz_sizes.shape) == 2: if self.tpz_sizes.shape[1] != 1: raise NotImplementedError("Can't handle 2 dimensional tiepoint zones.") self.tpz_sizes = self.tpz_sizes.squeeze(1) self.nb_tiepoint_zones = self.geography["NumberOfTiePointZonesScan"][()] self.c_align = da.from_array(self.geography["AlignmentCoefficient"], chunks=tuple(self.nb_tiepoint_zones)) self.c_exp = da.from_array(self.geography["ExpansionCoefficient"], chunks=tuple(self.nb_tiepoint_zones)) self.nb_tiepoint_zones = da.from_array(self.nb_tiepoint_zones, chunks=1) self._expansion_coefs = None self.cache = {} self.mda = {} short_name = np2str(self.h5f.attrs['Platform_Short_Name']) self.mda['platform_name'] = short_names.get(short_name, short_name) self.mda['sensor'] = 'viirs' def __del__(self): """Close file handlers when we are done.""" with suppress(OSError): self.h5f.close() def get_dataset(self, key, info): """Load a dataset.""" logger.debug('Reading %s.', key['name']) if key['name'] in _channels_dict: m_data = self.read_dataset(key, info) else: m_data = self.read_geo(key, info) m_data.attrs.update(info) m_data.attrs['rows_per_scan'] = self.scan_size return m_data def get_bounding_box(self): """Get the bounding box of the data.""" for key in self.h5f["Data_Products"].keys(): if key.startswith("VIIRS") and key.endswith("GEO"): lats = self.h5f["Data_Products"][key][key + '_Gran_0'].attrs['G-Ring_Latitude'][()] lons = self.h5f["Data_Products"][key][key + '_Gran_0'].attrs['G-Ring_Longitude'][()] break else: raise KeyError('Cannot find bounding coordinates!') return lons.ravel(), lats.ravel() @property def start_time(self): """Get the start time.""" return self.finfo['start_time'] @property def end_time(self): """Get the end time.""" end_time = datetime.combine(self.start_time.date(), self.finfo['end_time'].time()) if end_time < self.start_time: end_time += timedelta(days=1) return end_time def read_geo(self, key, info): """Read angles.""" pairs = {('satellite_azimuth_angle', 'satellite_zenith_angle'): ("SatelliteAzimuthAngle", "SatelliteZenithAngle"), ('solar_azimuth_angle', 'solar_zenith_angle'): ("SolarAzimuthAngle", "SolarZenithAngle"), ('dnb_solar_azimuth_angle', 'dnb_solar_zenith_angle'): ("SolarAzimuthAngle", "SolarZenithAngle"), ('dnb_lunar_azimuth_angle', 'dnb_lunar_zenith_angle'): ("LunarAzimuthAngle", "LunarZenithAngle"), } if self.lons is None or self.lats is None: self.lons, self.lats = self.navigate() for pair, fkeys in pairs.items(): if key['name'] in pair: if (self.cache.get(pair[0]) is None or self.cache.get(pair[1]) is None): angles = self.angles(*fkeys) self.cache[pair[0]], self.cache[pair[1]] = angles if key['name'] == pair[0]: return xr.DataArray(self.cache[pair[0]], name=key['name'], attrs=self.mda, dims=('y', 'x')) else: return xr.DataArray(self.cache[pair[1]], name=key['name'], attrs=self.mda, dims=('y', 'x')) if info.get('standard_name') in ['latitude', 'longitude']: mda = self.mda.copy() mda.update(info) if info['standard_name'] == 'longitude': return xr.DataArray(self.lons, attrs=mda, dims=('y', 'x')) else: return xr.DataArray(self.lats, attrs=mda, dims=('y', 'x')) if key['name'] == 'dnb_moon_illumination_fraction': mda = self.mda.copy() mda.update(info) return xr.DataArray(da.from_array(self.geography["MoonIllumFraction"]), attrs=info) def read_dataset(self, dataset_key, info): """Read a dataset.""" h5f = self.h5f channel = _channels_dict[dataset_key['name']] chan_dict = dict([(key.split("-")[1], key) for key in h5f["All_Data"].keys() if key.startswith("VIIRS")]) h5rads = h5f["All_Data"][chan_dict[channel]]["Radiance"] chunks = h5rads.chunks or CHUNK_SIZE rads = xr.DataArray(da.from_array(h5rads, chunks=chunks), name=dataset_key['name'], dims=['y', 'x']).astype(np.float32) h5attrs = h5rads.attrs scans = h5f["All_Data"]["NumberOfScans"][0] rads = rads[:scans * 16, :] rads = rads.where(rads <= 65526) try: rads = xr.where(rads <= h5attrs['Threshold'], rads * h5attrs['RadianceScaleLow'] + h5attrs['RadianceOffsetLow'], rads * h5attrs['RadianceScaleHigh'] + h5attrs['RadianceOffsetHigh']) except (KeyError, AttributeError): logger.info("Missing attribute for scaling of %s.", channel) pass unit = "W m-2 sr-1 μm-1" if dataset_key['calibration'] == 'counts': raise NotImplementedError("Can't get counts from this data") if dataset_key['calibration'] in ['reflectance', 'brightness_temperature']: # do calibrate try: # First guess: VIS or NIR data a_vis = h5attrs['EquivalentWidth'] b_vis = h5attrs['IntegratedSolarIrradiance'] dse = h5attrs['EarthSunDistanceNormalised'] rads *= 100 * np.pi * a_vis / b_vis * (dse**2) unit = "%" except KeyError: # Maybe it's IR data? try: a_ir = h5attrs['BandCorrectionCoefficientA'] b_ir = h5attrs['BandCorrectionCoefficientB'] lambda_c = h5attrs['CentralWaveLength'] rads *= 1e6 rads = (h * c) / (k * lambda_c * np.log(1 + (2 * h * c ** 2) / ((lambda_c ** 5) * rads))) rads *= a_ir rads += b_ir unit = "K" except KeyError: logger.warning("Calibration failed.") elif dataset_key['calibration'] != 'radiance': raise ValueError("Calibration parameter should be radiance, " "reflectance or brightness_temperature") rads = rads.clip(min=0) rads.attrs = self.mda rads.attrs['units'] = unit return rads def expand_angle_and_nav(self, arrays): """Expand angle and navigation datasets.""" res = [] for array in arrays: res.append(da.map_blocks(expand, array[:, :, np.newaxis], self.expansion_coefs, scans=self.scans, scan_size=self.scan_size, dtype=array.dtype, drop_axis=2, chunks=self.expansion_coefs.chunks[:-1])) return res @property def expansion_coefs(self): """Compute the expansion coefficients.""" if self._expansion_coefs is not None: return self._expansion_coefs v_track = (np.arange(self.scans * self.scan_size) % self.scan_size + self.track_offset) / self.scan_size self.tpz_sizes = self.tpz_sizes.persist() self.nb_tiepoint_zones = self.nb_tiepoint_zones.persist() col_chunks = (self.tpz_sizes * self.nb_tiepoint_zones).compute() self._expansion_coefs = da.map_blocks(get_coefs, self.c_align, self.c_exp, self.tpz_sizes, self.nb_tiepoint_zones, v_track=v_track, scans=self.scans, scan_size=self.scan_size, scan_offset=self.scan_offset, dtype=np.float64, new_axis=[0, 2], chunks=(self.scans * self.scan_size, tuple(col_chunks), 4)) return self._expansion_coefs def navigate(self): """Generate the navigation datasets.""" chunks = self._get_geographical_chunks() lon = da.from_array(self.geography["Longitude"], chunks=chunks) lat = da.from_array(self.geography["Latitude"], chunks=chunks) if self.switch_to_cart: arrays = lonlat2xyz(lon, lat) else: arrays = (lon, lat) expanded = self.expand_angle_and_nav(arrays) if self.switch_to_cart: return xyz2lonlat(*expanded) return expanded def _get_geographical_chunks(self): shape = self.geography['Longitude'].shape horizontal_chunks = (self.nb_tiepoint_zones + 1).compute() chunks = (shape[0], tuple(horizontal_chunks)) return chunks def angles(self, azi_name, zen_name): """Generate the angle datasets.""" chunks = self._get_geographical_chunks() azi = self.geography[azi_name] zen = self.geography[zen_name] switch_to_cart = ((np.max(azi) - np.min(azi) > 5) or (np.min(zen) < 10) or (max(abs(self.min_lat), abs(self.max_lat)) > 80)) azi = da.from_array(azi, chunks=chunks) zen = da.from_array(zen, chunks=chunks) if switch_to_cart: arrays = convert_from_angles(azi, zen) else: arrays = (azi, zen) expanded = self.expand_angle_and_nav(arrays) if switch_to_cart: return convert_to_angles(*expanded) return expanded def convert_from_angles(azi, zen): """Convert the angles to cartesian coordinates.""" x, y, z, = angle2xyz(azi, zen) # Conversion to ECEF is recommended by the provider, but no significant # difference has been seen. # x, y, z = (-np.sin(lon) * x + np.cos(lon) * y, # -np.sin(lat) * np.cos(lon) * x - np.sin(lat) * np.sin(lon) * y + np.cos(lat) * z, # np.cos(lat) * np.cos(lon) * x + np.cos(lat) * np.sin(lon) * y + np.sin(lat) * z) return x, y, z def convert_to_angles(x, y, z): """Convert the cartesian coordinates to angles.""" # Conversion to ECEF is recommended by the provider, but no significant # difference has been seen. # x, y, z = (-np.sin(lon) * x - np.sin(lat) * np.cos(lon) * y + np.cos(lat) * np.cos(lon) * z, # np.cos(lon) * x - np.sin(lat) * np.sin(lon) * y + np.cos(lat) * np.sin(lon) * z, # np.cos(lat) * y + np.sin(lat) * z) azi, zen = xyz2angle(x, y, z, acos=True) return azi, zen def get_coefs(c_align, c_exp, tpz_size, nb_tpz, v_track, scans, scan_size, scan_offset): """Compute the coeffs in numpy domain.""" nties = nb_tpz.item() tpz_size = tpz_size.item() v_scan = (np.arange(nties * tpz_size) % tpz_size + scan_offset) / tpz_size s_scan, s_track = np.meshgrid(v_scan, v_track) s_track = s_track.reshape(scans, scan_size, nties, tpz_size) s_scan = s_scan.reshape(scans, scan_size, nties, tpz_size) c_align = c_align[np.newaxis, np.newaxis, :, np.newaxis] c_exp = c_exp[np.newaxis, np.newaxis, :, np.newaxis] a_scan = s_scan + s_scan * (1 - s_scan) * c_exp + s_track * ( 1 - s_track) * c_align a_track = s_track coef_a = (1 - a_track) * (1 - a_scan) coef_b = (1 - a_track) * a_scan coef_d = a_track * (1 - a_scan) coef_c = a_track * a_scan res = np.stack([coef_a, coef_b, coef_c, coef_d], axis=4).reshape(scans * scan_size, -1, 4) return res def expand(data, coefs, scans, scan_size): """Perform the expansion in numpy domain.""" data = data.reshape(data.shape[:-1]) coefs = coefs.reshape(scans, scan_size, data.shape[1] - 1, -1, 4) coef_a = coefs[:, :, :, :, 0] coef_b = coefs[:, :, :, :, 1] coef_c = coefs[:, :, :, :, 2] coef_d = coefs[:, :, :, :, 3] corner_coefficients = (coef_a, coef_b, coef_c, coef_d) fdata = _interpolate_data(data, corner_coefficients, scans) return fdata.reshape(scans * scan_size, -1) def _interpolate_data(data, corner_coefficients, scans): """Interpolate the data using the provided coefficients.""" coef_a, coef_b, coef_c, coef_d = corner_coefficients data_a = data[:scans * 2:2, np.newaxis, :-1, np.newaxis] data_b = data[:scans * 2:2, np.newaxis, 1:, np.newaxis] data_c = data[1:scans * 2:2, np.newaxis, 1:, np.newaxis] data_d = data[1:scans * 2:2, np.newaxis, :-1, np.newaxis] fdata = (coef_a * data_a + coef_b * data_b + coef_d * data_d + coef_c * data_c) return fdata def expand_arrays(arrays, scans, c_align, c_exp, scan_size=16, tpz_size=16, nties=200, track_offset=0.5, scan_offset=0.5): """Expand *data* according to alignment and expansion.""" nties = nties.item() tpz_size = tpz_size.item() s_scan, s_track = da.meshgrid(da.arange(nties * tpz_size), da.arange(scans * scan_size)) s_track = (s_track.reshape(scans, scan_size, nties, tpz_size) % scan_size + track_offset) / scan_size s_scan = (s_scan.reshape(scans, scan_size, nties, tpz_size) % tpz_size + scan_offset) / tpz_size a_scan = s_scan + s_scan * (1 - s_scan) * c_exp + s_track * ( 1 - s_track) * c_align a_track = s_track expanded = [] coef_a = (1 - a_track) * (1 - a_scan) coef_b = (1 - a_track) * a_scan coef_d = a_track * (1 - a_scan) coef_c = a_track * a_scan corner_coefficients = (coef_a, coef_b, coef_c, coef_d) for data in arrays: fdata = _interpolate_data(data, corner_coefficients, scans) expanded.append(fdata.reshape(scans * scan_size, nties * tpz_size)) return expanded satpy-0.34.0/satpy/readers/viirs_edr_active_fires.py000066400000000000000000000126321420401153000226000ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """VIIRS Active Fires reader. This module implements readers for VIIRS Active Fires NetCDF and ASCII files. """ import dask.dataframe as dd import xarray as xr from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.netcdf_utils import NetCDF4FileHandler # map platform attributes to Oscar standard name PLATFORM_MAP = { "NPP": "Suomi-NPP", "J01": "NOAA-20", "J02": "NOAA-21" } class VIIRSActiveFiresFileHandler(NetCDF4FileHandler): """NetCDF4 reader for VIIRS Active Fires.""" def __init__(self, filename, filename_info, filetype_info, auto_maskandscale=False, xarray_kwargs=None): """Open and perform initial investigation of NetCDF file.""" super(VIIRSActiveFiresFileHandler, self).__init__( filename, filename_info, filetype_info, auto_maskandscale=auto_maskandscale, xarray_kwargs=xarray_kwargs) self.prefix = filetype_info.get('variable_prefix') def get_dataset(self, dsid, dsinfo): """Get requested data as DataArray. Args: dsid: Dataset ID param2: Dataset Information Returns: Dask DataArray: Data """ key = dsinfo.get('file_key', dsid['name']).format(variable_prefix=self.prefix) data = self[key] # rename "phoney dims" data = data.rename(dict(zip(data.dims, ['y', 'x']))) # handle attributes from YAML for key in ('units', 'standard_name', 'flag_meanings', 'flag_values', '_FillValue'): # we only want to add information that isn't present already if key in dsinfo and key not in data.attrs: data.attrs[key] = dsinfo[key] if isinstance(data.attrs.get('flag_meanings'), str): data.attrs['flag_meanings'] = data.attrs['flag_meanings'].split(' ') # use more common CF standard units if data.attrs.get('units') == 'kelvins': data.attrs['units'] = 'K' data.attrs["platform_name"] = PLATFORM_MAP.get(self.filename_info['satellite_name'].upper(), "unknown") data.attrs["sensor"] = "VIIRS" return data @property def start_time(self): """Get first date/time when observations were recorded.""" return self.filename_info['start_time'] @property def end_time(self): """Get last date/time when observations were recorded.""" return self.filename_info.get('end_time', self.start_time) @property def sensor_name(self): """Name of sensor for this file.""" return self["sensor"] @property def platform_name(self): """Name of platform/satellite for this file.""" return self["platform_name"] class VIIRSActiveFiresTextFileHandler(BaseFileHandler): """ASCII reader for VIIRS Active Fires.""" def __init__(self, filename, filename_info, filetype_info): """Make sure filepath is valid and then reads data into a Dask DataFrame. Args: filename: Filename filename_info: Filename information filetype_info: Filetype information """ skip_rows = filetype_info.get('skip_rows', 15) columns = filetype_info['columns'] self.file_content = dd.read_csv(filename, skiprows=skip_rows, header=None, names=columns) super(VIIRSActiveFiresTextFileHandler, self).__init__(filename, filename_info, filetype_info) self.platform_name = PLATFORM_MAP.get(self.filename_info['satellite_name'].upper(), "unknown") def get_dataset(self, dsid, dsinfo): """Get requested data as DataArray.""" ds = self[dsid['name']].to_dask_array(lengths=True) data = xr.DataArray(ds, dims=("y",), attrs={"platform_name": self.platform_name, "sensor": "VIIRS"}) for key in ('units', 'standard_name', 'flag_meanings', 'flag_values', '_FillValue'): # we only want to add information that isn't present already if key in dsinfo and key not in data.attrs: data.attrs[key] = dsinfo[key] if isinstance(data.attrs.get('flag_meanings'), str): data.attrs['flag_meanings'] = data.attrs['flag_meanings'].split(' ') return data @property def start_time(self): """Get first date/time when observations were recorded.""" return self.filename_info['start_time'] @property def end_time(self): """Get last date/time when observations were recorded.""" return self.filename_info.get('end_time', self.start_time) def __getitem__(self, key): """Get file content for 'key'.""" return self.file_content[key] def __contains__(self, item): """Check if variable is in current file.""" return item in self.file_content satpy-0.34.0/satpy/readers/viirs_edr_flood.py000066400000000000000000000065211420401153000212400ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Interface to VIIRS flood product.""" import numpy as np from pyresample import geometry from satpy.readers.hdf4_utils import HDF4FileHandler class VIIRSEDRFlood(HDF4FileHandler): """VIIRS EDR Flood-product handler for HDF4 files.""" @property def start_time(self): """Get start time.""" return self.filename_info['start_time'] @property def end_time(self): """Get end time.""" return self.filename_info.get('end_time', self.start_time) @property def sensor_name(self): """Get sensor name.""" sensor = self['/attr/SensorIdentifyCode'] if isinstance(sensor, np.ndarray): return str(sensor.astype(str)).lower() return sensor.lower() @property def platform_name(self): """Get platform name.""" platform_name = self['/attr/Satellitename'] if isinstance(platform_name, np.ndarray): return str(platform_name.astype(str)).lower() return platform_name.lower() def get_metadata(self, data, ds_info): """Get metadata.""" metadata = {} metadata.update(data.attrs) metadata.update(ds_info) metadata.update({ 'sensor': self.sensor_name, 'platform_name': self.platform_name, 'start_time': self.start_time, 'end_time': self.end_time, }) return metadata def get_dataset(self, ds_id, ds_info): """Get dataset.""" data = self[ds_id['name']] data.attrs = self.get_metadata(data, ds_info) fill = data.attrs.pop('_Fillvalue') offset = data.attrs.get('add_offset') scale_factor = data.attrs.get('scale_factor') data = data.where(data != fill) if scale_factor is not None and offset is not None: data *= scale_factor data += offset return data def get_area_def(self, ds_id): """Get area definition.""" data = self[ds_id['name']] proj_dict = { 'proj': 'latlong', 'datum': 'WGS84', 'ellps': 'WGS84', 'no_defs': True } area_extent = [data.attrs.get('ProjectionMinLongitude'), data.attrs.get('ProjectionMinLatitude'), data.attrs.get('ProjectionMaxLongitude'), data.attrs.get('ProjectionMaxLatitude')] area = geometry.AreaDefinition( 'viirs_flood_area', 'name_of_proj', 'id_of_proj', proj_dict, int(self.filename_info['dim0']), int(self.filename_info['dim1']), np.asarray(area_extent) ) return area satpy-0.34.0/satpy/readers/viirs_l1b.py000066400000000000000000000240321420401153000177560ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2011-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Interface to VIIRS L1B format.""" import logging from datetime import datetime import numpy as np from satpy.readers.netcdf_utils import NetCDF4FileHandler LOG = logging.getLogger(__name__) class VIIRSL1BFileHandler(NetCDF4FileHandler): """VIIRS L1B File Reader.""" def _parse_datetime(self, datestr): """Parse datetime.""" return datetime.strptime(datestr, "%Y-%m-%dT%H:%M:%S.000Z") @property def start_orbit_number(self): """Get start orbit number.""" try: return int(self['/attr/orbit_number']) except KeyError: return int(self['/attr/OrbitNumber']) @property def end_orbit_number(self): """Get end orbit number.""" try: return int(self['/attr/orbit_number']) except KeyError: return int(self['/attr/OrbitNumber']) @property def platform_name(self): """Get platform name.""" try: res = self.get('/attr/platform', self.filename_info['platform_shortname']) except KeyError: res = 'Unknown' return { 'JPSS-1': 'NOAA-20', 'NP': 'Suomi-NPP', 'J1': 'NOAA-20', 'J2': 'NOAA-21', 'JPSS-2': 'NOAA-21', }.get(res, res) @property def sensor_name(self): """Get sensor name.""" res = self['/attr/instrument'] if isinstance(res, np.ndarray): return str(res.astype(str)) else: return res def adjust_scaling_factors(self, factors, file_units, output_units): """Adjust scaling factors.""" if factors is None or factors[0] is None: factors = [1, 0] if file_units == output_units: LOG.debug("File units and output units are the same (%s)", file_units) return factors factors = np.array(factors) if file_units == "W cm-2 sr-1" and output_units == "W m-2 sr-1": LOG.debug("Adjusting scaling factors to convert '%s' to '%s'", file_units, output_units) factors[::2] = np.where(factors[::2] != -999, factors[::2] * 10000.0, -999) factors[1::2] = np.where(factors[1::2] != -999, factors[1::2] * 10000.0, -999) return factors elif file_units == "1" and output_units == "%": LOG.debug("Adjusting scaling factors to convert '%s' to '%s'", file_units, output_units) factors[::2] = np.where(factors[::2] != -999, factors[::2] * 100.0, -999) factors[1::2] = np.where(factors[1::2] != -999, factors[1::2] * 100.0, -999) return factors else: return factors def get_shape(self, ds_id, ds_info): """Get shape.""" var_path = ds_info.get('file_key', 'observation_data/{}'.format(ds_id['name'])) return self.get(var_path + '/shape', 1) @property def start_time(self): """Get start time.""" return self._parse_datetime(self['/attr/time_coverage_start']) @property def end_time(self): """Get end time.""" return self._parse_datetime(self['/attr/time_coverage_end']) def _get_dataset_file_units(self, dataset_id, ds_info, var_path): file_units = ds_info.get('file_units') if file_units is None: file_units = self.get(var_path + '/attr/units') # they were almost completely CF compliant... if file_units == "none": file_units = "1" if dataset_id.get('calibration') == 'radiance' and ds_info['units'] == 'W m-2 um-1 sr-1': rad_units_path = var_path + '/attr/radiance_units' if rad_units_path in self: if file_units is None: file_units = self[var_path + '/attr/radiance_units'] if file_units == 'Watts/meter^2/steradian/micrometer': file_units = 'W m-2 um-1 sr-1' elif ds_info.get('units') == '%' and file_units is None: # v1.1 and above of level 1 processing removed 'units' attribute # for all reflectance channels file_units = "1" return file_units def _get_dataset_valid_range(self, dataset_id, ds_info, var_path): if dataset_id.get('calibration') == 'radiance' and ds_info['units'] == 'W m-2 um-1 sr-1': rad_units_path = var_path + '/attr/radiance_units' if rad_units_path in self: # we are getting a reflectance band but we want the radiance values # special scaling parameters scale_factor = self[var_path + '/attr/radiance_scale_factor'] scale_offset = self[var_path + '/attr/radiance_add_offset'] else: # we are getting a btemp band but we want the radiance values # these are stored directly in the primary variable scale_factor = self[var_path + '/attr/scale_factor'] scale_offset = self[var_path + '/attr/add_offset'] valid_min = self[var_path + '/attr/valid_min'] valid_max = self[var_path + '/attr/valid_max'] elif ds_info.get('units') == '%': # normal reflectance valid_min = self[var_path + '/attr/valid_min'] valid_max = self[var_path + '/attr/valid_max'] scale_factor = self[var_path + '/attr/scale_factor'] scale_offset = self[var_path + '/attr/add_offset'] elif ds_info.get('units') == 'K': # normal brightness temperature # use a special LUT to get the actual values lut_var_path = ds_info.get('lut', var_path + '_brightness_temperature_lut') # we get the BT values from a look up table using the scaled radiance integers valid_min = self[lut_var_path + '/attr/valid_min'] valid_max = self[lut_var_path + '/attr/valid_max'] scale_factor = scale_offset = None else: valid_min = self.get(var_path + '/attr/valid_min') valid_max = self.get(var_path + '/attr/valid_max') scale_factor = self.get(var_path + '/attr/scale_factor') scale_offset = self.get(var_path + '/attr/add_offset') return valid_min, valid_max, scale_factor, scale_offset def get_metadata(self, dataset_id, ds_info): """Get metadata.""" var_path = ds_info.get('file_key', 'observation_data/{}'.format(dataset_id['name'])) shape = self.get_shape(dataset_id, ds_info) file_units = self._get_dataset_file_units(dataset_id, ds_info, var_path) # Get extra metadata if '/dimension/number_of_scans' in self: rows_per_scan = int(shape[0] / self['/dimension/number_of_scans']) ds_info.setdefault('rows_per_scan', rows_per_scan) i = getattr(self[var_path], 'attrs', {}) i.update(ds_info) i.update(dataset_id.to_dict()) i.update({ "shape": shape, "units": ds_info.get("units", file_units), "file_units": file_units, "platform_name": self.platform_name, "sensor": self.sensor_name, "start_orbit": self.start_orbit_number, "end_orbit": self.end_orbit_number, }) i.update(dataset_id.to_dict()) return i def get_dataset(self, dataset_id, ds_info): """Get dataset.""" var_path = ds_info.get('file_key', 'observation_data/{}'.format(dataset_id['name'])) metadata = self.get_metadata(dataset_id, ds_info) valid_min, valid_max, scale_factor, scale_offset = self._get_dataset_valid_range(dataset_id, ds_info, var_path) if dataset_id.get('calibration') == 'radiance' and ds_info['units'] == 'W m-2 um-1 sr-1': data = self[var_path] elif ds_info.get('units') == '%': data = self[var_path] elif ds_info.get('units') == 'K': # normal brightness temperature # use a special LUT to get the actual values lut_var_path = ds_info.get('lut', var_path + '_brightness_temperature_lut') data = self[var_path] # we get the BT values from a look up table using the scaled radiance integers index_arr = data.data.astype(int) coords = data.coords data.data = self[lut_var_path].data[index_arr.ravel()].reshape(data.shape) data = data.assign_coords(**coords) else: data = self[var_path] data.attrs.update(metadata) if valid_min is not None and valid_max is not None: data = data.where((data >= valid_min) & (data <= valid_max)) if data.attrs.get('units') in ['%', 'K', '1', 'W m-2 um-1 sr-1'] and \ 'flag_meanings' in data.attrs: # flag meanings don't mean anything anymore for these variables # these aren't category products data.attrs.pop('flag_meanings', None) data.attrs.pop('flag_values', None) factors = (scale_factor, scale_offset) factors = self.adjust_scaling_factors(factors, metadata['file_units'], ds_info.get("units")) if factors[0] != 1 or factors[1] != 0: data *= factors[0] data += factors[1] # rename dimensions to correspond to satpy's 'y' and 'x' standard if 'number_of_lines' in data.dims: data = data.rename({'number_of_lines': 'y', 'number_of_pixels': 'x'}) return data satpy-0.34.0/satpy/readers/viirs_sdr.py000066400000000000000000000665501420401153000201030ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2011-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Interface to VIIRS SDR format. This reader implements the support of VIIRS SDR files as produced by CSPP and CLASS. It is comprised of two parts: - A subclass of the YAMLFileReader class to allow handling all the files - A filehandler class to implement the actual reading Format documentation: - http://npp.gsfc.nasa.gov/science/sciencedocuments/082012/474-00001-03_CDFCBVolIII_RevC.pdf """ import logging import os.path from contextlib import suppress from datetime import datetime, timedelta from glob import glob import dask.array as da import numpy as np import xarray as xr from satpy.readers.hdf5_utils import HDF5FileHandler from satpy.readers.yaml_reader import FileYAMLReader NO_DATE = datetime(1958, 1, 1) EPSILON_TIME = timedelta(days=2) LOG = logging.getLogger(__name__) def _get_invalid_info(granule_data): """Get a detailed report of the missing data. N/A: not applicable MISS: required value missing at time of processing OBPT: onboard pixel trim (overlapping/bow-tie pixel removed during SDR processing) OGPT: on-ground pixel trim (overlapping/bow-tie pixel removed during EDR processing) ERR: error occurred during processing / non-convergence ELINT: ellipsoid intersect failed / instrument line-of-sight does not intersect the Earth’s surface VDNE: value does not exist / processing algorithm did not execute SOUB: scaled out-of-bounds / solution not within allowed range """ msg = None if issubclass(granule_data.dtype.type, np.integer): msg = ("na:" + str((granule_data == 65535).sum()) + " miss:" + str((granule_data == 65534).sum()) + " obpt:" + str((granule_data == 65533).sum()) + " ogpt:" + str((granule_data == 65532).sum()) + " err:" + str((granule_data == 65531).sum()) + " elint:" + str((granule_data == 65530).sum()) + " vdne:" + str((granule_data == 65529).sum()) + " soub:" + str((granule_data == 65528).sum())) elif issubclass(granule_data.dtype.type, np.floating): msg = ("na:" + str((granule_data == -999.9).sum()) + " miss:" + str((granule_data == -999.8).sum()) + " obpt:" + str((granule_data == -999.7).sum()) + " ogpt:" + str((granule_data == -999.6).sum()) + " err:" + str((granule_data == -999.5).sum()) + " elint:" + str((granule_data == -999.4).sum()) + " vdne:" + str((granule_data == -999.3).sum()) + " soub:" + str((granule_data == -999.2).sum())) return msg DATASET_KEYS = {'GDNBO': 'VIIRS-DNB-GEO', 'SVDNB': 'VIIRS-DNB-SDR', 'GITCO': 'VIIRS-IMG-GEO-TC', 'GIMGO': 'VIIRS-IMG-GEO', 'SVI01': 'VIIRS-I1-SDR', 'SVI02': 'VIIRS-I2-SDR', 'SVI03': 'VIIRS-I3-SDR', 'SVI04': 'VIIRS-I4-SDR', 'SVI05': 'VIIRS-I5-SDR', 'GMTCO': 'VIIRS-MOD-GEO-TC', 'GMODO': 'VIIRS-MOD-GEO', 'SVM01': 'VIIRS-M1-SDR', 'SVM02': 'VIIRS-M2-SDR', 'SVM03': 'VIIRS-M3-SDR', 'SVM04': 'VIIRS-M4-SDR', 'SVM05': 'VIIRS-M5-SDR', 'SVM06': 'VIIRS-M6-SDR', 'SVM07': 'VIIRS-M7-SDR', 'SVM08': 'VIIRS-M8-SDR', 'SVM09': 'VIIRS-M9-SDR', 'SVM10': 'VIIRS-M10-SDR', 'SVM11': 'VIIRS-M11-SDR', 'SVM12': 'VIIRS-M12-SDR', 'SVM13': 'VIIRS-M13-SDR', 'SVM14': 'VIIRS-M14-SDR', 'SVM15': 'VIIRS-M15-SDR', 'SVM16': 'VIIRS-M16-SDR', 'IVCDB': 'VIIRS-DualGain-Cal-IP' } class VIIRSSDRFileHandler(HDF5FileHandler): """VIIRS HDF5 File Reader.""" def __init__(self, filename, filename_info, filetype_info, use_tc=None, **kwargs): """Initialize file handler.""" self.datasets = filename_info['datasets'].split('-') self.use_tc = use_tc super(VIIRSSDRFileHandler, self).__init__(filename, filename_info, filetype_info, **kwargs) def __getitem__(self, item): """Get item.""" if '*' in item: # this is an aggregated field that can't easily be loaded, need to # join things together idx = 0 base_item = item item = base_item.replace('*', str(idx)) result = [] while True: try: res = super(VIIRSSDRFileHandler, self).__getitem__(item) result.append(res) except KeyError: # no more granule keys LOG.debug("Aggregated granule stopping on '%s'", item) break idx += 1 item = base_item.replace('*', str(idx)) return result else: return super(VIIRSSDRFileHandler, self).__getitem__(item) def _parse_datetime(self, datestr, timestr): try: datetime_str = datestr + timestr except TypeError: datetime_str = str(datestr.astype(str)) + str(timestr.astype(str)) time_val = datetime.strptime(datetime_str, '%Y%m%d%H%M%S.%fZ') if abs(time_val - NO_DATE) < EPSILON_TIME: # catch rare case when SDR files have incorrect date raise ValueError("Datetime invalid {}".format(time_val)) return time_val @property def start_time(self): """Get start time.""" dataset_group = DATASET_KEYS[self.datasets[0]] default_start_date = 'Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/AggregateBeginningDate' default_start_time = 'Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/AggregateBeginningTime' date_var_path = self.filetype_info.get('start_date', default_start_date).format(dataset_group=dataset_group) time_var_path = self.filetype_info.get('start_time', default_start_time).format(dataset_group=dataset_group) return self._parse_datetime(self[date_var_path], self[time_var_path]) @property def end_time(self): """Get end time.""" dataset_group = DATASET_KEYS[self.datasets[0]] default_end_date = 'Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/AggregateEndingDate' default_end_time = 'Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/AggregateEndingTime' date_var_path = self.filetype_info.get('end_date', default_end_date).format(dataset_group=dataset_group) time_var_path = self.filetype_info.get('end_time', default_end_time).format(dataset_group=dataset_group) return self._parse_datetime(self[date_var_path], self[time_var_path]) @property def start_orbit_number(self): """Get start orbit number.""" dataset_group = DATASET_KEYS[self.datasets[0]] default = 'Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/AggregateBeginningOrbitNumber' start_orbit_path = self.filetype_info.get('start_orbit', default).format(dataset_group=dataset_group) return int(self[start_orbit_path]) @property def end_orbit_number(self): """Get end orbit number.""" dataset_group = DATASET_KEYS[self.datasets[0]] default = 'Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/AggregateEndingOrbitNumber' end_orbit_path = self.filetype_info.get('end_orbit', default).format(dataset_group=dataset_group) return int(self[end_orbit_path]) @property def platform_name(self): """Get platform name.""" default = '/attr/Platform_Short_Name' platform_path = self.filetype_info.get( 'platform_name', default).format(**self.filetype_info) platform_dict = {'NPP': 'Suomi-NPP', 'JPSS-1': 'NOAA-20', 'J01': 'NOAA-20', 'JPSS-2': 'NOAA-21', 'J02': 'NOAA-21'} return platform_dict.get(self[platform_path], self[platform_path]) @property def sensor_name(self): """Get sensor name.""" dataset_group = DATASET_KEYS[self.datasets[0]] default = 'Data_Products/{dataset_group}/attr/Instrument_Short_Name' sensor_path = self.filetype_info.get( 'sensor_name', default).format(dataset_group=dataset_group) return self[sensor_path].lower() def get_file_units(self, dataset_id, ds_info): """Get file units from metadata.""" file_units = ds_info.get("file_units") if file_units is None: LOG.debug("Unknown units for file key '%s'", dataset_id) return file_units def scale_swath_data(self, data, scaling_factors, dataset_group): """Scale swath data using scaling factors and offsets. Multi-granule (a.k.a. aggregated) files will have more than the usual two values. """ rows_per_gran = self._get_rows_per_granule(dataset_group) factors = self._mask_and_reshape_factors(scaling_factors) data = self._map_and_apply_factors(data, factors, rows_per_gran) return data @staticmethod def _mask_and_reshape_factors(factors): factors = factors.where(factors > -999, np.float32(np.nan)) return factors.data.reshape((-1, 2)).rechunk((1, 2)) # make it so map_blocks happens per factor @staticmethod def _map_and_apply_factors(data, factors, rows_per_gran): # The user may have requested a different chunking scheme, but we need # per granule chunking right now so factor chunks map 1:1 to data chunks old_chunks = data.chunks dask_data = data.data.rechunk((tuple(rows_per_gran), data.data.chunks[1])) dask_data = da.map_blocks(_apply_factors, dask_data, factors, chunks=dask_data.chunks, dtype=data.dtype, meta=np.array([[]], dtype=data.dtype)) data = xr.DataArray(dask_data.rechunk(old_chunks), dims=data.dims, coords=data.coords, attrs=data.attrs) return data @staticmethod def _scale_factors_for_units(factors, file_units, output_units): if file_units == "W cm-2 sr-1" and output_units == "W m-2 sr-1": LOG.debug("Adjusting scaling factors to convert '%s' to '%s'", file_units, output_units) factors = factors * 10000. elif file_units == "1" and output_units == "%": LOG.debug("Adjusting scaling factors to convert '%s' to '%s'", file_units, output_units) factors = factors * 100. else: raise ValueError("Don't know how to convert '{}' to '{}'".format( file_units, output_units)) return factors @staticmethod def _get_valid_scaling_factors(factors): if factors is None: factors = np.array([1, 0], dtype=np.float32) factors = xr.DataArray(da.from_array(factors, chunks=1)) else: factors = factors.where(factors != -999., np.float32(np.nan)) return factors def _adjust_scaling_factors(self, factors, file_units, output_units): """Adjust scaling factors .""" if file_units == output_units: LOG.debug("File units and output units are the same (%s)", file_units) return factors factors = self._get_valid_scaling_factors(factors) return self._scale_factors_for_units(factors, file_units, output_units) def _get_scaling_factors(self, file_units, output_units, factor_var_path): """Get file scaling factors and scale according to expected units.""" factors = self.get(factor_var_path) factors = self._adjust_scaling_factors(factors, file_units, output_units) return factors def _generate_file_key(self, ds_id, ds_info, factors=False): var_path = ds_info.get('file_key', 'All_Data/{dataset_group}_All/{calibration}') calibration = { 'radiance': 'Radiance', 'reflectance': 'Reflectance', 'brightness_temperature': 'BrightnessTemperature', }.get(ds_id.get('calibration')) var_path = var_path.format(calibration=calibration, dataset_group=DATASET_KEYS[ds_info['dataset_group']]) if ds_id['name'] in ['dnb_longitude', 'dnb_latitude']: if self.use_tc is True: return var_path + '_TC' if self.use_tc is None and var_path + '_TC' in self.file_content: return var_path + '_TC' return var_path @staticmethod def expand_single_values(var, scans): """Expand single valued variable to full scan lengths.""" if scans.size == 1: return var else: expanded = np.repeat(var, scans) expanded.attrs = var.attrs expanded.rename({expanded.dims[0]: 'y'}) return expanded def _scan_size(self, dataset_group_name): """Get how many rows of data constitute one scanline.""" if 'I' in dataset_group_name: scan_size = 32 else: scan_size = 16 return scan_size def concatenate_dataset(self, dataset_group, var_path): """Concatenate dataset.""" scan_size = self._scan_size(dataset_group) scans = self._get_scans_per_granule(dataset_group) start_scan = 0 data_chunks = [] scans = xr.DataArray(scans) variable = self[var_path] # check if these are single per-granule value if variable.size != scans.size: for gscans in scans.values: data_chunks.append(self[var_path].isel(y=slice(start_scan, start_scan + gscans * scan_size))) start_scan += gscans * scan_size return xr.concat(data_chunks, 'y') else: return self.expand_single_values(variable, scans) def _get_rows_per_granule(self, dataset_group): scan_size = self._scan_size(dataset_group) scans_per_gran = self._get_scans_per_granule(dataset_group) return [scan_size * gran_scans for gran_scans in scans_per_gran] def _get_scans_per_granule(self, dataset_group): number_of_granules_path = 'Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/AggregateNumberGranules' nb_granules_path = number_of_granules_path.format(dataset_group=DATASET_KEYS[dataset_group]) scans = [] for granule in range(self[nb_granules_path]): scans_path = 'Data_Products/{dataset_group}/{dataset_group}_Gran_{granule}/attr/N_Number_Of_Scans' scans_path = scans_path.format(dataset_group=DATASET_KEYS[dataset_group], granule=granule) scans.append(self[scans_path]) return scans def mask_fill_values(self, data, ds_info): """Mask fill values.""" is_floating = np.issubdtype(data.dtype, np.floating) if is_floating: # If the data is a float then we mask everything <= -999.0 fill_max = np.float32(ds_info.pop("fill_max_float", -999.0)) return data.where(data > fill_max, np.float32(np.nan)) else: # If the data is an integer then we mask everything >= fill_min_int fill_min = int(ds_info.pop("fill_min_int", 65528)) return data.where(data < fill_min, np.float32(np.nan)) def get_dataset(self, dataset_id, ds_info): """Get the dataset corresponding to *dataset_id*. The size of the return DataArray will be dependent on the number of scans actually sensed, and not necessarily the regular 768 scanlines that the file contains for each granule. To that end, the number of scans for each granule is read from: ``Data_Products/...Gran_x/N_Number_Of_Scans``. """ dataset_group = [ds_group for ds_group in ds_info['dataset_groups'] if ds_group in self.datasets] if not dataset_group: return dataset_group = dataset_group[0] ds_info['dataset_group'] = dataset_group var_path = self._generate_file_key(dataset_id, ds_info) factor_var_path = ds_info.get("factors_key", var_path + "Factors") data = self.concatenate_dataset(dataset_group, var_path) data = self.mask_fill_values(data, ds_info) file_units = self.get_file_units(dataset_id, ds_info) output_units = ds_info.get("units", file_units) factors = self._get_scaling_factors(file_units, output_units, factor_var_path) if factors is not None: data = self.scale_swath_data(data, factors, dataset_group) else: LOG.debug("No scaling factors found for %s", dataset_id) i = getattr(data, 'attrs', {}) i.update(ds_info) i.update({ "units": output_units, "platform_name": self.platform_name, "sensor": self.sensor_name, "start_orbit": self.start_orbit_number, "end_orbit": self.end_orbit_number, "rows_per_scan": self._scan_size(dataset_group), }) i.update(dataset_id.to_dict()) data.attrs.update(i) return data def get_bounding_box(self): """Get the bounding box of this file.""" from pyproj import Geod geod = Geod(ellps='WGS84') dataset_group = DATASET_KEYS[self.datasets[0]] idx = 0 lons_ring = None lats_ring = None while True: path = 'Data_Products/{dataset_group}/{dataset_group}_Gran_{idx}/attr/' prefix = path.format(dataset_group=dataset_group, idx=idx) try: lats = self.file_content[prefix + 'G-Ring_Latitude'] lons = self.file_content[prefix + 'G-Ring_Longitude'] if lons_ring is None: lons_ring = lons lats_ring = lats else: prev_lon = lons_ring[0] prev_lat = lats_ring[0] dists = [geod.inv(lon, lat, prev_lon, prev_lat)[2] for lon, lat in zip(lons, lats)] first_idx = np.argmin(dists) if first_idx == 2 and len(lons) == 8: lons_ring = np.hstack((lons[:3], lons_ring[:-2], lons[4:])) lats_ring = np.hstack((lats[:3], lats_ring[:-2], lats[4:])) else: raise NotImplementedError("Don't know how to handle G-Rings of length %d" % len(lons)) except KeyError: break idx += 1 return lons_ring, lats_ring def available_datasets(self, configured_datasets=None): """Generate dataset info and their availablity. See :meth:`satpy.readers.file_handlers.BaseFileHandler.available_datasets` for details. """ for is_avail, ds_info in (configured_datasets or []): if is_avail is not None: yield is_avail, ds_info continue dataset_group = [ds_group for ds_group in ds_info['dataset_groups'] if ds_group in self.datasets] if dataset_group: yield True, ds_info elif is_avail is None: yield is_avail, ds_info def split_desired_other(fhs, prime_geo, second_geo): """Split the provided filehandlers *fhs* into desired filehandlers and others.""" desired = [] other = [] for fh in fhs: if prime_geo in fh.datasets: desired.append(fh) elif second_geo in fh.datasets: other.append(fh) return desired, other def _apply_factors(data, factor_set): return data * factor_set[0, 0] + factor_set[0, 1] class VIIRSSDRReader(FileYAMLReader): """Custom file reader for finding VIIRS SDR geolocation at runtime.""" def __init__(self, config_files, use_tc=None, **kwargs): """Initialize file reader and adjust geolocation preferences. Args: config_files (iterable): yaml config files passed to base class use_tc (boolean): If `True` use the terrain corrected files. If `False`, switch to non-TC files. If `None` (default), use TC if available, non-TC otherwise. """ super(VIIRSSDRReader, self).__init__(config_files, **kwargs) self.use_tc = use_tc def filter_filenames_by_info(self, filename_items): """Filter out file using metadata from the filenames. This sorts out the different lon and lat datasets depending on TC is desired or not. """ filename_items = list(filename_items) geo_keep = [] geo_del = [] for filename, filename_info in filename_items: datasets = filename_info['datasets'].split('-') if ('GITCO' in datasets) or ('GMTCO' in datasets): if self.use_tc is False: geo_del.append(filename) else: geo_keep.append(filename) elif ('GIMGO' in datasets) or ('GMODO' in datasets): if self.use_tc is True: geo_del.append(filename) else: geo_keep.append(filename) if geo_keep: filename_items = self._remove_geo_datasets_from_files(filename_items, geo_del) return super(VIIRSSDRReader, self).filter_filenames_by_info(filename_items) def _remove_geo_datasets_from_files(self, filename_items, files_to_edit): fdict = dict(filename_items) for to_del in files_to_edit: fdict[to_del]['datasets'] = fdict[to_del]['datasets'].split('-') for dataset in ['GITCO', 'GMTCO', 'GIMGO', 'GMODO']: with suppress(ValueError): fdict[to_del]['datasets'].remove(dataset) if not fdict[to_del]['datasets']: del fdict[to_del] else: fdict[to_del]['datasets'] = "-".join(fdict[to_del]['datasets']) filename_items = fdict.items() return filename_items def _load_filenames_from_geo_ref(self, dsid): """Load filenames from the N_GEO_Ref attribute of a dataset's file.""" file_handlers = self._get_file_handlers(dsid) if not file_handlers: return [] fns = [] for fh in file_handlers: base_dir = os.path.dirname(fh.filename) try: # get the filename and remove the creation time # which is often wrong fn = fh['/attr/N_GEO_Ref'][:46] + '*.h5' fns.extend(glob(os.path.join(base_dir, fn))) # usually is non-terrain corrected file, add the terrain # corrected file too if fn[:5] == 'GIMGO': fn = 'GITCO' + fn[5:] elif fn[:5] == 'GMODO': fn = 'GMTCO' + fn[5:] else: continue fns.extend(glob(os.path.join(base_dir, fn))) except KeyError: LOG.debug("Could not load geo-reference information from {}".format(fh.filename)) return fns def _get_primary_secondary_geo_groups(self, ds_info): """Find out which geolocation files are needed.""" if ds_info['dataset_groups'][0].startswith('GM'): if self.use_tc is False: prime_geo = 'GMODO' second_geo = 'GMTCO' else: prime_geo = 'GMTCO' second_geo = 'GMODO' elif ds_info['dataset_groups'][0].startswith('GI'): if self.use_tc is False: prime_geo = 'GIMGO' second_geo = 'GITCO' else: prime_geo = 'GITCO' second_geo = 'GIMGO' else: raise ValueError('Unknown dataset group %s' % ds_info['dataset_groups'][0]) return prime_geo, second_geo def get_right_geo_fhs(self, dsid, fhs): """Find the right geographical file handlers for given dataset ID *dsid*.""" ds_info = self.all_ids[dsid] prime_geo, second_geo = self._get_primary_secondary_geo_groups(ds_info) desired, other = split_desired_other(fhs, prime_geo, second_geo) if desired: try: ds_info['dataset_groups'].remove(second_geo) except ValueError: pass return desired else: return other def _get_file_handlers(self, dsid): """Get the file handler to load this dataset.""" ds_info = self.all_ids[dsid] fhs = [fh for fh in self.file_handlers['generic_file'] if set(fh.datasets) & set(ds_info['dataset_groups'])] if not fhs: LOG.warning("Required file type '%s' not found or loaded for " "'%s'", ds_info['file_type'], dsid['name']) else: if len(set(ds_info['dataset_groups']) & set(['GITCO', 'GIMGO', 'GMTCO', 'GMODO'])) > 1: fhs = self.get_right_geo_fhs(dsid, fhs) return fhs def _get_coordinates_for_dataset_key(self, dsid): """Get the coordinate dataset keys for `dsid`. Wraps the base class method in order to load geolocation files from the geo reference attribute in the datasets file. """ coords = super(VIIRSSDRReader, self)._get_coordinates_for_dataset_key(dsid) for c_id in coords: c_info = self.all_ids[c_id] # c_info['dataset_groups'] should be a list of 2 elements self._get_file_handlers(c_id) prime_geo, second_geo = self._geo_dataset_groups(c_info) if prime_geo is None: continue # check the dataset file for the geolocation filename geo_filenames = self._load_filenames_from_geo_ref(dsid) self._create_new_geo_file_handlers(geo_filenames) self._remove_not_loaded_geo_dataset_group(c_info['dataset_groups'], prime_geo, second_geo) return coords def _geo_dataset_groups(self, c_info): if len(c_info['dataset_groups']) == 1: # filtering already done return None, None try: prime_geo, second_geo = self._get_primary_secondary_geo_groups(c_info) return prime_geo, second_geo except ValueError: # DNB return None, None def _create_new_geo_file_handlers(self, geo_filenames): existing_filenames = set([fh.filename for fh in self.file_handlers['generic_file']]) geo_filenames = set(geo_filenames) - existing_filenames self.create_filehandlers(geo_filenames) def _remove_not_loaded_geo_dataset_group(self, c_dataset_groups, prime_geo, second_geo): all_fhs = self.file_handlers['generic_file'] desired, other = split_desired_other(all_fhs, prime_geo, second_geo) group_to_remove = second_geo if desired else prime_geo c_dataset_groups.remove(group_to_remove) satpy-0.34.0/satpy/readers/virr_l1b.py000066400000000000000000000152371420401153000176130ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Interface to VIRR (Visible and Infra-Red Radiometer) level 1b format. The file format is HDF5. Important attributes: - Latitude - Longitude - SolarZenith - EV_Emissive - EV_RefSB - Emissive_Radiance_Offsets - Emissive_Radiance_Scales - RefSB_Cal_Coefficients - RefSB_Effective_Wavelength - Emmisive_Centroid_Wave_Number Supported satellites: - FY-3B and FY-3C. For more information: - https://www.wmo-sat.info/oscar/instruments/view/607. """ import logging from datetime import datetime import dask.array as da import numpy as np from pyspectral.blackbody import blackbody_wn_rad2temp as rad2temp from satpy.readers.hdf5_utils import HDF5FileHandler LOG = logging.getLogger(__name__) # PROVIDED BY NIGEL ATKINSON - 2013 # FY3B_REF_COEFFS = [ # 0.12640, -1.43200, #channel1# # 0.13530, -1.62360, #channel2# # 0.09193, -2.48207, #channel6# # 0.07480, -0.90980, #channel7# # 0.07590, -0.91080, #channel8# # 0.07460, -0.89520, #channel9# # 0.06300, -0.76280] #channel10# # CMA - 2015 - http://www.nsmc.org.cn/en/NSMC/Contents/100089.html FY3B_REF_COEFFS = [ 0.1264, -1.4320, 0.1353, -1.6236, 0.0919, -2.4821, 0.0938, -1.1494, 0.0857, -1.0280, 0.0803, -0.9636, 0.0630, -0.7628] class VIRR_L1B(HDF5FileHandler): """VIRR Level 1b reader.""" def __init__(self, filename, filename_info, filetype_info): """Open file and perform initial setup.""" super(VIRR_L1B, self).__init__(filename, filename_info, filetype_info) LOG.debug('day/night flag for {0}: {1}'.format(filename, self['/attr/Day Or Night Flag'])) self.geolocation_prefix = filetype_info['geolocation_prefix'] self.platform_id = filename_info['platform_id'] self.l1b_prefix = 'Data/' self.wave_number = 'Emissive_Centroid_Wave_Number' # Else filename_info['platform_id'] == FY3C. if filename_info['platform_id'] == 'FY3B': self.l1b_prefix = '' self.wave_number = 'Emmisive_Centroid_Wave_Number' def get_dataset(self, dataset_id, ds_info): """Create DataArray from file content for `dataset_id`.""" file_key = self.geolocation_prefix + ds_info.get('file_key', dataset_id['name']) if self.platform_id == 'FY3B': file_key = file_key.replace('Data/', '') data = self[file_key] band_index = ds_info.get('band_index') valid_range = data.attrs.pop('valid_range', None) if isinstance(valid_range, np.ndarray): valid_range = valid_range.tolist() if band_index is not None: data = data[band_index] if valid_range: data = data.where((data >= valid_range[0]) & (data <= valid_range[1])) if 'Emissive' in file_key: self._calibrate_emissive(data, band_index) elif 'RefSB' in file_key: data = self._calibrate_reflective(data, band_index) else: slope = self._correct_slope(self[file_key + '/attr/Slope']) intercept = self[file_key + '/attr/Intercept'] if valid_range: data = data.where((data >= valid_range[0]) & (data <= valid_range[1])) data = data * slope + intercept new_dims = {old: new for old, new in zip(data.dims, ('y', 'x'))} data = data.rename(new_dims) # use lowercase sensor name to be consistent with the rest of satpy data.attrs.update({'platform_name': self['/attr/Satellite Name'], 'sensor': self['/attr/Sensor Identification Code'].lower()}) data.attrs.update(ds_info) units = self.get(file_key + '/attr/units') if units is not None and str(units).lower() != 'none': data.attrs.update({'units': self.get(file_key + '/attr/units')}) elif data.attrs.get('calibration') == 'reflectance': data.attrs.update({'units': '%'}) else: data.attrs.update({'units': '1'}) return data def _calibrate_reflective(self, data, band_index): if self.platform_id == 'FY3B': coeffs = da.from_array(FY3B_REF_COEFFS, chunks=-1) else: coeffs = self['/attr/RefSB_Cal_Coefficients'] slope = self._correct_slope(coeffs[0::2]) intercept = coeffs[1::2] data = data * slope[band_index] + intercept[band_index] return data def _calibrate_emissive(self, data, band_index): slope = self._correct_slope(self[self.l1b_prefix + 'Emissive_Radiance_Scales']. data[:, band_index][:, np.newaxis]) intercept = self[self.l1b_prefix + 'Emissive_Radiance_Offsets'].data[:, band_index][:, np.newaxis] # Converts cm^-1 (wavenumbers) and (mW/m^2)/(str/cm^-1) (radiance data) # to SI units m^-1, mW*m^-3*str^-1. wave_number = self['/attr/' + self.wave_number][band_index] * 100 bt_data = rad2temp(wave_number, (data.data * slope + intercept) * 1e-5) if isinstance(bt_data, np.ndarray): # old versions of pyspectral produce numpy arrays data.data = da.from_array(bt_data, chunks=data.data.chunks) else: # new versions of pyspectral can do dask arrays data.data = bt_data def _correct_slope(self, slope): # 0 slope is invalid. Note: slope can be a scalar or array. return da.where(slope == 0, 1, slope) @property def start_time(self): """Get starting observation time.""" start_time = self['/attr/Observing Beginning Date'] + 'T' + self['/attr/Observing Beginning Time'] + 'Z' return datetime.strptime(start_time, '%Y-%m-%dT%H:%M:%S.%fZ') @property def end_time(self): """Get ending observation time.""" end_time = self['/attr/Observing Ending Date'] + 'T' + self['/attr/Observing Ending Time'] + 'Z' return datetime.strptime(end_time, '%Y-%m-%dT%H:%M:%S.%fZ') satpy-0.34.0/satpy/readers/xmlformat.py000066400000000000000000000134541420401153000201030ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2012 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reads a format from an xml file to create dtypes and scaling factor arrays.""" from __future__ import annotations from xml.etree.ElementTree import ElementTree import numpy as np VARIABLES: dict[str, str] = {} TYPEC = {"boolean": ">i1", "integer2": ">i2", "integer4": ">i4", "uinteger2": ">u2", "uinteger4": ">u4", } def process_delimiter(elt, ascii=False): """Process a 'delimiter' tag.""" del elt, ascii def process_field(elt, ascii=False): """Process a 'field' tag.""" # NOTE: if there is a variable defined in this field and it is different # from the default, we could change the value and restart. scale = np.uint8(1) if elt.get("type") == "bitfield" and not ascii: current_type = ">u" + str(int(elt.get("length")) // 8) scale = np.dtype(current_type).type(1) elif (elt.get("length") is not None): if ascii: add = 33 else: add = 0 current_type = "S" + str(int(elt.get("length")) + add) else: current_type = TYPEC[elt.get("type")] try: scale = (10 / float(elt.get("scaling-factor", "10").replace("^", "e"))) except ValueError: scale = (10 / np.array( elt.get("scaling-factor").replace("^", "e").split(","), dtype=np.float64)) return ((elt.get("name"), current_type, scale)) def process_array(elt, ascii=False): """Process an 'array' tag.""" del ascii chld = list(elt) if len(chld) > 1: raise ValueError() chld = chld[0] try: name, current_type, scale = CASES[chld.tag](chld) size = None except ValueError: name, current_type, size, scale = CASES[chld.tag](chld) del name myname = elt.get("name") or elt.get("label") if elt.get("length").startswith("$"): length = int(VARIABLES[elt.get("length")[1:]]) else: length = int(elt.get("length")) if size is not None: return (myname, current_type, (length, ) + size, scale) else: return (myname, current_type, (length, ), scale) CASES = {"delimiter": process_delimiter, "field": process_field, "array": process_array, } def to_dtype(val): """Parse *val* to return a dtype.""" return np.dtype([i[:-1] for i in val]) def to_scaled_dtype(val): """Parse *val* to return a dtype.""" res = [] for i in val: if i[1].startswith("S"): res.append((i[0], i[1]) + i[2:-1]) else: try: res.append((i[0], i[-1].dtype) + i[2:-1]) except AttributeError: res.append((i[0], type(i[-1])) + i[2:-1]) return np.dtype(res) def to_scales(val): """Parse *val* to return an array of scale factors.""" res = [] for i in val: if len(i) == 3: res.append((i[0], type(i[2]))) else: try: res.append((i[0], i[3].dtype, i[2])) except AttributeError: res.append((i[0], type(i[3]), i[2])) dtype = np.dtype(res) scales = np.zeros((1, ), dtype=dtype) for i in val: try: scales[i[0]] = i[-1] except ValueError: scales[i[0]] = np.repeat(np.array(i[-1]), i[2][1]).reshape(i[2]) return scales def parse_format(xml_file): """Parse the xml file to create types, scaling factor types, and scales.""" tree = ElementTree() tree.parse(xml_file) for param in tree.find("parameters"): VARIABLES[param.get("name")] = param.get("value") types_scales = {} for prod in tree.find("product"): ascii = (prod.tag in ["mphr", "sphr"]) res = [] for i in prod: lres = CASES[i.tag](i, ascii) if lres is not None: res.append(lres) types_scales[(prod.tag, int(prod.get("subclass")))] = res types = {} stypes = {} scales = {} for key, val in types_scales.items(): types[key] = to_dtype(val) stypes[key] = to_scaled_dtype(val) scales[key] = to_scales(val) return types, stypes, scales def _apply_scales(array, scales, dtype): """Apply scales to the array.""" new_array = np.empty(array.shape, dtype) for i in array.dtype.names: try: new_array[i] = array[i] * scales[i] except TypeError: if np.all(scales[i] == 1): new_array[i] = array[i] else: raise return new_array class XMLFormat(object): """XMLFormat object.""" def __init__(self, filename): """Init the format reader.""" self.types, self.stypes, self.scales = parse_format(filename) self.translator = {} for key, val in self.types.items(): self.translator[val] = (self.scales[key], self.stypes[key]) def dtype(self, key): """Get the dtype for the format object.""" return self.types[key] def apply_scales(self, array): """Apply scales to *array*.""" return _apply_scales(array, *self.translator[array.dtype]) if __name__ == '__main__': pass satpy-0.34.0/satpy/readers/yaml_reader.py000066400000000000000000001607371420401153000203650ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2019, 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Base classes and utilities for all readers configured by YAML files.""" import glob import itertools import logging import os import warnings from abc import ABCMeta, abstractmethod from collections import OrderedDict, deque from contextlib import suppress from fnmatch import fnmatch from weakref import WeakValueDictionary import numpy as np import xarray as xr import yaml try: from yaml import UnsafeLoader except ImportError: from yaml import Loader as UnsafeLoader # type: ignore from pyresample.boundary import AreaDefBoundary, Boundary from pyresample.geometry import AreaDefinition, StackedAreaDefinition, SwathDefinition from trollsift.parser import globify, parse from satpy import DatasetDict from satpy.aux_download import DataDownloadMixin from satpy.dataset import DataID, DataQuery, get_key from satpy.dataset.dataid import default_co_keys_config, default_id_keys_config, get_keys_from_config from satpy.resample import add_crs_xy_coords, get_area_def from satpy.utils import recursive_dict_update logger = logging.getLogger(__name__) def listify_string(something): """Take *something* and make it a list. *something* is either a list of strings or a string, in which case the function returns a list containing the string. If *something* is None, an empty list is returned. """ if isinstance(something, str): return [something] if something is not None: return list(something) return list() def _get_filebase(path, pattern): """Get the end of *path* of same length as *pattern*.""" # convert any `/` on Windows to `\\` path = os.path.normpath(path) # A pattern can include directories tail_len = len(pattern.split(os.path.sep)) return os.path.join(*str(path).split(os.path.sep)[-tail_len:]) def _match_filenames(filenames, pattern): """Get the filenames matching *pattern*.""" matching = set() glob_pat = globify(pattern) for filename in filenames: if fnmatch(_get_filebase(filename, pattern), glob_pat): matching.add(filename) return matching def _verify_reader_info_assign_config_files(config, config_files): try: reader_info = config['reader'] except KeyError: raise KeyError( "Malformed config file {}: missing reader 'reader'".format( config_files)) else: reader_info['config_files'] = config_files def load_yaml_configs(*config_files, loader=UnsafeLoader): """Merge a series of YAML reader configuration files. Args: *config_files (str): One or more pathnames to YAML-based reader configuration files that will be merged to create a single configuration. loader: Yaml loader object to load the YAML with. Defaults to `UnsafeLoader`. Returns: dict Dictionary representing the entire YAML configuration with the addition of `config['reader']['config_files']` (the list of YAML pathnames that were merged). """ config = {} logger.debug('Reading %s', str(config_files)) for config_file in config_files: with open(config_file, 'r', encoding='utf-8') as fd: config = recursive_dict_update(config, yaml.load(fd, Loader=loader)) _verify_reader_info_assign_config_files(config, config_files) return config class AbstractYAMLReader(metaclass=ABCMeta): """Base class for all readers that use YAML configuration files. This class should only be used in rare cases. Its child class `FileYAMLReader` should be used in most cases. """ def __init__(self, config_dict): """Load information from YAML configuration file about how to read data files.""" if isinstance(config_dict, str): raise ValueError("Passing config files to create a Reader is " "deprecated. Use ReaderClass.from_config_files " "instead.") self.config = config_dict self.info = self.config['reader'] self.name = self.info['name'] self.file_patterns = [] for file_type, filetype_info in self.config['file_types'].items(): filetype_info.setdefault('file_type', file_type) # correct separator if needed file_patterns = [os.path.join(*pattern.split('/')) for pattern in filetype_info['file_patterns']] filetype_info['file_patterns'] = file_patterns self.file_patterns.extend(file_patterns) if 'sensors' in self.info and not isinstance(self.info['sensors'], (list, tuple)): self.info['sensors'] = [self.info['sensors']] self.datasets = self.config.get('datasets', {}) self._id_keys = self.info.get('data_identification_keys', default_id_keys_config) self._co_keys = self.info.get('coord_identification_keys', default_co_keys_config) self.info['filenames'] = [] self.all_ids = {} self.load_ds_ids_from_config() @classmethod def from_config_files(cls, *config_files, **reader_kwargs): """Create a reader instance from one or more YAML configuration files.""" config_dict = load_yaml_configs(*config_files) return config_dict['reader']['reader'](config_dict, **reader_kwargs) @property def sensor_names(self): """Names of sensors whose data is being loaded by this reader.""" return self.info['sensors'] or [] @property def all_dataset_ids(self): """Get DataIDs of all datasets known to this reader.""" return self.all_ids.keys() @property def all_dataset_names(self): """Get names of all datasets known to this reader.""" # remove the duplicates from various calibration and resolutions return set(ds_id['name'] for ds_id in self.all_dataset_ids) @property def available_dataset_ids(self): """Get DataIDs that are loadable by this reader.""" logger.warning( "Available datasets are unknown, returning all datasets...") return self.all_dataset_ids @property def available_dataset_names(self): """Get names of datasets that are loadable by this reader.""" return (ds_id['name'] for ds_id in self.available_dataset_ids) @property @abstractmethod def start_time(self): """Start time of the reader.""" @property @abstractmethod def end_time(self): """End time of the reader.""" @abstractmethod def filter_selected_filenames(self, filenames): """Filter provided filenames by parameters in reader configuration. Returns: iterable of usable files """ @abstractmethod def load(self, dataset_keys): """Load *dataset_keys*.""" def supports_sensor(self, sensor): """Check if *sensor* is supported. Returns True is *sensor* is None. """ if sensor and not (set(self.info.get("sensors")) & set(listify_string(sensor))): return False return True def select_files_from_directory( self, directory=None, fs=None): """Find files for this reader in *directory*. If directory is None or '', look in the current directory. Searches the local file system by default. Can search on a remote filesystem by passing an instance of a suitable implementation of ``fsspec.spec.AbstractFileSystem``. Args: directory (Optional[str]): Path to search. fs (Optional[FileSystem]): fsspec FileSystem implementation to use. Defaults to None, using local file system. Returns: list of strings describing matching files """ filenames = set() if directory is None: directory = '' # all the glob patterns that we are going to look at all_globs = {os.path.join(directory, globify(pattern)) for pattern in self.file_patterns} # custom filesystem or not if fs is None: matcher = glob.iglob else: matcher = fs.glob # get all files matching these patterns for glob_pat in all_globs: filenames.update(matcher(glob_pat)) return filenames def select_files_from_pathnames(self, filenames): """Select the files from *filenames* this reader can handle.""" selected_filenames = [] filenames = set(filenames) # make a copy of the inputs for pattern in self.file_patterns: matching = _match_filenames(filenames, pattern) filenames -= matching for fname in matching: if fname not in selected_filenames: selected_filenames.append(fname) if len(selected_filenames) == 0: logger.warning("No filenames found for reader: %s", self.name) return selected_filenames def get_dataset_key(self, key, **kwargs): """Get the fully qualified `DataID` matching `key`. See `satpy.readers.get_key` for more information about kwargs. """ return get_key(key, self.all_ids.keys(), **kwargs) def load_ds_ids_from_config(self): """Get the dataset ids from the config.""" ids = [] for dataset in self.datasets.values(): # xarray doesn't like concatenating attributes that are lists # https://github.com/pydata/xarray/issues/2060 if 'coordinates' in dataset and \ isinstance(dataset['coordinates'], list): dataset['coordinates'] = tuple(dataset['coordinates']) id_keys = get_keys_from_config(self._id_keys, dataset) # Build each permutation/product of the dataset id_kwargs = self._build_id_permutations(dataset, id_keys) for id_params in itertools.product(*id_kwargs): dsid = DataID(id_keys, **dict(zip(id_keys, id_params))) ids.append(dsid) # create dataset infos specifically for this permutation ds_info = dataset.copy() for key in dsid.keys(): if isinstance(ds_info.get(key), dict): with suppress(KeyError): # KeyError is suppressed in case the key does not represent interesting metadata, # eg a custom type ds_info.update(ds_info[key][dsid.get(key)]) # this is important for wavelength which was converted # to a tuple ds_info[key] = dsid.get(key) self.all_ids[dsid] = ds_info return ids def _build_id_permutations(self, dataset, id_keys): """Build each permutation/product of the dataset.""" id_kwargs = [] for key, idval in id_keys.items(): val = dataset.get(key, idval.get('default') if idval is not None else None) val_type = None if idval is not None: val_type = idval.get('type') if val_type is not None and issubclass(val_type, tuple): # special case: wavelength can be [min, nominal, max] # but is still considered 1 option id_kwargs.append((val,)) elif isinstance(val, (list, tuple, set)): # this key has multiple choices # (ex. 250 meter, 500 meter, 1000 meter resolutions) id_kwargs.append(val) elif isinstance(val, dict): id_kwargs.append(val.keys()) else: # this key only has one choice so make it a one # item iterable id_kwargs.append((val,)) return id_kwargs class FileYAMLReader(AbstractYAMLReader, DataDownloadMixin): """Primary reader base class that is configured by a YAML file. This class uses the idea of per-file "file handler" objects to read file contents and determine what is available in the file. This differs from the base :class:`AbstractYAMLReader` which does not depend on individual file handler objects. In almost all cases this class should be used over its base class and can be used as a reader by itself and requires no subclassing. """ # WeakValueDictionary objects must be created at the class level or else # dask will not be able to serialize them on a distributed environment _coords_cache: WeakValueDictionary = WeakValueDictionary() def __init__(self, config_dict, filter_parameters=None, filter_filenames=True, **kwargs): """Set up initial internal storage for loading file data.""" super(FileYAMLReader, self).__init__(config_dict) self.file_handlers = {} self.available_ids = {} self.filter_filenames = self.info.get('filter_filenames', filter_filenames) self.filter_parameters = filter_parameters or {} self.register_data_files() @property def sensor_names(self): """Names of sensors whose data is being loaded by this reader.""" if not self.file_handlers: return self.info['sensors'] file_handlers = (handlers[0] for handlers in self.file_handlers.values()) sensor_names = set() for fh in file_handlers: try: sensor_names.update(fh.sensor_names) except NotImplementedError: continue if not sensor_names: return self.info['sensors'] return sorted(sensor_names) @property def available_dataset_ids(self): """Get DataIDs that are loadable by this reader.""" return self.available_ids.keys() @property def start_time(self): """Start time of the earlier file used by this reader.""" if not self.file_handlers: raise RuntimeError("Start time unknown until files are selected") return min(x[0].start_time for x in self.file_handlers.values()) @property def end_time(self): """End time of the latest file used by this reader.""" if not self.file_handlers: raise RuntimeError("End time unknown until files are selected") return max(x[-1].end_time for x in self.file_handlers.values()) @staticmethod def check_file_covers_area(file_handler, check_area): """Check if the file covers the current area. If the file doesn't provide any bounding box information or 'area' was not provided in `filter_parameters`, the check returns True. """ try: gbb = Boundary(*file_handler.get_bounding_box()) except NotImplementedError as err: logger.debug("Bounding box computation not implemented: %s", str(err)) else: abb = AreaDefBoundary(get_area_def(check_area), frequency=1000) intersection = gbb.contour_poly.intersection(abb.contour_poly) if not intersection: return False return True def find_required_filehandlers(self, requirements, filename_info): """Find the necessary file handlers for the given requirements. We assume here requirements are available. Raises: KeyError, if no handler for the given requirements is available. RuntimeError, if there is a handler for the given requirements, but it doesn't match the filename info. """ req_fh = [] filename_info = set(filename_info.items()) if requirements: for requirement in requirements: for fhd in self.file_handlers[requirement]: if set(fhd.filename_info.items()).issubset(filename_info): req_fh.append(fhd) break else: raise RuntimeError("No matching requirement file of type " "{}".format(requirement)) # break everything and continue to next # filetype! return req_fh def sorted_filetype_items(self): """Sort the instance's filetypes in using order.""" processed_types = [] file_type_items = deque(self.config['file_types'].items()) while len(file_type_items): filetype, filetype_info = file_type_items.popleft() requirements = filetype_info.get('requires') if requirements is not None: # requirements have not been processed yet -> wait missing = [req for req in requirements if req not in processed_types] if missing: file_type_items.append((filetype, filetype_info)) continue processed_types.append(filetype) yield filetype, filetype_info @staticmethod def filename_items_for_filetype(filenames, filetype_info): """Iterate over the filenames matching *filetype_info*.""" if not isinstance(filenames, set): # we perform set operations later on to improve performance filenames = set(filenames) for pattern in filetype_info['file_patterns']: matched_files = set() matches = _match_filenames(filenames, pattern) for filename in matches: try: filename_info = parse( pattern, _get_filebase(filename, pattern)) except ValueError: logger.debug("Can't parse %s with %s.", filename, pattern) continue matched_files.add(filename) yield filename, filename_info filenames -= matched_files def _new_filehandler_instances(self, filetype_info, filename_items, fh_kwargs=None): """Generate new filehandler instances.""" requirements = filetype_info.get('requires') filetype_cls = filetype_info['file_reader'] if fh_kwargs is None: fh_kwargs = {} for filename, filename_info in filename_items: try: req_fh = self.find_required_filehandlers(requirements, filename_info) except KeyError as req: msg = "No handler for reading requirement {} for {}".format( req, filename) warnings.warn(msg) continue except RuntimeError as err: warnings.warn(str(err) + ' for {}'.format(filename)) continue yield filetype_cls(filename, filename_info, filetype_info, *req_fh, **fh_kwargs) def time_matches(self, fstart, fend): """Check that a file's start and end time mtach filter_parameters of this reader.""" start_time = self.filter_parameters.get('start_time') end_time = self.filter_parameters.get('end_time') fend = fend or fstart if start_time and fend and fend < start_time: return False if end_time and fstart and fstart > end_time: return False return True def metadata_matches(self, sample_dict, file_handler=None): """Check that file metadata matches filter_parameters of this reader.""" # special handling of start/end times if not self.time_matches( sample_dict.get('start_time'), sample_dict.get('end_time')): return False for key, val in self.filter_parameters.items(): if key != 'area' and key not in sample_dict: continue if key in ['start_time', 'end_time']: continue elif key == 'area' and file_handler: if not self.check_file_covers_area(file_handler, val): logger.info('Filtering out %s based on area', file_handler.filename) break elif key in sample_dict and val != sample_dict[key]: # don't use this file break else: # all the metadata keys are equal return True return False def filter_filenames_by_info(self, filename_items): """Filter out file using metadata from the filenames. Currently only uses start and end time. If only start time is available from the filename, keep all the filename that have a start time before the requested end time. """ for filename, filename_info in filename_items: fend = filename_info.get('end_time') fstart = filename_info.setdefault('start_time', fend) if fend and fend < fstart: # correct for filenames with 1 date and 2 times fend = fend.replace(year=fstart.year, month=fstart.month, day=fstart.day) filename_info['end_time'] = fend if self.metadata_matches(filename_info): yield filename, filename_info def filter_fh_by_metadata(self, filehandlers): """Filter out filehandlers using provide filter parameters.""" for filehandler in filehandlers: filehandler.metadata['start_time'] = filehandler.start_time filehandler.metadata['end_time'] = filehandler.end_time if self.metadata_matches(filehandler.metadata, filehandler): yield filehandler def filter_selected_filenames(self, filenames): """Filter provided files based on metadata in the filename.""" if not isinstance(filenames, set): # we perform set operations later on to improve performance filenames = set(filenames) for _, filetype_info in self.sorted_filetype_items(): filename_iter = self.filename_items_for_filetype(filenames, filetype_info) if self.filter_filenames: filename_iter = self.filter_filenames_by_info(filename_iter) for fn, _ in filename_iter: yield fn def _new_filehandlers_for_filetype(self, filetype_info, filenames, fh_kwargs=None): """Create filehandlers for a given filetype.""" filename_iter = self.filename_items_for_filetype(filenames, filetype_info) if self.filter_filenames: # preliminary filter of filenames based on start/end time # to reduce the number of files to open filename_iter = self.filter_filenames_by_info(filename_iter) filehandler_iter = self._new_filehandler_instances(filetype_info, filename_iter, fh_kwargs=fh_kwargs) filtered_iter = self.filter_fh_by_metadata(filehandler_iter) return list(filtered_iter) def create_filehandlers(self, filenames, fh_kwargs=None): """Organize the filenames into file types and create file handlers.""" filenames = list(OrderedDict.fromkeys(filenames)) logger.debug("Assigning to %s: %s", self.info['name'], filenames) self.info.setdefault('filenames', []).extend(filenames) filename_set = set(filenames) created_fhs = {} # load files that we know about by creating the file handlers for filetype, filetype_info in self.sorted_filetype_items(): filehandlers = self._new_filehandlers_for_filetype(filetype_info, filename_set, fh_kwargs=fh_kwargs) if filehandlers: created_fhs[filetype] = filehandlers self.file_handlers[filetype] = sorted( self.file_handlers.get(filetype, []) + filehandlers, key=lambda fhd: (fhd.start_time, fhd.filename)) # load any additional dataset IDs determined dynamically from the file # and update any missing metadata that only the file knows self.update_ds_ids_from_file_handlers() return created_fhs def _file_handlers_available_datasets(self): """Generate a series of available dataset information. This is done by chaining file handler's :meth:`satpy.readers.file_handlers.BaseFileHandler.available_datasets` together. See that method's documentation for more information. Returns: Generator of (bool, dict) where the boolean tells whether the current dataset is available from any of the file handlers. The boolean can also be None in the case where no loaded file handler is configured to load the dataset. The dictionary is the metadata provided either by the YAML configuration files or by the file handler itself if it is a new dataset. The file handler may have also supplemented or modified the information. """ # flatten all file handlers in to one list flat_fhs = (fh for fhs in self.file_handlers.values() for fh in fhs) id_values = list(self.all_ids.values()) configured_datasets = ((None, ds_info) for ds_info in id_values) for fh in flat_fhs: # chain the 'available_datasets' methods together by calling the # current file handler's method with the previous ones result configured_datasets = fh.available_datasets(configured_datasets=configured_datasets) return configured_datasets def update_ds_ids_from_file_handlers(self): """Add or modify available dataset information. Each file handler is consulted on whether or not it can load the dataset with the provided information dictionary. See :meth:`satpy.readers.file_handlers.BaseFileHandler.available_datasets` for more information. """ avail_datasets = self._file_handlers_available_datasets() new_ids = {} for is_avail, ds_info in avail_datasets: # especially from the yaml config coordinates = ds_info.get('coordinates') if isinstance(coordinates, list): # xarray doesn't like concatenating attributes that are # lists: https://github.com/pydata/xarray/issues/2060 ds_info['coordinates'] = tuple(ds_info['coordinates']) ds_info.setdefault('modifiers', tuple()) # default to no mods # Create DataID for this dataset ds_id = DataID(self._id_keys, **ds_info) # all datasets new_ids[ds_id] = ds_info # available datasets # False == we have the file type but it doesn't have this dataset # None == we don't have the file type object to ask if is_avail: self.available_ids[ds_id] = ds_info self.all_ids = new_ids @staticmethod def _load_dataset(dsid, ds_info, file_handlers, dim='y', **kwargs): """Load only a piece of the dataset.""" slice_list = [] failure = True for fh in file_handlers: try: projectable = fh.get_dataset(dsid, ds_info) if projectable is not None: slice_list.append(projectable) failure = False except KeyError: logger.warning("Failed to load {} from {}".format(dsid, fh), exc_info=True) if failure: raise KeyError( "Could not load {} from any provided files".format(dsid)) if dim not in slice_list[0].dims: return slice_list[0] res = xr.concat(slice_list, dim=dim) combined_info = file_handlers[0].combine_info( [p.attrs for p in slice_list]) res.attrs = combined_info return res def _load_dataset_data(self, file_handlers, dsid, **kwargs): ds_info = self.all_ids[dsid] proj = self._load_dataset(dsid, ds_info, file_handlers, **kwargs) # FIXME: areas could be concatenated here # Update the metadata proj.attrs['start_time'] = file_handlers[0].start_time proj.attrs['end_time'] = file_handlers[-1].end_time proj.attrs['reader'] = self.name return proj def _preferred_filetype(self, filetypes): """Get the preferred filetype out of the *filetypes* list. At the moment, it just returns the first filetype that has been loaded. """ if not isinstance(filetypes, list): filetypes = [filetypes] # look through the file types and use the first one that we have loaded for filetype in filetypes: if filetype in self.file_handlers: return filetype return None def _load_area_def(self, dsid, file_handlers, **kwargs): """Load the area definition of *dsid*.""" return _load_area_def(dsid, file_handlers) def _get_coordinates_for_dataset_key(self, dsid): """Get the coordinate dataset keys for *dsid*.""" ds_info = self.all_ids[dsid] cids = [] for cinfo in ds_info.get('coordinates', []): if not isinstance(cinfo, dict): cinfo = {'name': cinfo} for key in self._co_keys: if key == 'name': continue if key in ds_info: if ds_info[key] is not None: cinfo[key] = ds_info[key] cid = DataQuery.from_dict(cinfo) cids.append(self.get_dataset_key(cid)) return cids def _get_coordinates_for_dataset_keys(self, dsids): """Get all coordinates.""" coordinates = {} for dsid in dsids: cids = self._get_coordinates_for_dataset_key(dsid) coordinates.setdefault(dsid, []).extend(cids) return coordinates def _get_file_handlers(self, dsid): """Get the file handler to load this dataset.""" ds_info = self.all_ids[dsid] filetype = self._preferred_filetype(ds_info['file_type']) if filetype is None: logger.warning("Required file type '%s' not found or loaded for " "'%s'", ds_info['file_type'], dsid['name']) else: return self.file_handlers[filetype] def _make_area_from_coords(self, coords): """Create an appropriate area with the given *coords*.""" if len(coords) == 2: lons, lats = self._get_lons_lats_from_coords(coords) sdef = self._make_swath_definition_from_lons_lats(lons, lats) return sdef if len(coords) != 0: raise NameError("Don't know what to do with coordinates " + str( coords)) def _get_lons_lats_from_coords(self, coords): """Get lons and lats from the coords list.""" lons, lats = None, None for coord in coords: if coord.attrs.get('standard_name') == 'longitude': lons = coord elif coord.attrs.get('standard_name') == 'latitude': lats = coord if lons is None or lats is None: raise ValueError('Missing longitude or latitude coordinate: ' + str(coords)) return lons, lats def _make_swath_definition_from_lons_lats(self, lons, lats): """Make a swath definition instance from lons and lats.""" key = None try: key = (lons.data.name, lats.data.name) sdef = FileYAMLReader._coords_cache.get(key) except AttributeError: sdef = None if sdef is None: sdef = SwathDefinition(lons, lats) sensor_str = '_'.join(self.info['sensors']) shape_str = '_'.join(map(str, lons.shape)) sdef.name = "{}_{}_{}_{}".format(sensor_str, shape_str, lons.attrs.get('name', lons.name), lats.attrs.get('name', lats.name)) if key is not None: FileYAMLReader._coords_cache[key] = sdef return sdef def _load_dataset_area(self, dsid, file_handlers, coords, **kwargs): """Get the area for *dsid*.""" try: return self._load_area_def(dsid, file_handlers, **kwargs) except NotImplementedError: if any(x is None for x in coords): logger.warning( "Failed to load coordinates for '{}'".format(dsid)) return None area = self._make_area_from_coords(coords) if area is None: logger.debug("No coordinates found for %s", str(dsid)) return area def _load_dataset_with_area(self, dsid, coords, **kwargs): """Load *dsid* and its area if available.""" file_handlers = self._get_file_handlers(dsid) if not file_handlers: return try: ds = self._load_dataset_data(file_handlers, dsid, **kwargs) except (KeyError, ValueError) as err: logger.exception("Could not load dataset '%s': %s", dsid, str(err)) return None coords = self._assign_coords_from_dataarray(coords, ds) area = self._load_dataset_area(dsid, file_handlers, coords, **kwargs) if area is not None: ds.attrs['area'] = area ds = add_crs_xy_coords(ds, area) return ds @staticmethod def _assign_coords_from_dataarray(coords, ds): """Assign coords from the *ds* dataarray if needed.""" if not coords: coords = [] for coord in ds.coords.values(): if coord.attrs.get('standard_name') in ['longitude', 'latitude']: coords.append(coord) return coords def _load_ancillary_variables(self, datasets, **kwargs): """Load the ancillary variables of `datasets`.""" all_av_ids = self._gather_ancillary_variables_ids(datasets) loadable_av_ids = [av_id for av_id in all_av_ids if av_id not in datasets] if not all_av_ids: return if loadable_av_ids: self.load(loadable_av_ids, previous_datasets=datasets, **kwargs) for dataset in datasets.values(): new_vars = [] for av_id in dataset.attrs.get('ancillary_variables', []): if isinstance(av_id, DataID): new_vars.append(datasets[av_id]) else: new_vars.append(av_id) dataset.attrs['ancillary_variables'] = new_vars def _gather_ancillary_variables_ids(self, datasets): """Gather ancillary variables' ids. This adds/modifies the dataset's `ancillary_variables` attr. """ all_av_ids = set() for dataset in datasets.values(): ancillary_variables = dataset.attrs.get('ancillary_variables', []) if not isinstance(ancillary_variables, (list, tuple, set)): ancillary_variables = ancillary_variables.split(' ') av_ids = [] for key in ancillary_variables: try: av_ids.append(self.get_dataset_key(key)) except KeyError: logger.warning("Can't load ancillary dataset %s", str(key)) all_av_ids |= set(av_ids) dataset.attrs['ancillary_variables'] = av_ids return all_av_ids def get_dataset_key(self, key, available_only=False, **kwargs): """Get the fully qualified `DataID` matching `key`. This will first search through available DataIDs, datasets that should be possible to load, and fallback to "known" datasets, those that are configured but aren't loadable from the provided files. Providing ``available_only=True`` will stop this fallback behavior and raise a ``KeyError`` exception if no available dataset is found. Args: key (str, float, DataID, DataQuery): Key to search for in this reader. available_only (bool): Search only loadable datasets for the provided key. Loadable datasets are always searched first, but if ``available_only=False`` (default) then all known datasets will be searched. kwargs: See :func:`satpy.readers.get_key` for more information about kwargs. Returns: Best matching DataID to the provided ``key``. Raises: KeyError: if no key match is found. """ try: return get_key(key, self.available_dataset_ids, **kwargs) except KeyError: if available_only: raise return get_key(key, self.all_dataset_ids, **kwargs) def load(self, dataset_keys, previous_datasets=None, **kwargs): """Load `dataset_keys`. If `previous_datasets` is provided, do not reload those. """ all_datasets = previous_datasets or DatasetDict() datasets = DatasetDict() # Include coordinates in the list of datasets to load dsids = [self.get_dataset_key(ds_key) for ds_key in dataset_keys] coordinates = self._get_coordinates_for_dataset_keys(dsids) all_dsids = list(set().union(*coordinates.values())) + dsids for dsid in all_dsids: if dsid in all_datasets: continue coords = [all_datasets.get(cid, None) for cid in coordinates.get(dsid, [])] ds = self._load_dataset_with_area(dsid, coords, **kwargs) if ds is not None: all_datasets[dsid] = ds if dsid in dsids: datasets[dsid] = ds self._load_ancillary_variables(all_datasets, **kwargs) return datasets def _load_area_def(dsid, file_handlers): """Load the area definition of *dsid*.""" area_defs = [fh.get_area_def(dsid) for fh in file_handlers] area_defs = [area_def for area_def in area_defs if area_def is not None] final_area = StackedAreaDefinition(*area_defs) return final_area.squeeze() def _set_orientation(dataset, upper_right_corner): """Set the orientation of geostationary datasets. Allows to flip geostationary imagery when loading the datasets. Example call: scn.load(['VIS008'], upper_right_corner='NE') Args: dataset: Dataset to be flipped. upper_right_corner (str): Direction of the upper right corner of the image after flipping. Possible options are 'NW', 'NE', 'SW', 'SE', or 'native'. The common upright image orientation corresponds to 'NE'. Defaults to 'native' (no flipping is applied). """ # do some checks and early returns if upper_right_corner == 'native': logger.debug("Requested orientation for Dataset {} is 'native' (default). " "No flipping is applied.".format(dataset.attrs.get('name'))) return dataset if upper_right_corner not in ['NW', 'NE', 'SE', 'SW', 'native']: raise ValueError("Target orientation for Dataset {} not recognized. " "Kwarg upper_right_corner should be " "'NW', 'NE', 'SW', 'SE' or 'native'.".format(dataset.attrs.get('name', 'unknown_name'))) if 'area' not in dataset.attrs: logger.info("Dataset {} is missing the area attribute " "and will not be flipped.".format(dataset.attrs.get('name', 'unknown_name'))) return dataset projection_type = _get_projection_type(dataset.attrs['area']) accepted_geos_proj_types = ['Geostationary Satellite (Sweep Y)', 'Geostationary Satellite (Sweep X)'] if projection_type not in accepted_geos_proj_types: logger.info("Dataset {} is not in one of the known geostationary projections {} " "and cannot be flipped.".format(dataset.attrs.get('name', 'unknown_name'), accepted_geos_proj_types)) return dataset target_eastright, target_northup = _get_target_scene_orientation(upper_right_corner) area_extents_to_update = _get_dataset_area_extents_array(dataset.attrs['area']) current_eastright, current_northup = _get_current_scene_orientation(area_extents_to_update) if target_northup == current_northup and target_eastright == current_eastright: logger.info("Dataset {} is already in the target orientation " "and will not be flipped.".format(dataset.attrs.get('name', 'unknown_name'))) return dataset if target_northup != current_northup: dataset, area_extents_to_update = _flip_dataset_data_and_area_extents(dataset, area_extents_to_update, 'upsidedown') if target_eastright != current_eastright: dataset, area_extents_to_update = _flip_dataset_data_and_area_extents(dataset, area_extents_to_update, 'leftright') dataset.attrs['area'] = _get_new_flipped_area_definition(dataset.attrs['area'], area_extents_to_update, flip_areadef_stacking=target_northup != current_northup) return dataset def _get_projection_type(dataset_area_attr): """Get the projection type from the crs coordinate operation method name.""" if isinstance(dataset_area_attr, StackedAreaDefinition): # assumes all AreaDefinitions in a tackedAreaDefinition have the same projection area_crs = dataset_area_attr.defs[0].crs else: area_crs = dataset_area_attr.crs return area_crs.coordinate_operation.method_name def _get_target_scene_orientation(upper_right_corner): """Get the target scene orientation from the target upper_right_corner. 'NE' corresponds to target_eastright and target_northup being True. """ target_northup = upper_right_corner in ['NW', 'NE'] target_eastright = upper_right_corner in ['NE', 'SE'] return target_eastright, target_northup def _get_dataset_area_extents_array(dataset_area_attr): """Get dataset area extents in a numpy array for further flipping.""" if isinstance(dataset_area_attr, StackedAreaDefinition): # array of area extents if the Area is a StackedAreaDefinition area_extents_to_update = np.asarray([list(area_def.area_extent) for area_def in dataset_area_attr.defs]) else: # array with a single item if Area is in one piece area_extents_to_update = np.asarray([list(dataset_area_attr.area_extent)]) return area_extents_to_update def _get_current_scene_orientation(area_extents_to_update): """Get the current scene orientation from the area_extents.""" # assumes all AreaDefinitions inside a StackedAreaDefinition have the same orientation current_northup = area_extents_to_update[0, 3] - area_extents_to_update[0, 1] > 0 current_eastright = area_extents_to_update[0, 2] - area_extents_to_update[0, 0] > 0 return current_eastright, current_northup def _flip_dataset_data_and_area_extents(dataset, area_extents_to_update, flip_direction): """Flip the data and area extents array for a dataset.""" logger.info("Flipping Dataset {} {}.".format(dataset.attrs.get('name', 'unknown_name'), flip_direction)) if flip_direction == 'upsidedown': dataset = dataset[::-1, :] area_extents_to_update[:, [1, 3]] = area_extents_to_update[:, [3, 1]] elif flip_direction == 'leftright': dataset = dataset[:, ::-1] area_extents_to_update[:, [0, 2]] = area_extents_to_update[:, [2, 0]] else: raise ValueError("Flip direction not recognized. Should be either 'upsidedown' or 'leftright'.") return dataset, area_extents_to_update def _get_new_flipped_area_definition(dataset_area_attr, area_extents_to_update, flip_areadef_stacking): """Get a new area definition with updated area_extents for flipped geostationary datasets.""" if len(area_extents_to_update) == 1: # just update the area extents using the AreaDefinition copy method new_area_def = dataset_area_attr.copy(area_extent=area_extents_to_update[0]) else: # update the stacked AreaDefinitions singularly new_area_defs_to_stack = [] for n_area_def, area_def in enumerate(dataset_area_attr.defs): new_area_defs_to_stack.append(area_def.copy(area_extent=area_extents_to_update[n_area_def])) # flip the order of stacking if the area is upside down if flip_areadef_stacking: new_area_defs_to_stack = new_area_defs_to_stack[::-1] # regenerate the StackedAreaDefinition new_area_def = StackedAreaDefinition(*new_area_defs_to_stack) return new_area_def class GEOFlippableFileYAMLReader(FileYAMLReader): """Reader for flippable geostationary data.""" def _load_dataset_with_area(self, dsid, coords, upper_right_corner='native', **kwargs): ds = super(GEOFlippableFileYAMLReader, self)._load_dataset_with_area(dsid, coords, **kwargs) if ds is not None: ds = _set_orientation(ds, upper_right_corner) return ds class GEOSegmentYAMLReader(GEOFlippableFileYAMLReader): """Reader for segmented geostationary data. This reader pads the data to full geostationary disk if necessary. This reader uses an optional ``pad_data`` keyword argument that can be passed to :meth:`Scene.load` to control if padding is done (True by default). Passing `pad_data=False` will return data unpadded. When using this class in a reader's YAML configuration, segmented file types (files that may have multiple segments) should specify an extra ``expected_segments`` piece of file_type metadata. This tells this reader how many total segments it should expect when padding data. Alternatively, the file patterns for a file type can include a ``total_segments`` field which will be used if ``expected_segments`` is not defined. This will default to 1 segment. """ def create_filehandlers(self, filenames, fh_kwargs=None): """Create file handler objects and determine expected segments for each.""" created_fhs = super(GEOSegmentYAMLReader, self).create_filehandlers( filenames, fh_kwargs=fh_kwargs) # add "expected_segments" information for fhs in created_fhs.values(): for fh in fhs: # check the filename for total_segments parameter as a fallback ts = fh.filename_info.get('total_segments', 1) # if the YAML has segments explicitly specified then use that fh.filetype_info.setdefault('expected_segments', ts) # add segment key-values for FCI filehandlers if 'segment' not in fh.filename_info: fh.filename_info['segment'] = fh.filename_info.get('count_in_repeat_cycle', 1) return created_fhs @staticmethod def _load_dataset(dsid, ds_info, file_handlers, dim='y', pad_data=True): """Load only a piece of the dataset.""" if not pad_data: return FileYAMLReader._load_dataset(dsid, ds_info, file_handlers) counter, expected_segments, slice_list, failure, projectable = \ _find_missing_segments(file_handlers, ds_info, dsid) if projectable is None or failure: raise KeyError( "Could not load {} from any provided files".format(dsid)) padding_fci_scene = file_handlers[0].filetype_info.get('file_type') == 'fci_l1c_fdhsi' empty_segment = xr.full_like(projectable, np.nan) for i, sli in enumerate(slice_list): if sli is None: if padding_fci_scene: slice_list[i] = _get_empty_segment_with_height(empty_segment, _get_FCI_L1c_FDHSI_chunk_height( empty_segment.shape[1], i + 1), dim=dim) else: slice_list[i] = empty_segment while expected_segments > counter: if padding_fci_scene: slice_list.append(_get_empty_segment_with_height(empty_segment, _get_FCI_L1c_FDHSI_chunk_height( empty_segment.shape[1], counter + 1), dim=dim)) else: slice_list.append(empty_segment) counter += 1 if dim not in slice_list[0].dims: return slice_list[0] res = xr.concat(slice_list, dim=dim) combined_info = file_handlers[0].combine_info( [p.attrs for p in slice_list]) res.attrs = combined_info return res def _load_area_def(self, dsid, file_handlers, pad_data=True): """Load the area definition of *dsid* with padding.""" if not pad_data: return _load_area_def(dsid, file_handlers) return _load_area_def_with_padding(dsid, file_handlers) def _load_area_def_with_padding(dsid, file_handlers): """Load the area definition of *dsid* with padding.""" # Pad missing segments between the first available and expected area_defs = _pad_later_segments_area(file_handlers, dsid) # Add missing start segments area_defs = _pad_earlier_segments_area(file_handlers, dsid, area_defs) # Stack the area definitions area_def = _stack_area_defs(area_defs) return area_def def _stack_area_defs(area_def_dict): """Stack given dict of area definitions and return a StackedAreaDefinition.""" area_defs = [area_def_dict[area_def] for area_def in sorted(area_def_dict.keys()) if area_def is not None] area_def = StackedAreaDefinition(*area_defs) area_def = area_def.squeeze() return area_def def _pad_later_segments_area(file_handlers, dsid): """Pad area definitions for missing segments that are later in sequence than the first available.""" seg_size = None expected_segments = file_handlers[0].filetype_info['expected_segments'] available_segments = [int(fh.filename_info.get('segment', 1)) for fh in file_handlers] area_defs = {} padding_fci_scene = file_handlers[0].filetype_info.get('file_type') == 'fci_l1c_fdhsi' for segment in range(available_segments[0], expected_segments + 1): try: idx = available_segments.index(segment) fh = file_handlers[idx] area = fh.get_area_def(dsid) except ValueError: logger.debug("Padding to full disk with segment nr. %d", segment) new_height_proj_coord, new_height_px = _get_new_areadef_heights(area, seg_size, segment, padding_fci_scene) new_ll_y = area.area_extent[1] + new_height_proj_coord new_ur_y = area.area_extent[1] fill_extent = (area.area_extent[0], new_ll_y, area.area_extent[2], new_ur_y) area = AreaDefinition('fill', 'fill', 'fill', area.crs, seg_size[1], new_height_px, fill_extent) area_defs[segment] = area seg_size = area.shape return area_defs def _pad_earlier_segments_area(file_handlers, dsid, area_defs): """Pad area definitions for missing segments that are earlier in sequence than the first available.""" available_segments = [int(fh.filename_info.get('segment', 1)) for fh in file_handlers] area = file_handlers[0].get_area_def(dsid) seg_size = area.shape padding_fci_scene = file_handlers[0].filetype_info.get('file_type') == 'fci_l1c_fdhsi' for segment in range(available_segments[0] - 1, 0, -1): logger.debug("Padding segment %d to full disk.", segment) new_height_proj_coord, new_height_px = _get_new_areadef_heights(area, seg_size, segment, padding_fci_scene) new_ll_y = area.area_extent[3] new_ur_y = area.area_extent[3] - new_height_proj_coord fill_extent = (area.area_extent[0], new_ll_y, area.area_extent[2], new_ur_y) area = AreaDefinition('fill', 'fill', 'fill', area.crs, seg_size[1], new_height_px, fill_extent) area_defs[segment] = area seg_size = area.shape return area_defs def _find_missing_segments(file_handlers, ds_info, dsid): """Find missing segments.""" slice_list = [] failure = True counter = 1 expected_segments = 1 # get list of file handlers in segment order # (ex. first segment, second segment, etc) handlers = sorted(file_handlers, key=lambda x: x.filename_info.get('segment', 1)) projectable = None for fh in handlers: if fh.filetype_info['file_type'] in ds_info['file_type']: expected_segments = fh.filetype_info['expected_segments'] while int(fh.filename_info.get('segment', 1)) > counter: slice_list.append(None) counter += 1 try: projectable = fh.get_dataset(dsid, ds_info) if projectable is not None: slice_list.append(projectable) failure = False counter += 1 except KeyError: logger.warning("Failed to load %s from %s", str(dsid), str(fh), exc_info=True) # The last segment is missing? if len(slice_list) < expected_segments: slice_list.append(None) return counter, expected_segments, slice_list, failure, projectable def _get_new_areadef_heights(previous_area, previous_seg_size, segment_n, padding_fci_scene): """Get the area definition heights in projection coordinates and pixels for the new padded segment.""" if padding_fci_scene: # retrieve the chunk/segment pixel height new_height_px = _get_FCI_L1c_FDHSI_chunk_height(previous_seg_size[1], segment_n) # scale the previous vertical area extent using the new pixel height new_height_proj_coord = (previous_area.area_extent[1] - previous_area.area_extent[3]) * new_height_px / \ previous_seg_size[0] else: # all other cases have constant segment size, so reuse the previous segment heights new_height_px = previous_seg_size[0] new_height_proj_coord = previous_area.area_extent[1] - previous_area.area_extent[3] return new_height_proj_coord, new_height_px def _get_empty_segment_with_height(empty_segment, new_height, dim): """Get a new empty segment with the specified height.""" if empty_segment.shape[0] > new_height: # if current empty segment is too tall, slice the DataArray return empty_segment[:new_height, :] if empty_segment.shape[0] < new_height: # if current empty segment is too short, concatenate a slice of the DataArray return xr.concat([empty_segment, empty_segment[:new_height - empty_segment.shape[0], :]], dim=dim) return empty_segment def _get_FCI_L1c_FDHSI_chunk_height(chunk_width, chunk_n): """Get the height in pixels of a FCI L1c FDHSI chunk given the chunk width and number (starting from 1).""" if chunk_width == 11136: # 1km resolution case if chunk_n in [3, 5, 8, 10, 13, 15, 18, 20, 23, 25, 28, 30, 33, 35, 38, 40]: chunk_height = 279 else: chunk_height = 278 elif chunk_width == 5568: # 2km resolution case if chunk_n in [5, 10, 15, 20, 25, 30, 35, 40]: chunk_height = 140 else: chunk_height = 139 else: raise ValueError("FCI L1c FDHSI chunk width {} not recognized. Must be either 5568 or 11136.".format( chunk_width)) return chunk_height satpy-0.34.0/satpy/resample.py000066400000000000000000001601751420401153000162600ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Resampling in Satpy. Satpy provides multiple resampling algorithms for resampling geolocated data to uniform projected grids. The easiest way to perform resampling in Satpy is through the :class:`~satpy.scene.Scene` object's :meth:`~satpy.scene.Scene.resample` method. Additional utility functions are also available to assist in resampling data. Below is more information on resampling with Satpy as well as links to the relevant API documentation for available keyword arguments. Resampling algorithms --------------------- .. csv-table:: Available Resampling Algorithms :header-rows: 1 :align: center "Resampler", "Description", "Related" "nearest", "Nearest Neighbor", :class:`~satpy.resample.KDTreeResampler` "ewa", "Elliptical Weighted Averaging", :class:`~pyresample.ewa.DaskEWAResampler` "ewa_legacy", "Elliptical Weighted Averaging (Legacy)", :class:`~pyresample.ewa.LegacyDaskEWAResampler` "native", "Native", :class:`~satpy.resample.NativeResampler` "bilinear", "Bilinear", :class:`~satpy.resample.BilinearResampler` "bucket_avg", "Average Bucket Resampling", :class:`~satpy.resample.BucketAvg` "bucket_sum", "Sum Bucket Resampling", :class:`~satpy.resample.BucketSum` "bucket_count", "Count Bucket Resampling", :class:`~satpy.resample.BucketCount` "bucket_fraction", "Fraction Bucket Resampling", :class:`~satpy.resample.BucketFraction` "gradient_search", "Gradient Search Resampling", :class:`~pyresample.gradient.GradientSearchResampler` The resampling algorithm used can be specified with the ``resampler`` keyword argument and defaults to ``nearest``: .. code-block:: python >>> scn = Scene(...) >>> euro_scn = scn.resample('euro4', resampler='nearest') .. warning:: Some resampling algorithms expect certain forms of data. For example, the EWA resampling expects polar-orbiting swath data and prefers if the data can be broken in to "scan lines". See the API documentation for a specific algorithm for more information. Resampling for comparison and composites ---------------------------------------- While all the resamplers can be used to put datasets of different resolutions on to a common area, the 'native' resampler is designed to match datasets to one resolution in the dataset's original projection. This is extremely useful when generating composites between bands of different resolutions. .. code-block:: python >>> new_scn = scn.resample(resampler='native') By default this resamples to the :meth:`highest resolution area ` (smallest footprint per pixel) shared between the loaded datasets. You can easily specify the lowest resolution area: .. code-block:: python >>> new_scn = scn.resample(scn.coarsest_area(), resampler='native') Providing an area that is neither the minimum or maximum resolution area may work, but behavior is currently undefined. Caching for geostationary data ------------------------------ Satpy will do its best to reuse calculations performed to resample datasets, but it can only do this for the current processing and will lose this information when the process/script ends. Some resampling algorithms, like ``nearest`` and ``bilinear``, can benefit by caching intermediate data on disk in the directory specified by `cache_dir` and using it next time. This is most beneficial with geostationary satellite data where the locations of the source data and the target pixels don't change over time. >>> new_scn = scn.resample('euro4', cache_dir='/path/to/cache_dir') See the documentation for specific algorithms to see availability and limitations of caching for that algorithm. Create custom area definition ----------------------------- See :class:`pyresample.geometry.AreaDefinition` for information on creating areas that can be passed to the resample method:: >>> from pyresample.geometry import AreaDefinition >>> my_area = AreaDefinition(...) >>> local_scene = scn.resample(my_area) Create dynamic area definition ------------------------------ See :class:`pyresample.geometry.DynamicAreaDefinition` for more information. Examples coming soon... Store area definitions ---------------------- Area definitions can be saved to a custom YAML file (see `pyresample's writing to disk `_) and loaded using pyresample's utility methods (`pyresample's loading from disk `_):: >>> from pyresample import load_area >>> my_area = load_area('my_areas.yaml', 'my_area') Or using :func:`satpy.resample.get_area_def`, which will search through all ``areas.yaml`` files in your ``SATPY_CONFIG_PATH``:: >>> from satpy.resample import get_area_def >>> area_eurol = get_area_def("eurol") For examples of area definitions, see the file ``etc/areas.yaml`` that is included with Satpy and where all the area definitions shipped with Satpy are defined. """ import hashlib import json import os import warnings from logging import getLogger from weakref import WeakValueDictionary import dask import dask.array as da import numpy as np import pyresample import xarray as xr import zarr from packaging import version from pyresample.ewa import fornav, ll2cr from pyresample.geometry import SwathDefinition try: from pyresample.resampler import BaseResampler as PRBaseResampler except ImportError: PRBaseResampler = None try: from pyresample.gradient import GradientSearchResampler except ImportError: GradientSearchResampler = None try: from pyresample.ewa import DaskEWAResampler, LegacyDaskEWAResampler except ImportError: DaskEWAResampler = LegacyDaskEWAResampler = None from satpy import CHUNK_SIZE from satpy._config import config_search_paths, get_config_path LOG = getLogger(__name__) CACHE_SIZE = 10 NN_COORDINATES = {'valid_input_index': ('y1', 'x1'), 'valid_output_index': ('y2', 'x2'), 'index_array': ('y2', 'x2', 'z2')} BIL_COORDINATES = {'bilinear_s': ('x1', ), 'bilinear_t': ('x1', ), 'slices_x': ('x1', 'n'), 'slices_y': ('x1', 'n'), 'mask_slices': ('x1', 'n'), 'out_coords_x': ('x2', ), 'out_coords_y': ('y2', )} resamplers_cache: "WeakValueDictionary[tuple, object]" = WeakValueDictionary() PR_USE_SKIPNA = version.parse(pyresample.__version__) > version.parse("1.17.0") def hash_dict(the_dict, the_hash=None): """Calculate a hash for a dictionary.""" if the_hash is None: the_hash = hashlib.sha1() # nosec the_hash.update(json.dumps(the_dict, sort_keys=True).encode('utf-8')) return the_hash def get_area_file(): """Find area file(s) to use. The files are to be named `areas.yaml` or `areas.def`. """ paths = config_search_paths('areas.yaml') if paths: return paths else: return get_config_path('areas.def') def get_area_def(area_name): """Get the definition of *area_name* from file. The file is defined to use is to be placed in the $SATPY_CONFIG_PATH directory, and its name is defined in satpy's configuration file. """ try: from pyresample import parse_area_file except ImportError: from pyresample.utils import parse_area_file return parse_area_file(get_area_file(), area_name)[0] def add_xy_coords(data_arr, area, crs=None): """Assign x/y coordinates to DataArray from provided area. If 'x' and 'y' coordinates already exist then they will not be added. Args: data_arr (xarray.DataArray): data object to add x/y coordinates to area (pyresample.geometry.AreaDefinition): area providing the coordinate data. crs (pyproj.crs.CRS or None): CRS providing additional information about the area's coordinate reference system if available. Requires pyproj 2.0+. Returns (xarray.DataArray): Updated DataArray object """ if 'x' in data_arr.coords and 'y' in data_arr.coords: # x/y coords already provided return data_arr if 'x' not in data_arr.dims or 'y' not in data_arr.dims: # no defined x and y dimensions return data_arr if not hasattr(area, 'get_proj_vectors'): return data_arr x, y = area.get_proj_vectors() # convert to DataArrays y_attrs = {} x_attrs = {} if crs is not None: units = crs.axis_info[0].unit_name # fix udunits/CF standard units units = units.replace('metre', 'meter') if units == 'degree': y_attrs['units'] = 'degrees_north' x_attrs['units'] = 'degrees_east' else: y_attrs['units'] = units x_attrs['units'] = units y = xr.DataArray(y, dims=('y',), attrs=y_attrs) x = xr.DataArray(x, dims=('x',), attrs=x_attrs) return data_arr.assign_coords(y=y, x=x) def add_crs_xy_coords(data_arr, area): """Add :class:`pyproj.crs.CRS` and x/y or lons/lats to coordinates. For SwathDefinition or GridDefinition areas this will add a `crs` coordinate and coordinates for the 2D arrays of `lons` and `lats`. For AreaDefinition areas this will add a `crs` coordinate and the 1-dimensional `x` and `y` coordinate variables. Args: data_arr (xarray.DataArray): DataArray to add the 'crs' coordinate. area (pyresample.geometry.AreaDefinition): Area to get CRS information from. """ # add CRS object if pyproj 2.0+ try: from pyproj import CRS except ImportError: LOG.debug("Could not add 'crs' coordinate with pyproj<2.0") crs = None else: # default lat/lon projection latlon_proj = "+proj=latlong +datum=WGS84 +ellps=WGS84" # otherwise get it from the area definition if hasattr(area, 'crs'): crs = area.crs else: proj_str = getattr(area, 'proj_str', latlon_proj) crs = CRS.from_string(proj_str) data_arr = data_arr.assign_coords(crs=crs) # Add x/y coordinates if possible if isinstance(area, SwathDefinition): # add lon/lat arrays for swath definitions # SwathDefinitions created by Satpy should be assigning DataArray # objects as the lons/lats attributes so use those directly to # maintain original .attrs metadata (instead of converting to dask # array). lons = area.lons lats = area.lats lons.attrs.setdefault('standard_name', 'longitude') lons.attrs.setdefault('long_name', 'longitude') lons.attrs.setdefault('units', 'degrees_east') lats.attrs.setdefault('standard_name', 'latitude') lats.attrs.setdefault('long_name', 'latitude') lats.attrs.setdefault('units', 'degrees_north') # See https://github.com/pydata/xarray/issues/3068 # data_arr = data_arr.assign_coords(longitude=lons, latitude=lats) else: # Gridded data (AreaDefinition/StackedAreaDefinition) data_arr = add_xy_coords(data_arr, area, crs=crs) return data_arr def update_resampled_coords(old_data, new_data, new_area): """Add coordinate information to newly resampled DataArray. Args: old_data (xarray.DataArray): Old data before resampling. new_data (xarray.DataArray): New data after resampling. new_area (pyresample.geometry.BaseDefinition): Area definition for the newly resampled data. """ # copy over other non-x/y coordinates # this *MUST* happen before we set 'crs' below otherwise any 'crs' # coordinate in the coordinate variables we are copying will overwrite the # 'crs' coordinate we just assigned to the data ignore_coords = ('y', 'x', 'crs') new_coords = {} for cname, cval in old_data.coords.items(): # we don't want coordinates that depended on the old x/y dimensions has_ignored_dims = any(dim in cval.dims for dim in ignore_coords) if cname in ignore_coords or has_ignored_dims: continue new_coords[cname] = cval new_data = new_data.assign_coords(**new_coords) # add crs, x, and y coordinates new_data = add_crs_xy_coords(new_data, new_area) return new_data class BaseResampler(object): """Base abstract resampler class.""" def __init__(self, source_geo_def, target_geo_def): """Initialize resampler with geolocation information. Args: source_geo_def (SwathDefinition, AreaDefinition): Geolocation definition for the data to be resampled target_geo_def (CoordinateDefinition, AreaDefinition): Geolocation definition for the area to resample data to. """ self.source_geo_def = source_geo_def self.target_geo_def = target_geo_def def get_hash(self, source_geo_def=None, target_geo_def=None, **kwargs): """Get hash for the current resample with the given *kwargs*.""" if source_geo_def is None: source_geo_def = self.source_geo_def if target_geo_def is None: target_geo_def = self.target_geo_def the_hash = source_geo_def.update_hash() target_geo_def.update_hash(the_hash) hash_dict(kwargs, the_hash) return the_hash.hexdigest() def precompute(self, **kwargs): """Do the precomputation. This is an optional step if the subclass wants to implement more complex features like caching or can share some calculations between multiple datasets to be processed. """ return None def compute(self, data, **kwargs): """Do the actual resampling. This must be implemented by subclasses. """ raise NotImplementedError def resample(self, data, cache_dir=None, mask_area=None, **kwargs): """Resample `data` by calling `precompute` and `compute` methods. Only certain resampling classes may use `cache_dir` and the `mask` provided when `mask_area` is True. The return value of calling the `precompute` method is passed as the `cache_id` keyword argument of the `compute` method, but may not be used directly for caching. It is up to the individual resampler subclasses to determine how this is used. Args: data (xarray.DataArray): Data to be resampled cache_dir (str): directory to cache precomputed results (default False, optional) mask_area (bool): Mask geolocation data where data values are invalid. This should be used when data values may affect what neighbors are considered valid. Returns (xarray.DataArray): Data resampled to the target area """ # default is to mask areas for SwathDefinitions if mask_area is None and isinstance( self.source_geo_def, SwathDefinition): mask_area = True if mask_area: if isinstance(self.source_geo_def, SwathDefinition): geo_dims = self.source_geo_def.lons.dims else: geo_dims = ('y', 'x') flat_dims = [dim for dim in data.dims if dim not in geo_dims] if np.issubdtype(data.dtype, np.integer): kwargs['mask'] = data == data.attrs.get('_FillValue', np.iinfo(data.dtype.type).max) else: kwargs['mask'] = data.isnull() kwargs['mask'] = kwargs['mask'].all(dim=flat_dims) cache_id = self.precompute(cache_dir=cache_dir, **kwargs) return self.compute(data, cache_id=cache_id, **kwargs) def _create_cache_filename(self, cache_dir, prefix='', fmt='.zarr', **kwargs): """Create filename for the cached resampling parameters.""" hash_str = self.get_hash(**kwargs) return os.path.join(cache_dir, prefix + hash_str + fmt) class KDTreeResampler(BaseResampler): """Resample using a KDTree-based nearest neighbor algorithm. This resampler implements on-disk caching when the `cache_dir` argument is provided to the `resample` method. This should provide significant performance improvements on consecutive resampling of geostationary data. It is not recommended to provide `cache_dir` when the `mask` keyword argument is provided to `precompute` which occurs by default for `SwathDefinition` source areas. Args: cache_dir (str): Long term storage directory for intermediate results. mask (bool): Force resampled data's invalid pixel mask to be used when searching for nearest neighbor pixels. By default this is True for SwathDefinition source areas and False for all other area definition types. radius_of_influence (float): Search radius cut off distance in meters epsilon (float): Allowed uncertainty in meters. Increasing uncertainty reduces execution time. """ def __init__(self, source_geo_def, target_geo_def): """Init KDTreeResampler.""" super(KDTreeResampler, self).__init__(source_geo_def, target_geo_def) self.resampler = None self._index_caches = {} def precompute(self, mask=None, radius_of_influence=None, epsilon=0, cache_dir=None, **kwargs): """Create a KDTree structure and store it for later use. Note: The `mask` keyword should be provided if geolocation may be valid where data points are invalid. """ from pyresample.kd_tree import XArrayResamplerNN del kwargs if mask is not None and cache_dir is not None: LOG.warning("Mask and cache_dir both provided to nearest " "resampler. Cached parameters are affected by " "masked pixels. Will not cache results.") cache_dir = None if radius_of_influence is None and not hasattr(self.source_geo_def, 'geocentric_resolution'): radius_of_influence = self._adjust_radius_of_influence(radius_of_influence) kwargs = dict(source_geo_def=self.source_geo_def, target_geo_def=self.target_geo_def, radius_of_influence=radius_of_influence, neighbours=1, epsilon=epsilon) if self.resampler is None: # FIXME: We need to move all of this caching logic to pyresample self.resampler = XArrayResamplerNN(**kwargs) try: self.load_neighbour_info(cache_dir, mask=mask, **kwargs) LOG.debug("Read pre-computed kd-tree parameters") except IOError: LOG.debug("Computing kd-tree parameters") self.resampler.get_neighbour_info(mask=mask) self.save_neighbour_info(cache_dir, mask=mask, **kwargs) def _adjust_radius_of_influence(self, radius_of_influence): """Adjust radius of influence.""" warnings.warn("Upgrade 'pyresample' for a more accurate default 'radius_of_influence'.") try: radius_of_influence = self.source_geo_def.lons.resolution * 3 except AttributeError: try: radius_of_influence = max(abs(self.source_geo_def.pixel_size_x), abs(self.source_geo_def.pixel_size_y)) * 3 except AttributeError: radius_of_influence = 1000 except TypeError: radius_of_influence = 10000 return radius_of_influence def _apply_cached_index(self, val, idx_name, persist=False): """Reassign resampler index attributes.""" if isinstance(val, np.ndarray): val = da.from_array(val, chunks=CHUNK_SIZE) elif persist and isinstance(val, da.Array): val = val.persist() setattr(self.resampler, idx_name, val) return val def _check_numpy_cache(self, cache_dir, mask=None, **kwargs): """Check if there's Numpy cache file and convert it to zarr.""" if cache_dir is None: return fname_np = self._create_cache_filename(cache_dir, prefix='resample_lut-', mask=mask, fmt='.npz', **kwargs) fname_zarr = self._create_cache_filename(cache_dir, prefix='nn_lut-', mask=mask, fmt='.zarr', **kwargs) LOG.debug("Check if %s exists", fname_np) if os.path.exists(fname_np) and not os.path.exists(fname_zarr): import warnings warnings.warn("Using Numpy files as resampling cache is " "deprecated.") LOG.warning("Converting resampling LUT from .npz to .zarr") zarr_out = xr.Dataset() with np.load(fname_np, 'r') as fid: for idx_name, coord in NN_COORDINATES.items(): zarr_out[idx_name] = (coord, fid[idx_name]) # Write indices to Zarr file zarr_out.to_zarr(fname_zarr) LOG.debug("Resampling LUT saved to %s", fname_zarr) def load_neighbour_info(self, cache_dir, mask=None, **kwargs): """Read index arrays from either the in-memory or disk cache.""" mask_name = getattr(mask, 'name', None) cached = {} self._check_numpy_cache(cache_dir, mask=mask_name, **kwargs) for idx_name in NN_COORDINATES: if mask_name in self._index_caches: cached[idx_name] = self._apply_cached_index( self._index_caches[mask_name][idx_name], idx_name) elif cache_dir: try: filename = self._create_cache_filename( cache_dir, prefix='nn_lut-', mask=mask_name, **kwargs) fid = zarr.open(filename, 'r') cache = np.array(fid[idx_name]) if idx_name == 'valid_input_index': # valid input index array needs to be boolean cache = cache.astype(bool) except ValueError: raise IOError cache = self._apply_cached_index(cache, idx_name) cached[idx_name] = cache else: raise IOError self._index_caches[mask_name] = cached def save_neighbour_info(self, cache_dir, mask=None, **kwargs): """Cache resampler's index arrays if there is a cache dir.""" if cache_dir: mask_name = getattr(mask, 'name', None) cache = self._read_resampler_attrs() filename = self._create_cache_filename( cache_dir, prefix='nn_lut-', mask=mask_name, **kwargs) LOG.info('Saving kd_tree neighbour info to %s', filename) zarr_out = xr.Dataset() for idx_name, coord in NN_COORDINATES.items(): # update the cache in place with persisted dask arrays cache[idx_name] = self._apply_cached_index(cache[idx_name], idx_name, persist=True) zarr_out[idx_name] = (coord, cache[idx_name]) # Write indices to Zarr file zarr_out.to_zarr(filename) self._index_caches[mask_name] = cache # Delete the kdtree, it's not needed anymore self.resampler.delayed_kdtree = None def _read_resampler_attrs(self): """Read certain attributes from the resampler for caching.""" return {attr_name: getattr(self.resampler, attr_name) for attr_name in NN_COORDINATES} def compute(self, data, weight_funcs=None, fill_value=np.nan, with_uncert=False, **kwargs): """Resample data.""" del kwargs LOG.debug("Resampling %s", str(data.name)) res = self.resampler.get_sample_from_neighbour_info(data, fill_value) return update_resampled_coords(data, res, self.target_geo_def) class _LegacySatpyEWAResampler(BaseResampler): """Resample using an elliptical weighted averaging algorithm. This algorithm does **not** use caching or any externally provided data mask (unlike the 'nearest' resampler). This algorithm works under the assumption that the data is observed one scan line at a time. However, good results can still be achieved for non-scan based data provided `rows_per_scan` is set to the number of rows in the entire swath or by setting it to `None`. Args: rows_per_scan (int, None): Number of data rows for every observed scanline. If None then the entire swath is treated as one large scanline. weight_count (int): number of elements to create in the gaussian weight table. Default is 10000. Must be at least 2 weight_min (float): the minimum value to store in the last position of the weight table. Default is 0.01, which, with a `weight_distance_max` of 1.0 produces a weight of 0.01 at a grid cell distance of 1.0. Must be greater than 0. weight_distance_max (float): distance in grid cell units at which to apply a weight of `weight_min`. Default is 1.0. Must be greater than 0. weight_delta_max (float): maximum distance in grid cells in each grid dimension over which to distribute a single swath cell. Default is 10.0. weight_sum_min (float): minimum weight sum value. Cells whose weight sums are less than `weight_sum_min` are set to the grid fill value. Default is EPSILON. maximum_weight_mode (bool): If False (default), a weighted average of all swath cells that map to a particular grid cell is used. If True, the swath cell having the maximum weight of all swath cells that map to a particular grid cell is used. This option should be used for coded/category data, i.e. snow cover. """ def __init__(self, source_geo_def, target_geo_def): """Init _LegacySatpyEWAResampler.""" warnings.warn("A new version of pyresample is available. Please " "upgrade to get access to a newer 'ewa' and " "'ewa_legacy' resampler.") super(_LegacySatpyEWAResampler, self).__init__(source_geo_def, target_geo_def) self.cache = {} def resample(self, *args, **kwargs): """Run precompute and compute methods. .. note:: This sets the default of 'mask_area' to False since it is not needed in EWA resampling currently. """ kwargs.setdefault('mask_area', False) return super(_LegacySatpyEWAResampler, self).resample(*args, **kwargs) def _call_ll2cr(self, lons, lats, target_geo_def, swath_usage=0): """Wrap ll2cr() for handling dask delayed calls better.""" new_src = SwathDefinition(lons, lats) swath_points_in_grid, cols, rows = ll2cr(new_src, target_geo_def) # FIXME: How do we check swath usage/coverage if we only do this # per-block # # Determine if enough of the input swath was used # grid_name = getattr(self.target_geo_def, "name", "N/A") # fraction_in = swath_points_in_grid / float(lons.size) # swath_used = fraction_in > swath_usage # if not swath_used: # LOG.info("Data does not fit in grid %s because it only %f%% of " # "the swath is used" % # (grid_name, fraction_in * 100)) # raise RuntimeError("Data does not fit in grid %s" % (grid_name,)) # else: # LOG.debug("Data fits in grid %s and uses %f%% of the swath", # grid_name, fraction_in * 100) return np.stack([cols, rows], axis=0) def precompute(self, cache_dir=None, swath_usage=0, **kwargs): """Generate row and column arrays and store it for later use.""" if self.cache: # this resampler should be used for one SwathDefinition # no need to recompute ll2cr output again return None if kwargs.get('mask') is not None: LOG.warning("'mask' parameter has no affect during EWA " "resampling") del kwargs source_geo_def = self.source_geo_def target_geo_def = self.target_geo_def if cache_dir: LOG.warning("'cache_dir' is not used by EWA resampling") # Satpy/PyResample don't support dynamic grids out of the box yet lons, lats = source_geo_def.get_lonlats() if isinstance(lons, xr.DataArray): # get dask arrays lons = lons.data lats = lats.data # we are remapping to a static unchanging grid/area with all of # its parameters specified chunks = (2,) + lons.chunks res = da.map_blocks(self._call_ll2cr, lons, lats, target_geo_def, swath_usage, dtype=lons.dtype, chunks=chunks, new_axis=[0]) cols = res[0] rows = res[1] # save the dask arrays in the class instance cache # the on-disk cache will store the numpy arrays self.cache = { "rows": rows, "cols": cols, } return None def _call_fornav(self, cols, rows, target_geo_def, data, grid_coverage=0, **kwargs): """Wrap fornav() to run as a dask delayed.""" num_valid_points, res = fornav(cols, rows, target_geo_def, data, **kwargs) if isinstance(data, tuple): # convert 'res' from tuple of arrays to one array res = np.stack(res) num_valid_points = sum(num_valid_points) grid_covered_ratio = num_valid_points / float(res.size) grid_covered = grid_covered_ratio > grid_coverage if not grid_covered: msg = "EWA resampling only found %f%% of the grid covered " \ "(need %f%%)" % (grid_covered_ratio * 100, grid_coverage * 100) raise RuntimeError(msg) LOG.debug("EWA resampling found %f%% of the grid covered" % (grid_covered_ratio * 100)) return res def compute(self, data, cache_id=None, fill_value=0, weight_count=10000, weight_min=0.01, weight_distance_max=1.0, weight_delta_max=1.0, weight_sum_min=-1.0, maximum_weight_mode=False, grid_coverage=0, **kwargs): """Resample the data according to the precomputed X/Y coordinates.""" rows = self.cache["rows"] cols = self.cache["cols"] # if the data is scan based then check its metadata or the passed # kwargs otherwise assume the entire input swath is one large # "scanline" rows_per_scan = kwargs.get('rows_per_scan', data.attrs.get("rows_per_scan", data.shape[0])) if data.ndim == 3 and 'bands' in data.dims: data_in = tuple(data.sel(bands=band).data for band in data['bands']) elif data.ndim == 2: data_in = data.data else: raise ValueError("Unsupported data shape for EWA resampling.") res = dask.delayed(self._call_fornav)( cols, rows, self.target_geo_def, data_in, grid_coverage=grid_coverage, rows_per_scan=rows_per_scan, weight_count=weight_count, weight_min=weight_min, weight_distance_max=weight_distance_max, weight_delta_max=weight_delta_max, weight_sum_min=weight_sum_min, maximum_weight_mode=maximum_weight_mode) if isinstance(data_in, tuple): new_shape = (len(data_in),) + self.target_geo_def.shape else: new_shape = self.target_geo_def.shape data_arr = da.from_delayed(res, new_shape, data.dtype) # from delayed creates one large chunk, break it up a bit if we can data_arr = data_arr.rechunk([CHUNK_SIZE] * data_arr.ndim) if data.ndim == 3 and data.dims[0] == 'bands': dims = ('bands', 'y', 'x') elif data.ndim == 2: dims = ('y', 'x') else: dims = data.dims res = xr.DataArray(data_arr, dims=dims, attrs=data.attrs.copy()) return update_resampled_coords(data, res, self.target_geo_def) class BilinearResampler(BaseResampler): """Resample using bilinear interpolation. This resampler implements on-disk caching when the `cache_dir` argument is provided to the `resample` method. This should provide significant performance improvements on consecutive resampling of geostationary data. Args: cache_dir (str): Long term storage directory for intermediate results. radius_of_influence (float): Search radius cut off distance in meters epsilon (float): Allowed uncertainty in meters. Increasing uncertainty reduces execution time. reduce_data (bool): Reduce the input data to (roughly) match the target area. """ def __init__(self, source_geo_def, target_geo_def): """Init BilinearResampler.""" super(BilinearResampler, self).__init__(source_geo_def, target_geo_def) self.resampler = None def precompute(self, mask=None, radius_of_influence=50000, epsilon=0, reduce_data=True, cache_dir=False, **kwargs): """Create bilinear coefficients and store them for later use.""" try: from pyresample.bilinear import XArrayBilinearResampler except ImportError: from pyresample.bilinear import XArrayResamplerBilinear as XArrayBilinearResampler del kwargs del mask if self.resampler is None: kwargs = dict(source_geo_def=self.source_geo_def, target_geo_def=self.target_geo_def, radius_of_influence=radius_of_influence, neighbours=32, epsilon=epsilon) self.resampler = XArrayBilinearResampler(**kwargs) try: self.load_bil_info(cache_dir, **kwargs) LOG.debug("Loaded bilinear parameters") except IOError: LOG.debug("Computing bilinear parameters") self.resampler.get_bil_info() LOG.debug("Saving bilinear parameters.") self.save_bil_info(cache_dir, **kwargs) def load_bil_info(self, cache_dir, **kwargs): """Load bilinear resampling info from cache directory.""" if cache_dir: filename = self._create_cache_filename(cache_dir, prefix='bil_lut-', **kwargs) try: self.resampler.load_resampling_info(filename) except AttributeError: warnings.warn("Bilinear resampler can't handle caching, " "please upgrade Pyresample to 0.17.0 or newer.") raise IOError else: raise IOError def save_bil_info(self, cache_dir, **kwargs): """Save bilinear resampling info to cache directory.""" if cache_dir: filename = self._create_cache_filename(cache_dir, prefix='bil_lut-', **kwargs) # There are some old caches, move them out of the way if os.path.exists(filename): _move_existing_caches(cache_dir, filename) LOG.info('Saving BIL neighbour info to %s', filename) try: self.resampler.save_resampling_info(filename) except AttributeError: warnings.warn("Bilinear resampler can't handle caching, " "please upgrade Pyresample to 0.17.0 or newer.") def compute(self, data, fill_value=None, **kwargs): """Resample the given data using bilinear interpolation.""" del kwargs if fill_value is None: fill_value = data.attrs.get('_FillValue') target_shape = self.target_geo_def.shape res = self.resampler.get_sample_from_bil_info(data, fill_value=fill_value, output_shape=target_shape) return update_resampled_coords(data, res, self.target_geo_def) def _move_existing_caches(cache_dir, filename): """Move existing cache files out of the way.""" import os import shutil old_cache_dir = os.path.join(cache_dir, 'moved_by_satpy') try: os.makedirs(old_cache_dir) except FileExistsError: pass try: shutil.move(filename, old_cache_dir) except shutil.Error: os.remove(os.path.join(old_cache_dir, os.path.basename(filename))) shutil.move(filename, old_cache_dir) LOG.warning("Old cache file was moved to %s", old_cache_dir) def _mean(data, y_size, x_size): rows, cols = data.shape new_shape = (int(rows / y_size), int(y_size), int(cols / x_size), int(x_size)) data_mean = np.nanmean(data.reshape(new_shape), axis=(1, 3)) return data_mean def _repeat_by_factor(data, block_info=None): if block_info is None: return data out_shape = block_info[None]['chunk-shape'] out_data = data for axis, axis_size in enumerate(out_shape): in_size = data.shape[axis] out_data = np.repeat(out_data, int(axis_size / in_size), axis=axis) return out_data class NativeResampler(BaseResampler): """Expand or reduce input datasets to be the same shape. If data is higher resolution (more pixels) than the destination area then data is averaged to match the destination resolution. If data is lower resolution (less pixels) than the destination area then data is repeated to match the destination resolution. This resampler does not perform any caching or masking due to the simplicity of the operations. """ def resample(self, data, cache_dir=None, mask_area=False, **kwargs): """Run NativeResampler.""" # use 'mask_area' with a default of False. It wouldn't do anything. return super(NativeResampler, self).resample(data, cache_dir=cache_dir, mask_area=mask_area, **kwargs) @staticmethod def _aggregate(d, y_size, x_size): """Average every 4 elements (2x2) in a 2D array.""" if d.ndim != 2: # we can't guarantee what blocks we are getting and how # it should be reshaped to do the averaging. raise ValueError("Can't aggregrate (reduce) data arrays with " "more than 2 dimensions.") if not (x_size.is_integer() and y_size.is_integer()): raise ValueError("Aggregation factors are not integers") for agg_size, chunks in zip([y_size, x_size], d.chunks): for chunk_size in chunks: if chunk_size % agg_size != 0: raise ValueError("Aggregation requires arrays with " "shapes and chunks divisible by the " "factor") new_chunks = (tuple(int(x / y_size) for x in d.chunks[0]), tuple(int(x / x_size) for x in d.chunks[1])) return da.core.map_blocks(_mean, d, y_size, x_size, meta=np.array((), dtype=d.dtype), dtype=d.dtype, chunks=new_chunks) @staticmethod def _replicate(d_arr, repeats): """Repeat data pixels by the per-axis factors specified.""" # rechunk so new chunks are the same size as old chunks c_size = max(x[0] for x in d_arr.chunks) def _calc_chunks(c, c_size): whole_chunks = [c_size] * int(sum(c) // c_size) remaining = sum(c) - sum(whole_chunks) if remaining: whole_chunks += [remaining] return tuple(whole_chunks) new_chunks = [_calc_chunks(x, int(c_size // repeats[axis])) for axis, x in enumerate(d_arr.chunks)] d_arr = d_arr.rechunk(new_chunks) repeated_chunks = [] for axis, axis_chunks in enumerate(d_arr.chunks): factor = repeats[axis] if not factor.is_integer(): raise ValueError("Expand factor must be a whole number") repeated_chunks.append(tuple(x * int(factor) for x in axis_chunks)) repeated_chunks = tuple(repeated_chunks) d_arr = d_arr.map_blocks(_repeat_by_factor, meta=np.array((), dtype=d_arr.dtype), dtype=d_arr.dtype, chunks=repeated_chunks) return d_arr @classmethod def _expand_reduce(cls, d_arr, repeats): """Expand reduce.""" if not isinstance(d_arr, da.Array): d_arr = da.from_array(d_arr, chunks=CHUNK_SIZE) if all(x == 1 for x in repeats.values()): return d_arr if all(x >= 1 for x in repeats.values()): return cls._replicate(d_arr, repeats) if all(x <= 1 for x in repeats.values()): # reduce y_size = 1. / repeats[0] x_size = 1. / repeats[1] return cls._aggregate(d_arr, y_size, x_size) raise ValueError("Must either expand or reduce in both " "directions") def compute(self, data, expand=True, **kwargs): """Resample data with NativeResampler.""" if isinstance(self.target_geo_def, (list, tuple)): # find the highest/lowest area among the provided test_func = max if expand else min target_geo_def = test_func(self.target_geo_def, key=lambda x: x.shape) else: target_geo_def = self.target_geo_def # convert xarray backed with numpy array to dask array if 'x' not in data.dims or 'y' not in data.dims: if data.ndim not in [2, 3]: raise ValueError("Can only handle 2D or 3D arrays without dimensions.") # assume rows is the second to last axis y_axis = data.ndim - 2 x_axis = data.ndim - 1 else: y_axis = data.dims.index('y') x_axis = data.dims.index('x') out_shape = target_geo_def.shape in_shape = data.shape y_repeats = out_shape[0] / float(in_shape[y_axis]) x_repeats = out_shape[1] / float(in_shape[x_axis]) repeats = {axis_idx: 1. for axis_idx in range(data.ndim) if axis_idx not in [y_axis, x_axis]} repeats[y_axis] = y_repeats repeats[x_axis] = x_repeats d_arr = self._expand_reduce(data.data, repeats) new_data = xr.DataArray(d_arr, dims=data.dims) return update_resampled_coords(data, new_data, target_geo_def) def _get_arg_to_pass_for_skipna_handling(**kwargs): """Determine if skipna can be passed to the compute functions for the average and sum bucket resampler.""" # FIXME this can be removed once Pyresample 1.18.0 is a Satpy requirement if PR_USE_SKIPNA: if 'mask_all_nan' in kwargs: warnings.warn('Argument mask_all_nan is deprecated. Please use skipna for missing values handling. ' 'Continuing with default skipna=True, if not provided differently.', DeprecationWarning) kwargs.pop('mask_all_nan') else: if 'mask_all_nan' in kwargs: warnings.warn('Argument mask_all_nan is deprecated.' 'Please update Pyresample and use skipna for missing values handling.', DeprecationWarning) kwargs.setdefault('mask_all_nan', False) kwargs.pop('skipna') return kwargs class BucketResamplerBase(BaseResampler): """Base class for bucket resampling which implements averaging.""" def __init__(self, source_geo_def, target_geo_def): """Initialize bucket resampler.""" super(BucketResamplerBase, self).__init__(source_geo_def, target_geo_def) self.resampler = None def precompute(self, **kwargs): """Create X and Y indices and store them for later use.""" from pyresample import bucket LOG.debug("Initializing bucket resampler.") source_lons, source_lats = self.source_geo_def.get_lonlats( chunks=CHUNK_SIZE) self.resampler = bucket.BucketResampler(self.target_geo_def, source_lons, source_lats) def compute(self, data, **kwargs): """Call the resampling.""" raise NotImplementedError("Use the sub-classes") def resample(self, data, **kwargs): """Resample `data` by calling `precompute` and `compute` methods. Args: data (xarray.DataArray): Data to be resampled Returns (xarray.DataArray): Data resampled to the target area """ if not PR_USE_SKIPNA and 'skipna' in kwargs: raise ValueError('You are trying to set the skipna argument but you are using an old version of' ' Pyresample that does not support it.' 'Please update Pyresample to 1.18.0 or higher to be able to use this argument.') self.precompute(**kwargs) attrs = data.attrs.copy() data_arr = data.data if data.ndim == 3 and data.dims[0] == 'bands': dims = ('bands', 'y', 'x') # Both one and two dimensional input data results in 2D output elif data.ndim in (1, 2): dims = ('y', 'x') else: dims = data.dims LOG.debug("Resampling %s", str(data.attrs.get('_satpy_id', 'unknown'))) result = self.compute(data_arr, **kwargs) coords = {} if 'bands' in data.coords: coords['bands'] = data.coords['bands'] # Fractions are returned in a dict elif isinstance(result, dict): coords['categories'] = sorted(result.keys()) dims = ('categories', 'y', 'x') new_result = [] for cat in coords['categories']: new_result.append(result[cat]) result = da.stack(new_result) if result.ndim > len(dims): result = da.squeeze(result) # Adjust some attributes if "BucketFraction" in str(self): attrs['units'] = '' attrs['calibration'] = '' attrs['standard_name'] = 'area_fraction' elif "BucketCount" in str(self): attrs['units'] = '' attrs['calibration'] = '' attrs['standard_name'] = 'number_of_observations' result = xr.DataArray(result, dims=dims, coords=coords, attrs=attrs) return update_resampled_coords(data, result, self.target_geo_def) class BucketAvg(BucketResamplerBase): """Class for averaging bucket resampling. Bucket resampling calculates the average of all the values that are closest to each bin and inside the target area. Parameters ---------- fill_value : float (default: np.nan) Fill value to mark missing/invalid values in the input data, as well as in the binned and averaged output data. skipna : boolean (default: True) If True, skips missing values (as marked by NaN or `fill_value`) for the average calculation (similarly to Numpy's `nanmean`). Buckets containing only missing values are set to fill_value. If False, sets the bucket to fill_value if one or more missing values are present in the bucket (similarly to Numpy's `mean`). In both cases, empty buckets are set to `fill_value`. """ def compute(self, data, fill_value=np.nan, skipna=True, **kwargs): """Call the resampling. Args: data (numpy.Array, dask.Array): Data to be resampled fill_value (numpy.nan, int): fill_value. Defaults to numpy.nan skipna (boolean): Skip NA's. Default `True` Returns: dask.Array """ kwargs = _get_arg_to_pass_for_skipna_handling(skipna=skipna, **kwargs) results = [] if data.ndim == 3: for i in range(data.shape[0]): res = self.resampler.get_average(data[i, :, :], fill_value=fill_value, **kwargs) results.append(res) else: res = self.resampler.get_average(data, fill_value=fill_value, **kwargs) results.append(res) return da.stack(results) class BucketSum(BucketResamplerBase): """Class for bucket resampling which implements accumulation (sum). This resampler calculates the cumulative sum of all the values that are closest to each bin and inside the target area. Parameters ---------- fill_value : float (default: np.nan) Fill value for missing data skipna : boolean (default: True) If True, skips NaN values for the sum calculation (similarly to Numpy's `nansum`). Buckets containing only NaN are set to zero. If False, sets the bucket to NaN if one or more NaN values are present in the bucket (similarly to Numpy's `sum`). In both cases, empty buckets are set to 0. """ def compute(self, data, skipna=True, **kwargs): """Call the resampling.""" kwargs = _get_arg_to_pass_for_skipna_handling(skipna=skipna, **kwargs) results = [] if data.ndim == 3: for i in range(data.shape[0]): res = self.resampler.get_sum(data[i, :, :], **kwargs) results.append(res) else: res = self.resampler.get_sum(data, **kwargs) results.append(res) return da.stack(results) class BucketCount(BucketResamplerBase): """Class for bucket resampling which implements hit-counting. This resampler calculates the number of occurences of the input data closest to each bin and inside the target area. """ def compute(self, data, **kwargs): """Call the resampling.""" results = [] if data.ndim == 3: for _i in range(data.shape[0]): res = self.resampler.get_count() results.append(res) else: res = self.resampler.get_count() results.append(res) return da.stack(results) class BucketFraction(BucketResamplerBase): """Class for bucket resampling to compute category fractions. This resampler calculates the fraction of occurences of the input data per category. """ def compute(self, data, fill_value=np.nan, categories=None, **kwargs): """Call the resampling.""" if data.ndim > 2: raise ValueError("BucketFraction not implemented for 3D datasets") result = self.resampler.get_fractions(data, categories=categories, fill_value=fill_value) return result # TODO: move this to pyresample.resampler RESAMPLERS = {"kd_tree": KDTreeResampler, "nearest": KDTreeResampler, "bilinear": BilinearResampler, "native": NativeResampler, "gradient_search": GradientSearchResampler, "bucket_avg": BucketAvg, "bucket_sum": BucketSum, "bucket_count": BucketCount, "bucket_fraction": BucketFraction, } if DaskEWAResampler is not None: RESAMPLERS['ewa'] = DaskEWAResampler RESAMPLERS['ewa_legacy'] = LegacyDaskEWAResampler else: RESAMPLERS['ewa'] = _LegacySatpyEWAResampler # deepcode ignore PythonSameEvalBinaryExpressiontrue: PRBaseResampler is None only on import errors if PRBaseResampler is None: PRBaseResampler = BaseResampler # TODO: move this to pyresample def prepare_resampler(source_area, destination_area, resampler=None, **resample_kwargs): """Instantiate and return a resampler.""" if resampler is None: LOG.info("Using default KDTree resampler") resampler = 'kd_tree' if isinstance(resampler, (BaseResampler, PRBaseResampler)): raise ValueError("Trying to create a resampler when one already " "exists.") if isinstance(resampler, str): resampler_class = RESAMPLERS.get(resampler, None) if resampler_class is None: if resampler == "gradient_search": warnings.warn('Gradient search resampler not available. Maybe missing `shapely`?') raise KeyError("Resampler '%s' not available" % resampler) else: resampler_class = resampler key = (resampler_class, source_area, destination_area, hash_dict(resample_kwargs).hexdigest()) try: resampler_instance = resamplers_cache[key] except KeyError: resampler_instance = resampler_class(source_area, destination_area) resamplers_cache[key] = resampler_instance return key, resampler_instance # TODO: move this to pyresample def resample(source_area, data, destination_area, resampler=None, **kwargs): """Do the resampling.""" if not isinstance(resampler, (BaseResampler, PRBaseResampler)): # we don't use the first argument (cache key) _, resampler_instance = prepare_resampler(source_area, destination_area, resampler) else: resampler_instance = resampler if isinstance(data, list): res = [resampler_instance.resample(ds, **kwargs) for ds in data] else: res = resampler_instance.resample(data, **kwargs) return res def get_fill_value(dataset): """Get the fill value of the *dataset*, defaulting to np.nan.""" if np.issubdtype(dataset.dtype, np.integer): return dataset.attrs.get('_FillValue', np.nan) return np.nan def resample_dataset(dataset, destination_area, **kwargs): """Resample *dataset* and return the resampled version. Args: dataset (xarray.DataArray): Data to be resampled. destination_area: The destination onto which to project the data, either a full blown area definition or a string corresponding to the name of the area as defined in the area file. **kwargs: The extra parameters to pass to the resampler objects. Returns: A resampled DataArray with updated ``.attrs["area"]`` field. The dtype of the array is preserved. """ # call the projection stuff here try: source_area = dataset.attrs["area"] except KeyError: LOG.info("Cannot reproject dataset %s, missing area info", dataset.attrs['name']) return dataset fill_value = kwargs.pop('fill_value', get_fill_value(dataset)) new_data = resample(source_area, dataset, destination_area, fill_value=fill_value, **kwargs) new_attrs = new_data.attrs new_data.attrs = dataset.attrs.copy() new_data.attrs.update(new_attrs) new_data.attrs.update(area=destination_area) return new_data satpy-0.34.0/satpy/scene.py000066400000000000000000001775541420401153000155560ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2010-2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Scene object to hold satellite data.""" from __future__ import annotations import logging import os import warnings import numpy as np import xarray as xr from pyresample.geometry import AreaDefinition, BaseDefinition, SwathDefinition from xarray import DataArray from satpy.composites import IncompatibleAreas from satpy.composites.config_loader import load_compositor_configs_for_sensors from satpy.dataset import DataID, DataQuery, DatasetDict, combine_metadata, dataset_walker, replace_anc from satpy.dependency_tree import DependencyTree from satpy.node import CompositorNode, MissingDependencies, ReaderNode from satpy.readers import load_readers from satpy.resample import get_area_def, prepare_resampler, resample_dataset from satpy.writers import load_writer LOG = logging.getLogger(__name__) class DelayedGeneration(KeyError): """Mark that a dataset can't be generated without further modification.""" pass class Scene: """The Almighty Scene Class. Example usage:: from satpy import Scene from glob import glob # create readers and open files scn = Scene(filenames=glob('/path/to/files/*'), reader='viirs_sdr') # load datasets from input files scn.load(['I01', 'I02']) # resample from satellite native geolocation to builtin 'eurol' Area new_scn = scn.resample('eurol') # save all resampled datasets to geotiff files in the current directory new_scn.save_datasets() """ def __init__(self, filenames=None, reader=None, filter_parameters=None, reader_kwargs=None): """Initialize Scene with Reader and Compositor objects. To load data `filenames` and preferably `reader` must be specified. If `filenames` is provided without `reader` then the available readers will be searched for a Reader that can support the provided files. This can take a considerable amount of time so it is recommended that `reader` always be provided. Note without `filenames` the Scene is created with no Readers available requiring Datasets to be added manually:: scn = Scene() scn['my_dataset'] = Dataset(my_data_array, **my_info) Args: filenames (iterable or dict): A sequence of files that will be used to load data from. A ``dict`` object should map reader names to a list of filenames for that reader. reader (str or list): The name of the reader to use for loading the data or a list of names. filter_parameters (dict): Specify loaded file filtering parameters. Shortcut for `reader_kwargs['filter_parameters']`. reader_kwargs (dict): Keyword arguments to pass to specific reader instances. Either a single dictionary that will be passed onto to all reader instances, or a dictionary mapping reader names to sub-dictionaries to pass different arguments to different reader instances. """ self.attrs = dict() if filter_parameters: if reader_kwargs is None: reader_kwargs = {} else: reader_kwargs = reader_kwargs.copy() reader_kwargs.setdefault('filter_parameters', {}).update(filter_parameters) if filenames and isinstance(filenames, str): raise ValueError("'filenames' must be a list of files: Scene(filenames=[filename])") self._readers = self._create_reader_instances(filenames=filenames, reader=reader, reader_kwargs=reader_kwargs) self._datasets = DatasetDict() self._wishlist = set() self._dependency_tree = DependencyTree(self._readers) self._resamplers = {} @property def wishlist(self): """Return a copy of the wishlist.""" return self._wishlist.copy() def _ipython_key_completions_(self): return [x['name'] for x in self._datasets.keys()] def _create_reader_instances(self, filenames=None, reader=None, reader_kwargs=None): """Find readers and return their instances.""" return load_readers(filenames=filenames, reader=reader, reader_kwargs=reader_kwargs) @property def sensor_names(self) -> set[str]: """Return sensor names for the data currently contained in this Scene. Sensor information is collected from data contained in the Scene whether loaded from a reader or generated as a composite with :meth:`load` or added manually using ``scn["name"] = data_arr``). Sensor information is also collected from any loaded readers. In some rare cases this may mean that the reader includes sensor information for data that isn't actually loaded or even available. """ contained_sensor_names = self._contained_sensor_names() reader_sensor_names = set([sensor for reader_instance in self._readers.values() for sensor in reader_instance.sensor_names]) return contained_sensor_names | reader_sensor_names def _contained_sensor_names(self) -> set[str]: sensor_names = set() for data_arr in self.values(): if "sensor" not in data_arr.attrs: continue if isinstance(data_arr.attrs["sensor"], str): sensor_names.add(data_arr.attrs["sensor"]) elif isinstance(data_arr.attrs["sensor"], set): sensor_names.update(data_arr.attrs["sensor"]) else: raise TypeError("Unexpected type in sensor collection") return sensor_names @property def start_time(self): """Return the start time of the contained data. If no data is currently contained in the Scene then loaded readers will be consulted. """ start_times = [data_arr.attrs['start_time'] for data_arr in self.values() if 'start_time' in data_arr.attrs] if not start_times: start_times = self._reader_times('start_time') if not start_times: return None return min(start_times) @property def end_time(self): """Return the end time of the file. If no data is currently contained in the Scene then loaded readers will be consulted. If no readers are loaded then the :attr:`Scene.start_time` is returned. """ end_times = [data_arr.attrs['end_time'] for data_arr in self.values() if 'end_time' in data_arr.attrs] if not end_times: end_times = self._reader_times('end_time') if not end_times: return self.start_time return max(end_times) def _reader_times(self, time_prop_name): return [getattr(reader, time_prop_name) for reader in self._readers.values()] @property def missing_datasets(self): """Set of DataIDs that have not been successfully loaded.""" return set(self._wishlist) - set(self._datasets.keys()) def _compare_areas(self, datasets=None, compare_func=max): """Compare areas for the provided datasets. Args: datasets (iterable): Datasets whose areas will be compared. Can be either `xarray.DataArray` objects or identifiers to get the DataArrays from the current Scene. Defaults to all datasets. This can also be a series of area objects, typically AreaDefinitions. compare_func (callable): `min` or `max` or other function used to compare the dataset's areas. """ if datasets is None: datasets = list(self.values()) areas = self._gather_all_areas(datasets) if isinstance(areas[0], AreaDefinition): first_crs = areas[0].crs if not all(ad.crs == first_crs for ad in areas[1:]): raise ValueError("Can't compare areas with different " "projections.") def key_func(ds): return 1. / abs(ds.pixel_size_x) else: def key_func(ds): return ds.shape # find the highest/lowest area among the provided return compare_func(areas, key=key_func) def _gather_all_areas(self, datasets): """Gather all areas from datasets. They have to be of the same type, and at least one dataset should have an area. """ areas = [] for ds in datasets: if isinstance(ds, BaseDefinition): areas.append(ds) continue elif not isinstance(ds, DataArray): ds = self[ds] area = ds.attrs.get('area') areas.append(area) areas = [x for x in areas if x is not None] if not areas: raise ValueError("No dataset areas available") if not all(isinstance(x, type(areas[0])) for x in areas[1:]): raise ValueError("Can't compare areas of different types") return areas def finest_area(self, datasets=None): """Get highest resolution area for the provided datasets. Args: datasets (iterable): Datasets whose areas will be compared. Can be either `xarray.DataArray` objects or identifiers to get the DataArrays from the current Scene. Defaults to all datasets. """ return self._compare_areas(datasets=datasets, compare_func=max) def max_area(self, datasets=None): """Get highest resolution area for the provided datasets. Deprecated. Args: datasets (iterable): Datasets whose areas will be compared. Can be either `xarray.DataArray` objects or identifiers to get the DataArrays from the current Scene. Defaults to all datasets. """ warnings.warn("'max_area' is deprecated, use 'finest_area' instead.", DeprecationWarning) return self.finest_area(datasets=datasets) def coarsest_area(self, datasets=None): """Get lowest resolution area for the provided datasets. Args: datasets (iterable): Datasets whose areas will be compared. Can be either `xarray.DataArray` objects or identifiers to get the DataArrays from the current Scene. Defaults to all datasets. """ return self._compare_areas(datasets=datasets, compare_func=min) def min_area(self, datasets=None): """Get lowest resolution area for the provided datasets. Deprecated. Args: datasets (iterable): Datasets whose areas will be compared. Can be either `xarray.DataArray` objects or identifiers to get the DataArrays from the current Scene. Defaults to all datasets. """ warnings.warn("'min_area' is deprecated, use 'coarsest_area' instead.", DeprecationWarning) return self.coarsest_area(datasets=datasets) def available_dataset_ids(self, reader_name=None, composites=False): """Get DataIDs of loadable datasets. This can be for all readers loaded by this Scene or just for ``reader_name`` if specified. Available dataset names are determined by what each individual reader can load. This is normally determined by what files are needed to load a dataset and what files have been provided to the scene/reader. Some readers dynamically determine what is available based on the contents of the files provided. By default, only returns non-composite dataset IDs. To include composite dataset IDs, pass ``composites=True``. Args: reader_name (str, optional): Name of reader for which to return dataset IDs. If not passed, return dataset IDs for all readers. composites (bool, optional): If True, return dataset IDs including composites. If False (default), return only non-composite dataset IDs. Returns: list of available dataset IDs """ try: if reader_name: readers = [self._readers[reader_name]] else: readers = self._readers.values() except (AttributeError, KeyError): raise KeyError("No reader '%s' found in scene" % reader_name) available_datasets = sorted([dataset_id for reader in readers for dataset_id in reader.available_dataset_ids]) if composites: available_datasets += sorted(self.available_composite_ids()) return available_datasets def available_dataset_names(self, reader_name=None, composites=False): """Get the list of the names of the available datasets. By default, this only shows names of datasets directly defined in (one of the) readers. Names of composites are not returned unless the argument ``composites=True`` is passed. Args: reader_name (str, optional): Name of reader for which to return dataset IDs. If not passed, return dataset names for all readers. composites (bool, optional): If True, return dataset IDs including composites. If False (default), return only non-composite dataset names. Returns: list of available dataset names """ return sorted(set(x['name'] for x in self.available_dataset_ids( reader_name=reader_name, composites=composites))) def all_dataset_ids(self, reader_name=None, composites=False): """Get IDs of all datasets from loaded readers or `reader_name` if specified. Excludes composites unless ``composites=True`` is passed. Args: reader_name (str, optional): Name of reader for which to return dataset IDs. If not passed, return dataset IDs for all readers. composites (bool, optional): If True, return dataset IDs including composites. If False (default), return only non-composite dataset IDs. Returns: list of all dataset IDs """ try: if reader_name: readers = [self._readers[reader_name]] else: readers = self._readers.values() except (AttributeError, KeyError): raise KeyError("No reader '%s' found in scene" % reader_name) all_datasets = [dataset_id for reader in readers for dataset_id in reader.all_dataset_ids] if composites: all_datasets += self.all_composite_ids() return all_datasets def all_dataset_names(self, reader_name=None, composites=False): """Get all known dataset names configured for the loaded readers. Note that some readers dynamically determine what datasets are known by reading the contents of the files they are provided. This means that the list of datasets returned by this method may change depending on what files are provided even if a product/dataset is a "standard" product for a particular reader. Excludes composites unless ``composites=True`` is passed. Args: reader_name (str, optional): Name of reader for which to return dataset IDs. If not passed, return dataset names for all readers. composites (bool, optional): If True, return dataset IDs including composites. If False (default), return only non-composite dataset names. Returns: list of all dataset names """ return sorted(set(x['name'] for x in self.all_dataset_ids( reader_name=reader_name, composites=composites))) def _check_known_composites(self, available_only=False): """Create new dependency tree and check what composites we know about.""" # Note if we get compositors from the dep tree then it will include # modified composites which we don't want sensor_comps, mods = load_compositor_configs_for_sensors(self.sensor_names) # recreate the dependency tree so it doesn't interfere with the user's # wishlist from self._dependency_tree dep_tree = DependencyTree(self._readers, sensor_comps, mods, available_only=available_only) # ignore inline compositor dependencies starting with '_' comps = (comp for comp_dict in sensor_comps.values() for comp in comp_dict.keys() if not comp['name'].startswith('_')) # make sure that these composites are even create-able by these readers all_comps = set(comps) # find_dependencies will update the all_comps set with DataIDs try: dep_tree.populate_with_keys(all_comps) except MissingDependencies: pass available_comps = set(x.name for x in dep_tree.trunk()) # get rid of modified composites that are in the trunk return sorted(available_comps & all_comps) def available_composite_ids(self): """Get IDs of composites that can be generated from the available datasets.""" return self._check_known_composites(available_only=True) def available_composite_names(self): """Names of all configured composites known to this Scene.""" return sorted(set(x['name'] for x in self.available_composite_ids())) def all_composite_ids(self): """Get all IDs for configured composites.""" return self._check_known_composites() def all_composite_names(self): """Get all names for all configured composites.""" return sorted(set(x['name'] for x in self.all_composite_ids())) def all_modifier_names(self): """Get names of configured modifier objects.""" return sorted(self._dependency_tree.modifiers.keys()) def __str__(self): """Generate a nice print out for the scene.""" res = (str(proj) for proj in self._datasets.values()) return "\n".join(res) def __iter__(self): """Iterate over the datasets.""" for x in self._datasets.values(): yield x def iter_by_area(self): """Generate datasets grouped by Area. :return: generator of (area_obj, list of dataset objects) """ datasets_by_area = {} for ds in self: a = ds.attrs.get('area') dsid = DataID.from_dataarray(ds) datasets_by_area.setdefault(a, []).append(dsid) return datasets_by_area.items() def keys(self, **kwargs): """Get DataID keys for the underlying data container.""" return self._datasets.keys(**kwargs) def values(self): """Get values for the underlying data container.""" return self._datasets.values() def _copy_datasets_and_wishlist(self, new_scn, datasets): for ds_id in datasets: # NOTE: Must use `._datasets` or side effects of `__setitem__` # could hurt us with regards to the wishlist new_scn._datasets[ds_id] = self[ds_id] new_scn._wishlist = self._wishlist.copy() def copy(self, datasets=None): """Create a copy of the Scene including dependency information. Args: datasets (list, tuple): `DataID` objects for the datasets to include in the new Scene object. """ new_scn = self.__class__() new_scn.attrs = self.attrs.copy() new_scn._dependency_tree = self._dependency_tree.copy() if datasets is None: datasets = self.keys() self._copy_datasets_and_wishlist(new_scn, datasets) return new_scn @property def all_same_area(self): """All contained data arrays are on the same area.""" all_areas = [x.attrs.get('area', None) for x in self.values()] all_areas = [x for x in all_areas if x is not None] return all(all_areas[0] == x for x in all_areas[1:]) @property def all_same_proj(self): """All contained data array are in the same projection.""" all_areas = [x.attrs.get('area', None) for x in self.values()] all_areas = [x for x in all_areas if x is not None] return all(all_areas[0].crs == x.crs for x in all_areas[1:]) @staticmethod def _slice_area_from_bbox(src_area, dst_area, ll_bbox=None, xy_bbox=None): """Slice the provided area using the bounds provided.""" if ll_bbox is not None: dst_area = AreaDefinition( 'crop_area', 'crop_area', 'crop_latlong', {'proj': 'latlong'}, 100, 100, ll_bbox) elif xy_bbox is not None: dst_area = AreaDefinition( 'crop_area', 'crop_area', 'crop_xy', src_area.crs, src_area.width, src_area.height, xy_bbox) x_slice, y_slice = src_area.get_area_slices(dst_area) return src_area[y_slice, x_slice], y_slice, x_slice def _slice_datasets(self, dataset_ids, slice_key, new_area, area_only=True): """Slice scene in-place for the datasets specified.""" new_datasets = {} datasets = (self[ds_id] for ds_id in dataset_ids) for ds, parent_ds in dataset_walker(datasets): ds_id = DataID.from_dataarray(ds) # handle ancillary variables pres = None if parent_ds is not None: parent_dsid = DataID.from_dataarray(parent_ds) pres = new_datasets[parent_dsid] if ds_id in new_datasets: replace_anc(ds, pres) continue if area_only and ds.attrs.get('area') is None: new_datasets[ds_id] = ds replace_anc(ds, pres) continue if not isinstance(slice_key, dict): # match dimension name to slice object key = dict(zip(ds.dims, slice_key)) else: key = slice_key new_ds = ds.isel(**key) if new_area is not None: new_ds.attrs['area'] = new_area new_datasets[ds_id] = new_ds if parent_ds is None: # don't use `__setitem__` because we don't want this to # affect the existing wishlist/dep tree self._datasets[ds_id] = new_ds else: replace_anc(new_ds, pres) def slice(self, key): """Slice Scene by dataset index. .. note:: DataArrays that do not have an ``area`` attribute will not be sliced. """ if not self.all_same_area: raise RuntimeError("'Scene' has different areas and cannot " "be usefully sliced.") # slice new_scn = self.copy() new_scn._wishlist = self._wishlist for area, dataset_ids in self.iter_by_area(): if area is not None: # assume dimensions for area are y and x one_ds = self[dataset_ids[0]] area_key = tuple(sl for dim, sl in zip(one_ds.dims, key) if dim in ['y', 'x']) new_area = area[area_key] else: new_area = None new_scn._slice_datasets(dataset_ids, key, new_area) return new_scn def crop(self, area=None, ll_bbox=None, xy_bbox=None, dataset_ids=None): """Crop Scene to a specific Area boundary or bounding box. Args: area (AreaDefinition): Area to crop the current Scene to ll_bbox (tuple, list): 4-element tuple where values are in lon/lat degrees. Elements are ``(xmin, ymin, xmax, ymax)`` where X is longitude and Y is latitude. xy_bbox (tuple, list): Same as `ll_bbox` but elements are in projection units. dataset_ids (iterable): DataIDs to include in the returned `Scene`. Defaults to all datasets. This method will attempt to intelligently slice the data to preserve relationships between datasets. For example, if we are cropping two DataArrays of 500m and 1000m pixel resolution then this method will assume that exactly 4 pixels of the 500m array cover the same geographic area as a single 1000m pixel. It handles these cases based on the shapes of the input arrays and adjusting slicing indexes accordingly. This method will have trouble handling cases where data arrays seem related but don't cover the same geographic area or if the coarsest resolution data is not related to the other arrays which are related. It can be useful to follow cropping with a call to the native resampler to resolve all datasets to the same resolution and compute any composites that could not be generated previously:: >>> cropped_scn = scn.crop(ll_bbox=(-105., 40., -95., 50.)) >>> remapped_scn = cropped_scn.resample(resampler='native') .. note:: The `resample` method automatically crops input data before resampling to save time/memory. """ if len([x for x in [area, ll_bbox, xy_bbox] if x is not None]) != 1: raise ValueError("One and only one of 'area', 'll_bbox', " "or 'xy_bbox' can be specified.") new_scn = self.copy(datasets=dataset_ids) if not new_scn.all_same_proj and xy_bbox is not None: raise ValueError("Can't crop when dataset_ids are not all on the " "same projection.") # get the lowest resolution area, use it as the base of the slice # this makes sure that the other areas *should* be a consistent factor coarsest_area = new_scn.coarsest_area() if isinstance(area, str): area = get_area_def(area) new_coarsest_area, min_y_slice, min_x_slice = self._slice_area_from_bbox( coarsest_area, area, ll_bbox, xy_bbox) new_target_areas = {} for src_area, dataset_ids in new_scn.iter_by_area(): if src_area is None: for ds_id in dataset_ids: new_scn._datasets[ds_id] = self[ds_id] continue y_factor, y_remainder = np.divmod(float(src_area.shape[0]), coarsest_area.shape[0]) x_factor, x_remainder = np.divmod(float(src_area.shape[1]), coarsest_area.shape[1]) y_factor = int(y_factor) x_factor = int(x_factor) if y_remainder == 0 and x_remainder == 0: y_slice = slice(min_y_slice.start * y_factor, min_y_slice.stop * y_factor) x_slice = slice(min_x_slice.start * x_factor, min_x_slice.stop * x_factor) new_area = src_area[y_slice, x_slice] slice_key = {'y': y_slice, 'x': x_slice} new_scn._slice_datasets(dataset_ids, slice_key, new_area) else: new_target_areas[src_area] = self._slice_area_from_bbox( src_area, area, ll_bbox, xy_bbox ) return new_scn def aggregate(self, dataset_ids=None, boundary='trim', side='left', func='mean', **dim_kwargs): """Create an aggregated version of the Scene. Args: dataset_ids (iterable): DataIDs to include in the returned `Scene`. Defaults to all datasets. func (string): Function to apply on each aggregation window. One of 'mean', 'sum', 'min', 'max', 'median', 'argmin', 'argmax', 'prod', 'std', 'var'. 'mean' is the default. boundary: See :meth:`xarray.DataArray.coarsen`, 'trim' by default. side: See :meth:`xarray.DataArray.coarsen`, 'left' by default. dim_kwargs: the size of the windows to aggregate. Returns: A new aggregated scene See also: xarray.DataArray.coarsen Example: `scn.aggregate(func='min', x=2, y=2)` will apply the `min` function across a window of size 2 pixels. """ new_scn = self.copy(datasets=dataset_ids) for src_area, ds_ids in new_scn.iter_by_area(): if src_area is None: for ds_id in ds_ids: new_scn._datasets[ds_id] = self[ds_id] continue target_area = src_area.aggregate(boundary=boundary, **dim_kwargs) try: resolution = max(target_area.pixel_size_x, target_area.pixel_size_y) except AttributeError: resolution = max(target_area.lats.resolution, target_area.lons.resolution) for ds_id in ds_ids: res = self[ds_id].coarsen(boundary=boundary, side=side, **dim_kwargs) new_scn._datasets[ds_id] = getattr(res, func)() new_scn._datasets[ds_id].attrs = self[ds_id].attrs.copy() new_scn._datasets[ds_id].attrs['area'] = target_area new_scn._datasets[ds_id].attrs['resolution'] = resolution return new_scn def get(self, key, default=None): """Return value from DatasetDict with optional default.""" return self._datasets.get(key, default) def __getitem__(self, key): """Get a dataset or create a new 'slice' of the Scene.""" if isinstance(key, tuple): return self.slice(key) return self._datasets[key] def __setitem__(self, key, value): """Add the item to the scene.""" self._datasets[key] = value # this could raise a KeyError but never should in this case ds_id = self._datasets.get_key(key) self._wishlist.add(ds_id) self._dependency_tree.add_leaf(ds_id) def __delitem__(self, key): """Remove the item from the scene.""" k = self._datasets.get_key(key) self._wishlist.discard(k) del self._datasets[k] def __contains__(self, name): """Check if the dataset is in the scene.""" return name in self._datasets def _slice_data(self, source_area, slices, dataset): """Slice the data to reduce it.""" slice_x, slice_y = slices dataset = dataset.isel(x=slice_x, y=slice_y) if ('x', source_area.width) not in dataset.sizes.items(): raise RuntimeError if ('y', source_area.height) not in dataset.sizes.items(): raise RuntimeError dataset.attrs['area'] = source_area return dataset def _resampled_scene(self, new_scn, destination_area, reduce_data=True, **resample_kwargs): """Resample `datasets` to the `destination` area. If data reduction is enabled, some local caching is perfomed in order to avoid recomputation of area intersections. """ new_datasets = {} datasets = list(new_scn._datasets.values()) destination_area = self._get_finalized_destination_area(destination_area, new_scn) resamplers = {} reductions = {} for dataset, parent_dataset in dataset_walker(datasets): ds_id = DataID.from_dataarray(dataset) pres = None if parent_dataset is not None: pres = new_datasets[DataID.from_dataarray(parent_dataset)] if ds_id in new_datasets: replace_anc(new_datasets[ds_id], pres) if ds_id in new_scn._datasets: new_scn._datasets[ds_id] = new_datasets[ds_id] continue if dataset.attrs.get('area') is None: if parent_dataset is None: new_scn._datasets[ds_id] = dataset else: replace_anc(dataset, pres) continue LOG.debug("Resampling %s", ds_id) source_area = dataset.attrs['area'] dataset, source_area = self._reduce_data(dataset, source_area, destination_area, reduce_data, reductions, resample_kwargs) self._prepare_resampler(source_area, destination_area, resamplers, resample_kwargs) kwargs = resample_kwargs.copy() kwargs['resampler'] = resamplers[source_area] res = resample_dataset(dataset, destination_area, **kwargs) new_datasets[ds_id] = res if ds_id in new_scn._datasets: new_scn._datasets[ds_id] = res if parent_dataset is not None: replace_anc(res, pres) def _get_finalized_destination_area(self, destination_area, new_scn): if isinstance(destination_area, str): destination_area = get_area_def(destination_area) if hasattr(destination_area, 'freeze'): try: finest_area = new_scn.finest_area() destination_area = destination_area.freeze(finest_area) except ValueError: raise ValueError("No dataset areas available to freeze " "DynamicAreaDefinition.") return destination_area def _prepare_resampler(self, source_area, destination_area, resamplers, resample_kwargs): if source_area not in resamplers: key, resampler = prepare_resampler( source_area, destination_area, **resample_kwargs) resamplers[source_area] = resampler self._resamplers[key] = resampler def _reduce_data(self, dataset, source_area, destination_area, reduce_data, reductions, resample_kwargs): try: if reduce_data: key = source_area try: (slice_x, slice_y), source_area = reductions[key] except KeyError: if resample_kwargs.get('resampler') == 'gradient_search': factor = resample_kwargs.get('shape_divisible_by', 2) else: factor = None try: slice_x, slice_y = source_area.get_area_slices( destination_area, shape_divisible_by=factor) except TypeError: slice_x, slice_y = source_area.get_area_slices( destination_area) source_area = source_area[slice_y, slice_x] reductions[key] = (slice_x, slice_y), source_area dataset = self._slice_data(source_area, (slice_x, slice_y), dataset) else: LOG.debug("Data reduction disabled by the user") except NotImplementedError: LOG.info("Not reducing data before resampling.") return dataset, source_area def resample(self, destination=None, datasets=None, generate=True, unload=True, resampler=None, reduce_data=True, **resample_kwargs): """Resample datasets and return a new scene. Args: destination (AreaDefinition, GridDefinition): area definition to resample to. If not specified then the area returned by `Scene.finest_area()` will be used. datasets (list): Limit datasets to resample to these specified data arrays. By default all currently loaded datasets are resampled. generate (bool): Generate any requested composites that could not be previously due to incompatible areas (default: True). unload (bool): Remove any datasets no longer needed after requested composites have been generated (default: True). resampler (str): Name of resampling method to use. By default, this is a nearest neighbor KDTree-based resampling ('nearest'). Other possible values include 'native', 'ewa', etc. See the :mod:`~satpy.resample` documentation for more information. reduce_data (bool): Reduce data by matching the input and output areas and slicing the data arrays (default: True) resample_kwargs: Remaining keyword arguments to pass to individual resampler classes. See the individual resampler class documentation :mod:`here ` for available arguments. """ if destination is None: destination = self.finest_area(datasets) new_scn = self.copy(datasets=datasets) self._resampled_scene(new_scn, destination, resampler=resampler, reduce_data=reduce_data, **resample_kwargs) # regenerate anything from the wishlist that needs it (combining # multiple resolutions, etc.) new_scn.generate_possible_composites(generate, unload) return new_scn def show(self, dataset_id, overlay=None): """Show the *dataset* on screen as an image. Show dataset on screen as an image, possibly with an overlay. Args: dataset_id (DataID, DataQuery or str): Either a DataID, a DataQuery or a string, that refers to a data array that has been previously loaded using Scene.load. overlay (dict, optional): Add an overlay before showing the image. The keys/values for this dictionary are as the arguments for :meth:`~satpy.writers.add_overlay`. The dictionary should contain at least the key ``"coast_dir"``, which should refer to a top-level directory containing shapefiles. See the pycoast_ package documentation for coastline shapefile installation instructions. .. _pycoast: https://pycoast.readthedocs.io/ """ from satpy.utils import in_ipynb from satpy.writers import get_enhanced_image img = get_enhanced_image(self[dataset_id].squeeze(), overlay=overlay) if not in_ipynb(): img.show() return img def to_geoviews(self, gvtype=None, datasets=None, kdims=None, vdims=None, dynamic=False): """Convert satpy Scene to geoviews. Args: gvtype (gv plot type): One of gv.Image, gv.LineContours, gv.FilledContours, gv.Points Default to :class:`geoviews.Image`. See Geoviews documentation for details. datasets (list): Limit included products to these datasets kdims (list of str): Key dimensions. See geoviews documentation for more information. vdims : list of str, optional Value dimensions. See geoviews documentation for more information. If not given defaults to first data variable dynamic : boolean, optional, default False Returns: geoviews object Todo: * better handling of projection information in datasets which are to be passed to geoviews """ import geoviews as gv from cartopy import crs # noqa if gvtype is None: gvtype = gv.Image ds = self.to_xarray_dataset(datasets) if vdims is None: # by default select first data variable as display variable vdims = ds.data_vars[list(ds.data_vars.keys())[0]].name if hasattr(ds, "area") and hasattr(ds.area, 'to_cartopy_crs'): dscrs = ds.area.to_cartopy_crs() gvds = gv.Dataset(ds, crs=dscrs) else: gvds = gv.Dataset(ds) if "latitude" in ds.coords: gview = gvds.to(gv.QuadMesh, kdims=["longitude", "latitude"], vdims=vdims, dynamic=dynamic) else: gview = gvds.to(gvtype, kdims=["x", "y"], vdims=vdims, dynamic=dynamic) return gview def to_xarray_dataset(self, datasets=None): """Merge all xr.DataArrays of a scene to a xr.DataSet. Parameters: datasets (list): List of products to include in the :class:`xarray.Dataset` Returns: :class:`xarray.Dataset` """ dataarrays = self._get_dataarrays_from_identifiers(datasets) if len(dataarrays) == 0: return xr.Dataset() ds_dict = {i.attrs['name']: i.rename(i.attrs['name']) for i in dataarrays if i.attrs.get('area') is not None} mdata = combine_metadata(*tuple(i.attrs for i in dataarrays)) if mdata.get('area') is None or not isinstance(mdata['area'], SwathDefinition): # either don't know what the area is or we have an AreaDefinition ds = xr.merge(ds_dict.values()) else: # we have a swath definition and should use lon/lat values lons, lats = mdata['area'].get_lonlats() if not isinstance(lons, DataArray): lons = DataArray(lons, dims=('y', 'x')) lats = DataArray(lats, dims=('y', 'x')) ds = xr.Dataset(ds_dict, coords={"latitude": lats, "longitude": lons}) ds.attrs = mdata return ds def _get_dataarrays_from_identifiers(self, identifiers): if identifiers is not None: dataarrays = [self[ds] for ds in identifiers] else: dataarrays = [self._datasets.get(ds) for ds in self._wishlist] dataarrays = [ds for ds in dataarrays if ds is not None] return dataarrays def images(self): """Generate images for all the datasets from the scene.""" for ds_id, projectable in self._datasets.items(): if ds_id in self._wishlist: yield projectable.to_image() def save_dataset(self, dataset_id, filename=None, writer=None, overlay=None, decorate=None, compute=True, **kwargs): """Save the ``dataset_id`` to file using ``writer``. Args: dataset_id (str or Number or DataID or DataQuery): Identifier for the dataset to save to disk. filename (str): Optionally specify the filename to save this dataset to. It may include string formatting patterns that will be filled in by dataset attributes. writer (str): Name of writer to use when writing data to disk. Default to ``"geotiff"``. If not provided, but ``filename`` is provided then the filename's extension is used to determine the best writer to use. overlay (dict): See :func:`satpy.writers.add_overlay`. Only valid for "image" writers like `geotiff` or `simple_image`. decorate (dict): See :func:`satpy.writers.add_decorate`. Only valid for "image" writers like `geotiff` or `simple_image`. compute (bool): If `True` (default), compute all of the saves to disk. If `False` then the return value is either a :doc:`dask:delayed` object or two lists to be passed to a `dask.array.store` call. See return values below for more details. kwargs: Additional writer arguments. See :doc:`../writers` for more information. Returns: Value returned depends on `compute`. If `compute` is `True` then the return value is the result of computing a :doc:`dask:delayed` object or running :func:`dask.array.store`. If `compute` is `False` then the returned value is either a :doc:`dask:delayed` object that can be computed using `delayed.compute()` or a tuple of (source, target) that should be passed to :func:`dask.array.store`. If target is provided the the caller is responsible for calling `target.close()` if the target has this method. """ if writer is None and filename is None: writer = 'geotiff' elif writer is None: writer = self._get_writer_by_ext(os.path.splitext(filename)[1]) writer, save_kwargs = load_writer(writer, filename=filename, **kwargs) return writer.save_dataset(self[dataset_id], overlay=overlay, decorate=decorate, compute=compute, **save_kwargs) def save_datasets(self, writer=None, filename=None, datasets=None, compute=True, **kwargs): """Save requested datasets present in a scene to disk using ``writer``. Note that dependency datasets (those loaded solely to create another and not requested explicitly) that may be contained in this Scene will not be saved by default. The default datasets are those explicitly requested through ``.load`` and exist in the Scene currently. Specify dependency datasets using the ``datasets`` keyword argument. Args: writer (str): Name of writer to use when writing data to disk. Default to ``"geotiff"``. If not provided, but ``filename`` is provided then the filename's extension is used to determine the best writer to use. filename (str): Optionally specify the filename to save this dataset to. It may include string formatting patterns that will be filled in by dataset attributes. datasets (iterable): Limit written products to these datasets. Elements can be string name, a wavelength as a number, a DataID, or DataQuery object. compute (bool): If `True` (default), compute all of the saves to disk. If `False` then the return value is either a :doc:`dask:delayed` object or two lists to be passed to a `dask.array.store` call. See return values below for more details. kwargs: Additional writer arguments. See :doc:`../writers` for more information. Returns: Value returned depends on `compute` keyword argument. If `compute` is `True` the value is the result of a either a `dask.array.store` operation or a :doc:`dask:delayed` compute, typically this is `None`. If `compute` is `False` then the result is either a :doc:`dask:delayed` object that can be computed with `delayed.compute()` or a two element tuple of sources and targets to be passed to :func:`dask.array.store`. If `targets` is provided then it is the caller's responsibility to close any objects that have a "close" method. """ dataarrays = self._get_dataarrays_from_identifiers(datasets) if not dataarrays: raise RuntimeError("None of the requested datasets have been " "generated or could not be loaded. Requested " "composite inputs may need to have matching " "dimensions (eg. through resampling).") if writer is None: if filename is None: writer = 'geotiff' else: writer = self._get_writer_by_ext(os.path.splitext(filename)[1]) writer, save_kwargs = load_writer(writer, filename=filename, **kwargs) return writer.save_datasets(dataarrays, compute=compute, **save_kwargs) @staticmethod def _get_writer_by_ext(extension): """Find the writer matching the ``extension``. Defaults to "simple_image". Example Mapping: - geotiff: .tif, .tiff - cf: .nc - mitiff: .mitiff - simple_image: .png, .jpeg, .jpg, ... Args: extension (str): Filename extension starting with "." (ex. ".png"). Returns: str: The name of the writer to use for this extension. """ mapping = {".tiff": "geotiff", ".tif": "geotiff", ".nc": "cf", ".mitiff": "mitiff"} return mapping.get(extension.lower(), 'simple_image') def _remove_failed_datasets(self, keepables): """Remove the datasets that we couldn't create.""" # copy the set of missing datasets because they won't be valid # after they are removed in the next line missing = self.missing_datasets.copy() keepables = keepables or set() # remove reader datasets that couldn't be loaded so they aren't # attempted again later for n in self.missing_datasets: if n not in keepables: self._wishlist.discard(n) missing_str = ", ".join(str(x) for x in missing) LOG.warning("The following datasets were not created and may require " "resampling to be generated: {}".format(missing_str)) def unload(self, keepables=None): """Unload all unneeded datasets. Datasets are considered unneeded if they weren't directly requested or added to the Scene by the user or they are no longer needed to generate composites that have yet to be generated. Args: keepables (iterable): DataIDs to keep whether they are needed or not. """ to_del = [ds_id for ds_id, projectable in self._datasets.items() if ds_id not in self._wishlist and (not keepables or ds_id not in keepables)] for ds_id in to_del: LOG.debug("Unloading dataset: %r", ds_id) del self._datasets[ds_id] def load(self, wishlist, calibration='*', resolution='*', polarization='*', level='*', generate=True, unload=True, **kwargs): """Read and generate requested datasets. When the `wishlist` contains `DataQuery` objects they can either be fully-specified `DataQuery` objects with every parameter specified or they can not provide certain parameters and the "best" parameter will be chosen. For example, if a dataset is available in multiple resolutions and no resolution is specified in the wishlist's DataQuery then the highest (smallest number) resolution will be chosen. Loaded `DataArray` objects are created and stored in the Scene object. Args: wishlist (iterable): List of names (str), wavelengths (float), DataQuery objects or DataID of the requested datasets to load. See `available_dataset_ids()` for what datasets are available. calibration (list, str): Calibration levels to limit available datasets. This is a shortcut to having to list each DataQuery/DataID in `wishlist`. resolution (list | float): Resolution to limit available datasets. This is a shortcut similar to calibration. polarization (list | str): Polarization ('V', 'H') to limit available datasets. This is a shortcut similar to calibration. level (list | str): Pressure level to limit available datasets. Pressure should be in hPa or mb. If an altitude is used it should be specified in inverse meters (1/m). The units of this parameter ultimately depend on the reader. generate (bool): Generate composites from the loaded datasets (default: True) unload (bool): Unload datasets that were required to generate the requested datasets (composite dependencies) but are no longer needed. """ if isinstance(wishlist, str): raise TypeError("'load' expects a list of datasets, got a string.") dataset_keys = set(wishlist) needed_datasets = (self._wishlist | dataset_keys) - set(self._datasets.keys()) query = DataQuery(calibration=calibration, polarization=polarization, resolution=resolution, level=level) self._update_dependency_tree(needed_datasets, query) self._wishlist |= needed_datasets self._read_datasets_from_storage(**kwargs) self.generate_possible_composites(generate, unload) def _update_dependency_tree(self, needed_datasets, query): try: comps, mods = load_compositor_configs_for_sensors(self.sensor_names) self._dependency_tree.update_compositors_and_modifiers(comps, mods) self._dependency_tree.populate_with_keys(needed_datasets, query) except MissingDependencies as err: raise KeyError(str(err)) def _read_datasets_from_storage(self, **kwargs): """Load datasets from the necessary reader. Args: **kwargs: Keyword arguments to pass to the reader's `load` method. Returns: DatasetDict of loaded datasets """ nodes = self._dependency_tree.leaves(limit_nodes_to=self.missing_datasets) return self._read_dataset_nodes_from_storage(nodes, **kwargs) def _read_dataset_nodes_from_storage(self, reader_nodes, **kwargs): """Read the given dataset nodes from storage.""" # Sort requested datasets by reader reader_datasets = self._sort_dataset_nodes_by_reader(reader_nodes) loaded_datasets = self._load_datasets_by_readers(reader_datasets, **kwargs) self._datasets.update(loaded_datasets) return loaded_datasets def _sort_dataset_nodes_by_reader(self, reader_nodes): reader_datasets = {} for node in reader_nodes: ds_id = node.name # if we already have this node loaded or the node was assigned # by the user (node data is None) then don't try to load from a # reader if ds_id in self._datasets or not isinstance(node, ReaderNode): continue reader_name = node.reader_name if reader_name is None: # This shouldn't be possible raise RuntimeError("Dependency tree has a corrupt node.") reader_datasets.setdefault(reader_name, set()).add(ds_id) return reader_datasets def _load_datasets_by_readers(self, reader_datasets, **kwargs): # load all datasets for one reader at a time loaded_datasets = DatasetDict() for reader_name, ds_ids in reader_datasets.items(): reader_instance = self._readers[reader_name] new_datasets = reader_instance.load(ds_ids, **kwargs) loaded_datasets.update(new_datasets) return loaded_datasets def generate_possible_composites(self, generate, unload): """See what we can generate and do it.""" if generate: keepables = self._generate_composites_from_loaded_datasets() else: # don't lose datasets we loaded to try to generate composites keepables = set(self._datasets.keys()) | self._wishlist if self.missing_datasets: self._remove_failed_datasets(keepables) if unload: self.unload(keepables=keepables) def _filter_loaded_datasets_from_trunk_nodes(self, trunk_nodes): loaded_data_ids = self._datasets.keys() for trunk_node in trunk_nodes: if trunk_node.name in loaded_data_ids: continue yield trunk_node def _generate_composites_from_loaded_datasets(self): """Compute all the composites contained in `requirements`.""" trunk_nodes = self._dependency_tree.trunk(limit_nodes_to=self.missing_datasets, limit_children_to=self._datasets.keys()) needed_comp_nodes = set(self._filter_loaded_datasets_from_trunk_nodes(trunk_nodes)) return self._generate_composites_nodes_from_loaded_datasets(needed_comp_nodes) def _generate_composites_nodes_from_loaded_datasets(self, compositor_nodes): """Read (generate) composites.""" keepables = set() for node in compositor_nodes: self._generate_composite(node, keepables) return keepables def _generate_composite(self, comp_node: CompositorNode, keepables: set): """Collect all composite prereqs and create the specified composite. Args: comp_node: Composite Node to generate a Dataset for keepables: `set` to update if any datasets are needed when generation is continued later. This can happen if generation is delayed to incompatible areas which would require resampling first. """ if self._datasets.contains(comp_node.name): # already loaded return compositor = comp_node.compositor prereqs = comp_node.required_nodes optional_prereqs = comp_node.optional_nodes try: delayed_prereq = False prereq_datasets = self._get_prereq_datasets( comp_node.name, prereqs, keepables, ) except DelayedGeneration: # if we are missing a required dependency that could be generated # later then we need to wait to return until after we've also # processed the optional dependencies delayed_prereq = True except KeyError: # we are missing a hard requirement that will never be available # there is no need to "keep" optional dependencies return optional_datasets = self._get_prereq_datasets( comp_node.name, optional_prereqs, keepables, skip=True ) # we are missing some prerequisites # in the future we may be able to generate this composite (delayed) # so we need to hold on to successfully loaded prerequisites and # optional prerequisites if delayed_prereq: preservable_datasets = set(self._datasets.keys()) prereq_ids = set(p.name for p in prereqs) opt_prereq_ids = set(p.name for p in optional_prereqs) keepables |= preservable_datasets & (prereq_ids | opt_prereq_ids) return try: composite = compositor(prereq_datasets, optional_datasets=optional_datasets, **comp_node.name.to_dict()) cid = DataID.new_id_from_dataarray(composite) self._datasets[cid] = composite # update the node with the computed DataID if comp_node.name in self._wishlist: self._wishlist.remove(comp_node.name) self._wishlist.add(cid) self._dependency_tree.update_node_name(comp_node, cid) except IncompatibleAreas: LOG.debug("Delaying generation of %s because of incompatible areas", str(compositor.id)) preservable_datasets = set(self._datasets.keys()) prereq_ids = set(p.name for p in prereqs) opt_prereq_ids = set(p.name for p in optional_prereqs) keepables |= preservable_datasets & (prereq_ids | opt_prereq_ids) # even though it wasn't generated keep a list of what # might be needed in other compositors keepables.add(comp_node.name) return def _get_prereq_datasets(self, comp_id, prereq_nodes, keepables, skip=False): """Get a composite's prerequisites, generating them if needed. Args: comp_id (DataID): DataID for the composite whose prerequisites are being collected. prereq_nodes (sequence of Nodes): Prerequisites to collect keepables (set): `set` to update if any prerequisites can't be loaded at this time (see `_generate_composite`). skip (bool): If True, consider prerequisites as optional and only log when they are missing. If False, prerequisites are considered required and will raise an exception and log a warning if they can't be collected. Defaults to False. Raises: KeyError: If required (skip=False) prerequisite can't be collected. """ prereq_datasets = [] delayed_gen = False for prereq_node in prereq_nodes: prereq_id = prereq_node.name if prereq_id not in self._datasets and prereq_id not in keepables \ and isinstance(prereq_node, CompositorNode): self._generate_composite(prereq_node, keepables) # composite generation may have updated the DataID prereq_id = prereq_node.name if prereq_node is self._dependency_tree.empty_node: # empty sentinel node - no need to load it continue elif prereq_id in self._datasets: prereq_datasets.append(self._datasets[prereq_id]) elif isinstance(prereq_node, CompositorNode) and prereq_id in keepables: delayed_gen = True continue elif not skip: LOG.debug("Missing prerequisite for '{}': '{}'".format( comp_id, prereq_id)) raise KeyError("Missing composite prerequisite for" " '{}': '{}'".format(comp_id, prereq_id)) else: LOG.debug("Missing optional prerequisite for {}: {}".format(comp_id, prereq_id)) if delayed_gen: keepables.add(comp_id) keepables.update([x.name for x in prereq_nodes]) LOG.debug("Delaying generation of %s because of dependency's delayed generation: %s", comp_id, prereq_id) if not skip: LOG.debug("Delayed prerequisite for '{}': '{}'".format(comp_id, prereq_id)) raise DelayedGeneration( "Delayed composite prerequisite for " "'{}': '{}'".format(comp_id, prereq_id)) else: LOG.debug("Delayed optional prerequisite for {}: {}".format(comp_id, prereq_id)) return prereq_datasets satpy-0.34.0/satpy/tests/000077500000000000000000000000001420401153000152265ustar00rootroot00000000000000satpy-0.34.0/satpy/tests/__init__.py000066400000000000000000000013671420401153000173460ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2014-2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The tests package.""" satpy-0.34.0/satpy/tests/compositor_tests/000077500000000000000000000000001420401153000206465ustar00rootroot00000000000000satpy-0.34.0/satpy/tests/compositor_tests/__init__.py000066400000000000000000000011661420401153000227630ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 - 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. """Tests for compositors.""" satpy-0.34.0/satpy/tests/compositor_tests/test_abi.py000066400000000000000000000052271420401153000230200ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for ABI compositors.""" import unittest class TestABIComposites(unittest.TestCase): """Test ABI-specific composites.""" def test_load_composite_yaml(self): """Test loading the yaml for this sensor.""" from satpy.composites.config_loader import load_compositor_configs_for_sensors load_compositor_configs_for_sensors(['abi']) def test_simulated_green(self): """Test creating a fake 'green' band.""" import dask.array as da import numpy as np import xarray as xr from pyresample.geometry import AreaDefinition from satpy.composites.abi import SimulatedGreen rows = 5 cols = 10 area = AreaDefinition( 'test', 'test', 'test', {'proj': 'eqc', 'lon_0': 0.0, 'lat_0': 0.0}, cols, rows, (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) comp = SimulatedGreen('green', prerequisites=('C01', 'C02', 'C03'), standard_name='toa_bidirectional_reflectance') c01 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.25, dims=('y', 'x'), attrs={'name': 'C01', 'area': area}) c02 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.30, dims=('y', 'x'), attrs={'name': 'C02', 'area': area}) c03 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.35, dims=('y', 'x'), attrs={'name': 'C03', 'area': area}) res = comp((c01, c02, c03)) self.assertIsInstance(res, xr.DataArray) self.assertIsInstance(res.data, da.Array) self.assertEqual(res.attrs['name'], 'green') self.assertEqual(res.attrs['standard_name'], 'toa_bidirectional_reflectance') data = res.compute() np.testing.assert_allclose(data, 0.28025) satpy-0.34.0/satpy/tests/compositor_tests/test_ahi.py000066400000000000000000000047251420401153000230300ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for AHI compositors.""" import unittest class TestAHIComposites(unittest.TestCase): """Test AHI-specific composites.""" def test_load_composite_yaml(self): """Test loading the yaml for this sensor.""" from satpy.composites.config_loader import load_compositor_configs_for_sensors load_compositor_configs_for_sensors(['ahi']) def test_corrected_green(self): """Test adjusting the 'green' band.""" import dask.array as da import numpy as np import xarray as xr from pyresample.geometry import AreaDefinition from satpy.composites.ahi import GreenCorrector rows = 5 cols = 10 area = AreaDefinition( 'test', 'test', 'test', {'proj': 'eqc', 'lon_0': 0.0, 'lat_0': 0.0}, cols, rows, (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) comp = GreenCorrector('green', prerequisites=(0.51, 0.85), standard_name='toa_bidirectional_reflectance') c01 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.25, dims=('y', 'x'), attrs={'name': 'C01', 'area': area}) c02 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.30, dims=('y', 'x'), attrs={'name': 'C02', 'area': area}) res = comp((c01, c02)) self.assertIsInstance(res, xr.DataArray) self.assertIsInstance(res.data, da.Array) self.assertEqual(res.attrs['name'], 'green') self.assertEqual(res.attrs['standard_name'], 'toa_bidirectional_reflectance') data = res.compute() np.testing.assert_allclose(data, 0.2575) satpy-0.34.0/satpy/tests/compositor_tests/test_glm.py000066400000000000000000000053621420401153000230440ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for GLM compositors.""" class TestGLMComposites: """Test GLM-specific composites.""" def test_load_composite_yaml(self): """Test loading the yaml for this sensor.""" from satpy.composites.config_loader import load_compositor_configs_for_sensors load_compositor_configs_for_sensors(['glm']) def test_highlight_compositor(self): """Test creating a highlight composite.""" import dask.array as da import numpy as np import xarray as xr from pyresample.geometry import AreaDefinition from satpy.composites.glm import HighlightCompositor rows = 5 cols = 10 area = AreaDefinition( 'test', 'test', 'test', {'proj': 'eqc', 'lon_0': 0.0, 'lat_0': 0.0}, cols, rows, (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) comp = HighlightCompositor( 'c14_highlight', prerequisites=('flash_extent_density', 'C14'), min_hightlight=0.0, max_hightlight=1.0, ) flash_extent_density = xr.DataArray( da.zeros((rows, cols), chunks=25) + 0.5, dims=('y', 'x'), attrs={'name': 'flash_extent_density', 'area': area}) c14_data = np.repeat(np.arange(cols, dtype=np.float64)[None, :], rows, axis=0) c14 = xr.DataArray(da.from_array(c14_data, chunks=25) + 303.15, dims=('y', 'x'), attrs={ 'name': 'C14', 'area': area, 'standard_name': 'toa_brightness_temperature', }) res = comp((flash_extent_density, c14)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) assert res.attrs['name'] == 'c14_highlight' data = res.compute() np.testing.assert_almost_equal(data.values.min(), -0.04) np.testing.assert_almost_equal(data.values.max(), 1.04) satpy-0.34.0/satpy/tests/compositor_tests/test_sar.py000066400000000000000000000062631420401153000230530ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for SAR compositors.""" import unittest class TestSARComposites(unittest.TestCase): """Test SAR-specific composites.""" def test_sar_ice(self): """Test creating a the sar_ice composite.""" import dask.array as da import numpy as np import xarray as xr from satpy.composites.sar import SARIce rows = 2 cols = 2 comp = SARIce('sar_ice', prerequisites=('hh', 'hv'), standard_name='sar-ice') hh = xr.DataArray(da.zeros((rows, cols), chunks=25) + 2000, dims=('y', 'x'), attrs={'name': 'hh'}) hv = xr.DataArray(da.zeros((rows, cols), chunks=25) + 1000, dims=('y', 'x'), attrs={'name': 'hv'}) res = comp((hh, hv)) self.assertIsInstance(res, xr.DataArray) self.assertIsInstance(res.data, da.Array) self.assertEqual(res.attrs['name'], 'sar_ice') self.assertEqual(res.attrs['standard_name'], 'sar-ice') data = res.compute() np.testing.assert_allclose(data.sel(bands='R'), 31.58280822) np.testing.assert_allclose(data.sel(bands='G'), 159869.56789876) np.testing.assert_allclose(data.sel(bands='B'), 44.68138191) def test_sar_ice_log(self): """Test creating a the sar_ice_log composite.""" import dask.array as da import numpy as np import xarray as xr from satpy.composites.sar import SARIceLog rows = 2 cols = 2 comp = SARIceLog('sar_ice_log', prerequisites=('hh', 'hv'), standard_name='sar-ice-log') hh = xr.DataArray(da.zeros((rows, cols), chunks=25) - 10, dims=('y', 'x'), attrs={'name': 'hh'}) hv = xr.DataArray(da.zeros((rows, cols), chunks=25) - 20, dims=('y', 'x'), attrs={'name': 'hv'}) res = comp((hh, hv)) self.assertIsInstance(res, xr.DataArray) self.assertIsInstance(res.data, da.Array) self.assertEqual(res.attrs['name'], 'sar_ice_log') self.assertEqual(res.attrs['standard_name'], 'sar-ice-log') data = res.compute() np.testing.assert_allclose(data.sel(bands='R'), -20) np.testing.assert_allclose(data.sel(bands='G'), -4.6) np.testing.assert_allclose(data.sel(bands='B'), -10) satpy-0.34.0/satpy/tests/compositor_tests/test_viirs.py000066400000000000000000000224241420401153000234170ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for VIIRS compositors.""" import unittest class TestVIIRSComposites(unittest.TestCase): """Test VIIRS-specific composites.""" def test_load_composite_yaml(self): """Test loading the yaml for this sensor.""" from satpy.composites.config_loader import load_compositor_configs_for_sensors load_compositor_configs_for_sensors(['viirs']) def test_histogram_dnb(self): """Test the 'histogram_dnb' compositor.""" import dask.array as da import numpy as np import xarray as xr from pyresample.geometry import AreaDefinition from satpy.composites.viirs import HistogramDNB rows = 5 cols = 10 area = AreaDefinition( 'test', 'test', 'test', {'proj': 'eqc', 'lon_0': 0.0, 'lat_0': 0.0}, cols, rows, (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) comp = HistogramDNB('histogram_dnb', prerequisites=('dnb',), standard_name='toa_outgoing_radiance_per_' 'unit_wavelength') dnb = np.zeros((rows, cols)) + 0.25 dnb[3, :] += 0.25 dnb[4:, :] += 0.5 dnb = da.from_array(dnb, chunks=25) c01 = xr.DataArray(dnb, dims=('y', 'x'), attrs={'name': 'DNB', 'area': area}) # data changes by row, sza changes by col for testing sza = np.zeros((rows, cols)) + 70.0 sza[:, 3] += 20.0 sza[:, 4:] += 45.0 sza = da.from_array(sza, chunks=25) c02 = xr.DataArray(sza, dims=('y', 'x'), attrs={'name': 'solar_zenith_angle', 'area': area}) res = comp((c01, c02)) self.assertIsInstance(res, xr.DataArray) self.assertIsInstance(res.data, da.Array) self.assertEqual(res.attrs['name'], 'histogram_dnb') self.assertEqual(res.attrs['standard_name'], 'equalized_radiance') data = res.compute() unique_values = np.unique(data) np.testing.assert_allclose(unique_values, [0.5994, 0.7992, 0.999], rtol=1e-3) def test_adaptive_dnb(self): """Test the 'adaptive_dnb' compositor.""" import dask.array as da import numpy as np import xarray as xr from pyresample.geometry import AreaDefinition from satpy.composites.viirs import AdaptiveDNB rows = 5 cols = 10 area = AreaDefinition( 'test', 'test', 'test', {'proj': 'eqc', 'lon_0': 0.0, 'lat_0': 0.0}, cols, rows, (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) comp = AdaptiveDNB('adaptive_dnb', prerequisites=('dnb',), standard_name='toa_outgoing_radiance_per_' 'unit_wavelength') dnb = np.zeros((rows, cols)) + 0.25 dnb[3, :] += 0.25 dnb[4:, :] += 0.5 dnb = da.from_array(dnb, chunks=25) c01 = xr.DataArray(dnb, dims=('y', 'x'), attrs={'name': 'DNB', 'area': area}) sza = np.zeros((rows, cols)) + 70.0 sza[:, 3] += 20.0 sza[:, 4:] += 45.0 sza = da.from_array(sza, chunks=25) c02 = xr.DataArray(sza, dims=('y', 'x'), attrs={'name': 'solar_zenith_angle', 'area': area}) res = comp((c01, c02)) self.assertIsInstance(res, xr.DataArray) self.assertIsInstance(res.data, da.Array) self.assertEqual(res.attrs['name'], 'adaptive_dnb') self.assertEqual(res.attrs['standard_name'], 'equalized_radiance') data = res.compute() np.testing.assert_allclose(data.data, 0.999, rtol=1e-4) def test_erf_dnb(self): """Test the 'dynamic_dnb' or ERF DNB compositor.""" import dask.array as da import numpy as np import xarray as xr from pyresample.geometry import AreaDefinition from satpy.composites.viirs import ERFDNB rows = 5 cols = 10 area = AreaDefinition( 'test', 'test', 'test', {'proj': 'eqc', 'lon_0': 0.0, 'lat_0': 0.0}, cols, rows, (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) comp = ERFDNB('dynamic_dnb', prerequisites=('dnb',), standard_name='toa_outgoing_radiance_per_' 'unit_wavelength') dnb = np.zeros((rows, cols)) + 0.25 dnb[2, :cols // 2] = np.nan dnb[3, :] += 0.25 dnb[4:, :] += 0.5 dnb = da.from_array(dnb, chunks=25) c01 = xr.DataArray(dnb, dims=('y', 'x'), attrs={'name': 'DNB', 'area': area}) sza = np.zeros((rows, cols)) + 70.0 sza[:, 3] += 20.0 sza[:, 4:] += 45.0 sza = da.from_array(sza, chunks=25) c02 = xr.DataArray(sza, dims=('y', 'x'), attrs={'name': 'solar_zenith_angle', 'area': area}) lza = np.zeros((rows, cols)) + 70.0 lza[:, 3] += 20.0 lza[:, 4:] += 45.0 lza = da.from_array(lza, chunks=25) c03 = xr.DataArray(lza, dims=('y', 'x'), attrs={'name': 'lunar_zenith_angle', 'area': area}) mif = xr.DataArray(da.zeros((5,), chunks=5) + 0.1, dims=('y',), attrs={'name': 'moon_illumination_fraction', 'area': area}) res = comp((c01, c02, c03, mif)) self.assertIsInstance(res, xr.DataArray) self.assertIsInstance(res.data, da.Array) self.assertEqual(res.attrs['name'], 'dynamic_dnb') self.assertEqual(res.attrs['standard_name'], 'equalized_radiance') data = res.compute() unique = np.unique(data) assert np.isnan(unique).any() nonnan_unique = unique[~np.isnan(unique)] np.testing.assert_allclose( nonnan_unique, [0.00000000e+00, 1.00446703e-01, 1.64116082e-01, 2.09233451e-01, 1.43916324e+02, 2.03528498e+02, 2.49270516e+02]) def test_hncc_dnb(self): """Test the 'hncc_dnb' compositor.""" import dask.array as da import numpy as np import xarray as xr from pyresample.geometry import AreaDefinition from satpy.composites.viirs import NCCZinke rows = 5 cols = 10 area = AreaDefinition( 'test', 'test', 'test', {'proj': 'eqc', 'lon_0': 0.0, 'lat_0': 0.0}, cols, rows, (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) comp = NCCZinke('hncc_dnb', prerequisites=('dnb',), standard_name='toa_outgoing_radiance_per_' 'unit_wavelength') dnb = np.zeros((rows, cols)) + 0.25 dnb[3, :] += 0.25 dnb[4:, :] += 0.5 dnb = da.from_array(dnb, chunks=25) c01 = xr.DataArray(dnb, dims=('y', 'x'), attrs={'name': 'DNB', 'area': area}) sza = np.zeros((rows, cols)) + 70.0 sza[:, 3] += 20.0 sza[:, 4:] += 45.0 sza = da.from_array(sza, chunks=25) c02 = xr.DataArray(sza, dims=('y', 'x'), attrs={'name': 'solar_zenith_angle', 'area': area}) lza = np.zeros((rows, cols)) + 70.0 lza[:, 3] += 20.0 lza[:, 4:] += 45.0 lza = da.from_array(lza, chunks=25) c03 = xr.DataArray(lza, dims=('y', 'x'), attrs={'name': 'lunar_zenith_angle', 'area': area}) mif = xr.DataArray(da.zeros((5,), chunks=5) + 0.1, dims=('y',), attrs={'name': 'moon_illumination_fraction', 'area': area}) res = comp((c01, c02, c03, mif)) self.assertIsInstance(res, xr.DataArray) self.assertIsInstance(res.data, da.Array) self.assertEqual(res.attrs['name'], 'hncc_dnb') self.assertEqual(res.attrs['standard_name'], 'ncc_radiance') data = res.compute() unique = np.unique(data) np.testing.assert_allclose( unique, [3.48479712e-04, 6.96955799e-04, 1.04543189e-03, 4.75394738e-03, 9.50784532e-03, 1.42617433e-02, 1.50001560e+03, 3.00001560e+03, 4.50001560e+03]) satpy-0.34.0/satpy/tests/conftest.py000066400000000000000000000023371420401153000174320ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Shared preparation and utilities for testing. This module is executed automatically by pytest. """ import pytest import satpy @pytest.fixture(autouse=True) def reset_satpy_config(tmpdir): """Set satpy config to logical defaults for tests.""" test_config = { "cache_dir": str(tmpdir / "cache"), "data_dir": str(tmpdir / "data"), "config_path": [], "cache_lonlats": False, "cache_sensor_angles": False, } with satpy.config.set(test_config): yield satpy-0.34.0/satpy/tests/enhancement_tests/000077500000000000000000000000001420401153000207355ustar00rootroot00000000000000satpy-0.34.0/satpy/tests/enhancement_tests/__init__.py000066400000000000000000000013771420401153000230560ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The enhancements tests package.""" satpy-0.34.0/satpy/tests/enhancement_tests/test_abi.py000066400000000000000000000033711420401153000231050ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit testing for the ABI enhancement functions.""" import unittest import dask.array as da import numpy as np import xarray as xr class TestABIEnhancement(unittest.TestCase): """Test the ABI enhancement functions.""" def setUp(self): """Create fake data for the tests.""" data = da.linspace(0, 1, 16).reshape((4, 4)) self.da = xr.DataArray(data, dims=('y', 'x'), attrs={'test': 'test'}) def test_cimss_true_color_contrast(self): """Test the cimss_true_color_contrast enhancement.""" from trollimage.xrimage import XRImage from satpy.enhancements.abi import cimss_true_color_contrast expected = np.array([[ [0., 0., 0.05261956, 0.13396146], [0.21530335, 0.29664525, 0.37798715, 0.45932905], [0.54067095, 0.62201285, 0.70335475, 0.78469665], [0.86603854, 0.94738044, 1., 1.], ]]) img = XRImage(self.da) cimss_true_color_contrast(img) np.testing.assert_almost_equal(img.data.compute(), expected) satpy-0.34.0/satpy/tests/enhancement_tests/test_ahi.py000066400000000000000000000036761420401153000231230ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit testing for the AHI enhancement function.""" import dask.array as da import numpy as np import xarray as xr class TestAHIEnhancement(): """Test the AHI enhancement functions.""" def setup(self): """Create test data.""" data = da.arange(-100, 1000, 110).reshape(2, 5) rgb_data = np.stack([data, data, data]) self.rgb = xr.DataArray(rgb_data, dims=('bands', 'y', 'x'), coords={'bands': ['R', 'G', 'B']}) def test_jma_true_color_reproduction(self): """Test the jma_true_color_reproduction enhancement.""" from trollimage.xrimage import XRImage from satpy.enhancements.ahi import jma_true_color_reproduction expected = [[[-109.98, 10.998, 131.976, 252.954, 373.932], [494.91, 615.888, 736.866, 857.844, 978.822]], [[-97.6, 9.76, 117.12, 224.48, 331.84], [439.2, 546.56, 653.92, 761.28, 868.64]], [[-94.27, 9.427, 113.124, 216.821, 320.518], [424.215, 527.912, 631.609, 735.306, 839.003]]] img = XRImage(self.rgb) jma_true_color_reproduction(img) np.testing.assert_almost_equal(img.data.compute(), expected) satpy-0.34.0/satpy/tests/enhancement_tests/test_enhancements.py000066400000000000000000000420531420401153000250220ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit testing the enhancements functions, e.g. cira_stretch.""" import contextlib import os from tempfile import NamedTemporaryFile from unittest import mock import dask.array as da import numpy as np import pytest import xarray as xr from satpy.enhancements import create_colormap def run_and_check_enhancement(func, data, expected, **kwargs): """Perform basic checks that apply to multiple tests.""" from trollimage.xrimage import XRImage pre_attrs = data.attrs img = XRImage(data) func(img, **kwargs) assert isinstance(img.data.data, da.Array) old_keys = set(pre_attrs.keys()) # It is OK to have "enhancement_history" added new_keys = set(img.data.attrs.keys()) - {"enhancement_history"} assert old_keys == new_keys np.testing.assert_allclose(img.data.values, expected, atol=1.e-6, rtol=0) class TestEnhancementStretch: """Class for testing enhancements in satpy.enhancements.""" def setup_method(self): """Create test data used by every test.""" data = np.arange(-210, 790, 100).reshape((2, 5)) * 0.95 data[0, 0] = np.nan # one bad value for testing crefl_data = np.arange(-210, 790, 100).reshape((2, 5)) * 0.95 crefl_data /= 5.605 crefl_data[0, 0] = np.nan # one bad value for testing crefl_data[0, 1] = 0. self.ch1 = xr.DataArray(data, dims=('y', 'x'), attrs={'test': 'test'}) self.ch2 = xr.DataArray(crefl_data, dims=('y', 'x'), attrs={'test': 'test'}) rgb_data = np.stack([data, data, data]) self.rgb = xr.DataArray(rgb_data, dims=('bands', 'y', 'x'), coords={'bands': ['R', 'G', 'B']}) def test_cira_stretch(self): """Test applying the cira_stretch.""" from satpy.enhancements import cira_stretch expected = np.array([[ [np.nan, -7.04045974, -7.04045974, 0.79630132, 0.95947296], [1.05181359, 1.11651012, 1.16635571, 1.20691137, 1.24110186]]]) run_and_check_enhancement(cira_stretch, self.ch1, expected) def test_reinhard(self): """Test the reinhard algorithm.""" from satpy.enhancements import reinhard_to_srgb expected = np.array([[[np.nan, 0., 0., 0.93333793, 1.29432402], [1.55428709, 1.76572249, 1.94738635, 2.10848544, 2.25432809]], [[np.nan, 0., 0., 0.93333793, 1.29432402], [1.55428709, 1.76572249, 1.94738635, 2.10848544, 2.25432809]], [[np.nan, 0., 0., 0.93333793, 1.29432402], [1.55428709, 1.76572249, 1.94738635, 2.10848544, 2.25432809]]]) run_and_check_enhancement(reinhard_to_srgb, self.rgb, expected) def test_lookup(self): """Test the lookup enhancement function.""" from satpy.enhancements import lookup expected = np.array([[ [0., 0., 0., 0.333333, 0.705882], [1., 1., 1., 1., 1.]]]) lut = np.arange(256.) run_and_check_enhancement(lookup, self.ch1, expected, luts=lut) expected = np.array([[[0., 0., 0., 0.333333, 0.705882], [1., 1., 1., 1., 1.]], [[0., 0., 0., 0.333333, 0.705882], [1., 1., 1., 1., 1.]], [[0., 0., 0., 0.333333, 0.705882], [1., 1., 1., 1., 1.]]]) lut = np.arange(256.) lut = np.vstack((lut, lut, lut)).T run_and_check_enhancement(lookup, self.rgb, expected, luts=lut) def test_colorize(self): """Test the colorize enhancement function.""" from trollimage.colormap import brbg from satpy.enhancements import colorize expected = np.array([[ [np.nan, 3.29409498e-01, 3.29409498e-01, 4.35952940e-06, 4.35952940e-06], [4.35952940e-06, 4.35952940e-06, 4.35952940e-06, 4.35952940e-06, 4.35952940e-06]], [[np.nan, 1.88249866e-01, 1.88249866e-01, 2.35302110e-01, 2.35302110e-01], [2.35302110e-01, 2.35302110e-01, 2.35302110e-01, 2.35302110e-01, 2.35302110e-01]], [[np.nan, 1.96102817e-02, 1.96102817e-02, 1.88238767e-01, 1.88238767e-01], [1.88238767e-01, 1.88238767e-01, 1.88238767e-01, 1.88238767e-01, 1.88238767e-01]]]) run_and_check_enhancement(colorize, self.ch1, expected, palettes=brbg) def test_palettize(self): """Test the palettize enhancement function.""" from trollimage.colormap import brbg from satpy.enhancements import palettize expected = np.array([[[10, 0, 0, 10, 10], [10, 10, 10, 10, 10]]]) run_and_check_enhancement(palettize, self.ch1, expected, palettes=brbg) def test_three_d_effect(self): """Test the three_d_effect enhancement function.""" from satpy.enhancements import three_d_effect expected = np.array([[ [np.nan, np.nan, -389.5, -294.5, 826.5], [np.nan, np.nan, 85.5, 180.5, 1301.5]]]) run_and_check_enhancement(three_d_effect, self.ch1, expected) def test_crefl_scaling(self): """Test the crefl_scaling enhancement function.""" from satpy.enhancements import crefl_scaling expected = np.array([[ [np.nan, 0., 0., 0.44378, 0.631734], [0.737562, 0.825041, 0.912521, 1., 1.]]]) run_and_check_enhancement(crefl_scaling, self.ch2, expected, idx=[0., 25., 55., 100., 255.], sc=[0., 90., 140., 175., 255.]) def test_piecewise_linear_stretch(self): """Test the piecewise_linear_stretch enhancement function.""" from satpy.enhancements import piecewise_linear_stretch expected = np.array([[ [np.nan, 0., 0., 0.44378, 0.631734], [0.737562, 0.825041, 0.912521, 1., 1.]]]) run_and_check_enhancement(piecewise_linear_stretch, self.ch2 / 100.0, expected, xp=[0., 25., 55., 100., 255.], fp=[0., 90., 140., 175., 255.], reference_scale_factor=255, ) def test_btemp_threshold(self): """Test applying the cira_stretch.""" from satpy.enhancements import btemp_threshold expected = np.array([[ [np.nan, 0.946207, 0.892695, 0.839184, 0.785672], [0.73216, 0.595869, 0.158745, -0.278379, -0.715503]]]) run_and_check_enhancement(btemp_threshold, self.ch1, expected, min_in=-200, max_in=500, threshold=350) def test_merge_colormaps(self): """Test merging colormaps.""" from trollimage.colormap import Colormap from satpy.enhancements import _merge_colormaps as mcp from satpy.enhancements import create_colormap ret_map = mock.MagicMock() create_colormap_mock = mock.Mock(wraps=create_colormap) cmap1 = Colormap((1, (1., 1., 1.))) kwargs = {'palettes': cmap1} with mock.patch('satpy.enhancements.create_colormap', create_colormap_mock): res = mcp(kwargs) assert res is cmap1 create_colormap_mock.assert_not_called() create_colormap_mock.reset_mock() ret_map.reset_mock() cmap1 = {'colors': 'blues', 'min_value': 0, 'max_value': 1} kwargs = {'palettes': [cmap1]} with mock.patch('satpy.enhancements.create_colormap', create_colormap_mock),\ mock.patch('trollimage.colormap.blues', ret_map): _ = mcp(kwargs) create_colormap_mock.assert_called_once() ret_map.reverse.assert_not_called() ret_map.set_range.assert_called_with(0, 1) create_colormap_mock.reset_mock() ret_map.reset_mock() cmap2 = {'colors': 'blues', 'min_value': 2, 'max_value': 3, 'reverse': True} kwargs = {'palettes': [cmap2]} with mock.patch('trollimage.colormap.blues', ret_map): _ = mcp(kwargs) ret_map.reverse.assert_called_once() ret_map.set_range.assert_called_with(2, 3) create_colormap_mock.reset_mock() ret_map.reset_mock() kwargs = {'palettes': [cmap1, cmap2]} with mock.patch('trollimage.colormap.blues', ret_map): _ = mcp(kwargs) ret_map.__add__.assert_called_once() def tearDown(self): """Clean up.""" @contextlib.contextmanager def closed_named_temp_file(**kwargs): """Named temporary file context manager that closes the file after creation. This helps with Windows systems which can get upset with opening or deleting a file that is already open. """ try: with NamedTemporaryFile(delete=False, **kwargs) as tmp_cmap: yield tmp_cmap.name finally: os.remove(tmp_cmap.name) def _write_cmap_to_file(cmap_filename, cmap_data): ext = os.path.splitext(cmap_filename)[1] if ext in (".npy",): np.save(cmap_filename, cmap_data) elif ext in (".npz",): np.savez(cmap_filename, cmap_data) else: np.savetxt(cmap_filename, cmap_data, delimiter=",") def _generate_cmap_test_data(color_scale, colormap_mode): cmap_data = np.array([ [1, 0, 0], [1, 1, 0], [1, 1, 1], [0, 0, 1], ], dtype=np.float64) if len(colormap_mode) != 3: _cmap_data = cmap_data cmap_data = np.empty((cmap_data.shape[0], len(colormap_mode)), dtype=np.float64) if colormap_mode.startswith("V") or colormap_mode.endswith("A"): cmap_data[:, 0] = np.array([128, 130, 132, 134]) / 255.0 cmap_data[:, -3:] = _cmap_data if colormap_mode.startswith("V") and colormap_mode.endswith("A"): cmap_data[:, 1] = np.array([128, 130, 132, 134]) / 255.0 if color_scale is None or color_scale == 255: cmap_data = (cmap_data * 255).astype(np.uint8) return cmap_data class TestColormapLoading: """Test utilities used with colormaps.""" @pytest.mark.parametrize("color_scale", [None, 1.0]) @pytest.mark.parametrize("colormap_mode", ["RGB", "VRGB", "VRGBA"]) @pytest.mark.parametrize("extra_kwargs", [ {}, {"min_value": 50, "max_value": 100}, ]) @pytest.mark.parametrize("filename_suffix", [".npy", ".npz", ".csv"]) def test_cmap_from_file(self, color_scale, colormap_mode, extra_kwargs, filename_suffix): """Test that colormaps can be loaded from a binary file.""" # create the colormap file on disk with closed_named_temp_file(suffix=filename_suffix) as cmap_filename: cmap_data = _generate_cmap_test_data(color_scale, colormap_mode) _write_cmap_to_file(cmap_filename, cmap_data) unset_first_value = 128.0 / 255.0 if colormap_mode.startswith("V") else 0.0 unset_last_value = 134.0 / 255.0 if colormap_mode.startswith("V") else 1.0 if (color_scale is None or color_scale == 255) and colormap_mode.startswith("V"): unset_first_value *= 255 unset_last_value *= 255 if "min_value" in extra_kwargs: unset_first_value = extra_kwargs["min_value"] unset_last_value = extra_kwargs["max_value"] first_color = [1.0, 0.0, 0.0] if colormap_mode == "VRGBA": first_color = [128.0 / 255.0] + first_color kwargs1 = {"filename": cmap_filename} kwargs1.update(extra_kwargs) if color_scale is not None: kwargs1["color_scale"] = color_scale cmap = create_colormap(kwargs1) assert cmap.colors.shape[0] == 4 np.testing.assert_equal(cmap.colors[0], first_color) assert cmap.values.shape[0] == 4 assert cmap.values[0] == unset_first_value assert cmap.values[-1] == unset_last_value def test_cmap_vrgb_as_rgba(self): """Test that data created as VRGB still reads as RGBA.""" with closed_named_temp_file(suffix=".npy") as cmap_filename: cmap_data = _generate_cmap_test_data(None, "VRGB") np.save(cmap_filename, cmap_data) cmap = create_colormap({'filename': cmap_filename, 'colormap_mode': "RGBA"}) assert cmap.colors.shape[0] == 4 assert cmap.colors.shape[1] == 4 # RGBA np.testing.assert_equal(cmap.colors[0], [128 / 255., 1.0, 0, 0]) assert cmap.values.shape[0] == 4 assert cmap.values[0] == 0 assert cmap.values[-1] == 1.0 @pytest.mark.parametrize( ("real_mode", "forced_mode"), [ ("VRGBA", "RGBA"), ("VRGBA", "VRGB"), ("RGBA", "RGB"), ] ) @pytest.mark.parametrize("filename_suffix", [".npy", ".csv"]) def test_cmap_bad_mode(self, real_mode, forced_mode, filename_suffix): """Test that reading colormaps with the wrong mode fails.""" with closed_named_temp_file(suffix=filename_suffix) as cmap_filename: cmap_data = _generate_cmap_test_data(None, real_mode) _write_cmap_to_file(cmap_filename, cmap_data) # Force colormap_mode VRGBA to RGBA and we should see an exception with pytest.raises(ValueError): create_colormap({'filename': cmap_filename, 'colormap_mode': forced_mode}) def test_cmap_from_file_bad_shape(self): """Test that unknown array shape causes an error.""" from satpy.enhancements import create_colormap # create the colormap file on disk with closed_named_temp_file(suffix='.npy') as cmap_filename: np.save(cmap_filename, np.array([ [0], [64], [128], [255], ])) with pytest.raises(ValueError): create_colormap({'filename': cmap_filename}) def test_cmap_from_config_path(self, tmp_path): """Test loading a colormap relative to a config path.""" import satpy from satpy.enhancements import create_colormap cmap_dir = tmp_path / "colormaps" cmap_dir.mkdir() cmap_filename = cmap_dir / "my_colormap.npy" cmap_data = _generate_cmap_test_data(None, "RGBA") np.save(cmap_filename, cmap_data) with satpy.config.set(config_path=[tmp_path]): rel_cmap_filename = os.path.join("colormaps", "my_colormap.npy") cmap = create_colormap({'filename': rel_cmap_filename, 'colormap_mode': "RGBA"}) assert cmap.colors.shape[0] == 4 assert cmap.colors.shape[1] == 4 # RGBA np.testing.assert_equal(cmap.colors[0], [128 / 255., 1.0, 0, 0]) assert cmap.values.shape[0] == 4 assert cmap.values[0] == 0 assert cmap.values[-1] == 1.0 def test_cmap_from_trollimage(self): """Test that colormaps in trollimage can be loaded.""" from satpy.enhancements import create_colormap cmap = create_colormap({'colors': 'pubu'}) from trollimage.colormap import pubu np.testing.assert_equal(cmap.colors, pubu.colors) np.testing.assert_equal(cmap.values, pubu.values) def test_cmap_no_colormap(self): """Test that being unable to create a colormap raises an error.""" from satpy.enhancements import create_colormap with pytest.raises(ValueError): create_colormap({}) def test_cmap_list(self): """Test that colors can be a list/tuple.""" from satpy.enhancements import create_colormap colors = [ [0, 0, 1], [1, 0, 1], [0, 1, 1], [1, 1, 1], ] values = [2, 4, 6, 8] cmap = create_colormap({'colors': colors, 'color_scale': 1}) assert cmap.colors.shape[0] == 4 np.testing.assert_equal(cmap.colors[0], [0.0, 0.0, 1.0]) assert cmap.values.shape[0] == 4 assert cmap.values[0] == 0 assert cmap.values[-1] == 1.0 cmap = create_colormap({'colors': colors, 'color_scale': 1, 'values': values}) assert cmap.colors.shape[0] == 4 np.testing.assert_equal(cmap.colors[0], [0.0, 0.0, 1.0]) assert cmap.values.shape[0] == 4 assert cmap.values[0] == 2 assert cmap.values[-1] == 8 satpy-0.34.0/satpy/tests/enhancement_tests/test_viirs.py000066400000000000000000000063771420401153000235170ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit testing for the VIIRS enhancement function.""" import unittest import dask.array as da import numpy as np import xarray as xr from .test_enhancements import run_and_check_enhancement class TestVIIRSEnhancement(unittest.TestCase): """Class for testing the VIIRS enhancement function in satpy.enhancements.viirs.""" def setUp(self): """Create test data.""" data = np.arange(15, 301, 15).reshape(2, 10) data = da.from_array(data, chunks=(2, 10)) self.da = xr.DataArray(data, dims=('y', 'x'), attrs={'test': 'test'}) self.palette = {'colors': [[14, [0.0, 0.0, 0.0]], [15, [0.0, 0.0, 0.39215]], [16, [0.76862, 0.63529, 0.44705]], [17, [0.76862, 0.63529, 0.44705]], [18, [0.0, 0.0, 1.0]], [20, [1.0, 1.0, 1.0]], [27, [0.0, 1.0, 1.0]], [30, [0.78431, 0.78431, 0.78431]], [31, [0.39215, 0.39215, 0.39215]], [88, [0.70588, 0.0, 0.90196]], [100, [0.19607, 1.0, 0.39215]], [120, [0.19607, 1.0, 0.39215]], [121, [0.0, 1.0, 0.0]], [130, [0.0, 1.0, 0.0]], [131, [0.78431, 1.0, 0.0]], [140, [0.78431, 1.0, 0.0]], [141, [1.0, 1.0, 0.58823]], [150, [1.0, 1.0, 0.58823]], [151, [1.0, 1.0, 0.0]], [160, [1.0, 1.0, 0.0]], [161, [1.0, 0.78431, 0.0]], [170, [1.0, 0.78431, 0.0]], [171, [1.0, 0.58823, 0.19607]], [180, [1.0, 0.58823, 0.19607]], [181, [1.0, 0.39215, 0.0]], [190, [1.0, 0.39215, 0.0]], [191, [1.0, 0.0, 0.0]], [200, [1.0, 0.0, 0.0]], [201, [0.0, 0.0, 0.0]]], 'min_value': 0, 'max_value': 201} def test_viirs(self): """Test VIIRS flood enhancement.""" from satpy.enhancements.viirs import water_detection expected = [[[1, 7, 8, 8, 8, 9, 10, 11, 14, 8], [20, 23, 26, 10, 12, 15, 18, 21, 24, 27]]] run_and_check_enhancement(water_detection, self.da, expected, palettes=self.palette) satpy-0.34.0/satpy/tests/etc/000077500000000000000000000000001420401153000160015ustar00rootroot00000000000000satpy-0.34.0/satpy/tests/etc/composites/000077500000000000000000000000001420401153000201665ustar00rootroot00000000000000satpy-0.34.0/satpy/tests/etc/composites/fake_sensor.yaml000066400000000000000000000150321420401153000233520ustar00rootroot00000000000000sensor_name: visir/fake_sensor modifiers: mod1: modifier: !!python/name:satpy.tests.utils.FakeModifier prerequisites: - ds2 mod2: modifier: !!python/name:satpy.tests.utils.FakeModifier prerequisites: - comp3 mod3: modifier: !!python/name:satpy.tests.utils.FakeModifier prerequisites: - ds2 res_change: modifier: !!python/name:satpy.tests.utils.FakeModifier incomp_areas: modifier: !!python/name:satpy.tests.utils.FakeModifier prerequisites: - ds1 incomp_areas_opt: modifier: !!python/name:satpy.tests.utils.FakeModifier prerequisites: - name: ds1 modifiers: ['incomp_areas'] optional_prerequisites: - ds2 mod_opt_prereq: modifier: !!python/name:satpy.tests.utils.FakeModifier prerequisites: - ds1 optional_prerequisites: - ds2 mod_bad_opt: modifier: !!python/name:satpy.tests.utils.FakeModifier prerequisites: - ds1 optional_prerequisites: - ds9_fail_load mod_opt_only: modifier: !!python/name:satpy.tests.utils.FakeModifier optional_prerequisites: - ds2 mod_wl: modifier: !!python/name:satpy.tests.utils.FakeModifier prerequisites: - wavelength: 0.2 modifiers: ['mod1'] sunz_corr: modifier: !!python/name:satpy.tests.utils.FakeModifier optional_prerequisites: - sunz_angles_NOPE rayleigh_corr: modifier: !!python/name:satpy.tests.utils.FakeModifier optional_prerequisites: - wavelength: 30.64 modifiers: ['sunz_corr'] composites: comp1: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - ds1 comp2: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - ds1 - ds2 comp3: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - ds1 - ds2 - ds3 comp4: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - comp2 - ds3 comp5: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - ds1 - ds2 optional_prerequisites: - ds3 comp6: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - ds1 - ds2 optional_prerequisites: - comp2 comp7: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - ds1 - comp2 optional_prerequisites: - ds2 comp8: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - ds_NOPE - comp2 comp9: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - ds1 - comp2 optional_prerequisites: - ds_NOPE comp10: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - name: ds1 modifiers: ["mod1"] - comp2 comp11: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - 0.22 - 0.48 - 0.85 comp12: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - wavelength: 0.22 modifiers: ["mod1"] - wavelength: 0.48 modifiers: ["mod1"] - wavelength: 0.85 modifiers: ["mod1"] comp13: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - name: ds5 modifiers: ["res_change"] comp14: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - ds1 comp15: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - ds1 - ds9_fail_load comp16: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - ds1 optional_prerequisites: - ds9_fail_load comp17: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - ds1 - comp15 comp18: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - ds3 - name: ds4 modifiers: ["mod1", "mod3"] - name: ds5 modifiers: ["mod1", "incomp_areas"] comp18_2: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - ds3 - name: ds4 modifiers: ["mod1", "mod3"] - name: ds5 modifiers: ["mod1", "incomp_areas_opt"] comp19: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - name: ds5 modifiers: ["res_change"] - comp13 - ds2 comp20: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - name: ds5 modifiers: ["mod_opt_prereq"] comp21: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - name: ds5 modifiers: ["mod_bad_opt"] comp22: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - name: ds5 modifiers: ["mod_opt_only"] comp23: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - 0.8 static_image: compositor: !!python/name:satpy.tests.utils.FakeCompositor comp24: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - name: ds11 - name: ds12 comp25: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - name: comp24 - name: ds5 ahi_green: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - wavelength: 30.5 modifiers: ['sunz_corr', 'rayleigh_corr'] - wavelength: 30.85 modifiers: ['sunz_corr'] comp26: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - name: comp14 comp27: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - name: ds5 modifiers: ['mod1', 'mod_opt_only'] - compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - name: ds1 modifiers: ['mod1', 'mod_opt_only'] - compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - name: ds2 modifiers: ['mod1', 'mod_opt_only'] - compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - name: ds13 modifiers: ['mod1', 'mod_opt_only'] comp_multi: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - ds1 - ds4_b satpy-0.34.0/satpy/tests/etc/readers/000077500000000000000000000000001420401153000174265ustar00rootroot00000000000000satpy-0.34.0/satpy/tests/etc/readers/fake1.yaml000066400000000000000000000052741420401153000213110ustar00rootroot00000000000000reader: name: fake1 description: Fake reader used for easier testing reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [fake_sensor] datasets: lons: name: lons resolution: [250, 500, 1000] standard_name: longitude file_type: fake_file1 lats: name: lats resolution: [250, 500, 1000] standard_name: latitude file_type: fake_file1 ds1: name: ds1 resolution: 250 calibration: "reflectance" file_type: fake_file1 coordinates: [lons, lats] ds2: name: ds2 resolution: 250 calibration: "reflectance" file_type: fake_file1 coordinates: [lons, lats] ds3: name: ds3 file_type: fake_file1 coordinates: [lons, lats] ds4: name: ds4 calibration: ["radiance", "reflectance"] file_type: fake_file1 coordinates: [lons, lats] ds5: name: ds5 resolution: 250: file_type: fake_file1_highres 500: file_type: fake_file1 1000: file_type: fake_file1 coordinates: [lons, lats] ds6: name: ds6 resolution: 250 wavelength: [0.1, 0.2, 0.3] calibration: "reflectance" file_type: fake_file1 coordinates: [lons, lats] ds7: name: ds7 wavelength: [0.4, 0.5, 0.6] file_type: fake_file1 coordinates: [lons, lats] ds8: name: ds8 wavelength: [0.7, 0.8, 0.9] file_type: fake_file1 coordinates: [lons, lats] ds9_fail_load: name: ds9_fail_load wavelength: [1.0, 1.1, 1.2] file_type: fake_file1 coordinates: [lons, lats] ds10: name: ds10 wavelength: [0.75, 0.85, 0.95] file_type: fake_file1 coordinates: [lons, lats] ds11: name: ds11 resolution: [500, 1000] file_type: fake_file1 coordinates: [lons, lats] ds12: name: ds12 resolution: [500, 1000] file_type: fake_file1 coordinates: [lons, lats] ds13_modified: name: ds13 resolution: 250 modifiers: ["mod1"] file_type: fake_file1 coordinates: [lons, lats] B02: name: B02 resolution: 1000 wavelength: [30.49, 30.51, 30.53] file_type: fake_file1 coordinates: [lons, lats] B03: name: B03 resolution: 500 wavelength: [30.62, 30.64, 30.66] file_type: fake_file1 coordinates: [lons, lats] B04: name: B04 resolution: 1000 wavelength: [30.83, 30.85, 30.87] file_type: fake_file1 coordinates: [lons, lats] file_types: fake_file1: file_reader: !!python/name:satpy.tests.utils.FakeFileHandler file_patterns: ['fake1_{file_idx:d}.txt'] sensor: fake_sensor fake_file1_highres: file_reader: !!python/name:satpy.tests.utils.FakeFileHandler file_patterns: ['fake1_highres_{file_idx:d}.txt'] sensor: fake_sensor satpy-0.34.0/satpy/tests/etc/readers/fake1_1ds.yaml000066400000000000000000000007161420401153000220540ustar00rootroot00000000000000reader: name: fake1_1ds description: Fake reader used for easier testing with only one dataset reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [fake_sensor] datasets: ds1: name: ds1 resolution: 250 calibration: "reflectance" file_type: fake_file1 file_types: fake_file1: file_reader: !!python/name:satpy.tests.utils.FakeFileHandler file_patterns: ['fake1_1ds_{file_idx:d}.txt'] sensor: fake_sensor satpy-0.34.0/satpy/tests/etc/readers/fake2_1ds.yaml000066400000000000000000000006721420401153000220560ustar00rootroot00000000000000reader: name: fake2_1ds description: Fake reader used for easier testing reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [fake_sensor2] datasets: ds2: name: ds2 resolution: 250 calibration: "reflectance" file_type: fake_file1 file_types: fake_file1: file_reader: !!python/name:satpy.tests.utils.FakeFileHandler file_patterns: ['fake2_1ds_{file_idx:d}.txt'] sensor: fake_sensor2 satpy-0.34.0/satpy/tests/etc/readers/fake2_3ds.yaml000066400000000000000000000012651420401153000220570ustar00rootroot00000000000000reader: name: fake2_1ds description: Fake reader used for easier testing reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [fake_sensor2] datasets: lons: name: lons resolution: [250, 500, 1000] standard_name: longitude file_type: fake_file1 lats: name: lats resolution: [250, 500, 1000] standard_name: latitude file_type: fake_file1 ds2: name: ds2 resolution: 250 calibration: "reflectance" file_type: fake_file1 coordinates: [lons, lats] file_types: fake_file1: file_reader: !!python/name:satpy.tests.utils.FakeFileHandler file_patterns: ['fake2_3ds_{file_idx:d}.txt'] sensor: fake_sensor2 satpy-0.34.0/satpy/tests/etc/readers/fake3.yaml000066400000000000000000000010101420401153000212730ustar00rootroot00000000000000reader: name: fake3 description: Fake reader used for easier testing reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [fake_sensor] datasets: duplicate1: name: duplicate1 wavelength: [0.1, 0.2, 0.3] file_type: fake_file1 duplicate2: name: duplicate2 wavelength: [0.1, 0.2, 0.3] file_type: fake_file1 file_types: fake_file1: file_reader: !!python/name:satpy.tests.utils.FakeFileHandler file_patterns: ['fake3_{file_idx:d}.txt'] sensor: fake_sensor satpy-0.34.0/satpy/tests/etc/readers/fake4.yaml000066400000000000000000000014721420401153000213100ustar00rootroot00000000000000reader: name: fake4 description: Fake reader used for easier testing reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [fake_sensor_4] datasets: lons: name: lons resolution: [250, 500, 1000] standard_name: longitude file_type: fake_file4 lats: name: lats resolution: [250, 500, 1000] standard_name: latitude file_type: fake_file4 ds4_a: name: ds4_a resolution: 1000 wavelength: [0.1, 0.2, 0.3] file_type: fake_file4 coordinates: [lons, lats] ds4_b: name: ds4_b resolution: 250 wavelength: [0.4, 0.5, 0.6] file_type: fake_file4 coordinates: [lons, lats] file_types: fake_file4: file_reader: !!python/name:satpy.tests.utils.FakeFileHandler file_patterns: ['fake4_{file_idx:d}.txt'] sensor: fake_sensor_4 satpy-0.34.0/satpy/tests/features/000077500000000000000000000000001420401153000170445ustar00rootroot00000000000000satpy-0.34.0/satpy/tests/features/feature-load.feature000066400000000000000000000032751420401153000230000ustar00rootroot00000000000000Feature: Simple and intuitive scene loading (sc. 1) The scientific user explores the data and prototypes new algorithms. It needs access not only to the calibrated data, but also to the raw data and probably a majority of the metadata. The user would work with data locally, and it has to be easy to tell satpy where the data is. Providing filename templates or editing config file before starting working is a pain, so it should be avoided. To load the data should be a simple 1-step procedure. At load time, the user provides the data and metadata he/she needs, and if some items are unavailable/unaccessible, the user should be informed in a gentle but clear way (ie. no crash). The data and metadata available from the file have to be explorable, so that the user don’t need to guess what the (meta)data is called. @download Scenario: 1-step data loading Given data is available When user loads the data without providing a config file Then the data is available in a scene object @download Scenario: No crash when metadata is missing Given data is available When user loads the data without providing a config file And some items are not available Then the data is available in a scene object @download Scenario: Data is explorable Given data is available When user wants to know what data is available Then available datasets is returned Scenario: Accessing datasets by name prefers less modified datasets Given datasets with the same name When a dataset is retrieved by name Then the least modified version of the dataset is returned satpy-0.34.0/satpy/tests/features/feature-real-load-process-write.feature000066400000000000000000000175031420401153000265240ustar00rootroot00000000000000Feature: Loading real data in many formats with the same command This feature loads real data from disk and generates resampled images. This is made as a way to system test satpy. To provide test data to this feature, add a directory called `test_data` in the current directory. Under this directory, created a directory for each data format you want to test, and under this a directory with data called data and a directory with reference images called `ref`, eg: test_data |_ seviri_l1b_hrit | |_ data | | |_ [all the MSG SEVIRI data files] | |_ ref | |_ overview_eurol.png | |_ ... |_ viirs_sdr | |_ data | | |_ [all the viirs SDR files] | |_ ref | |_ true_color_eurol.png | |_ ... ... @wip Scenario Outline: Reading and processing of real data Given data is available When the user loads the composite And the user resamples the data to And the user saves the composite to disk Then the resulting image should match the reference image Examples: AAPP L1 data | format | composite | area | | avhrr_l1b_aapp | overview | eurol | Examples: ABI L1 data | format | composite | area | | abi_l1b | overview | - | | abi_l1b | airmass | - | | abi_l1b | natural | - | # Examples: ACSPO data # | format | composite | area | # | acspo | overview | - | # | acspo | true_color | - | # | acspo | true_color | north_america | Examples: AHI L1 data | format | composite | area | | ahi_hsd | overview | - | | ahi_hsd | true_color | - | | ahi_hsd | true_color | australia | Examples: AMSR2 L1 data | format | composite | area | | amsr2_l1b | ice | moll | Examples: CLAVR-X data | format | composite | area | | clavrx | cloudtype | usa | Examples: EPS L1 data | format | composite | area | | avhrr_l1b_eps | overview | eurol | Examples: FCI L1c data | format | composite | area | | fci_l1c_nc | overview | eurol | | fci_l1c_nc | cloudtop | eurol | | fci_l1c_nc | true_color | eurol | Examples: GAC data | format | composite | area | | avhrr_l1b_gaclac | overview | eurol | | avhrr_l1b_gaclac | cloudtop | eurol | # Examples: Generic Images # Examples: GEOCAT data # | format | composite | area | # | geocat | overview | - | # | geocat | true_color | - | # | geocat | true_color | north_america | # Examples: GHRSST OSISAF data # | format | composite | area | # | ghrsst_osisaf | overview | - | # | ghrsst_osisaf | true_color | - | # | ghrsst_osisaf | true_color | north_america | # Examples: Caliop v3 data # | format | composite | area | # | hdf4_caliopv3 | overview | - | # | hdf4_caliopv3 | true_color | - | # | hdf4_caliopv3 | true_color | north_america | Examples: MODIS HDF4-EOS data | format | composite | area | | modis_l1b | overview | eurol | | modis_l1b | true_color_lowres | eurol | | modis_l1b | true_color | eurol | Examples: Electro-L N2 HRIT data | format | composite | area | | electrol_hrit | overview | india | | electrol_hrit | cloudtop | india | Examples: GOES HRIT data | format | composite | area | | goes-imager_hrit | overview | usa | | goes-imager_hrit | cloudtop | usa | Examples: Himawari HRIT data | format | composite | area | | ahi_hrit | overview | australia | | ahi_hrit | cloudtop | australia | Examples: MSG HRIT data | format | composite | area | | seviri_l1b_hrit| overview | eurol | | seviri_l1b_hrit| cloudtop | eurol | Examples: HRPT data | format | composite | area | | avhrr_l1b_hrpt | overview | eurol | | avhrr_l1b_hrpt | cloudtop | eurol | # Examples: IASI L2 data # Examples: Lightning Imager L2 # Examples: MAIA data Examples: MSG Native data | format | composite | area | | seviri_l1b_native | overview | eurol | | seviri_l1b_native | snow | eurol | | seviri_l1b_native | HRV | - | | seviri_l1b_native | overview | - | Examples: NWCSAF GEO data | format | composite | area | | nwcsaf-geo | cloudtype | eurol | | nwcsaf-geo | ctth | eurol | Examples: NWCSAF PPS data | format | composite | area | | nwcsaf-pps_nc | cloudtype | eurol | | nwcsaf-pps_nc | ctth | eurol | Examples: MSG Native data | format | composite | area | | seviri_l1b_native | overview | eurol | | seviri_l1b_native | cloudtop | eurol | Examples: OLCI L1 data | format | composite | area | | olci_l1b | true_color | eurol | Examples: OLCI L2 data | format | composite | area | | olci_l2 | karo | eurol | Examples: SLSTR L1 data | format | composite | area | | slstr_l1b | true_color | eurol | # Examples: NUCAPS data # Examples: OMPS EDR Examples: SAFE MSI L1 data | format | composite | area | | msi_safe | true_color | eurol | Examples: SAR-C L1 data | format | composite | area | | sar-c_safe | sar-ice | euron1 | | sar-c_safe | sar-rgb | euron1 | | sar-c_safe | sar-quick | euron1 | # Examples: SCATSAT 1 data # | format | composite | area | # | sar_c | ice | eurol | Examples: VIIRS compact data | format | composite | area | | viirs_compact | overview | eurol | | viirs_compact | true_color | eurol | Examples: VIIRS L1B data | format | composite | area | | viirs_l1b | overview | eurol | | viirs_l1b | true_color | eurol | Examples: VIIRS SDR data | format | composite | area | | viirs_sdr | overview | eurol | | viirs_sdr | true_color_lowres | eurol | | viirs_sdr | fog | eurol | | viirs_sdr | dust | eurol | | viirs_sdr | ash | eurol | | viirs_sdr | natural_sun_lowres | eurol | | viirs_sdr | snow_age | eurol | | viirs_sdr | fire_temperature | eurol | | viirs_sdr | fire_temperature_awips | eurol | | viirs_sdr | fire_temperature_eumetsat | eurol | | viirs_sdr | fire_temperature_39refl | eurol | satpy-0.34.0/satpy/tests/features/feature-save.feature000066400000000000000000000016141420401153000230120ustar00rootroot00000000000000# Created by a001673 at 2015-12-07 Feature: Simple and intuitive saving Visualization of the data is important and should be an easy one-line, like eg show(my_dataset). In a similar way, saving the data to disk should be simple, for example save(dataset, filename), with sensible defaults provided depending on the filename extension (eg. geotiff for .tif, netcdf for .nc). Saving several datasets at once would be nice to have. Scenario: 1-step showing dataset Given a dataset is available When the show command is called Then an image should pop up Scenario: 1-step saving dataset Given a dataset is available When the save_dataset command is called Then a file should be saved on disk Scenario: 1-step saving all datasets Given a bunch of datasets are available When the save_datasets command is called Then a bunch of files should be saved on disk satpy-0.34.0/satpy/tests/features/steps/000077500000000000000000000000001420401153000202025ustar00rootroot00000000000000satpy-0.34.0/satpy/tests/features/steps/steps-load.py000066400000000000000000000120151420401153000226260ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Behaviour steps for loading.""" import os from urllib.request import urlopen from behave import given, then, use_step_matcher, when use_step_matcher("re") @given(u'data is available') def step_impl_data_available(context): """Make data available.""" if not os.path.exists('/tmp/SVM02_npp_d20150311_t1122204_e1123446_b17451_c20150311113206961730_cspp_dev.h5'): response = urlopen('https://zenodo.org/record/16355/files/' 'SVM02_npp_d20150311_t1122204_e1123446_b17451_c20150311113206961730_cspp_dev.h5') with open('/tmp/SVM02_npp_d20150311_t1122204_e1123446_b17451_c20150311113206961730_cspp_dev.h5', mode="w") as fp: fp.write(response.read()) if not os.path.exists('/tmp/GMTCO_npp_d20150311_t1122204_e1123446_b17451_c20150311113205873710_cspp_dev.h5'): response = urlopen('https://zenodo.org/record/16355/files/' 'GMTCO_npp_d20150311_t1122204_e1123446_b17451_c20150311113205873710_cspp_dev.h5') with open('/tmp/GMTCO_npp_d20150311_t1122204_e1123446_b17451_c20150311113205873710_cspp_dev.h5', mode="w") as fp: fp.write(response.read()) @when(u'user loads the data without providing a config file') def step_impl_user_loads_no_config(context): """Load the data without a config.""" from datetime import datetime from satpy import Scene, find_files_and_readers os.chdir("/tmp/") readers_files = find_files_and_readers(sensor='viirs', start_time=datetime(2015, 3, 11, 11, 20), end_time=datetime(2015, 3, 11, 11, 26)) scn = Scene(filenames=readers_files) scn.load(["M02"]) context.scene = scn @then(u'the data is available in a scene object') def step_impl_data_available_in_scene(context): """Check that the data is available in the scene.""" assert context.scene["M02"] is not None assert context.scene.get("M01") is None @when(u'some items are not available') def step_impl_items_not_available(context): """Load some data.""" context.scene.load(["M01"]) @when(u'user wants to know what data is available') def step_impl_user_checks_availability(context): """Check availability.""" from datetime import datetime from satpy import Scene, find_files_and_readers os.chdir("/tmp/") reader_files = find_files_and_readers(sensor="viirs", start_time=datetime(2015, 3, 11, 11, 20), end_time=datetime(2015, 3, 11, 11, 26)) scn = Scene(filenames=reader_files) context.available_dataset_ids = scn.available_dataset_ids() @then(u'available datasets are returned') def step_impl_available_datasets_are_returned(context): """Check that datasets are returned.""" assert (len(context.available_dataset_ids) >= 5) @given("datasets with the same name") def step_impl_datasets_with_same_name(context): """Datasets with the same name but different other ID parameters.""" from xarray import DataArray from satpy import Scene from satpy.tests.utils import make_dataid scn = Scene() scn[make_dataid(name='ds1', calibration='radiance')] = DataArray([[1, 2], [3, 4]]) scn[make_dataid(name='ds1', resolution=500, calibration='reflectance')] = DataArray([[5, 6], [7, 8]]) scn[make_dataid(name='ds1', resolution=250, calibration='reflectance')] = DataArray([[5, 6], [7, 8]]) scn[make_dataid(name='ds1', resolution=1000, calibration='reflectance')] = DataArray([[5, 6], [7, 8]]) scn[make_dataid(name='ds1', resolution=500, calibration='radiance', modifiers=('mod1',))] = \ DataArray([[5, 6], [7, 8]]) ds_id = make_dataid(name='ds1', resolution=1000, calibration='radiance', modifiers=('mod1', 'mod2')) scn[ds_id] = DataArray([[5, 6], [7, 8]]) context.scene = scn @when("a dataset is retrieved by name") def step_impl_dataset_retrieved_by_name(context): """Use the Scene's getitem method to get a dataset.""" context.returned_dataset = context.scene['ds1'] @then("the least modified version of the dataset is returned") def step_impl_least_modified_dataset_returned(context): """Check that the dataset should be one of the least modified datasets.""" assert(len(context.returned_dataset.attrs['modifiers']) == 0) satpy-0.34.0/satpy/tests/features/steps/steps-real-load-process-write.py000066400000000000000000000107061420401153000263600ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018-2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Step for the real load-process-write tests.""" import fnmatch import os from tempfile import NamedTemporaryFile import numpy as np from behave import given, then, when from PIL import Image def fft_proj_rms(a1, a2): """Compute the RMS of differences between two images. Compute the RMS of differences between two FFT vectors of a1 and projection of FFT vectors of a2. This metric is sensitive to large scale changes and image noise but insensitive to small rendering differences. """ ms = 0 # for i in range(a1.shape[-1]): fr1 = np.fft.rfftn(a1) fr2 = np.fft.rfftn(a2) ps1 = np.log10(fr1 * fr1.conj()).real ps2 = np.log10(fr2 * fr2.conj()).real p1 = np.arctan2(fr1.imag, fr1.real) p2 = np.arctan2(fr2.imag, fr2.real) theta = p2 - p1 l_factor = ps2 * np.cos(theta) ms += np.sum(((l_factor - ps1) ** 2)) / float(ps1.size) rms = np.sqrt(ms) return rms def assert_images_match(image1, image2, threshold=0.1): """Assert that images are matching.""" img1 = np.asarray(Image.open(image1)) img2 = np.asarray(Image.open(image2)) rms = fft_proj_rms(img1, img2) assert rms <= threshold, "Images {0} and {1} don't match: {2}".format( image1, image2, rms) def get_all_files(directory, pattern): """Find all files matching *pattern* under ``directory``.""" matches = [] for root, _, filenames in os.walk(directory): for filename in fnmatch.filter(filenames, pattern): matches.append(os.path.join(root, filename)) return matches def before_all(context): """Enable satpy debugging.""" if not context.config.log_capture: from satpy.utils import debug_on debug_on() @given(u'{dformat} data is available') def step_impl_input_files_exists(context, dformat): """Check that input data exists on disk.""" data_path = os.path.join('test_data', dformat) data_available = os.path.exists(data_path) if not data_available: context.scenario.skip(reason="No test data available for " + dformat) else: context.dformat = dformat context.data_path = data_path @when(u'the user loads the {composite} composite') def step_impl_create_scene_and_load_single(context, composite): """Create a Scene and load a single composite.""" from satpy import Scene scn = Scene(reader=context.dformat, filenames=get_all_files(os.path.join(context.data_path, 'data'), '*')) scn.load([composite]) context.scn = scn context.composite = composite @when(u'the user resamples the data to {area}') def step_impl_resample_scene(context, area): """Resample the scene to an area or use the native resampler.""" if area != '-': context.lscn = context.scn.resample(area) else: context.lscn = context.scn.resample(resampler='native') context.area = area @when(u'the user saves the composite to disk') def step_impl_save_to_png(context): """Call Scene.save_dataset to write a PNG image.""" with NamedTemporaryFile(suffix='.png', delete=False) as tmp_file: context.lscn.save_dataset(context.composite, filename=tmp_file.name) context.new_filename = tmp_file.name @then(u'the resulting image should match the reference image') def step_impl_compare_two_png_images(context): """Compare two PNG image files.""" if context.area == '-': ref_filename = context.composite + ".png" else: ref_filename = context.composite + "_" + context.area + ".png" ref_filename = os.path.join(context.data_path, "ref", ref_filename) assert os.path.exists(ref_filename), "Missing reference file." assert_images_match(ref_filename, context.new_filename) os.remove(context.new_filename) satpy-0.34.0/satpy/tests/features/steps/steps-save.py000066400000000000000000000067601420401153000226570ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Behave steps related to saving or showing datasets.""" from unittest.mock import patch from behave import given, then, use_step_matcher, when use_step_matcher("re") @given("a dataset is available") def step_impl_create_scene_one_dataset(context): """Create a Scene with a fake dataset for testing. Args: context (behave.runner.Context): Test context """ from xarray import DataArray from satpy import Scene scn = Scene() scn["MyDataset"] = DataArray([[1, 2], [3, 4]], dims=['y', 'x']) context.scene = scn @when("the show command is called") def step_impl_scene_show(context): """Call the Scene.show method. Args: context (behave.runner.Context): Test context """ with patch('trollimage.xrimage.XRImage.show') as mock_show: context.scene.show("MyDataset") mock_show.assert_called_once_with() @then("an image should pop up") def step_impl_image_pop_up(context): """Check that a image window pops up (no-op currently). Args: context (behave.runner.Context): Test context """ @when("the save_dataset command is called") def step_impl_save_dataset_to_png(context): """Run Scene.save_dataset to create a PNG image. Args: context (behave.runner.Context): Test context """ context.filename = "/tmp/test_dataset.png" context.scene.save_dataset("MyDataset", context.filename) @then("a file should be saved on disk") def step_impl_file_exists_and_remove(context): """Check that a file exists on disk and then remove it. Args: context (behave.runner.Context): Test context """ import os assert(os.path.exists(context.filename)) os.remove(context.filename) @given("a bunch of datasets are available") def step_impl_create_scene_two_datasets(context): """Create a Scene with two fake datasets for testing. Args: context (behave.runner.Context): Test context """ from xarray import DataArray from satpy import Scene scn = Scene() scn["MyDataset"] = DataArray([[1, 2], [3, 4]], dims=['y', 'x']) scn["MyDataset2"] = DataArray([[5, 6], [7, 8]], dims=['y', 'x']) context.scene = scn @when("the save_datasets command is called") def step_impl_save_datasets(context): """Run Scene.save_datsets to create PNG images. Args: context (behave.runner.Context): Test context """ context.scene.save_datasets(writer="simple_image", filename="{name}.png") @then("a bunch of files should be saved on disk") def step_impl_check_two_pngs_exist(context): """Check that two PNGs exist. Args: context (behave.runner.Context): Test context """ import os for filename in ["MyDataset.png", "MyDataset2.png"]: assert(os.path.exists(filename)) os.remove(filename) satpy-0.34.0/satpy/tests/modifier_tests/000077500000000000000000000000001420401153000202465ustar00rootroot00000000000000satpy-0.34.0/satpy/tests/modifier_tests/__init__.py000066400000000000000000000011641420401153000223610ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 - 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. """Tests for modifiers.""" satpy-0.34.0/satpy/tests/modifier_tests/test_crefl.py000066400000000000000000000444121420401153000227570ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 - 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. """Tests for the CREFL ReflectanceCorrector modifier.""" from contextlib import contextmanager from datetime import datetime from unittest import mock import numpy as np import pytest import xarray as xr from dask import array as da from pyresample.geometry import AreaDefinition from ..utils import assert_maximum_dask_computes @contextmanager def mock_cmgdem(tmpdir, url): """Create fake file representing CMGDEM.hdf.""" yield from _mock_and_create_dem_file(tmpdir, url, "averaged elevation", fill_value=-9999) @contextmanager def mock_tbase(tmpdir, url): """Create fake file representing tbase.hdf.""" yield from _mock_and_create_dem_file(tmpdir, url, "Elevation") def _mock_and_create_dem_file(tmpdir, url, var_name, fill_value=None): if not url: yield None return rmock_obj, dem_fn = _mock_dem_retrieve(tmpdir, url) _create_fake_dem_file(dem_fn, var_name, fill_value) try: yield rmock_obj finally: rmock_obj.stop() def _mock_dem_retrieve(tmpdir, url): rmock_obj = mock.patch('satpy.modifiers._crefl.retrieve') rmock = rmock_obj.start() dem_fn = str(tmpdir.join(url)) rmock.return_value = dem_fn return rmock_obj, dem_fn def _create_fake_dem_file(dem_fn, var_name, fill_value): from pyhdf.SD import SD, SDC h = SD(dem_fn, SDC.WRITE | SDC.CREATE) dem_var = h.create(var_name, SDC.INT16, (10, 10)) dem_var[:] = np.zeros((10, 10), dtype=np.int16) if fill_value is not None: dem_var.setfillvalue(fill_value) h.end() def _make_viirs_xarray(data, area, name, standard_name, wavelength=None, units='degrees', calibration=None): return xr.DataArray(data, dims=('y', 'x'), attrs={ 'start_orbit': 1708, 'end_orbit': 1708, 'wavelength': wavelength, 'modifiers': None, 'calibration': calibration, 'resolution': 371, 'name': name, 'standard_name': standard_name, 'platform_name': 'Suomi-NPP', 'polarization': None, 'sensor': 'viirs', 'units': units, 'start_time': datetime(2012, 2, 25, 18, 1, 24, 570942), 'end_time': datetime(2012, 2, 25, 18, 11, 21, 175760), 'area': area, 'ancillary_variables': [] }) class TestReflectanceCorrectorModifier: """Test the CREFL modifier.""" @staticmethod def data_area_ref_corrector(): """Create test area definition and data.""" rows = 5 cols = 10 area = AreaDefinition( 'some_area_name', 'On-the-fly area', 'geosabii', {'a': '6378137.0', 'b': '6356752.31414', 'h': '35786023.0', 'lon_0': '-89.5', 'proj': 'geos', 'sweep': 'x', 'units': 'm'}, cols, rows, (-5434894.954752679, -5434894.964451744, 5434894.964451744, 5434894.954752679)) data = np.zeros((rows, cols)) + 25 data[3, :] += 25 data[4:, :] += 50 data = da.from_array(data, chunks=2) return area, data def test_reflectance_corrector_abi(self): """Test ReflectanceCorrector modifier with ABI data.""" from satpy.modifiers._crefl import ReflectanceCorrector from satpy.tests.utils import make_dsq ref_cor = ReflectanceCorrector(optional_prerequisites=[ make_dsq(name='satellite_azimuth_angle'), make_dsq(name='satellite_zenith_angle'), make_dsq(name='solar_azimuth_angle'), make_dsq(name='solar_zenith_angle')], name='C01', prerequisites=[], wavelength=(0.45, 0.47, 0.49), resolution=1000, calibration='reflectance', modifiers=('sunz_corrected', 'rayleigh_corrected_crefl',), sensor='abi') assert ref_cor.attrs['modifiers'] == ('sunz_corrected', 'rayleigh_corrected_crefl') assert ref_cor.attrs['calibration'] == 'reflectance' assert ref_cor.attrs['wavelength'] == (0.45, 0.47, 0.49) assert ref_cor.attrs['name'] == 'C01' assert ref_cor.attrs['resolution'] == 1000 assert ref_cor.attrs['sensor'] == 'abi' assert ref_cor.attrs['prerequisites'] == [] assert ref_cor.attrs['optional_prerequisites'] == [ make_dsq(name='satellite_azimuth_angle'), make_dsq(name='satellite_zenith_angle'), make_dsq(name='solar_azimuth_angle'), make_dsq(name='solar_zenith_angle')] area, dnb = self.data_area_ref_corrector() c01 = xr.DataArray(dnb, dims=('y', 'x'), attrs={ 'satellite_longitude': -89.5, 'satellite_latitude': 0.0, 'satellite_altitude': 35786023.4375, 'platform_name': 'GOES-16', 'calibration': 'reflectance', 'units': '%', 'wavelength': (0.45, 0.47, 0.49), 'name': 'C01', 'resolution': 1000, 'sensor': 'abi', 'start_time': '2017-09-20 17:30:40.800000', 'end_time': '2017-09-20 17:41:17.500000', 'area': area, 'ancillary_variables': [] }) with assert_maximum_dask_computes(0): res = ref_cor([c01], []) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) assert res.attrs['satellite_longitude'] == -89.5 assert res.attrs['satellite_latitude'] == 0.0 assert res.attrs['satellite_altitude'] == 35786023.4375 assert res.attrs['modifiers'] == ('sunz_corrected', 'rayleigh_corrected_crefl') assert res.attrs['platform_name'] == 'GOES-16' assert res.attrs['calibration'] == 'reflectance' assert res.attrs['units'] == '%' assert res.attrs['wavelength'] == (0.45, 0.47, 0.49) assert res.attrs['name'] == 'C01' assert res.attrs['resolution'] == 1000 assert res.attrs['sensor'] == 'abi' assert res.attrs['start_time'] == '2017-09-20 17:30:40.800000' assert res.attrs['end_time'] == '2017-09-20 17:41:17.500000' assert res.attrs['area'] == area assert res.attrs['ancillary_variables'] == [] data = res.values assert abs(np.nanmean(data) - 26.00760944144745) < 1e-10 assert data.shape == (5, 10) unique = np.unique(data[~np.isnan(data)]) np.testing.assert_allclose(unique, [-1.0, 4.210745457958135, 6.7833906076177595, 8.730371329824473, 10.286627569545209, 11.744159436709374, 12.20226097829902, 13.501444598985305, 15.344399223932212, 17.173329483996515, 17.28798660754271, 18.29594550575925, 19.076835059905125, 19.288331720959864, 19.77043407084455, 19.887082168377006, 20.091028778326375, 20.230341149334617, 20.457671064690196, 20.82686905639114, 21.021094816441195, 21.129963777952124, 41.601857910095575, 43.963919057675504, 46.21672174361075, 46.972099490462085, 47.497072794632835, 47.80393007974336, 47.956765988770385, 48.043025685032106, 51.909142813383916, 58.8234273736508, 68.84706145641482, 69.91085190887961, 71.10179768327806, 71.33161009169649]) @pytest.mark.parametrize( 'url,dem_mock_cm,dem_sds', [ (None, mock_cmgdem, "average elevation"), ("CMGDEM.hdf", mock_cmgdem, "averaged elevation"), ("tbase.hdf", mock_tbase, "Elevation"), ]) def test_reflectance_corrector_viirs(self, tmpdir, url, dem_mock_cm, dem_sds): """Test ReflectanceCorrector modifier with VIIRS data.""" from satpy.modifiers._crefl import ReflectanceCorrector from satpy.tests.utils import make_dsq ref_cor = ReflectanceCorrector( optional_prerequisites=[ make_dsq(name='satellite_azimuth_angle'), make_dsq(name='satellite_zenith_angle'), make_dsq(name='solar_azimuth_angle'), make_dsq(name='solar_zenith_angle') ], name='I01', prerequisites=[], wavelength=(0.6, 0.64, 0.68), resolution=371, calibration='reflectance', modifiers=('sunz_corrected_iband', 'rayleigh_corrected_crefl_iband'), sensor='viirs', url=url, dem_sds=dem_sds, ) assert ref_cor.attrs['modifiers'] == ('sunz_corrected_iband', 'rayleigh_corrected_crefl_iband') assert ref_cor.attrs['calibration'] == 'reflectance' assert ref_cor.attrs['wavelength'] == (0.6, 0.64, 0.68) assert ref_cor.attrs['name'] == 'I01' assert ref_cor.attrs['resolution'] == 371 assert ref_cor.attrs['sensor'] == 'viirs' assert ref_cor.attrs['prerequisites'] == [] assert ref_cor.attrs['optional_prerequisites'] == [ make_dsq(name='satellite_azimuth_angle'), make_dsq(name='satellite_zenith_angle'), make_dsq(name='solar_azimuth_angle'), make_dsq(name='solar_zenith_angle')] area, data = self.data_area_ref_corrector() c01 = _make_viirs_xarray(data, area, 'I01', 'toa_bidirectional_reflectance', wavelength=(0.6, 0.64, 0.68), units='%', calibration='reflectance') c02 = _make_viirs_xarray(data, area, 'satellite_azimuth_angle', 'sensor_azimuth_angle') c03 = _make_viirs_xarray(data, area, 'satellite_zenith_angle', 'sensor_zenith_angle') c04 = _make_viirs_xarray(data, area, 'solar_azimuth_angle', 'solar_azimuth_angle') c05 = _make_viirs_xarray(data, area, 'solar_zenith_angle', 'solar_zenith_angle') with dem_mock_cm(tmpdir, url), assert_maximum_dask_computes(0): res = ref_cor([c01], [c02, c03, c04, c05]) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) assert res.attrs['wavelength'] == (0.6, 0.64, 0.68) assert res.attrs['modifiers'] == ('sunz_corrected_iband', 'rayleigh_corrected_crefl_iband') assert res.attrs['calibration'] == 'reflectance' assert res.attrs['resolution'] == 371 assert res.attrs['name'] == 'I01' assert res.attrs['standard_name'] == 'toa_bidirectional_reflectance' assert res.attrs['platform_name'] == 'Suomi-NPP' assert res.attrs['sensor'] == 'viirs' assert res.attrs['units'] == '%' assert res.attrs['start_time'] == datetime(2012, 2, 25, 18, 1, 24, 570942) assert res.attrs['end_time'] == datetime(2012, 2, 25, 18, 11, 21, 175760) assert res.attrs['area'] == area assert res.attrs['ancillary_variables'] == [] data = res.values assert abs(np.mean(data) - 40.7578684169142) < 1e-10 assert data.shape == (5, 10) unique = np.unique(data) np.testing.assert_allclose(unique, [25.20341702519979, 52.38819447051263, 75.79089653845898]) def test_reflectance_corrector_modis(self): """Test ReflectanceCorrector modifier with MODIS data.""" from satpy.modifiers._crefl import ReflectanceCorrector from satpy.tests.utils import make_dsq sataa_did = make_dsq(name='satellite_azimuth_angle') satza_did = make_dsq(name='satellite_zenith_angle') solaa_did = make_dsq(name='solar_azimuth_angle') solza_did = make_dsq(name='solar_zenith_angle') ref_cor = ReflectanceCorrector( optional_prerequisites=[sataa_did, satza_did, solaa_did, solza_did], name='1', prerequisites=[], wavelength=(0.62, 0.645, 0.67), resolution=250, calibration='reflectance', modifiers=('sunz_corrected', 'rayleigh_corrected_crefl'), sensor='modis') assert ref_cor.attrs['modifiers'] == ('sunz_corrected', 'rayleigh_corrected_crefl') assert ref_cor.attrs['calibration'] == 'reflectance' assert ref_cor.attrs['wavelength'] == (0.62, 0.645, 0.67) assert ref_cor.attrs['name'] == '1' assert ref_cor.attrs['resolution'] == 250 assert ref_cor.attrs['sensor'] == 'modis' assert ref_cor.attrs['prerequisites'] == [] assert ref_cor.attrs['optional_prerequisites'] == [ make_dsq(name='satellite_azimuth_angle'), make_dsq(name='satellite_zenith_angle'), make_dsq(name='solar_azimuth_angle'), make_dsq(name='solar_zenith_angle')] area, dnb = self.data_area_ref_corrector() def make_xarray(name, calibration, wavelength=None, modifiers=None, resolution=1000): return xr.DataArray(dnb, dims=('y', 'x'), attrs={ 'wavelength': wavelength, 'level': None, 'modifiers': modifiers, 'calibration': calibration, 'resolution': resolution, 'name': name, 'coordinates': ['longitude', 'latitude'], 'platform_name': 'EOS-Aqua', 'polarization': None, 'sensor': 'modis', 'units': '%', 'start_time': datetime(2012, 8, 13, 18, 46, 1, 439838), 'end_time': datetime(2012, 8, 13, 18, 57, 47, 746296), 'area': area, 'ancillary_variables': [] }) c01 = make_xarray('1', 'reflectance', wavelength=(0.62, 0.645, 0.67), modifiers='sunz_corrected', resolution=500) c02 = make_xarray('satellite_azimuth_angle', None) c03 = make_xarray('satellite_zenith_angle', None) c04 = make_xarray('solar_azimuth_angle', None) c05 = make_xarray('solar_zenith_angle', None) res = ref_cor([c01], [c02, c03, c04, c05]) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) assert res.attrs['wavelength'] == (0.62, 0.645, 0.67) assert res.attrs['modifiers'] == ('sunz_corrected', 'rayleigh_corrected_crefl',) assert res.attrs['calibration'] == 'reflectance' assert res.attrs['resolution'] == 500 assert res.attrs['name'] == '1' assert res.attrs['platform_name'] == 'EOS-Aqua' assert res.attrs['sensor'] == 'modis' assert res.attrs['units'] == '%' assert res.attrs['start_time'] == datetime(2012, 8, 13, 18, 46, 1, 439838) assert res.attrs['end_time'] == datetime(2012, 8, 13, 18, 57, 47, 746296) assert res.attrs['area'] == area assert res.attrs['ancillary_variables'] == [] data = res.values if abs(np.mean(data) - 38.734365117099145) >= 1e-10: raise AssertionError('{} is not within {} of {}'.format(np.mean(data), 1e-10, 38.734365117099145)) assert data.shape == (5, 10) unique = np.unique(data) np.testing.assert_allclose(unique, [24.641586, 50.431692, 69.315375]) def test_reflectance_corrector_bad_prereqs(self): """Test ReflectanceCorrector modifier with wrong number of inputs.""" from satpy.modifiers._crefl import ReflectanceCorrector ref_cor = ReflectanceCorrector("test") pytest.raises(ValueError, ref_cor, [1], [2, 3, 4]) pytest.raises(ValueError, ref_cor, [1, 2, 3, 4], []) pytest.raises(ValueError, ref_cor, [], [1, 2, 3, 4]) @pytest.mark.parametrize( 'url,dem_mock_cm,dem_sds', [ (None, mock_cmgdem, "average elevation"), ("CMGDEM.hdf", mock_cmgdem, "averaged elevation"), ("tbase.hdf", mock_tbase, "Elevation"), ]) def test_reflectance_corrector_different_chunks(self, tmpdir, url, dem_mock_cm, dem_sds): """Test that the modifier works with different chunk sizes for inputs. The modifier uses dask's "map_blocks". If the input chunks aren't the same an error is raised. """ from satpy.modifiers._crefl import ReflectanceCorrector from satpy.tests.utils import make_dsq ref_cor = ReflectanceCorrector( optional_prerequisites=[ make_dsq(name='satellite_azimuth_angle'), make_dsq(name='satellite_zenith_angle'), make_dsq(name='solar_azimuth_angle'), make_dsq(name='solar_zenith_angle') ], name='I01', prerequisites=[], wavelength=(0.6, 0.64, 0.68), resolution=371, calibration='reflectance', modifiers=('sunz_corrected_iband', 'rayleigh_corrected_crefl_iband'), sensor='viirs', url=url, dem_sds=dem_sds, ) area, data = self.data_area_ref_corrector() c01 = _make_viirs_xarray(data, area, 'I01', 'toa_bidirectional_reflectance', wavelength=(0.6, 0.64, 0.68), units='%', calibration='reflectance') c02 = _make_viirs_xarray(data, area, 'satellite_azimuth_angle', 'sensor_azimuth_angle') c02.data = c02.data.rechunk((1, -1)) c03 = _make_viirs_xarray(data, area, 'satellite_zenith_angle', 'sensor_zenith_angle') c04 = _make_viirs_xarray(data, area, 'solar_azimuth_angle', 'solar_azimuth_angle') c05 = _make_viirs_xarray(data, area, 'solar_zenith_angle', 'solar_zenith_angle') with dem_mock_cm(tmpdir, url): res = ref_cor([c01], [c02, c03, c04, c05]) # make sure it can actually compute res.compute() satpy-0.34.0/satpy/tests/reader_tests/000077500000000000000000000000001420401153000177125ustar00rootroot00000000000000satpy-0.34.0/satpy/tests/reader_tests/__init__.py000066400000000000000000000013761420401153000220320ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The reader tests package.""" satpy-0.34.0/satpy/tests/reader_tests/_modis_fixtures.py000066400000000000000000000577511420401153000235060ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """MODIS L1b and L2 test fixtures.""" from __future__ import annotations from datetime import datetime, timedelta from typing import Optional import numpy as np import pytest from pyhdf.SD import SD, SDC # Level 1 Fixtures AVAILABLE_1KM_VIS_PRODUCT_NAMES = [str(x) for x in range(8, 13)] AVAILABLE_1KM_VIS_PRODUCT_NAMES += ['13lo', '13hi', '14lo', '14hi'] AVAILABLE_1KM_VIS_PRODUCT_NAMES += [str(x) for x in range(15, 20)] AVAILABLE_1KM_IR_PRODUCT_NAMES = [str(x) for x in range(20, 37)] AVAILABLE_1KM_PRODUCT_NAMES = AVAILABLE_1KM_VIS_PRODUCT_NAMES + AVAILABLE_1KM_IR_PRODUCT_NAMES AVAILABLE_HKM_PRODUCT_NAMES = [str(x) for x in range(3, 8)] AVAILABLE_QKM_PRODUCT_NAMES = ['1', '2'] SCAN_LEN_5KM = 6 # 3 scans of 5km data SCAN_WIDTH_5KM = 270 SCALE_FACTOR = 1 RES_TO_REPEAT_FACTOR = { 250: 20, 500: 10, 1000: 5, 5000: 1, } def _shape_for_resolution(resolution: int) -> tuple[int, int]: assert resolution in RES_TO_REPEAT_FACTOR factor = RES_TO_REPEAT_FACTOR[resolution] if factor == 1: return SCAN_LEN_5KM, SCAN_WIDTH_5KM factor_1km = RES_TO_REPEAT_FACTOR[1000] shape_1km = (factor_1km * SCAN_LEN_5KM, factor_1km * SCAN_WIDTH_5KM + 4) factor //= 5 return factor * shape_1km[0], factor * shape_1km[1] def _generate_lonlat_data(resolution: int) -> np.ndarray: shape = _shape_for_resolution(resolution) lat = np.repeat(np.linspace(35., 45., shape[0])[:, None], shape[1], 1) lat *= np.linspace(0.9, 1.1, shape[1]) lon = np.repeat(np.linspace(-45., -35., shape[1])[None, :], shape[0], 0) lon *= np.linspace(0.9, 1.1, shape[0])[:, None] return lon.astype(np.float32), lat.astype(np.float32) def _generate_angle_data(resolution: int) -> np.ndarray: shape = _shape_for_resolution(resolution) data = np.repeat(abs(np.linspace(-65.2, 65.4, shape[1]))[None, :], shape[0], 0) return (data * 100).astype(np.int16) def _generate_visible_data(resolution: int, num_bands: int, dtype=np.uint16) -> np.ndarray: shape = _shape_for_resolution(resolution) data = np.zeros((num_bands, shape[0], shape[1]), dtype=dtype) return data def _get_lonlat_variable_info(resolution: int) -> dict: lon_5km, lat_5km = _generate_lonlat_data(resolution) return { 'Latitude': {'data': lat_5km, 'type': SDC.FLOAT32, 'fill_value': -999, 'attrs': {'dim_labels': ['Cell_Along_Swath_5km:mod35', 'Cell_Across_Swath_5km:mod35']}}, 'Longitude': {'data': lon_5km, 'type': SDC.FLOAT32, 'fill_value': -999, 'attrs': {'dim_labels': ['Cell_Along_Swath_5km:mod35', 'Cell_Across_Swath_5km:mod35']}}, } def _get_angles_variable_info(resolution: int) -> dict: angle_data = _generate_angle_data(resolution) dim_factor = RES_TO_REPEAT_FACTOR[resolution] * 2 angle_info = { 'data': angle_data, 'type': SDC.INT16, 'fill_value': -32767, 'attrs': { 'dim_labels': [ f'{dim_factor}*nscans:MODIS_SWATH_Type_L1B', '1KM_geo_dim:MODIS_SWATH_Type_L1B'], 'scale_factor': 0.01 }, } angles_info = {} for var_name in ('SensorAzimuth', 'SensorZenith', 'SolarAzimuth', 'SolarZenith'): angles_info[var_name] = angle_info return angles_info def _get_visible_variable_info(var_name: str, resolution: int, bands: list[str]): num_bands = len(bands) data = _generate_visible_data(resolution, len(bands)) dim_factor = RES_TO_REPEAT_FACTOR[resolution] * 2 band_dim_name = f"Band_{resolution}_{num_bands}_RefSB:MODIS_SWATH_Type_L1B" row_dim_name = f'{dim_factor}*nscans:MODIS_SWATH_Type_L1B' col_dim_name = 'Max_EV_frames:MODIS_SWATH_Type_L1B' return { var_name: { 'data': data, 'type': SDC.UINT16, 'fill_value': 0, 'attrs': { # dim_labels are just unique dimension names, may not match exactly with real world files 'dim_labels': [band_dim_name, row_dim_name, col_dim_name], 'valid_range': (0, 32767), 'reflectance_scales': (1,) * num_bands, 'reflectance_offsets': (0,) * num_bands, 'band_names': ",".join(bands), }, }, var_name + '_Uncert_Indexes': { 'data': np.zeros(data.shape, dtype=np.uint8), 'type': SDC.UINT8, 'fill_value': 255, 'attrs': { 'dim_labels': [band_dim_name, row_dim_name, col_dim_name], }, }, } def _get_emissive_variable_info(var_name: str, resolution: int, bands: list[str]): num_bands = len(bands) data = _generate_visible_data(resolution, len(bands)) dim_factor = RES_TO_REPEAT_FACTOR[resolution] * 2 band_dim_name = f"Band_{resolution}_{num_bands}_Emissive:MODIS_SWATH_Type_L1B" row_dim_name = f'{dim_factor}*nscans:MODIS_SWATH_Type_L1B' col_dim_name = 'Max_EV_frames:MODIS_SWATH_Type_L1B' return { var_name: { 'data': data, 'type': SDC.UINT16, 'fill_value': 0, 'attrs': { 'dim_labels': [band_dim_name, row_dim_name, col_dim_name], 'valid_range': (0, 32767), 'band_names': ",".join(bands), }, }, var_name + '_Uncert_Indexes': { 'data': np.zeros(data.shape, dtype=np.uint8), 'type': SDC.UINT8, 'fill_value': 255, 'attrs': { 'dim_labels': [band_dim_name, row_dim_name, col_dim_name], }, }, } def _get_l1b_geo_variable_info(filename: str, geo_resolution: int, include_angles: bool = True ) -> dict: variables_info = {} variables_info.update(_get_lonlat_variable_info(geo_resolution)) if include_angles: variables_info.update(_get_angles_variable_info(geo_resolution)) return variables_info def generate_nasa_l1b_filename(prefix): """Generate a filename that follows NASA MODIS L1b convention.""" now = datetime.now() return f'{prefix}_A{now:%y%j_%H%M%S}_{now:%Y%j%H%M%S}.hdf' def generate_imapp_filename(suffix): """Generate a filename that follows IMAPP MODIS L1b convention.""" now = datetime.now() return f't1.{now:%y%j.%H%M}.{suffix}.hdf' def create_hdfeos_test_file(filename: str, variable_infos: dict, geo_resolution: Optional[int] = None, file_shortname: Optional[str] = None, include_metadata: bool = True): """Create a fake MODIS L1b HDF4 file with headers. Args: filename: Full path of filename to be created. variable_infos: Dictionary mapping HDF4 variable names to dictionary of variable information (see ``_add_variable_to_file``). geo_resolution: Resolution of geolocation datasets to be stored in the metadata strings stored in the global metadata attributes. Only used if ``include_metadata`` is ``True`` (default). file_shortname: Short name of the file to be stored in global metadata attributes. Only used if ``include_metadata`` is ``True`` (default). include_metadata: Include global metadata attributes (default: True). """ h = SD(filename, SDC.WRITE | SDC.CREATE) if include_metadata: if geo_resolution is None or file_shortname is None: raise ValueError("'geo_resolution' and 'file_shortname' are required when including metadata.") setattr(h, 'CoreMetadata.0', _create_core_metadata(file_shortname)) # noqa setattr(h, 'StructMetadata.0', _create_struct_metadata(geo_resolution)) # noqa setattr(h, 'ArchiveMetadata.0', _create_header_metadata()) # noqa for var_name, var_info in variable_infos.items(): _add_variable_to_file(h, var_name, var_info) h.end() def _add_variable_to_file(h, var_name, var_info): v = h.create(var_name, var_info['type'], var_info['data'].shape) v[:] = var_info['data'] dim_count = 0 for dimension_name in var_info['attrs']['dim_labels']: v.dim(dim_count).setname(dimension_name) dim_count += 1 v.setfillvalue(var_info['fill_value']) v.scale_factor = var_info['attrs'].get('scale_factor', SCALE_FACTOR) for attr_key, attr_val in var_info['attrs'].items(): if attr_key == 'dim_labels': continue setattr(v, attr_key, attr_val) def _create_core_metadata(file_shortname: str) -> str: beginning_date = datetime.now() ending_date = beginning_date + timedelta(minutes=5) core_metadata_header = "GROUP = INVENTORYMETADATA\nGROUPTYPE = MASTERGROUP\n\n" \ "GROUP = RANGEDATETIME\n\nOBJECT = RANGEBEGINNINGDATE\nNUM_VAL = 1\nVALUE = \"{}\"\n" \ "END_OBJECT = RANGEBEGINNINGDATE\n\nOBJECT = RANGEBEGINNINGTIME\n" \ "NUM_VAL = 1\nVALUE = \"{}\"\n" \ "END_OBJECT = RANGEBEGINNINGTIME\n\nOBJECT = RANGEENDINGDATE\n" \ "NUM_VAL = 1\nVALUE = \"{}\"\n" \ "END_OBJECT = RANGEENDINGDATE\n\nOBJECT = RANGEENDINGTIME\nNUM_VAL = 1\nVALUE = \"{}\"\n" \ "END_OBJECT = RANGEENDINGTIME\nEND_GROUP = RANGEDATETIME" core_metadata_header = core_metadata_header.format( beginning_date.strftime("%Y-%m-%d"), beginning_date.strftime("%H:%M:%S.%f"), ending_date.strftime("%Y-%m-%d"), ending_date.strftime("%H:%M:%S.%f") ) inst_metadata = "GROUP = ASSOCIATEDPLATFORMINSTRUMENTSENSOR\n\n" \ "OBJECT = ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER\nCLASS = \"1\"\n\n" \ "OBJECT = ASSOCIATEDSENSORSHORTNAME\nCLASS = \"1\"\nNUM_VAL = 1\n" \ "VALUE = \"MODIS\"\nEND_OBJECT = ASSOCIATEDSENSORSHORTNAME\n\n" \ "OBJECT = ASSOCIATEDPLATFORMSHORTNAME\nCLASS = \"1\"\nNUM_VAL = 1\n" \ "VALUE = \"Terra\"\nEND_OBJECT = ASSOCIATEDPLATFORMSHORTNAME\n\n" \ "OBJECT = ASSOCIATEDINSTRUMENTSHORTNAME\nCLASS = \"1\"\nNUM_VAL = 1\n" \ "VALUE = \"MODIS\"\nEND_OBJECT = ASSOCIATEDINSTRUMENTSHORTNAME\n\n" \ "END_OBJECT = ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER\n\n" \ "END_GROUP = ASSOCIATEDPLATFORMINSTRUMENTSENSOR\n\n" collection_metadata = "GROUP = COLLECTIONDESCRIPTIONCLASS\n\nOBJECT = SHORTNAME\nNUM_VAL = 1\n" \ f"VALUE = \"{file_shortname}\"\nEND_OBJECT = SHORTNAME\n\n" \ "OBJECT = VERSIONID\nNUM_VAL = 1\nVALUE = 6\nEND_OBJECT = VERSIONID\n\n" \ "END_GROUP = COLLECTIONDESCRIPTIONCLASS\n\n" core_metadata_header += "\n\n" + inst_metadata + collection_metadata return core_metadata_header def _create_struct_metadata(geo_resolution: int) -> str: geo_dim_factor = RES_TO_REPEAT_FACTOR[geo_resolution] * 2 struct_metadata_header = "GROUP=SwathStructure\n" \ "GROUP=SWATH_1\n" \ "GROUP=DimensionMap\n" \ "OBJECT=DimensionMap_2\n" \ f"GeoDimension=\"{geo_dim_factor}*nscans\"\n" \ "END_OBJECT=DimensionMap_2\n" \ "END_GROUP=DimensionMap\n" \ "END_GROUP=SWATH_1\n" \ "END_GROUP=SwathStructure\nEND" return struct_metadata_header def _create_header_metadata() -> str: archive_metadata_header = "GROUP = ARCHIVEDMETADATA\nEND_GROUP = ARCHIVEDMETADATA\nEND" return archive_metadata_header @pytest.fixture(scope="session") def modis_l1b_nasa_mod021km_file(tmpdir_factory) -> list[str]: """Create a single MOD021KM file following standard NASA file scheme.""" filename = generate_nasa_l1b_filename("MOD021km") full_path = str(tmpdir_factory.mktemp("modis_l1b").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 5000, include_angles=True) variable_infos.update(_get_visible_variable_info("EV_1KM_RefSB", 1000, AVAILABLE_1KM_VIS_PRODUCT_NAMES)) variable_infos.update(_get_visible_variable_info("EV_500_Aggr1km_RefSB", 1000, AVAILABLE_HKM_PRODUCT_NAMES)) variable_infos.update(_get_visible_variable_info("EV_250_Aggr1km_RefSB", 1000, AVAILABLE_QKM_PRODUCT_NAMES)) variable_infos.update(_get_emissive_variable_info("EV_1KM_Emissive", 1000, AVAILABLE_1KM_IR_PRODUCT_NAMES)) create_hdfeos_test_file(full_path, variable_infos, geo_resolution=5000, file_shortname="MOD021KM") return [full_path] @pytest.fixture(scope="session") def modis_l1b_imapp_1000m_file(tmpdir_factory) -> list[str]: """Create a single MOD021KM file following IMAPP file scheme.""" filename = generate_imapp_filename("1000m") full_path = str(tmpdir_factory.mktemp("modis_l1b").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 5000, include_angles=True) variable_infos.update(_get_visible_variable_info("EV_1KM_RefSB", 1000, AVAILABLE_1KM_VIS_PRODUCT_NAMES)) variable_infos.update(_get_visible_variable_info("EV_500_Aggr1km_RefSB", 1000, AVAILABLE_HKM_PRODUCT_NAMES)) variable_infos.update(_get_visible_variable_info("EV_250_Aggr1km_RefSB", 1000, AVAILABLE_QKM_PRODUCT_NAMES)) variable_infos.update(_get_emissive_variable_info("EV_1KM_Emissive", 1000, AVAILABLE_1KM_IR_PRODUCT_NAMES)) create_hdfeos_test_file(full_path, variable_infos, geo_resolution=5000, file_shortname="MOD021KM") return [full_path] @pytest.fixture(scope="session") def modis_l1b_nasa_mod02hkm_file(tmpdir_factory) -> list[str]: """Create a single MOD02HKM file following standard NASA file scheme.""" filename = generate_nasa_l1b_filename("MOD02Hkm") full_path = str(tmpdir_factory.mktemp("modis_l1b").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 1000, include_angles=False) variable_infos.update(_get_visible_variable_info("EV_500_RefSB", 250, AVAILABLE_QKM_PRODUCT_NAMES)) create_hdfeos_test_file(full_path, variable_infos, geo_resolution=1000, file_shortname="MOD02HKM") return [full_path] @pytest.fixture def modis_l1b_nasa_mod02qkm_file(tmpdir_factory) -> list[str]: """Create a single MOD02QKM file following standard NASA file scheme.""" filename = generate_nasa_l1b_filename("MOD02Qkm") full_path = str(tmpdir_factory.mktemp("modis_l1b").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 1000, include_angles=False) variable_infos.update(_get_visible_variable_info("EV_250_RefSB", 250, AVAILABLE_QKM_PRODUCT_NAMES)) create_hdfeos_test_file(full_path, variable_infos, geo_resolution=1000, file_shortname="MOD02QKM") return [full_path] @pytest.fixture(scope="session") def modis_l1b_nasa_mod03_file(tmpdir_factory) -> list[str]: """Create a single MOD03 file following standard NASA file scheme.""" filename = generate_nasa_l1b_filename("MOD03") full_path = str(tmpdir_factory.mktemp("modis_l1b").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 1000, include_angles=True) create_hdfeos_test_file(full_path, variable_infos, geo_resolution=1000, file_shortname="MOD03") return [full_path] @pytest.fixture(scope="session") def modis_l1b_imapp_geo_file(tmpdir_factory) -> list[str]: """Create a single geo file following standard IMAPP file scheme.""" filename = generate_imapp_filename("geo") full_path = str(tmpdir_factory.mktemp("modis_l1b").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 1000, include_angles=True) create_hdfeos_test_file(full_path, variable_infos, geo_resolution=1000, file_shortname="MOD03") return [full_path] @pytest.fixture(scope="session") def modis_l1b_nasa_1km_mod03_files(modis_l1b_nasa_mod021km_file, modis_l1b_nasa_mod03_file) -> list[str]: """Create input files including the 1KM and MOD03 files.""" return modis_l1b_nasa_mod021km_file + modis_l1b_nasa_mod03_file # Level 2 Fixtures def _get_basic_variable_info(var_name: str, resolution: int) -> dict: shape = _shape_for_resolution(resolution) data = np.zeros((shape[0], shape[1]), dtype=np.uint16) row_dim_name = f'Cell_Along_Swath_{resolution}m:modl2' col_dim_name = f'Cell_Across_Swath_{resolution}m:modl2' return { var_name: { 'data': data, 'type': SDC.UINT16, 'fill_value': 0, 'attrs': { # dim_labels are just unique dimension names, may not match exactly with real world files 'dim_labels': [row_dim_name, col_dim_name], 'valid_range': (0, 32767), 'scale_factor': 1., 'add_offset': 0., }, }, } def _get_cloud_mask_variable_info(var_name: str, resolution: int) -> dict: num_bytes = 6 shape = _shape_for_resolution(resolution) data = np.zeros((num_bytes, shape[0], shape[1]), dtype=np.int8) byte_dim_name = "Byte_Segment:mod35" row_dim_name = 'Cell_Along_Swath_1km:mod35' col_dim_name = 'Cell_Across_Swath_1km:mod35' return { var_name: { 'data': data, 'type': SDC.INT8, 'fill_value': 0, 'attrs': { # dim_labels are just unique dimension names, may not match exactly with real world files 'dim_labels': [byte_dim_name, row_dim_name, col_dim_name], 'valid_range': (0, -1), 'scale_factor': 1., 'add_offset': 0., }, }, 'Quality_Assurance': { 'data': np.ones((shape[0], shape[1], 10), dtype=np.int8), 'type': SDC.INT8, 'fill_value': 0, 'attrs': { # dim_labels are just unique dimension names, may not match exactly with real world files 'dim_labels': [row_dim_name, col_dim_name, 'Quality_Dimension:mod35'], 'valid_range': (0, -1), 'scale_factor': 1., 'add_offset': 0., }, }, } def _get_mask_byte1_variable_info() -> dict: shape = _shape_for_resolution(1000) data = np.zeros((shape[0], shape[1]), dtype=np.uint16) row_dim_name = 'Cell_Along_Swath_1km:mod35' col_dim_name = 'Cell_Across_Swath_1km:mod35' return { "MODIS_Cloud_Mask": { 'data': data, 'type': SDC.UINT16, 'fill_value': 9999, 'attrs': { # dim_labels are just unique dimension names, may not match exactly with real world files 'dim_labels': [row_dim_name, col_dim_name], 'valid_range': (0, 4), 'scale_factor': 1., 'add_offset': 0., }, }, "MODIS_Simple_LandSea_Mask": { 'data': data, 'type': SDC.UINT16, 'fill_value': 9999, 'attrs': { # dim_labels are just unique dimension names, may not match exactly with real world files 'dim_labels': [row_dim_name, col_dim_name], 'valid_range': (0, 4), 'scale_factor': 1., 'add_offset': 0., }, }, "MODIS_Snow_Ice_Flag": { 'data': data, 'type': SDC.UINT16, 'fill_value': 9999, 'attrs': { # dim_labels are just unique dimension names, may not match exactly with real world files 'dim_labels': [row_dim_name, col_dim_name], 'valid_range': (0, 2), 'scale_factor': 1., 'add_offset': 0., }, }, } def generate_nasa_l2_filename(prefix: str) -> str: """Generate a file name that follows MODIS 35 L2 convention in a temporary directory.""" now = datetime.now() return f'{prefix}_L2.A{now:%Y%j.%H%M}.061.{now:%Y%j%H%M%S}.hdf' @pytest.fixture(scope="session") def modis_l2_nasa_mod35_file(tmpdir_factory) -> list[str]: """Create a single MOD35 L2 HDF4 file with headers.""" filename = generate_nasa_l2_filename("MOD35") full_path = str(tmpdir_factory.mktemp("modis_l2").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 5000, include_angles=True) variable_infos.update(_get_cloud_mask_variable_info("Cloud_Mask", 1000)) create_hdfeos_test_file(full_path, variable_infos, geo_resolution=5000, file_shortname="MOD35") return [full_path] @pytest.fixture(scope="session") def modis_l2_nasa_mod35_mod03_files(modis_l2_nasa_mod35_file, modis_l1b_nasa_mod03_file) -> list[str]: """Create a MOD35 L2 HDF4 file and MOD03 L1b geolocation file.""" return modis_l2_nasa_mod35_file + modis_l1b_nasa_mod03_file @pytest.fixture(scope="session") def modis_l2_nasa_mod06_file(tmpdir_factory) -> list[str]: """Create a single MOD06 L2 HDF4 file with headers.""" filename = generate_nasa_l2_filename("MOD06") full_path = str(tmpdir_factory.mktemp("modis_l2").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 5000, include_angles=True) variable_infos.update(_get_basic_variable_info("Surface_Pressure", 5000)) create_hdfeos_test_file(full_path, variable_infos, geo_resolution=5000, file_shortname="MOD06") return [full_path] @pytest.fixture(scope="session") def modis_l2_imapp_snowmask_file(tmpdir_factory) -> list[str]: """Create a single IMAPP snowmask L2 HDF4 file with headers.""" filename = generate_imapp_filename("snowmask") full_path = str(tmpdir_factory.mktemp("modis_l2").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 5000, include_angles=False) variable_infos.update(_get_basic_variable_info("Snow_Mask", 1000)) create_hdfeos_test_file(full_path, variable_infos, include_metadata=False) return [full_path] @pytest.fixture(scope="session") def modis_l2_imapp_snowmask_geo_files(modis_l2_imapp_snowmask_file, modis_l1b_nasa_mod03_file) -> list[str]: """Create the IMAPP snowmask and geo HDF4 files.""" return modis_l2_imapp_snowmask_file + modis_l1b_nasa_mod03_file @pytest.fixture(scope="session") def modis_l2_imapp_mask_byte1_file(tmpdir_factory) -> list[str]: """Create a single IMAPP mask_byte1 L2 HDF4 file with headers.""" filename = generate_imapp_filename("mask_byte1") full_path = str(tmpdir_factory.mktemp("modis_l2").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 5000, include_angles=False) variable_infos.update(_get_mask_byte1_variable_info()) create_hdfeos_test_file(full_path, variable_infos, include_metadata=False) return [full_path] @pytest.fixture(scope="session") def modis_l2_imapp_mask_byte1_geo_files(modis_l2_imapp_mask_byte1_file, modis_l1b_nasa_mod03_file) -> list[str]: """Create the IMAPP mask_byte1 and geo HDF4 files.""" return modis_l2_imapp_mask_byte1_file + modis_l1b_nasa_mod03_file satpy-0.34.0/satpy/tests/reader_tests/conftest.py000066400000000000000000000024151420401153000221130ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Setup and configuration for all reader tests.""" from ._modis_fixtures import ( modis_l1b_imapp_1000m_file, modis_l1b_imapp_geo_file, modis_l1b_nasa_1km_mod03_files, modis_l1b_nasa_mod02hkm_file, modis_l1b_nasa_mod02qkm_file, modis_l1b_nasa_mod03_file, modis_l1b_nasa_mod021km_file, modis_l2_imapp_mask_byte1_file, modis_l2_imapp_mask_byte1_geo_files, modis_l2_imapp_snowmask_file, modis_l2_imapp_snowmask_geo_files, modis_l2_nasa_mod06_file, modis_l2_nasa_mod35_file, modis_l2_nasa_mod35_mod03_files, ) satpy-0.34.0/satpy/tests/reader_tests/test_aapp_l1b.py000066400000000000000000001133571420401153000230140ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Test module for the avhrr aapp l1b reader.""" import datetime import os import tempfile import unittest from contextlib import suppress from unittest import mock import numpy as np from satpy.readers.aapp_l1b import _HEADERTYPE, _SCANTYPE, AVHRRAAPPL1BFile from satpy.tests.utils import make_dataid class TestAAPPL1BAllChannelsPresent(unittest.TestCase): """Test the filehandler.""" def setUp(self): """Set up the test case.""" self._header = np.zeros(1, dtype=_HEADERTYPE) self._header['satid'][0] = 13 self._header['radtempcnv'][0] = [[267194, -171669, 1002811], [930310, -59084, 1001600], [828600, -37854, 1001147]] # first 3b is off, 3a is on self._header['inststat1'][0] = 0b1111011100000000 # switch 3a off at position 1 self._header['statchrecnb'][0] = 1 # 3b is on, 3a is off self._header['inststat2'][0] = 0b1111101100000000 self._data = np.zeros(3, dtype=_SCANTYPE) self._data['scnlinyr'][:] = 2020 self._data['scnlindy'][:] = 8 self._data['scnlintime'][0] = 30195225 self._data['scnlintime'][1] = 30195389 self._data['scnlintime'][2] = 30195556 self._data['scnlinbit'][0] = -16383 self._data['scnlinbit'][1] = -16383 self._data['scnlinbit'][2] = -16384 calvis = np.array([[[0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [543489984, -21941870, 1592440064, -545027008, 499]], [[0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [540780032, -22145690, 1584350080, -543935616, 500]], [[0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [257550000, -10449420, 1812019968, -785690304, 499]]]) self._data['calvis'][:] = calvis self._data['calir'] = [[[[0, -2675, 2655265], [0, 0, 0]], [[33605, -260786, 226818992], [0, 0, 0]], [[13869, -249508, 234624768], [0, 0, 0]]], [[[0, -2675, 2655265], [0, 0, 0]], [[33609, -260810, 226837328], [0, 0, 0]], [[13870, -249520, 234638704], [0, 0, 0]]], [[[0, 0, 0], [0, 0, 0]], [[33614, -260833, 226855664], [0, 0, 0]], [[13871, -249531, 234652640], [0, 0, 0]]]] self._data['hrpt'] = np.ones_like(self._data['hrpt']) * (np.arange(2048) // 2)[np.newaxis, :, np.newaxis] self.filename_info = {'platform_shortname': 'metop03', 'start_time': datetime.datetime(2020, 1, 8, 8, 19), 'orbit_number': 6071} self.filetype_info = {'file_reader': AVHRRAAPPL1BFile, 'file_patterns': ['hrpt_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit_number:05d}.l1b'], # noqa 'file_type': 'avhrr_aapp_l1b'} def test_read(self): """Test the reading.""" with tempfile.TemporaryFile() as tmpfile: self._header.tofile(tmpfile) tmpfile.seek(22016, 0) self._data.tofile(tmpfile) fh = AVHRRAAPPL1BFile(tmpfile, self.filename_info, self.filetype_info) info = {} mins = [] maxs = [] for name in ['1', '2', '3a']: key = make_dataid(name=name, calibration='reflectance') res = fh.get_dataset(key, info) assert(res.min() == 0) assert(res.max() >= 100) mins.append(res.min().values) maxs.append(res.max().values) if name == '3a': assert(np.all(np.isnan(res[:2, :]))) for name in ['3b', '4', '5']: key = make_dataid(name=name, calibration='reflectance') res = fh.get_dataset(key, info) mins.append(res.min().values) maxs.append(res.max().values) if name == '3b': assert(np.all(np.isnan(res[2:, :]))) np.testing.assert_allclose(mins, [0., 0., 0., 204.10106939, 103.23477235, 106.42609758]) np.testing.assert_allclose(maxs, [108.40391775, 107.68545158, 106.80061233, 337.71416096, 355.15898219, 350.87182166]) def test_angles(self): """Test reading the angles.""" with tempfile.TemporaryFile() as tmpfile: self._header.tofile(tmpfile) tmpfile.seek(22016, 0) self._data.tofile(tmpfile) fh = AVHRRAAPPL1BFile(tmpfile, self.filename_info, self.filetype_info) info = {} key = make_dataid(name='solar_zenith_angle') res = fh.get_dataset(key, info) assert(np.all(res == 0)) def test_navigation(self): """Test reading the lon and lats.""" with tempfile.TemporaryFile() as tmpfile: self._header.tofile(tmpfile) tmpfile.seek(22016, 0) self._data.tofile(tmpfile) fh = AVHRRAAPPL1BFile(tmpfile, self.filename_info, self.filetype_info) info = {} key = make_dataid(name='longitude') res = fh.get_dataset(key, info) assert(np.all(res == 0)) key = make_dataid(name='latitude') res = fh.get_dataset(key, info) assert(np.all(res == 0)) def test_interpolation(self): """Test reading the lon and lats.""" with tempfile.TemporaryFile() as tmpfile: self._header.tofile(tmpfile) tmpfile.seek(22016, 0) self._data.tofile(tmpfile) fh = AVHRRAAPPL1BFile(tmpfile, self.filename_info, self.filetype_info) lons40km = np.array([ [-115.9773, -122.3054, -127.7482, -132.464, -136.5788, -140.1951, -143.3961, -146.2497, -148.8112, -151.1259, -153.2309, -155.1568, -156.9291, -158.5689, -160.0941, -161.5196, -162.8584, -164.1212, -165.3176, -166.4557, -167.5426, -168.5846, -169.5872, -170.5555, -171.4937, -172.406, -173.296, -174.1671, -175.0224, -175.865, -176.6976, -177.523, -178.3439, -179.1628, -179.9825, 179.1944, 178.3651, 177.5267, 176.6761, 175.8098, 174.9242, 174.0149, 173.0773, 172.1057, 171.0935, 170.0326, 168.9128, 167.7211, 166.4397, 165.0436, 163.4946], [-115.9639, -122.2967, -127.7441, -132.4639, -136.5824, -140.2018, -143.4055, -146.2614, -148.8249, -151.1413, -153.2478, -155.175, -156.9484, -158.5892, -160.1152, -161.5415, -162.8809, -164.1443, -165.3412, -166.4797, -167.567, -168.6094, -169.6123, -170.5808, -171.5192, -172.4317, -173.3219, -174.1931, -175.0486, -175.8913, -176.724, -177.5494, -178.3703, -179.1893, 179.991, 179.168, 178.3388, 177.5005, 176.6499, 175.7838, 174.8983, 173.9892, 173.0518, 172.0805, 171.0685, 170.0079, 168.8885, 167.6972, 166.4164, 165.0209, 163.4726], [-115.9504, -122.288, -127.7399, -132.4639, -136.5859, -140.2084, -143.4148, -146.2731, -148.8386, -151.1567, -153.2647, -155.1932, -156.9677, -158.6095, -160.1363, -161.5634, -162.9034, -164.1674, -165.3648, -166.5038, -167.5915, -168.6341, -169.6374, -170.6061, -171.5448, -172.4575, -173.3478, -174.2192, -175.0748, -175.9176, -176.7503, -177.5758, -178.3968, -179.2157, 179.9646, 179.1416, 178.3124, 177.4742, 176.6238, 175.7577, 174.8724, 173.9635, 173.0263, 172.0552, 171.0436, 169.9833, 168.8643, 167.6734, 166.3931, 164.9982, 163.4507]]) lats40km = np.array([ [78.6613, 78.9471, 79.0802, 79.1163, 79.0889, 79.019, 78.9202, 78.8016, 78.6695, 78.528, 78.38, 78.2276, 78.0721, 77.9145, 77.7553, 77.5949, 77.4335, 77.2712, 77.1079, 76.9435, 76.7779, 76.6108, 76.4419, 76.2708, 76.0973, 75.921, 75.7412, 75.5576, 75.3696, 75.1764, 74.9776, 74.7721, 74.5592, 74.3379, 74.1069, 73.865, 73.6106, 73.342, 73.057, 72.7531, 72.4273, 72.076, 71.6945, 71.2773, 70.8171, 70.3046, 69.7272, 69.0676, 68.3014, 67.3914, 66.2778], [78.6703, 78.9565, 79.0897, 79.1259, 79.0985, 79.0286, 78.9297, 78.8111, 78.6789, 78.5373, 78.3892, 78.2367, 78.0811, 77.9233, 77.764, 77.6035, 77.442, 77.2796, 77.1162, 76.9518, 76.7861, 76.6188, 76.4498, 76.2787, 76.1051, 75.9287, 75.7488, 75.5651, 75.377, 75.1838, 74.9848, 74.7793, 74.5663, 74.3448, 74.1138, 73.8718, 73.6173, 73.3486, 73.0635, 72.7595, 72.4336, 72.0821, 71.7005, 71.2832, 70.8229, 70.3102, 69.7326, 69.0729, 68.3065, 67.3963, 66.2825], [78.6794, 78.9658, 79.0993, 79.1355, 79.1082, 79.0381, 78.9392, 78.8205, 78.6882, 78.5465, 78.3984, 78.2458, 78.0901, 77.9322, 77.7728, 77.6122, 77.4506, 77.2881, 77.1246, 76.96, 76.7942, 76.6269, 76.4578, 76.2866, 76.1129, 75.9364, 75.7564, 75.5727, 75.3844, 75.1911, 74.9921, 74.7864, 74.5734, 74.3518, 74.1207, 73.8786, 73.624, 73.3552, 73.0699, 72.7658, 72.4398, 72.0882, 71.7065, 71.2891, 70.8286, 70.3158, 69.7381, 69.0782, 68.3116, 67.4012, 66.2872]]) fh._get_coordinates_in_degrees = mock.MagicMock() fh._get_coordinates_in_degrees.return_value = (lons40km, lats40km) (lons, lats) = fh._get_all_interpolated_coordinates() lon_data = lons.compute() self.assertTrue(np.max(lon_data) <= 180) # Not longitdes between -110, 110 in indata self.assertTrue(np.all(np.abs(lon_data) > 110)) def test_interpolation_angles(self): """Test reading the lon and lats.""" with tempfile.TemporaryFile() as tmpfile: self._header.tofile(tmpfile) tmpfile.seek(22016, 0) self._data.tofile(tmpfile) fh = AVHRRAAPPL1BFile(tmpfile, self.filename_info, self.filetype_info) sunz40km = np.array( [[122.42, 121.72, 121.14, 120.63, 120.19, 119.79, 119.43, 119.1, 118.79, 118.51, 118.24, 117.99, 117.76, 117.53, 117.31, 117.1, 116.9, 116.71, 116.52, 116.33, 116.15, 115.97, 115.79, 115.61, 115.44, 115.26, 115.08, 114.91, 114.73, 114.55, 114.36, 114.18, 113.98, 113.79, 113.58, 113.37, 113.15, 112.92, 112.68, 112.43, 112.15, 111.87, 111.55, 111.22, 110.85, 110.44, 109.99, 109.47, 108.88, 108.18, 107.33], [122.41, 121.71, 121.13, 120.62, 120.18, 119.78, 119.42, 119.09, 118.78, 118.5, 118.24, 117.99, 117.75, 117.52, 117.31, 117.1, 116.9, 116.7, 116.51, 116.32, 116.14, 115.96, 115.78, 115.6, 115.43, 115.25, 115.08, 114.9, 114.72, 114.54, 114.36, 114.17, 113.98, 113.78, 113.57, 113.36, 113.14, 112.91, 112.67, 112.42, 112.15, 111.86, 111.55, 111.21, 110.84, 110.43, 109.98, 109.46, 108.87, 108.17, 107.32]]) satz40km = np.array( [[6.623e+01, 6.281e+01, 5.960e+01, 5.655e+01, 5.360e+01, 5.075e+01, 4.797e+01, 4.524e+01, 4.256e+01, 3.992e+01, 3.731e+01, 3.472e+01, 3.216e+01, 2.962e+01, 2.710e+01, 2.460e+01, 2.210e+01, 1.962e+01, 1.714e+01, 1.467e+01, 1.221e+01, 9.760e+00, 7.310e+00, 4.860e+00, 2.410e+00, 3.000e-02, 2.470e+00, 4.920e+00, 7.370e+00, 9.820e+00, 1.227e+01, 1.474e+01, 1.720e+01, 1.968e+01, 2.216e+01, 2.466e+01, 2.717e+01, 2.969e+01, 3.223e+01, 3.479e+01, 3.737e+01, 3.998e+01, 4.263e+01, 4.531e+01, 4.804e+01, 5.082e+01, 5.368e+01, 5.662e+01, 5.969e+01, 6.290e+01, 6.633e+01], [6.623e+01, 6.281e+01, 5.960e+01, 5.655e+01, 5.360e+01, 5.075e+01, 4.797e+01, 4.524e+01, 4.256e+01, 3.992e+01, 3.731e+01, 3.472e+01, 3.216e+01, 2.962e+01, 2.710e+01, 2.460e+01, 2.210e+01, 1.962e+01, 1.714e+01, 1.467e+01, 1.221e+01, 9.760e+00, 7.310e+00, 4.860e+00, 2.410e+00, 3.000e-02, 2.470e+00, 4.920e+00, 7.370e+00, 9.820e+00, 1.227e+01, 1.474e+01, 1.720e+01, 1.968e+01, 2.216e+01, 2.466e+01, 2.717e+01, 2.969e+01, 3.223e+01, 3.479e+01, 3.737e+01, 3.998e+01, 4.263e+01, 4.531e+01, 4.804e+01, 5.082e+01, 5.368e+01, 5.662e+01, 5.969e+01, 6.290e+01, 6.633e+01]]) azidiff40km = np.array([ [56.9, 56.24, 55.71, 55.27, 54.9, 54.57, 54.29, 54.03, 53.8, 53.59, 53.4, 53.22, 53.05, 52.89, 52.74, 52.6, 52.47, 52.34, 52.22, 52.1, 51.98, 51.87, 51.76, 51.65, 51.55, 128.55, 128.65, 128.76, 128.86, 128.96, 129.07, 129.17, 129.27, 129.38, 129.49, 129.6, 129.72, 129.83, 129.95, 130.08, 130.21, 130.35, 130.5, 130.65, 130.81, 130.99, 131.18, 131.39, 131.63, 131.89, 132.19], [56.9, 56.24, 55.72, 55.28, 54.9, 54.58, 54.29, 54.03, 53.8, 53.59, 53.4, 53.22, 53.05, 52.89, 52.75, 52.6, 52.47, 52.34, 52.22, 52.1, 51.98, 51.87, 51.76, 51.65, 51.55, 128.55, 128.65, 128.75, 128.86, 128.96, 129.06, 129.17, 129.27, 129.38, 129.49, 129.6, 129.71, 129.83, 129.95, 130.08, 130.21, 130.35, 130.49, 130.65, 130.81, 130.99, 131.18, 131.39, 131.62, 131.89, 132.19]]) fh._get_tiepoint_angles_in_degrees = mock.MagicMock() fh._get_tiepoint_angles_in_degrees.return_value = (sunz40km, satz40km, azidiff40km) (sunz, satz, azidiff) = fh._get_all_interpolated_angles() self.assertTrue(np.max(sunz) <= 123) self.assertTrue(np.max(satz) <= 70) class TestAAPPL1BChannel3AMissing(unittest.TestCase): """Test the filehandler when channel 3a is missing.""" def setUp(self): """Set up the test case.""" self._header = np.zeros(1, dtype=_HEADERTYPE) self._header['satid'][0] = 13 self._header['radtempcnv'][0] = [[267194, -171669, 1002811], [930310, -59084, 1001600], [828600, -37854, 1001147]] # first 3a is off, 3b is on self._header['inststat1'][0] = 0b1111011100000000 # valid for the whole pass self._header['statchrecnb'][0] = 0 self._header['inststat2'][0] = 0b0 self._data = np.zeros(3, dtype=_SCANTYPE) self._data['scnlinyr'][:] = 2020 self._data['scnlindy'][:] = 8 self._data['scnlintime'][0] = 30195225 self._data['scnlintime'][1] = 30195389 self._data['scnlintime'][2] = 30195556 self._data['scnlinbit'][0] = -16383 self._data['scnlinbit'][1] = -16383 self._data['scnlinbit'][2] = -16383 calvis = np.array([[[0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [543489984, -21941870, 1592440064, -545027008, 499]], [[0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [540780032, -22145690, 1584350080, -543935616, 500]], [[0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [257550000, -10449420, 1812019968, -785690304, 499]]]) self._data['calvis'][:] = calvis self._data['calir'] = [[[[0, -2675, 2655265], [0, 0, 0]], [[33605, -260786, 226818992], [0, 0, 0]], [[13869, -249508, 234624768], [0, 0, 0]]], [[[0, -2675, 2655265], [0, 0, 0]], [[33609, -260810, 226837328], [0, 0, 0]], [[13870, -249520, 234638704], [0, 0, 0]]], [[[0, 0, 0], [0, 0, 0]], [[33614, -260833, 226855664], [0, 0, 0]], [[13871, -249531, 234652640], [0, 0, 0]]]] self._data['hrpt'] = np.ones_like(self._data['hrpt']) * (np.arange(2048) // 2)[np.newaxis, :, np.newaxis] self.filename_info = {'platform_shortname': 'metop03', 'start_time': datetime.datetime(2020, 1, 8, 8, 19), 'orbit_number': 6071} self.filetype_info = {'file_reader': AVHRRAAPPL1BFile, 'file_patterns': [ 'hrpt_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit_number:05d}.l1b'], # noqa 'file_type': 'avhrr_aapp_l1b'} def test_loading_missing_channels_returns_none(self): """Test that loading a missing channel raises a keyerror.""" with tempfile.TemporaryFile() as tmpfile: self._header.tofile(tmpfile) tmpfile.seek(22016, 0) self._data.tofile(tmpfile) fh = AVHRRAAPPL1BFile(tmpfile, self.filename_info, self.filetype_info) info = {} key = make_dataid(name='3a', calibration='reflectance') assert fh.get_dataset(key, info) is None def test_available_datasets_miss_3a(self): """Test that channel 3a is missing from available datasets.""" with tempfile.TemporaryFile() as tmpfile: self._header.tofile(tmpfile) tmpfile.seek(22016, 0) self._data.tofile(tmpfile) fh = AVHRRAAPPL1BFile(tmpfile, self.filename_info, self.filetype_info) configured_datasets = [[None, {'name': '1'}], [None, {'name': '2'}], [None, {'name': '3a'}], [None, {'name': '3b'}], [None, {'name': '4'}], [None, {'name': '5'}], ] available_datasets = fh.available_datasets(configured_datasets) for status, mda in available_datasets: if mda['name'] == '3a': assert status is False else: assert status is True class TestNegativeCalibrationSlope(unittest.TestCase): """Case for testing correct behaviour when the data has negative slope2 coefficients.""" def setUp(self): """Set up the test case.""" from satpy.readers.aapp_l1b import _HEADERTYPE, _SCANTYPE calvis = np.array([[[617200000, -24330000, 1840000000, -632800000, 498], # calvis [0, 0, 0, 0, 0], [540000000, -21300002, 1610000000, -553699968, 501]], [[750299968, -29560000, -2043967360, -784400000, 503], [0, 0, 0, 0, 0], [529000000, -20840002, 1587299968, -553100032, 500]], [[0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [261799984, -9820000, 1849200000, -808800000, 501]]], dtype=". """Test module for the MHS AAPP level-1c reader.""" import datetime import tempfile import unittest import numpy as np from satpy.readers.aapp_mhs_amsub_l1c import _HEADERTYPE, _SCANTYPE, HEADER_LENGTH, MHS_AMSUB_AAPPL1CFile from satpy.tests.utils import make_dataid SCANLINE1 = [[26798, 27487, 23584, 24816, 26196], [26188, 27392, 23453, 24832, 26223], [23777, 26804, 23529, 24876, 26193], [23311, 26781, 23583, 24898, 26157], [23194, 26737, 23743, 24917, 26199], [23811, 26871, 23836, 25017, 26239], [25000, 27034, 23952, 25118, 26247], [25937, 26988, 24019, 25058, 26143], [25986, 26689, 24048, 25081, 25967], [24689, 26083, 24062, 24975, 25744], [23719, 25519, 24016, 24938, 25617], [23327, 25565, 23882, 24960, 25571], [23214, 25646, 23862, 24847, 25561], [23473, 25886, 23859, 24832, 25640], [23263, 25449, 23759, 24730, 25525], [23335, 25672, 23716, 24727, 25578], [23477, 25983, 23771, 24847, 25882], [23141, 25863, 23758, 24971, 26066], [23037, 25813, 23855, 25113, 26231], [22908, 25701, 23958, 25130, 26226], [22608, 25493, 23980, 25223, 26277], [22262, 25275, 24019, 25356, 26247], [21920, 25116, 24161, 25375, 26268], [21559, 24795, 24169, 25351, 26284], [21269, 24591, 24333, 25503, 26300], [21028, 24395, 24413, 25498, 26300], [20887, 24254, 24425, 25479, 26228], [20882, 24288, 24440, 25463, 26284], [20854, 24261, 24569, 25438, 26266], [20911, 24277, 24564, 25464, 26213], [21069, 24369, 24567, 25473, 26211], [20994, 24201, 24747, 25552, 26130], [21909, 24648, 24856, 25546, 26249], [21936, 24662, 24843, 25612, 26207], [21142, 24248, 24885, 25616, 26159], [21180, 24251, 24817, 25553, 26114], [21236, 24219, 24840, 25569, 26100], [21057, 24152, 24735, 25535, 26093], [20825, 24018, 24830, 25528, 26103], [20731, 23866, 24789, 25579, 26117], [20924, 23972, 24808, 25512, 26082], [21228, 24259, 24723, 25501, 26071], [21307, 24285, 24733, 25491, 26058], [21558, 24521, 24739, 25511, 26009], [21562, 24500, 24706, 25538, 26091], [21568, 24448, 24639, 25504, 26011], [21636, 24520, 24673, 25462, 26028], [21895, 24667, 24662, 25494, 26048], [22251, 24892, 24570, 25435, 25977], [22459, 25109, 24557, 25340, 26010], [22426, 25030, 24533, 25310, 25964], [22419, 24966, 24528, 25316, 25953], [22272, 24851, 24503, 25318, 25891], [22261, 24799, 24548, 25326, 25912], [22445, 25023, 24410, 25333, 25930], [22371, 24902, 24381, 25323, 25892], [21791, 24521, 24407, 25362, 25880], [20930, 23820, 24440, 25287, 25849], [21091, 24008, 24412, 25251, 25854], [21575, 24331, 24405, 25272, 25774], [21762, 24545, 24395, 25216, 25763], [21891, 24550, 24317, 25256, 25790], [21865, 24584, 24250, 25205, 25797], [21431, 24178, 24302, 25228, 25738], [21285, 23978, 24240, 25205, 25735], [21935, 24515, 24232, 25240, 25834], [22372, 24790, 24325, 25311, 25878], [22621, 24953, 24410, 25395, 25897], [23642, 25290, 24456, 25428, 25959], [23871, 25209, 24376, 25369, 25976], [22846, 24495, 24378, 25347, 25868], [22490, 24320, 24327, 25374, 25849], [23237, 24599, 24182, 25298, 25839], [23134, 24601, 24121, 25306, 25864], [22647, 24314, 24108, 25248, 25787], [22499, 24293, 24049, 25165, 25823], [22247, 23987, 23936, 25131, 25742], [22291, 23942, 23908, 25028, 25715], [22445, 24205, 23784, 24997, 25615], [22487, 24417, 23764, 24921, 25643], [22386, 24420, 23765, 24865, 25715], [22217, 24326, 23748, 24823, 25617], [21443, 23814, 23722, 24750, 25552], [20354, 22599, 23580, 24722, 25439], [20331, 22421, 23431, 24655, 25389], [19925, 21855, 23412, 24623, 25284], [20240, 22224, 23339, 24545, 25329], [20368, 22596, 23419, 24474, 25362], [20954, 23192, 23345, 24416, 25403], [22292, 24303, 23306, 24330, 25353]] ANGLES_SCLINE1 = [[5926, 35786, 7682, 23367], [5769, 35780, 7709, 23352], [5614, 35774, 7733, 23339], [5463, 35769, 7756, 23326], [5314, 35763, 7777, 23313], [5167, 35758, 7797, 23302], [5022, 35753, 7816, 23290], [4879, 35747, 7834, 23280], [4738, 35742, 7851, 23269], [4598, 35737, 7868, 23259], [4459, 35732, 7883, 23249], [4321, 35727, 7899, 23240], [4185, 35721, 7913, 23231], [4049, 35716, 7927, 23222], [3914, 35711, 7940, 23213], [3780, 35706, 7953, 23204], [3647, 35701, 7966, 23195], [3515, 35695, 7978, 23187], [3383, 35690, 7990, 23179], [3252, 35685, 8001, 23170], [3121, 35680, 8013, 23162], [2991, 35674, 8023, 23154], [2861, 35669, 8034, 23146], [2732, 35663, 8045, 23138], [2603, 35658, 8055, 23130], [2474, 35652, 8065, 23122], [2346, 35647, 8075, 23114], [2218, 35641, 8084, 23106], [2090, 35635, 8094, 23098], [1963, 35630, 8103, 23090], [1836, 35624, 8112, 23082], [1709, 35618, 8121, 23074], [1582, 35612, 8130, 23066], [1455, 35605, 8139, 23057], [1329, 35599, 8148, 23049], [1203, 35593, 8157, 23041], [1077, 35586, 8165, 23032], [951, 35580, 8174, 23023], [825, 35573, 8182, 23014], [699, 35566, 8191, 23005], [573, 35560, 8199, 22996], [448, 35553, 8208, 22987], [322, 35548, 8216, 22977], [196, 35545, 8224, 22968], [71, 35561, 8233, 22958], [54, 17463, 8241, 22947], [179, 17489, 8249, 22937], [305, 17486, 8258, 22926], [431, 17479, 8266, 22915], [556, 17471, 8275, 22903], [682, 17461, 8283, 22891], [808, 17451, 8291, 22879], [934, 17440, 8300, 22866], [1060, 17428, 8309, 22853], [1186, 17416, 8317, 22839], [1312, 17403, 8326, 22824], [1438, 17390, 8335, 22809], [1565, 17375, 8344, 22793], [1692, 17360, 8353, 22776], [1818, 17344, 8362, 22759], [1946, 17327, 8371, 22740], [2073, 17309, 8381, 22720], [2201, 17289, 8390, 22699], [2329, 17268, 8400, 22676], [2457, 17245, 8410, 22652], [2585, 17220, 8420, 22625], [2714, 17194, 8431, 22597], [2843, 17164, 8441, 22566], [2973, 17132, 8452, 22533], [3103, 17097, 8463, 22496], [3234, 17058, 8475, 22455], [3365, 17014, 8486, 22410], [3497, 16965, 8498, 22359], [3629, 16909, 8511, 22301], [3762, 16844, 8524, 22236], [3896, 16770, 8537, 22160], [4031, 16683, 8551, 22071], [4166, 16578, 8565, 21965], [4303, 16452, 8580, 21837], [4440, 16295, 8595, 21679], [4579, 16096, 8611, 21478], [4718, 15835, 8628, 21215], [4860, 15477, 8646, 20856], [5003, 14963, 8665, 20341], [5147, 14178, 8684, 19553], [5294, 12897, 8705, 18270], [5442, 10778, 8727, 16150], [5593, 7879, 8751, 13250], [5747, 5305, 8776, 10674], [5904, 3659, 8803, 9027]] LATLON_SCLINE1 = [[715994, 787602], [720651, 786999], [724976, 786407], [729013, 785827], [732799, 785255], [736362, 784692], [739728, 784134], [742919, 783583], [745953, 783035], [748844, 782492], [751607, 781951], [754254, 781412], [756796, 780875], [759240, 780338], [761597, 779801], [763872, 779264], [766073, 778726], [768206, 778186], [770275, 777644], [772287, 777100], [774245, 776552], [776153, 776000], [778015, 775444], [779836, 774882], [781617, 774316], [783361, 773743], [785073, 773163], [786753, 772576], [788405, 771981], [790031, 771377], [791633, 770764], [793212, 770140], [794771, 769506], [796312, 768860], [797837, 768201], [799346, 767528], [800842, 766841], [802326, 766138], [803799, 765419], [805264, 764681], [806721, 763924], [808171, 763147], [809617, 762347], [811060, 761523], [812500, 760673], [813939, 759796], [815378, 758888], [816819, 757949], [818263, 756974], [819712, 755962], [821166, 754909], [822627, 753812], [824096, 752666], [825575, 751468], [827065, 750213], [828567, 748894], [830084, 747507], [831617, 746043], [833167, 744496], [834736, 742855], [836327, 741112], [837940, 739253], [839578, 737265], [841243, 735132], [842938, 732835], [844665, 730352], [846425, 727656], [848223, 724716], [850060, 721492], [851941, 717939], [853868, 713998], [855845, 709597], [857875, 704644], [859963, 699024], [862113, 692583], [864329, 685119], [866616, 676358], [868979, 665918], [871421, 653256], [873947, 637570], [876557, 617626], [879250, 591448], [882013, 555681], [884815, 504285], [887577, 425703], [890102, 297538], [891907, 85636], [892134, -204309], [890331, -461741], [887022, -626300]] class TestMHS_AMSUB_AAPPL1CReadData(unittest.TestCase): """Test the filehandler.""" def setUp(self): """Set up the test case.""" self._header = np.zeros(1, dtype=_HEADERTYPE) self._header['satid'][0] = 3 self._header['instrument'][0] = 12 self._header['tempradcnv'][0] = [[2968720, 0, 1000000, 5236956, 0], [1000000, 6114597, 0, 1000000, 6114597], [-3100, 1000270, 6348092, 0, 1000000]] self._data = np.zeros(3, dtype=_SCANTYPE) self._data['scnlinyr'][:] = 2020 self._data['scnlindy'][:] = 261 self._data['scnlintime'][0] = 36368496 self._data['scnlintime'][1] = 36371163 self._data['scnlintime'][2] = 36373830 self._data['qualind'][0] = 0 self._data['qualind'][1] = 0 self._data['qualind'][2] = 0 self._data['scnlinqual'][0] = 16384 self._data['scnlinqual'][1] = 16384 self._data['scnlinqual'][2] = 16384 self._data['chanqual'][0] = [6, 6, 6, 6, 6] self._data['chanqual'][1] = [6, 6, 6, 6, 6] self._data['chanqual'][2] = [6, 6, 6, 6, 6] self._data['instrtemp'][:] = [29520, 29520, 29520] self._data['dataqual'][:] = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] self._data['scalti'][0:3] = [8321, 8321, 8321] self._data['latlon'][0] = LATLON_SCLINE1 self._data['angles'][0] = ANGLES_SCLINE1 self._data['btemps'][0] = SCANLINE1 self.filename_info = {'platform_shortname': 'metop01', 'start_time': datetime.datetime(2020, 9, 17, 10, 6), 'orbit_number': 41509} self.filetype_info = {'file_reader': MHS_AMSUB_AAPPL1CFile, 'file_patterns': ['mhsl1c_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit_number:05d}.l1c'], 'file_type': 'mhs_aapp_l1c'} def test_platform_name(self): """Test getting the platform name.""" with tempfile.TemporaryFile() as tmpfile: self._header.tofile(tmpfile) tmpfile.seek(HEADER_LENGTH, 0) self._data.tofile(tmpfile) fh_ = MHS_AMSUB_AAPPL1CFile(tmpfile, self.filename_info, self.filetype_info) assert fh_.platform_name == 'Metop-C' self._header['satid'][0] = 1 with tempfile.TemporaryFile() as tmpfile: self._header.tofile(tmpfile) tmpfile.seek(HEADER_LENGTH, 0) self._data.tofile(tmpfile) fh_ = MHS_AMSUB_AAPPL1CFile(tmpfile, self.filename_info, self.filetype_info) assert fh_.platform_name == 'Metop-B' def test_sensor_name(self): """Test getting the sensor name.""" with tempfile.TemporaryFile() as tmpfile: self._header.tofile(tmpfile) tmpfile.seek(HEADER_LENGTH, 0) self._data.tofile(tmpfile) fh_ = MHS_AMSUB_AAPPL1CFile(tmpfile, self.filename_info, self.filetype_info) assert fh_.sensor == 'mhs' self._header['instrument'][0] = 11 with tempfile.TemporaryFile() as tmpfile: self._header.tofile(tmpfile) tmpfile.seek(HEADER_LENGTH, 0) self._data.tofile(tmpfile) fh_ = MHS_AMSUB_AAPPL1CFile(tmpfile, self.filename_info, self.filetype_info) assert fh_.sensor == 'amsub' self._header['instrument'][0] = 10 with tempfile.TemporaryFile() as tmpfile: self._header.tofile(tmpfile) tmpfile.seek(HEADER_LENGTH, 0) self._data.tofile(tmpfile) with self.assertRaises(IOError): fh_ = MHS_AMSUB_AAPPL1CFile(tmpfile, self.filename_info, self.filetype_info) def test_read(self): """Test getting the platform name.""" with tempfile.TemporaryFile() as tmpfile: self._header.tofile(tmpfile) tmpfile.seek(HEADER_LENGTH, 0) self._data.tofile(tmpfile) fh_ = MHS_AMSUB_AAPPL1CFile(tmpfile, self.filename_info, self.filetype_info) info = {} chmin = [199.25, 218.55, 233.06, 243.3, 252.84] chmax = [267.98, 274.87, 248.85, 256.16, 263.] for chn, name in enumerate(['1', '2', '3', '4', '5']): key = make_dataid(name=name, calibration='brightness_temperature') res = fh_.get_dataset(key, info) assert(res.min() == chmin[chn]) assert(res.max() == chmax[chn]) def test_angles(self): """Test reading the angles.""" with tempfile.TemporaryFile() as tmpfile: self._header.tofile(tmpfile) tmpfile.seek(HEADER_LENGTH, 0) self._data.tofile(tmpfile) fh_ = MHS_AMSUB_AAPPL1CFile(tmpfile, self.filename_info, self.filetype_info) info = {} key = make_dataid(name='solar_zenith_angle') res = fh_.get_dataset(key, info) assert(np.all(res[2] == 0)) assert(np.all(res[1] == 0)) expected = np.array([76.82, 77.09, 77.33, 77.56, 77.77, 77.97, 78.16, 78.34, 78.51, 78.68, 78.83, 78.99, 79.13, 79.27, 79.4, 79.53, 79.66, 79.78, 79.9, 80.01, 80.13, 80.23, 80.34, 80.45, 80.55, 80.65, 80.75, 80.84, 80.94, 81.03, 81.12, 81.21, 81.3, 81.39, 81.48, 81.57, 81.65, 81.74, 81.82, 81.91, 81.99, 82.08, 82.16, 82.24, 82.33, 82.41, 82.49, 82.58, 82.66, 82.75, 82.83, 82.91, 83., 83.09, 83.17, 83.26, 83.35, 83.44, 83.53, 83.62, 83.71, 83.81, 83.9, 84., 84.1, 84.2, 84.31, 84.41, 84.52, 84.63, 84.75, 84.86, 84.98, 85.11, 85.24, 85.37, 85.51, 85.65, 85.8, 85.95, 86.11, 86.28, 86.46, 86.65, 86.84, 87.05, 87.27, 87.51, 87.76, 88.03]) np.testing.assert_allclose(res[0], expected) def test_navigation(self): """Test reading the longitudes and latitudes.""" with tempfile.TemporaryFile() as tmpfile: self._header.tofile(tmpfile) tmpfile.seek(HEADER_LENGTH, 0) self._data.tofile(tmpfile) fh_ = MHS_AMSUB_AAPPL1CFile(tmpfile, self.filename_info, self.filetype_info) info = {} key = make_dataid(name='longitude') res = fh_.get_dataset(key, info) assert(np.all(res[2] == 0)) assert(np.all(res[1] == 0)) expected = np.array([78.7602, 78.6999, 78.6407, 78.5827, 78.5255, 78.4692, 78.4134, 78.3583, 78.3035, 78.2492, 78.1951, 78.1412, 78.0875, 78.0338, 77.9801, 77.9264, 77.8726, 77.8186, 77.7644, 77.71, 77.6552, 77.6, 77.5444, 77.4882, 77.4316, 77.3743, 77.3163, 77.2576, 77.1981, 77.1377, 77.0764, 77.014, 76.9506, 76.886, 76.8201, 76.7528, 76.6841, 76.6138, 76.5419, 76.4681, 76.3924, 76.3147, 76.2347, 76.1523, 76.0673, 75.9796, 75.8888, 75.7949, 75.6974, 75.5962, 75.4909, 75.3812, 75.2666, 75.1468, 75.0213, 74.8894, 74.7507, 74.6043, 74.4496, 74.2855, 74.1112, 73.9253, 73.7265, 73.5132, 73.2835, 73.0352, 72.7656, 72.4716, 72.1492, 71.7939, 71.3998, 70.9597, 70.4644, 69.9024, 69.2583, 68.5119, 67.6358, 66.5918, 65.3256, 63.757, 61.7626, 59.1448, 55.5681, 50.4285, 42.5703, 29.7538, 8.5636, -20.4309, -46.1741, -62.63]) np.testing.assert_allclose(res[0], expected) key = make_dataid(name='latitude') res = fh_.get_dataset(key, info) assert(np.all(res[2] == 0)) assert(np.all(res[1] == 0)) expected = np.array([71.5994, 72.0651, 72.4976, 72.9013, 73.2799, 73.6362, 73.9728, 74.2919, 74.5953, 74.8844, 75.1607, 75.4254, 75.6796, 75.924, 76.1597, 76.3872, 76.6073, 76.8206, 77.0275, 77.2287, 77.4245, 77.6153, 77.8015, 77.9836, 78.1617, 78.3361, 78.5073, 78.6753, 78.8405, 79.0031, 79.1633, 79.3212, 79.4771, 79.6312, 79.7837, 79.9346, 80.0842, 80.2326, 80.3799, 80.5264, 80.6721, 80.8171, 80.9617, 81.106, 81.25, 81.3939, 81.5378, 81.6819, 81.8263, 81.9712, 82.1166, 82.2627, 82.4096, 82.5575, 82.7065, 82.8567, 83.0084, 83.1617, 83.3167, 83.4736, 83.6327, 83.794, 83.9578, 84.1243, 84.2938, 84.4665, 84.6425, 84.8223, 85.006, 85.1941, 85.3868, 85.5845, 85.7875, 85.9963, 86.2113, 86.4329, 86.6616, 86.8979, 87.1421, 87.3947, 87.6557, 87.925, 88.2013, 88.4815, 88.7577, 89.0102, 89.1907, 89.2134, 89.0331, 88.7022]) np.testing.assert_allclose(res[0], expected) satpy-0.34.0/satpy/tests/reader_tests/test_abi_l1b.py000066400000000000000000000332571420401153000226260ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The abi_l1b reader tests package.""" import unittest from unittest import mock import numpy as np import pytest import xarray as xr from satpy.tests.utils import make_dataid class Test_NC_ABI_L1B_Base(unittest.TestCase): """Common setup for NC_ABI_L1B tests.""" @mock.patch('satpy.readers.abi_base.xr') def setUp(self, xr_, rad=None): """Create a fake dataset using the given radiance data.""" from satpy.readers.abi_l1b import NC_ABI_L1B x_image = xr.DataArray(0.) y_image = xr.DataArray(0.) time = xr.DataArray(0.) if rad is None: rad_data = (np.arange(10.).reshape((2, 5)) + 1.) * 50. rad_data = (rad_data + 1.) / 0.5 rad_data = rad_data.astype(np.int16) rad = xr.DataArray( rad_data, dims=('y', 'x'), attrs={ 'scale_factor': 0.5, 'add_offset': -1., '_FillValue': 1002, 'units': 'W m-2 um-1 sr-1', 'valid_range': (0, 4095), } ) rad.coords['t'] = time rad.coords['x_image'] = x_image rad.coords['y_image'] = y_image x__ = xr.DataArray( range(5), attrs={'scale_factor': 2., 'add_offset': -1.}, dims=('x',) ) y__ = xr.DataArray( range(2), attrs={'scale_factor': -2., 'add_offset': 1.}, dims=('y',) ) proj = xr.DataArray( [], attrs={ 'semi_major_axis': 1., 'semi_minor_axis': 1., 'perspective_point_height': 1., 'longitude_of_projection_origin': -90., 'latitude_of_projection_origin': 0., 'sweep_angle_axis': u'x' } ) fake_dataset = xr.Dataset( data_vars={ 'Rad': rad, 'band_id': np.array(8), # 'x': x__, # 'y': y__, 'x_image': x_image, 'y_image': y_image, 'goes_imager_projection': proj, 'yaw_flip_flag': np.array([1]), "planck_fk1": np.array(13432.1), "planck_fk2": np.array(1497.61), "planck_bc1": np.array(0.09102), "planck_bc2": np.array(0.99971), "esun": np.array(2017), "nominal_satellite_subpoint_lat": np.array(0.0), "nominal_satellite_subpoint_lon": np.array(-89.5), "nominal_satellite_height": np.array(35786.02), "earth_sun_distance_anomaly_in_AU": np.array(0.99) }, coords={ 't': rad.coords['t'], 'x': x__, 'y': y__, }, attrs={ "time_coverage_start": "2017-09-20T17:30:40.8Z", "time_coverage_end": "2017-09-20T17:41:17.5Z", }, ) xr_.open_dataset.return_value = fake_dataset self.reader = NC_ABI_L1B('filename', {'platform_shortname': 'G16', 'observation_type': 'Rad', 'suffix': 'custom', 'scene_abbr': 'C', 'scan_mode': 'M3'}, {'filetype': 'info'}) class TestABIYAML: """Tests for the ABI L1b reader's YAML configuration.""" @pytest.mark.parametrize(['channel', 'suffix'], [("C{:02d}".format(num), suffix) for num in range(1, 17) for suffix in ('', '_test_suffix')]) def test_file_patterns_match(self, channel, suffix): """Test that the configured file patterns work.""" from satpy.readers import configs_for_reader, load_reader reader_configs = list(configs_for_reader('abi_l1b'))[0] reader = load_reader(reader_configs) fn1 = ("OR_ABI-L1b-RadM1-M3{}_G16_s20182541300210_e20182541300267" "_c20182541300308{}.nc").format(channel, suffix) loadables = reader.select_files_from_pathnames([fn1]) assert len(loadables) == 1 if not suffix and channel in ["C01", "C02", "C03", "C05"]: fn2 = ("OR_ABI-L1b-RadM1-M3{}_G16_s20182541300210_e20182541300267" "_c20182541300308-000000_0.nc").format(channel) loadables = reader.select_files_from_pathnames([fn2]) assert len(loadables) == 1 class Test_NC_ABI_L1B(Test_NC_ABI_L1B_Base): """Test the NC_ABI_L1B reader.""" def test_basic_attributes(self): """Test getting basic file attributes.""" from datetime import datetime self.assertEqual(self.reader.start_time, datetime(2017, 9, 20, 17, 30, 40, 800000)) self.assertEqual(self.reader.end_time, datetime(2017, 9, 20, 17, 41, 17, 500000)) def test_get_dataset(self): """Test the get_dataset method.""" key = make_dataid(name='Rad', calibration='radiance') res = self.reader.get_dataset(key, {'info': 'info'}) exp = {'calibration': 'radiance', 'instrument_ID': None, 'modifiers': (), 'name': 'Rad', 'observation_type': 'Rad', 'orbital_parameters': {'projection_altitude': 1.0, 'projection_latitude': 0.0, 'projection_longitude': -90.0, 'satellite_nominal_altitude': 35786020., 'satellite_nominal_latitude': 0.0, 'satellite_nominal_longitude': -89.5, 'yaw_flip': True}, 'orbital_slot': None, 'platform_name': 'GOES-16', 'platform_shortname': 'G16', 'production_site': None, 'scan_mode': 'M3', 'scene_abbr': 'C', 'scene_id': None, 'sensor': 'abi', 'timeline_ID': None, 'suffix': 'custom', 'units': 'W m-2 um-1 sr-1'} self.assertDictEqual(res.attrs, exp) # we remove any time dimension information self.assertNotIn('t', res.coords) self.assertNotIn('t', res.dims) self.assertNotIn('time', res.coords) self.assertNotIn('time', res.dims) @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') def test_get_area_def(self, adef): """Test the area generation.""" self.reader.get_area_def(None) self.assertEqual(adef.call_count, 1) call_args = tuple(adef.call_args)[0] self.assertDictEqual(call_args[3], {'a': 1.0, 'b': 1.0, 'h': 1.0, 'lon_0': -90.0, 'proj': 'geos', 'sweep': 'x', 'units': 'm'}) self.assertEqual(call_args[4], self.reader.ncols) self.assertEqual(call_args[5], self.reader.nlines) np.testing.assert_allclose(call_args[6], (-2, -2, 8, 2)) class Test_NC_ABI_L1B_ir_cal(Test_NC_ABI_L1B_Base): """Test the NC_ABI_L1B reader's IR calibration.""" def setUp(self): """Create fake data for the tests.""" rad_data = (np.arange(10.).reshape((2, 5)) + 1.) * 50. rad_data = (rad_data + 1.) / 0.5 rad_data = rad_data.astype(np.int16) rad = xr.DataArray( rad_data, dims=('y', 'x'), attrs={ 'scale_factor': 0.5, 'add_offset': -1., '_FillValue': 1002, } ) super(Test_NC_ABI_L1B_ir_cal, self).setUp(rad=rad) def test_ir_calibrate(self): """Test IR calibration.""" res = self.reader.get_dataset( make_dataid(name='C05', calibration='brightness_temperature'), {}) expected = np.array([[267.55572248, 305.15576503, 332.37383249, 354.73895301, 374.19710115], [391.68679226, 407.74064808, 422.69329105, 436.77021913, np.nan]]) self.assertTrue(np.allclose(res.data, expected, equal_nan=True)) # make sure the attributes from the file are in the data array self.assertNotIn('scale_factor', res.attrs) self.assertNotIn('_FillValue', res.attrs) self.assertEqual(res.attrs['standard_name'], 'toa_brightness_temperature') self.assertEqual(res.attrs['long_name'], 'Brightness Temperature') class Test_NC_ABI_L1B_vis_cal(Test_NC_ABI_L1B_Base): """Test the NC_ABI_L1B reader.""" def setUp(self): """Create fake data for the tests.""" rad_data = (np.arange(10.).reshape((2, 5)) + 1.) rad_data = (rad_data + 1.) / 0.5 rad_data = rad_data.astype(np.int16) rad = xr.DataArray( rad_data, dims=('y', 'x'), attrs={ 'scale_factor': 0.5, 'add_offset': -1., '_FillValue': 20, } ) super(Test_NC_ABI_L1B_vis_cal, self).setUp(rad=rad) def test_vis_calibrate(self): """Test VIS calibration.""" res = self.reader.get_dataset( make_dataid(name='C05', calibration='reflectance'), {}) expected = np.array([[0.15265617, 0.30531234, 0.45796851, 0.61062468, 0.76328085], [0.91593702, 1.06859319, 1.22124936, np.nan, 1.52656171]]) self.assertTrue(np.allclose(res.data, expected, equal_nan=True)) self.assertNotIn('scale_factor', res.attrs) self.assertNotIn('_FillValue', res.attrs) self.assertEqual(res.attrs['standard_name'], 'toa_bidirectional_reflectance') self.assertEqual(res.attrs['long_name'], 'Bidirectional Reflectance') class Test_NC_ABI_L1B_raw_cal(Test_NC_ABI_L1B_Base): """Test the NC_ABI_L1B reader raw calibration.""" def setUp(self): """Create fake data for the tests.""" rad_data = (np.arange(10.).reshape((2, 5)) + 1.) rad_data = (rad_data + 1.) / 0.5 rad_data = rad_data.astype(np.int16) rad = xr.DataArray( rad_data, dims=('y', 'x'), attrs={ 'scale_factor': 0.5, 'add_offset': -1., '_FillValue': 20, } ) super(Test_NC_ABI_L1B_raw_cal, self).setUp(rad=rad) def test_raw_calibrate(self): """Test RAW calibration.""" res = self.reader.get_dataset( make_dataid(name='C05', calibration='counts'), {}) # We expect the raw data to be unchanged expected = res.data self.assertTrue(np.allclose(res.data, expected, equal_nan=True)) # check for the presence of typical attributes self.assertIn('scale_factor', res.attrs) self.assertIn('add_offset', res.attrs) self.assertIn('_FillValue', res.attrs) self.assertIn('orbital_parameters', res.attrs) self.assertIn('platform_shortname', res.attrs) self.assertIn('scene_id', res.attrs) # determine if things match their expected values/types. self.assertEqual(res.data.dtype, np.int16, "int16 data type expected") self.assertEqual(res.attrs['standard_name'], 'counts') self.assertEqual(res.attrs['long_name'], 'Raw Counts') class Test_NC_ABI_L1B_invalid_cal(Test_NC_ABI_L1B_Base): """Test the NC_ABI_L1B reader with invalid calibration.""" def test_invalid_calibration(self): """Test detection of invalid calibration values.""" # Need to use a custom DataID class because the real DataID class is # smart enough to detect the invalid calibration before the ABI L1B # get_dataset method gets a chance to run. class FakeDataID(dict): def to_dict(self): return self with self.assertRaises(ValueError, msg='Did not detect invalid cal'): did = FakeDataID(name='C05', calibration='invalid', modifiers=()) self.reader.get_dataset(did, {}) class Test_NC_ABI_File(unittest.TestCase): """Test file opening.""" @mock.patch('satpy.readers.abi_base.xr') def test_open_dataset(self, _): """Test openning a dataset.""" from satpy.readers.abi_l1b import NC_ABI_L1B openable_thing = mock.MagicMock() NC_ABI_L1B(openable_thing, {'platform_shortname': 'g16'}, None) openable_thing.open.assert_called() class Test_NC_ABI_L1B_H5netcdf(Test_NC_ABI_L1B): """Allow h5netcdf peculiarities.""" def setUp(self): """Create fake data for the tests.""" rad_data = np.int16(50) rad = xr.DataArray( rad_data, attrs={ 'scale_factor': 0.5, 'add_offset': -1., '_FillValue': np.array([1002]), 'units': 'W m-2 um-1 sr-1', 'valid_range': (0, 4095), } ) super(Test_NC_ABI_L1B_H5netcdf, self).setUp(rad=rad) satpy-0.34.0/satpy/tests/reader_tests/test_abi_l2_nc.py000066400000000000000000000237441420401153000231450ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . """The abi_l2_nc reader tests package.""" import unittest from unittest import mock import numpy as np import xarray as xr def _create_cmip_dataset(): proj = xr.DataArray( [], attrs={ 'semi_major_axis': 1., 'semi_minor_axis': 1., 'perspective_point_height': 1., 'longitude_of_projection_origin': -90., 'sweep_angle_axis': u'x' } ) x__ = xr.DataArray( [0, 1], attrs={'scale_factor': 2., 'add_offset': -1.}, dims=('x',), ) y__ = xr.DataArray( [0, 1], attrs={'scale_factor': -2., 'add_offset': 1.}, dims=('y',), ) ht_da = xr.DataArray(np.array([2, -1, -32768, 32767]).astype(np.int16).reshape((2, 2)), dims=('y', 'x'), attrs={'scale_factor': 0.3052037, 'add_offset': 0., '_FillValue': np.array(-1).astype(np.int16), '_Unsigned': 'True', 'units': 'm'},) fake_dataset = xr.Dataset( data_vars={ 'goes_imager_projection': proj, 'x': x__, 'y': y__, 'HT': ht_da, "nominal_satellite_subpoint_lat": np.array(0.0), "nominal_satellite_subpoint_lon": np.array(-89.5), "nominal_satellite_height": np.array(35786.02), "spatial_resolution": "10km at nadir", }, attrs={ "time_coverage_start": "2017-09-20T17:30:40.8Z", "time_coverage_end": "2017-09-20T17:41:17.5Z", "spatial_resolution": "2km at nadir", } ) return fake_dataset def _create_mcmip_dataset(): fake_dataset = _create_cmip_dataset() fake_dataset = fake_dataset.copy(deep=True) fake_dataset['CMI_C14'] = fake_dataset['HT'] del fake_dataset['HT'] return fake_dataset class Test_NC_ABI_L2_base(unittest.TestCase): """Test the NC_ABI_L2 reader.""" def setUp(self): """Create fake data for the tests.""" from satpy.readers.abi_l2_nc import NC_ABI_L2 fake_cmip_dataset = _create_cmip_dataset() with mock.patch('satpy.readers.abi_base.xr') as xr_: xr_.open_dataset.return_value = fake_cmip_dataset self.reader = NC_ABI_L2( 'filename', { 'platform_shortname': 'G16', 'scan_mode': 'M3', 'scene_abbr': 'M1', }, { 'file_type': 'info', 'observation_type': 'ACHA', }, ) class Test_NC_ABI_L2_get_dataset(Test_NC_ABI_L2_base): """Test get dataset function of the NC_ABI_L2 reader.""" def test_get_dataset(self): """Test basic L2 load.""" from satpy.tests.utils import make_dataid key = make_dataid(name='HT') res = self.reader.get_dataset(key, {'file_key': 'HT'}) exp_data = np.array([[2 * 0.3052037, np.nan], [32768 * 0.3052037, 32767 * 0.3052037]]) exp_attrs = {'instrument_ID': None, 'modifiers': (), 'name': 'HT', 'observation_type': 'ACHA', 'orbital_slot': None, 'platform_name': 'GOES-16', 'platform_shortname': 'G16', 'production_site': None, 'satellite_altitude': 35786020., 'satellite_latitude': 0.0, 'satellite_longitude': -89.5, 'scan_mode': 'M3', 'scene_abbr': 'M1', 'scene_id': None, 'sensor': 'abi', 'timeline_ID': None, 'units': 'm'} self.assertTrue(np.allclose(res.data, exp_data, equal_nan=True)) self.assertDictEqual(dict(res.attrs), exp_attrs) class TestMCMIPReading: """Test cases of the MCMIP file format.""" @mock.patch('satpy.readers.abi_base.xr') def test_mcmip_get_dataset(self, xr_): """Test getting channel from MCMIP file.""" from datetime import datetime from pyresample.geometry import AreaDefinition from satpy import Scene from satpy.dataset.dataid import WavelengthRange xr_.open_dataset.return_value = _create_mcmip_dataset() fn = "OR_ABI-L2-MCMIPF-M6_G16_s20192600241149_e20192600243534_c20192600245360.nc" scn = Scene(reader='abi_l2_nc', filenames=[fn]) scn.load(['C14']) exp_data = np.array([[2 * 0.3052037, np.nan], [32768 * 0.3052037, 32767 * 0.3052037]]) exp_attrs = {'instrument_ID': None, 'modifiers': (), 'name': 'C14', 'observation_type': 'MCMIP', 'orbital_slot': None, 'reader': 'abi_l2_nc', 'platform_name': 'GOES-16', 'platform_shortname': 'G16', 'production_site': None, 'satellite_altitude': 35786020., 'satellite_latitude': 0.0, 'satellite_longitude': -89.5, 'scan_mode': 'M6', 'scene_abbr': 'F', 'scene_id': None, 'sensor': 'abi', 'timeline_ID': None, 'start_time': datetime(2017, 9, 20, 17, 30, 40, 800000), 'end_time': datetime(2017, 9, 20, 17, 41, 17, 500000), 'calibration': 'brightness_temperature', 'ancillary_variables': [], 'wavelength': WavelengthRange(10.8, 11.2, 11.6, unit='µm'), 'units': 'm'} res = scn['C14'] np.testing.assert_allclose(res.data, exp_data, equal_nan=True) assert isinstance(res.attrs['area'], AreaDefinition) # don't complicate the comparison below for key in ('area', '_satpy_id'): del res.attrs[key] assert dict(res.attrs) == exp_attrs class Test_NC_ABI_L2_area_fixedgrid(Test_NC_ABI_L2_base): """Test the NC_ABI_L2 reader.""" @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') def test_get_area_def_fixedgrid(self, adef): """Test the area generation.""" self.reader.get_area_def(None) self.assertEqual(adef.call_count, 1) call_args = tuple(adef.call_args)[0] self.assertDictEqual(call_args[3], {'a': 1.0, 'b': 1.0, 'h': 1.0, 'lon_0': -90.0, 'proj': 'geos', 'sweep': 'x', 'units': 'm'}) self.assertEqual(call_args[4], self.reader.ncols) self.assertEqual(call_args[5], self.reader.nlines) np.testing.assert_allclose(call_args[6], (-2., -2., 2., 2.)) class Test_NC_ABI_L2_area_latlon(unittest.TestCase): """Test the NC_ABI_L2 reader.""" @mock.patch('satpy.readers.abi_base.xr') def setUp(self, xr_): """Create fake data for the tests.""" from satpy.readers.abi_l2_nc import NC_ABI_L2 proj = xr.DataArray( [], attrs={'semi_major_axis': 1., 'semi_minor_axis': 1., 'inverse_flattening': 1., 'longitude_of_prime_meridian': 0.0, } ) proj_ext = xr.DataArray( [], attrs={'geospatial_westbound_longitude': -85.0, 'geospatial_eastbound_longitude': -65.0, 'geospatial_northbound_latitude': 20.0, 'geospatial_southbound_latitude': -20.0, 'geospatial_lat_center': 0.0, 'geospatial_lon_center': -75.0, }) x__ = xr.DataArray( [0, 1], attrs={'scale_factor': 2., 'add_offset': -1.}, dims=('lon',), ) y__ = xr.DataArray( [0, 1], attrs={'scale_factor': -2., 'add_offset': 1.}, dims=('lat',), ) fake_dataset = xr.Dataset( data_vars={ 'goes_lat_lon_projection': proj, 'geospatial_lat_lon_extent': proj_ext, 'lon': x__, 'lat': y__, 'RSR': xr.DataArray(np.ones((2, 2)), dims=('lat', 'lon')), }, ) xr_.open_dataset.return_value = fake_dataset self.reader = NC_ABI_L2('filename', {'platform_shortname': 'G16', 'observation_type': 'RSR', 'scene_abbr': 'C', 'scan_mode': 'M3'}, {'filetype': 'info'}) @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') def test_get_area_def_latlon(self, adef): """Test the area generation.""" self.reader.get_area_def(None) self.assertEqual(adef.call_count, 1) call_args = tuple(adef.call_args)[0] self.assertDictEqual(call_args[3], {'proj': 'latlong', 'a': 1.0, 'b': 1.0, 'fi': 1.0, 'pm': 0.0, 'lon_0': -75.0, 'lat_0': 0.0}) self.assertEqual(call_args[4], self.reader.ncols) self.assertEqual(call_args[5], self.reader.nlines) np.testing.assert_allclose(call_args[6], (-85.0, -20.0, -65.0, 20)) satpy-0.34.0/satpy/tests/reader_tests/test_acspo.py000066400000000000000000000146141420401153000224360ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.acspo module.""" import os from datetime import datetime, timedelta from unittest import mock import numpy as np import pytest from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler from satpy.tests.utils import convert_file_content_to_data_array DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (10, 300) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32) DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0) DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) class FakeNetCDF4FileHandler2(FakeNetCDF4FileHandler): """Swap-in NetCDF4 File Handler.""" def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" dt = filename_info.get('start_time', datetime(2016, 1, 1, 12, 0, 0)) sat, inst = { 'VIIRS_NPP': ('NPP', 'VIIRS'), 'VIIRS_N20': ('N20', 'VIIRS'), }[filename_info['sensor_id']] file_content = { '/attr/platform': sat, '/attr/sensor': inst, '/attr/spatial_resolution': '742 m at nadir', '/attr/time_coverage_start': dt.strftime('%Y%m%dT%H%M%SZ'), '/attr/time_coverage_end': (dt + timedelta(minutes=6)).strftime('%Y%m%dT%H%M%SZ'), } file_content['lat'] = DEFAULT_LAT_DATA file_content['lat/attr/comment'] = 'Latitude of retrievals' file_content['lat/attr/long_name'] = 'latitude' file_content['lat/attr/standard_name'] = 'latitude' file_content['lat/attr/units'] = 'degrees_north' file_content['lat/attr/valid_min'] = -90. file_content['lat/attr/valid_max'] = 90. file_content['lat/shape'] = DEFAULT_FILE_SHAPE file_content['lon'] = DEFAULT_LON_DATA file_content['lon/attr/comment'] = 'Longitude of retrievals' file_content['lon/attr/long_name'] = 'longitude' file_content['lon/attr/standard_name'] = 'longitude' file_content['lon/attr/units'] = 'degrees_east' file_content['lon/attr/valid_min'] = -180. file_content['lon/attr/valid_max'] = 180. file_content['lon/shape'] = DEFAULT_FILE_SHAPE for k in ['sea_surface_temperature', 'satellite_zenith_angle', 'sea_ice_fraction', 'wind_speed']: file_content[k] = DEFAULT_FILE_DATA[None, ...] file_content[k + '/attr/scale_factor'] = 1.1 file_content[k + '/attr/add_offset'] = 0.1 file_content[k + '/attr/units'] = 'some_units' file_content[k + '/attr/comment'] = 'comment' file_content[k + '/attr/standard_name'] = 'standard_name' file_content[k + '/attr/long_name'] = 'long_name' file_content[k + '/attr/valid_min'] = 0 file_content[k + '/attr/valid_max'] = 65534 file_content[k + '/attr/_FillValue'] = 65534 file_content[k + '/shape'] = (1, DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1]) file_content['l2p_flags'] = np.zeros( (1, DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1]), dtype=np.uint16) convert_file_content_to_data_array(file_content, dims=("time", "nj", "ni")) return file_content class TestACSPOReader: """Test ACSPO Reader.""" yaml_file = "acspo.yaml" def setup_method(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.acspo import ACSPOFileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(ACSPOFileHandler, '__bases__', (FakeNetCDF4FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def teardown_method(self): """Stop wrapping the NetCDF4 file handler.""" self.p.stop() @pytest.mark.parametrize( ("filename",), [ ["20170401174600-STAR-L2P_GHRSST-SSTskin-VIIRS_NPP-ACSPO_V2.40-v02.0-fv01.0.nc"], ["20210916161708-STAR-L2P_GHRSST-SSTsubskin-VIIRS_N20-ACSPO_V2.80-v02.0-fv01.0.nc"], ] ) def test_init(self, filename): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([filename]) assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files assert r.file_handlers def test_load_every_dataset(self): """Test loading all datasets.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ '20170401174600-STAR-L2P_GHRSST-SSTskin-VIIRS_NPP-ACSPO_V2.40-v02.0-fv01.0.nc', ]) r.create_filehandlers(loadables) datasets = r.load(['sst', 'satellite_zenith_angle', 'sea_ice_fraction', 'wind_speed']) assert len(datasets) == 4 for d in datasets.values(): assert d.shape == DEFAULT_FILE_SHAPE assert d.dims == ("y", "x") satpy-0.34.0/satpy/tests/reader_tests/test_agri_l1.py000066400000000000000000000432671420401153000226550ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The agri_l1 reader tests package.""" import os from unittest import mock import dask.array as da import numpy as np import pytest import xarray as xr from satpy.readers.agri_l1 import RESOLUTION_LIST from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler ALL_BAND_NAMES = ["C01", "C02", "C03", "C04", "C05", "C06", "C07", "C08", "C09", "C10", "C11", "C12", "C13", "C14"] CHANNELS_BY_RESOLUTION = {500: ["C02"], 1000: ["C01", "C02", "C03"], 2000: ["C01", "C02", "C03", "C04", "C05", "C06", "C07"], 4000: ALL_BAND_NAMES, 'GEO': 'solar_azimuth_angle' } AREA_EXTENTS_BY_RESOLUTION = { 500: (-5495771.007913081, 5495271.006001793, -5493771.000267932, 5495771.007913081), 1000: (-5495521.074086424, 5494521.070251633, -5491521.058747265, 5495521.074086424), 2000: (-5495021.206414789, 5493021.198696349, -5487021.175541028, 5495021.206414789), 4000: (-5494021.20255557, 5490021.187118688, -5478021.140808046, 5494021.20255557) } class FakeHDF5FileHandler2(FakeHDF5FileHandler): """Swap-in HDF5 File Handler.""" def make_test_data(self, cwl, ch, prefix, dims, file_type): """Make test data.""" if prefix == 'CAL': data = xr.DataArray( da.from_array((np.arange(10.) + 1.) / 10., [dims[0] * dims[1]]), attrs={ 'Slope': np.array(1.), 'Intercept': np.array(0.), 'FillValue': np.array(-65535.0), 'units': 'NUL', 'center_wavelength': '{}um'.format(cwl).encode('utf-8'), 'band_names': 'band{}(band number is range from 1 to 14)' .format(ch).encode('utf-8'), 'long_name': 'Calibration table of {}um Channel'.format(cwl).encode('utf-8'), 'valid_range': np.array([0, 1.5]), }, dims='_const') elif prefix == 'NOM': data = xr.DataArray( da.from_array(np.arange(10, dtype=np.uint16).reshape((2, 5)) + 1, [dim for dim in dims]), attrs={ 'Slope': np.array(1.), 'Intercept': np.array(0.), 'FillValue': np.array(65535), 'units': 'DN', 'center_wavelength': '{}um'.format(cwl).encode('utf-8'), 'band_names': 'band{}(band number is range from 1 to 14)' .format(ch).encode('utf-8'), 'long_name': 'Calibration table of {}um Channel'.format(cwl).encode('utf-8'), 'valid_range': np.array([0, 4095]), }, dims=('_RegLength', '_RegWidth')) elif prefix == 'GEO': data = xr.DataArray( da.from_array(np.arange(10, dtype=np.float32).reshape((2, 5)) + 1, [dim for dim in dims]), attrs={ 'Slope': np.array(1.), 'Intercept': np.array(0.), 'FillValue': np.array(65535.), 'units': 'NUL', 'band_names': 'NUL', 'valid_range': np.array([0., 360.]), }, dims=('_RegLength', '_RegWidth')) elif prefix == 'COEF': if file_type == '500': data = self._create_coeff_array(1) elif file_type == '1000': data = self._create_coeff_array(3) elif file_type == '2000': data = self._create_coeff_array(7) elif file_type == '4000': data = self._create_coeff_array(14) return data def _create_coeff_array(self, nb_channels): data = xr.DataArray( da.from_array((np.arange(nb_channels * 2).reshape((nb_channels, 2)) + 1.) / np.array([1E4, 1E2]), [nb_channels, 2]), attrs={ 'Slope': 1., 'Intercept': 0., 'FillValue': 0, 'units': 'NUL', 'band_names': 'NUL', 'long_name': b'Calibration coefficient (SCALE and OFFSET)', 'valid_range': [-500, 500], }, dims=('_num_channel', '_coefs')) return data def _get_500m_data(self, file_type): chs = [2] cwls = [0.65] data = self._create_channel_data(chs, cwls, file_type) return data def _create_channel_data(self, chs, cwls, file_type): dim_0 = 2 dim_1 = 5 data = {} for index, _cwl in enumerate(cwls): data['CALChannel' + '%02d' % chs[index]] = self.make_test_data(cwls[index], chs[index], 'CAL', [dim_0, dim_1], file_type) data['NOMChannel' + '%02d' % chs[index]] = self.make_test_data(cwls[index], chs[index], 'NOM', [dim_0, dim_1], file_type) data['CALIBRATION_COEF(SCALE+OFFSET)'] = self.make_test_data(cwls[index], chs[index], 'COEF', [dim_0, dim_1], file_type) return data def _get_1km_data(self, file_type): chs = np.linspace(1, 3, 3) cwls = [0.47, 0.65, 0.83] data = self._create_channel_data(chs, cwls, file_type) return data def _get_2km_data(self, file_type): chs = np.linspace(1, 7, 7) cwls = [0.47, 0.65, 0.83, 1.37, 1.61, 2.22, 3.72] data = self._create_channel_data(chs, cwls, file_type) return data def _get_4km_data(self, file_type): chs = np.linspace(1, 14, 14) cwls = [0.47, 0.65, 0.83, 1.37, 1.61, 2.22, 3.72, 3.72, 6.25, 7.10, 8.50, 10.8, 12, 13.5] data = self._create_channel_data(chs, cwls, file_type) return data def _get_geo_data(self, file_type): dim_0 = 2 dim_1 = 5 data = {'NOMSunAzimuth': self.make_test_data('NUL', 'NUL', 'GEO', [dim_0, dim_1], file_type)} return data def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" global_attrs = { '/attr/NOMCenterLat': np.array(0.0), '/attr/NOMCenterLon': np.array(104.7), '/attr/NOMSatHeight': np.array(3.5786E7), '/attr/dEA': np.array(6378.14), '/attr/dObRecFlat': np.array(298.257223563), '/attr/OBIType': 'REGC', '/attr/RegLength': np.array(2.0), '/attr/RegWidth': np.array(5.0), '/attr/Begin Line Number': np.array(0), '/attr/End Line Number': np.array(1), '/attr/Observing Beginning Date': '2019-06-03', '/attr/Observing Beginning Time': '00:30:01.807', '/attr/Observing Ending Date': '2019-06-03', '/attr/Observing Ending Time': '00:34:07.572', '/attr/Satellite Name': 'FY4A', '/attr/Sensor Identification Code': 'AGRI', '/attr/Sensor Name': 'AGRI', } data = {} if self.filetype_info['file_type'] == 'agri_l1_0500m': data = self._get_500m_data('500') elif self.filetype_info['file_type'] == 'agri_l1_1000m': data = self._get_1km_data('1000') elif self.filetype_info['file_type'] == 'agri_l1_2000m': data = self._get_2km_data('2000') elif self.filetype_info['file_type'] == 'agri_l1_4000m': data = self._get_4km_data('4000') elif self.filetype_info['file_type'] == 'agri_l1_4000m_geo': data = self._get_geo_data('4000') test_content = {} test_content.update(global_attrs) test_content.update(data) return test_content def _create_filenames_from_resolutions(*resolutions): """Create filenames from the given resolutions.""" if 'GEO' in resolutions: return ["FY4A-_AGRI--_N_REGC_1047E_L1-_GEO-_MULT_NOM_20190603003000_20190603003416_4000M_V0001.HDF"] pattern = ("FY4A-_AGRI--_N_REGC_1047E_L1-_FDI-_MULT_NOM_20190603003000_20190603003416_" "{resolution:04d}M_V0001.HDF") return [pattern.format(resolution=resolution) for resolution in resolutions] class Test_HDF_AGRI_L1_cal: """Test VIRR L1B Reader.""" yaml_file = "agri_l1.yaml" def setup(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.agri_l1 import HDF_AGRI_L1 self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(HDF_AGRI_L1, '__bases__', (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True self.expected = { 1: np.array([[2.01, 2.02, 2.03, 2.04, 2.05], [2.06, 2.07, 2.08, 2.09, 2.1]]), 2: np.array([[4.03, 4.06, 4.09, 4.12, 4.15], [4.18, 4.21, 4.24, 4.27, 4.3]]), 3: np.array([[6.05, 6.1, 6.15, 6.2, 6.25], [6.3, 6.35, 6.4, 6.45, 6.5]]), 4: np.array([[8.07, 8.14, 8.21, 8.28, 8.35], [8.42, 8.49, 8.56, 8.63, 8.7]]), 5: np.array([[10.09, 10.18, 10.27, 10.36, 10.45], [10.54, 10.63, 10.72, 10.81, 10.9]]), 6: np.array([[12.11, 12.22, 12.33, 12.44, 12.55], [12.66, 12.77, 12.88, 12.99, 13.1]]), 7: np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]), 8: np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]), 9: np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]), 10: np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]), 11: np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]), 12: np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]), 13: np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]), 14: np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]) } def teardown(self): """Stop wrapping the HDF5 file handler.""" self.p.stop() def test_fy4a_channels_are_loaded_with_right_resolution(self): """Test all channels are loaded with the right resolution.""" reader = self._create_reader_for_resolutions(*RESOLUTION_LIST) available_datasets = reader.available_dataset_ids for resolution_to_test in RESOLUTION_LIST: self._check_keys_for_dsq(available_datasets, resolution_to_test) def test_fy4a_all_bands_have_right_units(self): """Test all bands have the right units.""" reader = self._create_reader_for_resolutions(*RESOLUTION_LIST) band_names = ALL_BAND_NAMES res = reader.load(band_names) assert len(res) == 14 for band_name in band_names: assert res[band_name].shape == (2, 5) self._check_units(band_name, res) def test_fy4a_orbital_parameters_are_correct(self): """Test orbital parameters are set correctly.""" reader = self._create_reader_for_resolutions(*RESOLUTION_LIST) band_names = ALL_BAND_NAMES res = reader.load(band_names) # check whether the data type of orbital_parameters is float orbital_parameters = res[band_names[0]].attrs['orbital_parameters'] for attr in orbital_parameters: assert isinstance(orbital_parameters[attr], float) assert orbital_parameters['satellite_nominal_latitude'] == 0. assert orbital_parameters['satellite_nominal_longitude'] == 104.7 assert orbital_parameters['satellite_nominal_altitude'] == 3.5786E7 @staticmethod def _check_keys_for_dsq(available_datasets, resolution_to_test): from satpy.dataset.data_dict import get_key from satpy.tests.utils import make_dsq band_names = CHANNELS_BY_RESOLUTION[resolution_to_test] for band_name in band_names: ds_q = make_dsq(name=band_name, resolution=resolution_to_test) res = get_key(ds_q, available_datasets, num_results=0, best=False) if band_name < 'C07': assert len(res) == 2 else: assert len(res) == 3 def test_fy4a_counts_calibration(self): """Test loading data at counts calibration.""" from satpy.tests.utils import make_dsq reader = self._create_reader_for_resolutions(*RESOLUTION_LIST) ds_ids = [] band_names = CHANNELS_BY_RESOLUTION[4000] for band_name in band_names: ds_ids.append(make_dsq(name=band_name, calibration='counts')) res = reader.load(ds_ids) assert len(res) == 14 for band_name in band_names: assert res[band_name].shape == (2, 5) assert res[band_name].attrs['calibration'] == "counts" assert res[band_name].dtype == np.uint16 assert res[band_name].attrs['units'] == "1" def test_fy4a_geo(self): """Test loading data for angles.""" from satpy.tests.utils import make_dsq reader = self._create_reader_for_resolutions('GEO') band_name = 'solar_azimuth_angle' ds_ids = [make_dsq(name=band_name)] res = reader.load(ds_ids) assert len(res) == 1 assert res[band_name].shape == (2, 5) assert res[band_name].dtype == np.float32 def _create_reader_for_resolutions(self, *resolutions): from satpy.readers import load_reader filenames = _create_filenames_from_resolutions(*resolutions) reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) assert len(filenames) == len(files) reader.create_filehandlers(files) # Make sure we have some files assert reader.file_handlers return reader @pytest.mark.parametrize("resolution_to_test", RESOLUTION_LIST) def test_fy4a_for_one_resolution(self, resolution_to_test): """Test loading data when only one resolution is available.""" reader = self._create_reader_for_resolutions(resolution_to_test) available_datasets = reader.available_dataset_ids band_names = CHANNELS_BY_RESOLUTION[resolution_to_test] self._assert_which_channels_are_loaded(available_datasets, band_names, resolution_to_test) res = reader.load(band_names) assert len(res) == len(band_names) self._check_calibration_and_units(band_names, res) for band_name in band_names: assert res[band_name].attrs['area'].area_extent == AREA_EXTENTS_BY_RESOLUTION[resolution_to_test] def _check_calibration_and_units(self, band_names, result): for index, band_name in enumerate(band_names): assert result[band_name].attrs['sensor'].islower() assert result[band_name].shape == (2, 5) np.testing.assert_allclose(result[band_name].values, self.expected[index + 1], equal_nan=True) self._check_units(band_name, result) @staticmethod def _check_units(band_name, result): if band_name < 'C07': assert result[band_name].attrs['calibration'] == "reflectance" else: assert result[band_name].attrs['calibration'] == 'brightness_temperature' if band_name < 'C07': assert result[band_name].attrs['units'] == "%" else: assert result[band_name].attrs['units'] == "K" @staticmethod def _assert_which_channels_are_loaded(available_datasets, band_names, resolution_to_test): from satpy.dataset.data_dict import get_key from satpy.tests.utils import make_dsq other_resolutions = RESOLUTION_LIST.copy() other_resolutions.remove(resolution_to_test) for band_name in band_names: for resolution in other_resolutions: ds_q = make_dsq(name=band_name, resolution=resolution) with pytest.raises(KeyError): _ = get_key(ds_q, available_datasets, num_results=0, best=False) ds_q = make_dsq(name=band_name, resolution=resolution_to_test) res = get_key(ds_q, available_datasets, num_results=0, best=False) if band_name < 'C07': assert len(res) == 2 else: assert len(res) == 3 satpy-0.34.0/satpy/tests/reader_tests/test_ahi_hrit.py000066400000000000000000000366571420401153000231330ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The hrit ahi reader tests package.""" import unittest from unittest import mock import dask.array as da import numpy as np from xarray import DataArray class TestHRITJMAFileHandler(unittest.TestCase): """Test the HRITJMAFileHandler.""" @mock.patch('satpy.readers.hrit_jma.HRITFileHandler.__init__') def _get_reader(self, mocked_init, mda, filename_info=None, filetype_info=None, reader_kwargs=None): from satpy.readers.hrit_jma import HRITJMAFileHandler if not filename_info: filename_info = {} if not filetype_info: filetype_info = {} if not reader_kwargs: reader_kwargs = {} HRITJMAFileHandler.filename = 'filename' HRITJMAFileHandler.mda = mda HRITJMAFileHandler._start_time = filename_info.get('start_time') return HRITJMAFileHandler('filename', filename_info, filetype_info, **reader_kwargs) def _get_acq_time(self, nlines): """Get sample header entry for scanline acquisition times. Lines: 1, 21, 41, 61, ..., nlines Times: 1970-01-01 00:00 + (1, 21, 41, 61, ..., nlines) seconds So the interpolated times are expected to be 1970-01-01 + (1, 2, 3, 4, ..., nlines) seconds. Note that there will be some floating point inaccuracies, because timestamps are stored with only 6 decimals precision. """ mjd_1970 = 40587.0 lines_sparse = np.array(list(range(1, nlines, 20)) + [nlines]) times_sparse = mjd_1970 + lines_sparse / 24 / 3600 acq_time_s = ['LINE:={}\rTIME:={:.6f}\r'.format(line, time) for line, time in zip(lines_sparse, times_sparse)] acq_time_b = ''.join(acq_time_s).encode() return acq_time_b def _get_mda(self, loff=5500.0, coff=5500.0, nlines=11000, ncols=11000, segno=0, numseg=1, vis=True, platform='Himawari-8'): """Create metadata dict like HRITFileHandler would do it.""" if vis: idf = b'$HALFTONE:=16\r_NAME:=VISIBLE\r_UNIT:=ALBEDO(%)\r' \ b'0:=-0.10\r1023:=100.00\r65535:=100.00\r' else: idf = b'$HALFTONE:=16\r_NAME:=INFRARED\r_UNIT:=KELVIN\r' \ b'0:=329.98\r1023:=130.02\r65535:=130.02\r' proj_h8 = b'GEOS(140.70) ' proj_mtsat2 = b'GEOS(145.00) ' proj_name = proj_h8 if platform == 'Himawari-8' else proj_mtsat2 return {'image_segm_seq_no': segno, 'total_no_image_segm': numseg, 'projection_name': proj_name, 'projection_parameters': { 'a': 6378169.00, 'b': 6356583.80, 'h': 35785831.00, }, 'cfac': 10233128, 'lfac': 10233128, 'coff': coff, 'loff': loff, 'number_of_columns': ncols, 'number_of_lines': nlines, 'image_data_function': idf, 'image_observation_time': self._get_acq_time(nlines)} def test_init(self): """Test creating the file handler.""" from satpy.readers.hrit_jma import HIMAWARI8, UNKNOWN_AREA # Test addition of extra metadata mda = self._get_mda() mda_expected = mda.copy() mda_expected.update( {'planned_end_segment_number': 1, 'planned_start_segment_number': 1, 'segment_sequence_number': 0, 'unit': 'ALBEDO(%)'}) mda_expected['projection_parameters']['SSP_longitude'] = 140.7 reader = self._get_reader(mda=mda) self.assertEqual(reader.mda, mda_expected) # Check projection name self.assertEqual(reader.projection_name, 'GEOS(140.70)') # Check calibration table cal_expected = np.array([[0, -0.1], [1023, 100], [65535, 100]]) self.assertTrue(np.all(reader.calibration_table == cal_expected)) # Check if scanline timestamps are there (dedicated test below) self.assertIsInstance(reader.acq_time, np.ndarray) # Check platform self.assertEqual(reader.platform, HIMAWARI8) # Check is_segmented attribute expected = {0: False, 1: True, 8: True} for segno, is_segmented in expected.items(): mda = self._get_mda(segno=segno) reader = self._get_reader(mda=mda) self.assertEqual(reader.is_segmented, is_segmented) # Check area IDs expected = [ ({'area': 1}, 1), ({'area': 1234}, UNKNOWN_AREA), ({}, UNKNOWN_AREA) ] mda = self._get_mda() for filename_info, area_id in expected: reader = self._get_reader(mda=mda, filename_info=filename_info) self.assertEqual(reader.area_id, area_id) @mock.patch('satpy.readers.hrit_jma.HRITJMAFileHandler.__init__') def test_get_platform(self, mocked_init): """Test platform identification.""" from satpy.readers.hrit_jma import PLATFORMS, UNKNOWN_PLATFORM, HRITJMAFileHandler mocked_init.return_value = None reader = HRITJMAFileHandler() for proj_name, platform in PLATFORMS.items(): reader.projection_name = proj_name self.assertEqual(reader._get_platform(), platform) with mock.patch('logging.Logger.error') as mocked_log: reader.projection_name = 'invalid' self.assertEqual(reader._get_platform(), UNKNOWN_PLATFORM) mocked_log.assert_called() def test_get_area_def(self): """Test getting an AreaDefinition.""" from satpy.readers.hrit_jma import AREA_NAMES, FULL_DISK, NORTH_HEMIS, SOUTH_HEMIS cases = [ # Non-segmented, full disk {'loff': 1375.0, 'coff': 1375.0, 'nlines': 2750, 'ncols': 2750, 'segno': 0, 'numseg': 1, 'area': FULL_DISK, 'extent': (-5498000.088960204, -5498000.088960204, 5502000.089024927, 5502000.089024927)}, # Non-segmented, northern hemisphere {'loff': 1325.0, 'coff': 1375.0, 'nlines': 1375, 'ncols': 2750, 'segno': 0, 'numseg': 1, 'area': NORTH_HEMIS, 'extent': (-5498000.088960204, -198000.00320373234, 5502000.089024927, 5302000.085788833)}, # Non-segmented, southern hemisphere {'loff': 50, 'coff': 1375.0, 'nlines': 1375, 'ncols': 2750, 'segno': 0, 'numseg': 1, 'area': SOUTH_HEMIS, 'extent': (-5498000.088960204, -5298000.085724112, 5502000.089024927, 202000.0032684542)}, # Segmented, segment #1 {'loff': 1375.0, 'coff': 1375.0, 'nlines': 275, 'ncols': 2750, 'segno': 1, 'numseg': 10, 'area': FULL_DISK, 'extent': (-5498000.088960204, 4402000.071226413, 5502000.089024927, 5502000.089024927)}, # Segmented, segment #7 {'loff': 1375.0, 'coff': 1375.0, 'nlines': 275, 'ncols': 2750, 'segno': 7, 'numseg': 10, 'area': FULL_DISK, 'extent': (-5498000.088960204, -2198000.035564665, 5502000.089024927, -1098000.0177661523)}, ] for case in cases: mda = self._get_mda(loff=case['loff'], coff=case['coff'], nlines=case['nlines'], ncols=case['ncols'], segno=case['segno'], numseg=case['numseg']) reader = self._get_reader(mda=mda, filename_info={'area': case['area']}) area = reader.get_area_def('some_id') self.assertTupleEqual(area.area_extent, case['extent']) self.assertEqual(area.description, AREA_NAMES[case['area']]['long']) def test_calibrate(self): """Test calibration.""" # Generate test data counts = np.linspace(0, 1200, 25).reshape(5, 5) counts[-1, -1] = 65535 counts = DataArray(da.from_array(counts, chunks=5)) refl = np.array( [[-0.1, 4.79247312, 9.68494624, 14.57741935, 19.46989247], [24.36236559, 29.25483871, 34.14731183, 39.03978495, 43.93225806], [48.82473118, 53.7172043, 58.60967742, 63.50215054, 68.39462366], [73.28709677, 78.17956989, 83.07204301, 87.96451613, 92.85698925], [97.74946237, 100., 100., 100., np.nan]] ) bt = np.array( [[329.98, 320.20678397, 310.43356794, 300.66035191, 290.88713587], [281.11391984, 271.34070381, 261.56748778, 251.79427175, 242.02105572], [232.24783969, 222.47462366, 212.70140762, 202.92819159, 193.15497556], [183.38175953, 173.6085435, 163.83532747, 154.06211144, 144.28889541], [134.51567937, 130.02, 130.02, 130.02, np.nan]] ) # Choose an area near the subsatellite point to avoid masking # of space pixels mda = self._get_mda(nlines=5, ncols=5, loff=1375.0, coff=1375.0, segno=0) reader = self._get_reader(mda=mda) # 1. Counts res = reader.calibrate(data=counts, calibration='counts') self.assertTrue(np.all(counts.values == res.values)) # 2. Reflectance res = reader.calibrate(data=counts, calibration='reflectance') np.testing.assert_allclose(refl, res.values) # also compares NaN # 3. Brightness temperature mda_bt = self._get_mda(nlines=5, ncols=5, loff=1375.0, coff=1375.0, segno=0, vis=False) reader_bt = self._get_reader(mda=mda_bt) res = reader_bt.calibrate(data=counts, calibration='brightness_temperature') np.testing.assert_allclose(bt, res.values) # also compares NaN def test_mask_space(self): """Test masking of space pixels.""" mda = self._get_mda(loff=1375.0, coff=1375.0, nlines=275, ncols=1375, segno=1, numseg=10) reader = self._get_reader(mda=mda) data = DataArray(da.ones((275, 1375), chunks=1024)) masked = reader._mask_space(data) # First line of the segment should be space, in the middle of the # last line there should be some valid pixels np.testing.assert_allclose(masked.values[0, :], np.nan) self.assertTrue(np.all(masked.values[-1, 588:788] == 1)) @mock.patch('satpy.readers.hrit_jma.HRITFileHandler.get_dataset') def test_get_dataset(self, base_get_dataset): """Test getting a dataset.""" from satpy.readers.hrit_jma import HIMAWARI8 mda = self._get_mda(loff=1375.0, coff=1375.0, nlines=275, ncols=1375, segno=1, numseg=10) reader = self._get_reader(mda=mda) key = mock.MagicMock() key.calibration = 'reflectance' base_get_dataset.return_value = DataArray(da.ones((275, 1375), chunks=1024), dims=('y', 'x')) # Check attributes res = reader.get_dataset(key, {'units': '%', 'sensor': 'ahi'}) self.assertEqual(res.attrs['units'], '%') self.assertEqual(res.attrs['sensor'], 'ahi') self.assertEqual(res.attrs['platform_name'], HIMAWARI8) self.assertEqual(res.attrs['satellite_longitude'], 140.7) self.assertEqual(res.attrs['satellite_latitude'], 0.) self.assertEqual(res.attrs['satellite_altitude'], 35785831.0) self.assertDictEqual(res.attrs['orbital_parameters'], {'projection_longitude': 140.7, 'projection_latitude': 0., 'projection_altitude': 35785831.0}) # Check if acquisition time is a coordinate self.assertIn('acq_time', res.coords) # Check called methods with mock.patch.object(reader, '_mask_space') as mask_space: with mock.patch.object(reader, 'calibrate') as calibrate: reader.get_dataset(key, {'units': '%', 'sensor': 'ahi'}) mask_space.assert_called() calibrate.assert_called() with mock.patch('logging.Logger.error') as log_mock: reader.get_dataset(key, {'units': '%', 'sensor': 'jami'}) log_mock.assert_called() def test_mjd2datetime64(self): """Test conversion from modified julian day to datetime64.""" from satpy.readers.hrit_jma import mjd2datetime64 self.assertEqual(mjd2datetime64(np.array([0])), np.datetime64('1858-11-17', 'us')) self.assertEqual(mjd2datetime64(np.array([40587.5])), np.datetime64('1970-01-01 12:00', 'us')) def test_get_acq_time(self): """Test computation of scanline acquisition times.""" dt_line = np.arange(1, 11000+1).astype('timedelta64[s]') acq_time_exp = np.datetime64('1970-01-01', 'us') + dt_line for platform in ['Himawari-8', 'MTSAT-2']: # Results are not exactly identical because timestamps are stored in # the header with only 6 decimals precision (max diff here: 45 msec). mda = self._get_mda(platform=platform) reader = self._get_reader(mda=mda) np.testing.assert_allclose(reader.acq_time.astype(np.int64), acq_time_exp.astype(np.int64), atol=45000) def test_start_time_from_filename(self): """Test that by default the datetime in the filename is returned.""" import datetime as dt start_time = dt.datetime(2022, 1, 20, 12, 10) for platform in ['Himawari-8', 'MTSAT-2']: mda = self._get_mda(platform=platform) reader = self._get_reader( mda=mda, filename_info={'start_time': start_time}) assert reader._start_time == start_time def test_start_time_from_aqc_time(self): """Test that by the datetime from the metadata returned when `use_acquisition_time_as_start_time=True`.""" import datetime as dt start_time = dt.datetime(2022, 1, 20, 12, 10) for platform in ['Himawari-8', 'MTSAT-2']: mda = self._get_mda(platform=platform) reader = self._get_reader( mda=mda, filename_info={'start_time': start_time}, reader_kwargs={'use_acquisition_time_as_start_time': True}) assert reader.start_time == reader.acq_time[0].astype(dt.datetime) satpy-0.34.0/satpy/tests/reader_tests/test_ahi_hsd.py000066400000000000000000000505041420401153000227260ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The ahi_hsd reader tests package.""" import unittest import warnings from datetime import datetime from unittest import mock import dask.array as da import numpy as np from satpy.readers.ahi_hsd import AHIHSDFileHandler from satpy.readers.utils import get_geostationary_mask class TestAHIHSDNavigation(unittest.TestCase): """Test the AHI HSD reader navigation.""" @mock.patch('satpy.readers.ahi_hsd.np2str') @mock.patch('satpy.readers.ahi_hsd.np.fromfile') def test_region(self, fromfile, np2str): """Test region navigation.""" from pyresample.utils import proj4_radius_parameters np2str.side_effect = lambda x: x m = mock.mock_open() with mock.patch('satpy.readers.ahi_hsd.open', m, create=True): fh = AHIHSDFileHandler('somefile', {'segment': 1, 'total_segments': 1}, filetype_info={'file_type': 'hsd_b01'}, user_calibration=None) fh.proj_info = {'CFAC': 40932549, 'COFF': -591.5, 'LFAC': 40932549, 'LOFF': 5132.5, 'blocklength': 127, 'coeff_for_sd': 1737122264.0, 'distance_from_earth_center': 42164.0, 'earth_equatorial_radius': 6378.137, 'earth_polar_radius': 6356.7523, 'hblock_number': 3, 'req2_rpol2': 1.006739501, 'req2_rpol2_req2': 0.0066943844, 'resampling_size': 4, 'resampling_types': 0, 'rpol2_req2': 0.993305616, 'spare': '', 'sub_lon': 140.7} fh.data_info = {'blocklength': 50, 'compression_flag_for_data': 0, 'hblock_number': 2, 'number_of_bits_per_pixel': 16, 'number_of_columns': 1000, 'number_of_lines': 1000, 'spare': ''} area_def = fh.get_area_def(None) proj_dict = area_def.proj_dict a, b = proj4_radius_parameters(proj_dict) self.assertEqual(a, 6378137.0) self.assertEqual(b, 6356752.3) self.assertEqual(proj_dict['h'], 35785863.0) self.assertEqual(proj_dict['lon_0'], 140.7) self.assertEqual(proj_dict['proj'], 'geos') self.assertEqual(proj_dict['units'], 'm') np.testing.assert_allclose(area_def.area_extent, (592000.0038256242, 4132000.0267018233, 1592000.0102878273, 5132000.033164027)) @mock.patch('satpy.readers.ahi_hsd.np2str') @mock.patch('satpy.readers.ahi_hsd.np.fromfile') def test_segment(self, fromfile, np2str): """Test segment navigation.""" from pyresample.utils import proj4_radius_parameters np2str.side_effect = lambda x: x m = mock.mock_open() with mock.patch('satpy.readers.ahi_hsd.open', m, create=True): fh = AHIHSDFileHandler('somefile', {'segment': 8, 'total_segments': 10}, filetype_info={'file_type': 'hsd_b01'}) fh.proj_info = {'CFAC': 40932549, 'COFF': 5500.5, 'LFAC': 40932549, 'LOFF': 5500.5, 'blocklength': 127, 'coeff_for_sd': 1737122264.0, 'distance_from_earth_center': 42164.0, 'earth_equatorial_radius': 6378.137, 'earth_polar_radius': 6356.7523, 'hblock_number': 3, 'req2_rpol2': 1.006739501, 'req2_rpol2_req2': 0.0066943844, 'resampling_size': 4, 'resampling_types': 0, 'rpol2_req2': 0.993305616, 'spare': '', 'sub_lon': 140.7} fh.data_info = {'blocklength': 50, 'compression_flag_for_data': 0, 'hblock_number': 2, 'number_of_bits_per_pixel': 16, 'number_of_columns': 11000, 'number_of_lines': 1100, 'spare': ''} area_def = fh.get_area_def(None) proj_dict = area_def.proj_dict a, b = proj4_radius_parameters(proj_dict) self.assertEqual(a, 6378137.0) self.assertEqual(b, 6356752.3) self.assertEqual(proj_dict['h'], 35785863.0) self.assertEqual(proj_dict['lon_0'], 140.7) self.assertEqual(proj_dict['proj'], 'geos') self.assertEqual(proj_dict['units'], 'm') np.testing.assert_allclose(area_def.area_extent, (-5500000.035542117, -3300000.021325271, 5500000.035542117, -2200000.0142168473)) class TestAHIHSDFileHandler(unittest.TestCase): """Test case for the file reading.""" def new_unzip(fname): """Fake unzipping.""" if fname[-3:] == 'bz2': return fname[:-4] return fname @staticmethod def _create_fake_file_handler(in_fname, filename_info=None, filetype_info=None): if filename_info is None: filename_info = {'segment': 8, 'total_segments': 10} if filetype_info is None: filetype_info = {'file_type': 'hsd_b01'} fh = AHIHSDFileHandler(in_fname, filename_info, filetype_info) # Check that the filename is altered for bz2 format files assert in_fname != fh.filename fh.proj_info = { 'CFAC': 40932549, 'COFF': 5500.5, 'LFAC': 40932549, 'LOFF': 5500.5, 'blocklength': 127, 'coeff_for_sd': 1737122264.0, 'distance_from_earth_center': 42164.0, 'earth_equatorial_radius': 6378.137, 'earth_polar_radius': 6356.7523, 'hblock_number': 3, 'req2_rpol2': 1.006739501, 'req2_rpol2_req2': 0.0066943844, 'resampling_size': 4, 'resampling_types': 0, 'rpol2_req2': 0.993305616, 'spare': '', 'sub_lon': 140.7 } fh.nav_info = { 'SSP_longitude': 140.66, 'SSP_latitude': 0.03, 'distance_earth_center_to_satellite': 42165.04, 'nadir_longitude': 140.67, 'nadir_latitude': 0.04 } fh.data_info = { 'blocklength': 50, 'compression_flag_for_data': 0, 'hblock_number': 2, 'number_of_bits_per_pixel': 16, 'number_of_columns': 11000, 'number_of_lines': 1100, 'spare': '' } fh.basic_info = { 'observation_area': np.array(['FLDK']), 'observation_start_time': np.array([58413.12523839]), 'observation_end_time': np.array([58413.12562439]), 'observation_timeline': np.array([300]), } fh.observation_area = fh.basic_info['observation_area'] return fh @mock.patch('satpy.readers.ahi_hsd.np2str') @mock.patch('satpy.readers.ahi_hsd.np.fromfile') @mock.patch('satpy.readers.ahi_hsd.unzip_file', mock.MagicMock(side_effect=new_unzip)) def setUp(self, fromfile, np2str): """Create a test file handler.""" np2str.side_effect = lambda x: x m = mock.mock_open() with mock.patch('satpy.readers.ahi_hsd.open', m, create=True): # Check if file handler raises exception for invalid calibration mode with self.assertRaises(ValueError): AHIHSDFileHandler('somefile', {'segment': 8, 'total_segments': 10}, filetype_info={'file_type': 'hsd_b01'}, calib_mode='BAD_MODE') in_fname = 'test_file.bz2' self.fh = self._create_fake_file_handler(in_fname) def test_time_properties(self): """Test start/end/scheduled time properties.""" self.assertEqual(self.fh.start_time, datetime(2018, 10, 22, 3, 0, 20, 596896)) self.assertEqual(self.fh.end_time, datetime(2018, 10, 22, 3, 0, 53, 947296)) self.assertEqual(self.fh.scheduled_time, datetime(2018, 10, 22, 3, 0, 0, 0)) def test_scanning_frequencies(self): """Test scanning frequencies.""" self.fh.observation_area = 'JP04' self.assertEqual(self.fh.scheduled_time, datetime(2018, 10, 22, 3, 7, 30, 0)) self.fh.observation_area = 'R304' self.assertEqual(self.fh.scheduled_time, datetime(2018, 10, 22, 3, 7, 30, 0)) self.fh.observation_area = 'R420' self.assertEqual(self.fh.scheduled_time, datetime(2018, 10, 22, 3, 9, 30, 0)) self.fh.observation_area = 'R520' self.assertEqual(self.fh.scheduled_time, datetime(2018, 10, 22, 3, 9, 30, 0)) self.fh.observation_area = 'FLDK' self.assertEqual(self.fh.scheduled_time, datetime(2018, 10, 22, 3, 0, 0, 0)) @mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._read_header') @mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._read_data') @mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._mask_invalid') @mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler.calibrate') def test_read_band(self, calibrate, *mocks): """Test masking of space pixels.""" nrows = 25 ncols = 100 self.fh.data_info['number_of_columns'] = ncols self.fh.data_info['number_of_lines'] = nrows calibrate.return_value = np.ones((nrows, ncols)) m = mock.mock_open() with mock.patch('satpy.readers.ahi_hsd.open', m, create=True): im = self.fh.read_band(info=mock.MagicMock(), key=mock.MagicMock()) # Note: Within the earth's shape get_geostationary_mask() is True but the numpy.ma mask # is False mask = im.to_masked_array().mask ref_mask = np.logical_not(get_geostationary_mask(self.fh.area).compute()) self.assertTrue(np.all(mask == ref_mask)) # Test attributes orb_params_exp = {'projection_longitude': 140.7, 'projection_latitude': 0., 'projection_altitude': 35785863.0, 'satellite_actual_longitude': 140.66, 'satellite_actual_latitude': 0.03, 'nadir_longitude': 140.67, 'nadir_latitude': 0.04} self.assertTrue(set(orb_params_exp.items()).issubset(set(im.attrs['orbital_parameters'].items()))) self.assertTrue(np.isclose(im.attrs['orbital_parameters']['satellite_actual_altitude'], 35786903.00581372)) # Test if masking space pixels disables with appropriate flag self.fh.mask_space = False with mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._mask_space') as mask_space: self.fh.read_band(info=mock.MagicMock(), key=mock.MagicMock()) mask_space.assert_not_called() @mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._read_header') @mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._read_data') @mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._mask_invalid') @mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler.calibrate') def test_scene_loading(self, calibrate, *mocks): """Test masking of space pixels.""" from satpy import Scene nrows = 25 ncols = 100 calibrate.return_value = np.ones((nrows, ncols)) m = mock.mock_open() with mock.patch('satpy.readers.ahi_hsd.open', m, create=True), \ mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler') as fh_cls: fh_cls.return_value = self.fh self.fh.filename_info['total_segments'] = 1 self.fh.filename_info['segment'] = 1 self.fh.data_info['number_of_columns'] = ncols self.fh.data_info['number_of_lines'] = nrows scn = Scene(reader='ahi_hsd', filenames=['HS_H08_20210225_0700_B07_FLDK_R20_S0110.DAT']) scn.load(['B07']) im = scn['B07'] # Make sure space masking worked mask = im.to_masked_array().mask ref_mask = np.logical_not(get_geostationary_mask(self.fh.area).compute()) self.assertTrue(np.all(mask == ref_mask)) def test_blocklen_error(self, *mocks): """Test erraneous blocklength.""" open_name = '%s.open' % __name__ fpos = 50 with mock.patch(open_name, create=True) as mock_open: with mock_open(mock.MagicMock(), 'r') as fp_: # Expected and actual blocklength match fp_.tell.return_value = 50 with warnings.catch_warnings(record=True) as w: self.fh._check_fpos(fp_, fpos, 0, 'header 1') self.assertTrue(len(w) == 0) # Expected and actual blocklength do not match fp_.tell.return_value = 100 with warnings.catch_warnings(record=True) as w: self.fh._check_fpos(fp_, fpos, 0, 'header 1') self.assertTrue(len(w) > 0) @mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._check_fpos') def test_read_header(self, *mocks): """Test header reading.""" nhdr = [ {'blocklength': 0}, {'blocklength': 0}, {'blocklength': 0}, {'blocklength': 0}, {'blocklength': 0, 'band_number': [4]}, {'blocklength': 0}, {'blocklength': 0}, {'blocklength': 0}, {'blocklength': 0, 'numof_correction_info_data': [1]}, {'blocklength': 0}, {'blocklength': 0, 'number_of_observation_times': [1]}, {'blocklength': 0}, {'blocklength': 0, 'number_of_error_info_data': [1]}, {'blocklength': 0}, {'blocklength': 0}] with mock.patch('numpy.fromfile', side_effect=nhdr): self.fh._read_header(mock.MagicMock()) class TestAHICalibration(unittest.TestCase): """Test case for various AHI calibration types.""" @mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler.__init__', return_value=None) def setUp(self, *mocks): """Create fake data for testing.""" self.def_cali = [-0.0037, 15.20] self.upd_cali = [-0.0074, 30.40] self.bad_cali = [0.0, 0.0] fh = AHIHSDFileHandler(filetype_info={'file_type': 'hsd_b01'}) fh.calib_mode = 'NOMINAL' fh.user_calibration = None fh.is_zipped = False fh._header = { 'block5': {'band_number': [5], 'gain_count2rad_conversion': [self.def_cali[0]], 'offset_count2rad_conversion': [self.def_cali[1]], 'central_wave_length': [10.4073], }, 'calibration': {'coeff_rad2albedo_conversion': [0.0019255], 'speed_of_light': [299792458.0], 'planck_constant': [6.62606957e-34], 'boltzmann_constant': [1.3806488e-23], 'c0_rad2tb_conversion': [-0.116127314574], 'c1_rad2tb_conversion': [1.00099153832], 'c2_rad2tb_conversion': [-1.76961091571e-06], 'cali_gain_count2rad_conversion': [self.upd_cali[0]], 'cali_offset_count2rad_conversion': [self.upd_cali[1]]}, } self.counts = da.array(np.array([[0., 1000.], [2000., 5000.]])) self.fh = fh def test_default_calibrate(self, *mocks): """Test default in-file calibration modes.""" self.setUp() # Counts self.assertEqual(self.fh.calibrate(data=123, calibration='counts'), 123) # Radiance rad_exp = np.array([[15.2, 11.5], [7.8, -3.3]]) rad = self.fh.calibrate(data=self.counts, calibration='radiance') self.assertTrue(np.allclose(rad, rad_exp)) # Brightness Temperature bt_exp = np.array([[330.978979, 310.524688], [285.845017, np.nan]]) bt = self.fh.calibrate(data=self.counts, calibration='brightness_temperature') np.testing.assert_allclose(bt, bt_exp) # Reflectance refl_exp = np.array([[2.92676, 2.214325], [1.50189, 0.]]) refl = self.fh.calibrate(data=self.counts, calibration='reflectance') self.assertTrue(np.allclose(refl, refl_exp)) def test_updated_calibrate(self): """Test updated in-file calibration modes.""" # Standard operation self.fh.calib_mode = 'UPDATE' rad_exp = np.array([[30.4, 23.0], [15.6, -6.6]]) rad = self.fh.calibrate(data=self.counts, calibration='radiance') self.assertTrue(np.allclose(rad, rad_exp)) # Case for no updated calibration available (older data) self.fh._header = { 'block5': {'band_number': [5], 'gain_count2rad_conversion': [self.def_cali[0]], 'offset_count2rad_conversion': [self.def_cali[1]], 'central_wave_length': [10.4073], }, 'calibration': {'coeff_rad2albedo_conversion': [0.0019255], 'speed_of_light': [299792458.0], 'planck_constant': [6.62606957e-34], 'boltzmann_constant': [1.3806488e-23], 'c0_rad2tb_conversion': [-0.116127314574], 'c1_rad2tb_conversion': [1.00099153832], 'c2_rad2tb_conversion': [-1.76961091571e-06], 'cali_gain_count2rad_conversion': [self.bad_cali[0]], 'cali_offset_count2rad_conversion': [self.bad_cali[1]]}, } rad = self.fh.calibrate(data=self.counts, calibration='radiance') rad_exp = np.array([[15.2, 11.5], [7.8, -3.3]]) self.assertTrue(np.allclose(rad, rad_exp)) def test_user_calibration(self): """Test user-defined calibration modes.""" # This is for radiance correction self.fh.user_calibration = {'B13': {'slope': 0.95, 'offset': -0.1}} self.fh.band_name = 'B13' rad = self.fh.calibrate(data=self.counts, calibration='radiance').compute() rad_exp = np.array([[16.10526316, 12.21052632], [8.31578947, -3.36842105]]) self.assertTrue(np.allclose(rad, rad_exp)) # This is for DN calibration self.fh.user_calibration = {'B13': {'slope': -0.0032, 'offset': 15.20}, 'type': 'DN'} self.fh.band_name = 'B13' rad = self.fh.calibrate(data=self.counts, calibration='radiance').compute() rad_exp = np.array([[15.2, 12.], [8.8, -0.8]]) self.assertTrue(np.allclose(rad, rad_exp)) satpy-0.34.0/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py000066400000000000000000000265221420401153000251430ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The ahi_l1b_gridded_bin reader tests package.""" import os import shutil import tempfile import unittest from unittest import mock import dask.array as da import numpy as np from pyresample.geometry import AreaDefinition from satpy.readers.ahi_l1b_gridded_bin import AHI_LUT_NAMES, AHIGriddedFileHandler class TestAHIGriddedArea(unittest.TestCase): """Test the AHI gridded reader definition.""" def setUp(self): """Create fake data for testing.""" self.FULLDISK_SIZES = {0.005: {'x_size': 24000, 'y_size': 24000}, 0.01: {'x_size': 12000, 'y_size': 12000}, 0.02: {'x_size': 6000, 'y_size': 6000}} self.AHI_FULLDISK_EXTENT = [85., -60., 205., 60.] @staticmethod def make_fh(filetype, area='fld'): """Create a test file handler.""" m = mock.mock_open() with mock.patch('satpy.readers.ahi_l1b_gridded_bin.open', m, create=True): fh = AHIGriddedFileHandler('somefile', {'area': area}, filetype_info={'file_type': filetype}) return fh def test_low_res(self): """Check size of the low resolution (2km) grid.""" tmp_fh = self.make_fh('tir.01') self.assertEqual(self.FULLDISK_SIZES[0.02]['x_size'], tmp_fh.ncols) self.assertEqual(self.FULLDISK_SIZES[0.02]['y_size'], tmp_fh.nlines) def test_med_res(self): """Check size of the low resolution (1km) grid.""" tmp_fh = self.make_fh('vis.02') self.assertEqual(self.FULLDISK_SIZES[0.01]['x_size'], tmp_fh.ncols) self.assertEqual(self.FULLDISK_SIZES[0.01]['y_size'], tmp_fh.nlines) def test_hi_res(self): """Check size of the low resolution (0.5km) grid.""" tmp_fh = self.make_fh('ext.01') self.assertEqual(self.FULLDISK_SIZES[0.005]['x_size'], tmp_fh.ncols) self.assertEqual(self.FULLDISK_SIZES[0.005]['y_size'], tmp_fh.nlines) def test_area_def(self): """Check that a valid full disk area is produced.""" good_area = AreaDefinition('gridded_himawari', 'A gridded Himawari area', 'longlat', 'EPSG:4326', self.FULLDISK_SIZES[0.01]['x_size'], self.FULLDISK_SIZES[0.01]['y_size'], self.AHI_FULLDISK_EXTENT) tmp_fh = self.make_fh('vis.01') tmp_fh.get_area_def(None) self.assertEqual(tmp_fh.area, good_area) def test_bad_area(self): """Ensure an error is raised for an usupported area.""" tmp_fh = self.make_fh('ext.01') tmp_fh.areaname = 'scanning' with self.assertRaises(NotImplementedError): tmp_fh.get_area_def(None) with self.assertRaises(NotImplementedError): self.make_fh('ext.01', area='scanning') class TestAHIGriddedFileCalibration(unittest.TestCase): """Test case for the file calibration types.""" def setUp(self): """Create a test file handler.""" m = mock.mock_open() with mock.patch('satpy.readers.ahi_l1b_gridded_bin.open', m, create=True): in_fname = 'test_file' fh = AHIGriddedFileHandler(in_fname, {'area': 'fld'}, filetype_info={'file_type': 'tir.01'}) self.fh = fh @mock.patch('satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler._get_luts') @mock.patch('satpy.readers.ahi_l1b_gridded_bin.os.path.exists') @mock.patch('satpy.readers.ahi_l1b_gridded_bin.np.loadtxt') def test_calibrate(self, np_loadtxt, os_exist, get_luts): """Test the calibration modes of AHI using the LUTs.""" load_return = np.squeeze(np.dstack([np.arange(0, 2048, 1), np.arange(0, 120, 0.05859375)])) np_loadtxt.return_value = load_return get_luts.return_value = True in_data = np.array([[100., 300., 500.], [800., 1500., 2040.]]) refl_out = np.array([[5.859375, 17.578125, 29.296875], [46.875, 87.890625, 119.53125]]) os_exist.return_value = False # Check that the LUT download is called if we don't have the LUTS self.fh.calibrate(in_data, 'reflectance') get_luts.assert_called() os_exist.return_value = True # Ensure results equal if no calibration applied out_data = self.fh.calibrate(in_data, 'counts') np.testing.assert_equal(in_data, out_data) # Now ensure results equal if LUT calibration applied out_data = self.fh.calibrate(in_data, 'reflectance') np.testing.assert_allclose(refl_out, out_data) # Check that exception is raised if bad calibration is passed with self.assertRaises(NotImplementedError): self.fh.calibrate(in_data, 'lasers') # Check that exception is raised if no file is present np_loadtxt.side_effect = FileNotFoundError with self.assertRaises(FileNotFoundError): self.fh.calibrate(in_data, 'reflectance') class TestAHIGriddedFileHandler(unittest.TestCase): """Test case for the file reading.""" def new_unzip(fname): """Fake unzipping.""" if fname[-3:] == 'bz2': return fname[:-4] @mock.patch('satpy.readers.ahi_l1b_gridded_bin.unzip_file', mock.MagicMock(side_effect=new_unzip)) def setUp(self): """Create a test file handler.""" m = mock.mock_open() with mock.patch('satpy.readers.ahi_l1b_gridded_bin.open', m, create=True): in_fname = 'test_file.bz2' fh = AHIGriddedFileHandler(in_fname, {'area': 'fld'}, filetype_info={'file_type': 'tir.01'}) # Check that the filename is altered for bz2 format files self.assertNotEqual(in_fname, fh.filename) self.fh = fh key = {'calibration': 'counts', 'name': 'vis.01'} info = {'units': 'unitless', 'standard_name': 'vis.01', 'wavelength': 10.8, 'resolution': 0.05} self.key = key self.info = info @mock.patch('satpy.readers.ahi_l1b_gridded_bin.np.memmap') def test_dataread(self, memmap): """Check that a dask array is returned from the read function.""" test_arr = np.zeros((10, 10)) memmap.return_value = test_arr m = mock.mock_open() with mock.patch('satpy.readers.ahi_l1b_gridded_bin.open', m, create=True): res = self.fh._read_data(mock.MagicMock()) np.testing.assert_allclose(res, da.from_array(test_arr)) @mock.patch('satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler._read_data') def test_get_dataset(self, mocked_read): """Check that a good dataset is returned on request.""" m = mock.mock_open() out_data = np.array([[100., 300., 500.], [800., 1500., 2040.]]) mocked_read.return_value = out_data with mock.patch('satpy.readers.ahi_l1b_gridded_bin.open', m, create=True): res = self.fh.get_dataset(self.key, self.info) mocked_read.assert_called() # Check output data is correct np.testing.assert_allclose(res.values, out_data) # Also check a couple of attributes self.assertEqual(res.attrs['name'], self.key['name']) self.assertEqual(res.attrs['wavelength'], self.info['wavelength']) @mock.patch('os.path.exists', return_value=True) @mock.patch('os.remove') def test_destructor(self, exist_patch, remove_patch): """Check that file handler deletes files if needed.""" del self.fh remove_patch.assert_called() class TestAHIGriddedLUTs(unittest.TestCase): """Test case for the downloading and preparing LUTs.""" def mocked_ftp_dl(fname): """Fake download of LUT tar file by creating a local tar.""" import os import tarfile import tempfile with tarfile.open(fname, "w:gz") as tar_handle: for namer in AHI_LUT_NAMES: tmpf = os.path.join(tempfile.tempdir, namer) with open(tmpf, 'w') as tmp_fid: tmp_fid.write("TEST\n") tar_handle.add(tmpf, arcname='count2tbb_v102/'+namer) os.remove(tmpf) def setUp(self): """Create a test file handler.""" m = mock.mock_open() with mock.patch('satpy.readers.ahi_l1b_gridded_bin.open', m, create=True): in_fname = 'test_file' fh = AHIGriddedFileHandler(in_fname, {'area': 'fld'}, filetype_info={'file_type': 'tir.01'}) self.fh = fh key = {'calibration': 'counts', 'name': 'vis.01'} info = {'units': 'unitless', 'standard_name': 'vis.01', 'wavelength': 10.8, 'resolution': 0.05} self.key = key self.info = info def tearDown(self): """Remove files and directories created by the tests.""" for lut_name in AHI_LUT_NAMES: tmp_filename = os.path.join(self.fh.lut_dir, lut_name) if os.path.isfile(tmp_filename): os.remove(tmp_filename) if os.path.isdir(self.fh.lut_dir): shutil.rmtree(self.fh.lut_dir) @mock.patch('satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler._download_luts', mock.MagicMock(side_effect=mocked_ftp_dl)) def test_get_luts(self): """Check that the function to download LUTs operates successfully.""" tempdir = tempfile.gettempdir() print(self.fh.lut_dir) self.fh._get_luts() self.assertFalse(os.path.exists(os.path.join(tempdir, 'count2tbb_v102/'))) for lut_name in AHI_LUT_NAMES: self.assertTrue(os.path.isfile(os.path.join(self.fh.lut_dir, lut_name))) @mock.patch('urllib.request.urlopen') @mock.patch('shutil.copyfileobj') def test_download_luts(self, mock_dl, mock_shutil): """Test that the FTP library is called for downloading LUTS.""" m = mock.mock_open() with mock.patch('satpy.readers.ahi_l1b_gridded_bin.open', m, create=True): self.fh._download_luts('/test_file') mock_dl.assert_called() mock_shutil.assert_called() satpy-0.34.0/satpy/tests/reader_tests/test_ami_l1b.py000066400000000000000000000374771420401153000226510ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The ami_l1b reader tests package.""" import unittest from unittest import mock import dask.array as da import numpy as np import xarray as xr class FakeDataset(object): """Mimic xarray Dataset object.""" def __init__(self, info, attrs): """Initialize test data.""" for var_name, var_data in list(info.items()): if isinstance(var_data, np.ndarray): info[var_name] = xr.DataArray(var_data) self.info = info self.attrs = attrs def __getitem__(self, key): """Mimic getitem method.""" return self.info[key] def __contains__(self, key): """Mimic contains method.""" return key in self.info def rename(self, *args, **kwargs): """Mimic rename method.""" return self def close(self): """Act like close method.""" return class TestAMIL1bNetCDFBase(unittest.TestCase): """Common setup for NC_ABI_L1B tests.""" @mock.patch('satpy.readers.ami_l1b.xr') def setUp(self, xr_, counts=None): """Create a fake dataset using the given counts data.""" from satpy.readers.ami_l1b import AMIL1bNetCDF if counts is None: rad_data = (np.arange(10.).reshape((2, 5)) + 1.) * 50. rad_data = (rad_data + 1.) / 0.5 rad_data = rad_data.astype(np.int16) counts = xr.DataArray( da.from_array(rad_data, chunks='auto'), dims=('y', 'x'), attrs={ 'channel_name': "VI006", 'detector_side': 2, 'number_of_total_pixels': 484000000, 'number_of_error_pixels': 113892451, 'max_pixel_value': 32768, 'min_pixel_value': 6, 'average_pixel_value': 8228.98770845248, 'stddev_pixel_value': 13621.130386551, 'number_of_total_bits_per_pixel': 16, 'number_of_data_quality_flag_bits_per_pixel': 2, 'number_of_valid_bits_per_pixel': 12, 'data_quality_flag_meaning': "0:good_pixel, 1:conditionally_usable_pixel, 2:out_of_scan_area_pixel, 3:error_pixel", 'ground_sample_distance_ew': 1.4e-05, 'ground_sample_distance_ns': 1.4e-05, } ) sc_position = xr.DataArray(0., attrs={ 'sc_position_center_pixel': [-26113466.1974016, 33100139.1630508, 3943.75470244799], }) xr_.open_dataset.return_value = FakeDataset( { 'image_pixel_values': counts, 'sc_position': sc_position, 'gsics_coeff_intercept': [0.1859369], 'gsics_coeff_slope': [0.9967594], }, { "satellite_name": "GK-2A", "observation_start_time": 623084431.957882, "observation_end_time": 623084975.606133, "projection_type": "GEOS", "sub_longitude": 2.23751210105673, "cfac": 81701355.6133574, "lfac": -81701355.6133574, "coff": 11000.5, "loff": 11000.5, "nominal_satellite_height": 42164000., "earth_equatorial_radius": 6378137., "earth_polar_radius": 6356752.3, "number_of_columns": 22000, "number_of_lines": 22000, "observation_mode": "FD", "channel_spatial_resolution": "0.5", "Radiance_to_Albedo_c": 1, "DN_to_Radiance_Gain": -0.0144806550815701, "DN_to_Radiance_Offset": 118.050903320312, "Teff_to_Tbb_c0": -0.141418528203155, "Teff_to_Tbb_c1": 1.00052232906885, "Teff_to_Tbb_c2": -0.00000036287276076109, "light_speed": 2.9979245800E+08, "Boltzmann_constant_k": 1.3806488000E-23, "Plank_constant_h": 6.6260695700E-34, } ) self.reader = AMIL1bNetCDF('filename', {'platform_shortname': 'gk2a'}, {'file_type': 'ir087'},) class TestAMIL1bNetCDF(TestAMIL1bNetCDFBase): """Test the AMI L1b reader.""" def _check_orbital_parameters(self, orb_params): """Check that orbital parameters match expected values.""" exp_params = { 'projection_altitude': 35785863.0, 'projection_latitude': 0.0, 'projection_longitude': 128.2, 'satellite_actual_altitude': 35782654.56070405, 'satellite_actual_latitude': 0.005364927, 'satellite_actual_longitude': 128.2707, } for key, val in exp_params.items(): self.assertAlmostEqual(val, orb_params[key], places=3) def test_filename_grouping(self): """Test that filenames are grouped properly.""" from satpy.readers import group_files filenames = [ 'gk2a_ami_le1b_ir087_fd020ge_201909300300.nc', 'gk2a_ami_le1b_ir096_fd020ge_201909300300.nc', 'gk2a_ami_le1b_ir105_fd020ge_201909300300.nc', 'gk2a_ami_le1b_ir112_fd020ge_201909300300.nc', 'gk2a_ami_le1b_ir123_fd020ge_201909300300.nc', 'gk2a_ami_le1b_ir133_fd020ge_201909300300.nc', 'gk2a_ami_le1b_nr013_fd020ge_201909300300.nc', 'gk2a_ami_le1b_nr016_fd020ge_201909300300.nc', 'gk2a_ami_le1b_sw038_fd020ge_201909300300.nc', 'gk2a_ami_le1b_vi004_fd010ge_201909300300.nc', 'gk2a_ami_le1b_vi005_fd010ge_201909300300.nc', 'gk2a_ami_le1b_vi006_fd005ge_201909300300.nc', 'gk2a_ami_le1b_vi008_fd010ge_201909300300.nc', 'gk2a_ami_le1b_wv063_fd020ge_201909300300.nc', 'gk2a_ami_le1b_wv069_fd020ge_201909300300.nc', 'gk2a_ami_le1b_wv073_fd020ge_201909300300.nc'] groups = group_files(filenames, reader='ami_l1b') self.assertEqual(len(groups), 1) self.assertEqual(len(groups[0]['ami_l1b']), 16) def test_basic_attributes(self): """Test getting basic file attributes.""" from datetime import datetime self.assertEqual(self.reader.start_time, datetime(2019, 9, 30, 3, 0, 31, 957882)) self.assertEqual(self.reader.end_time, datetime(2019, 9, 30, 3, 9, 35, 606133)) def test_get_dataset(self): """Test gettting radiance data.""" from satpy.tests.utils import make_dataid key = make_dataid(name='VI006', calibration='radiance') res = self.reader.get_dataset(key, { 'file_key': 'image_pixel_values', 'standard_name': 'toa_outgoing_radiance_per_unit_wavelength', 'units': 'W m-2 um-1 sr-1', }) exp = {'calibration': 'radiance', 'modifiers': (), 'platform_name': 'GEO-KOMPSAT-2A', 'sensor': 'ami', 'units': 'W m-2 um-1 sr-1'} for key, val in exp.items(): self.assertEqual(val, res.attrs[key]) self._check_orbital_parameters(res.attrs['orbital_parameters']) def test_bad_calibration(self): """Test that asking for a bad calibration fails.""" from satpy.tests.utils import make_dataid with self.assertRaises(ValueError): ds_id = make_dataid(name='VI006', calibration='_bad_') ds_info = {'file_key': 'image_pixel_values', 'standard_name': 'toa_outgoing_radiance_per_unit_wavelength', 'units': 'W m-2 um-1 sr-1', } self.reader.get_dataset(ds_id, ds_info) @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') def test_get_area_def(self, adef): """Test the area generation.""" self.reader.get_area_def(None) self.assertEqual(adef.call_count, 1) call_args = tuple(adef.call_args)[0] exp = {'a': 6378137.0, 'b': 6356752.3, 'h': 35785863.0, 'lon_0': 128.2, 'proj': 'geos', 'units': 'm'} for key, val in exp.items(): self.assertIn(key, call_args[3]) self.assertAlmostEqual(val, call_args[3][key]) self.assertEqual(call_args[4], self.reader.nc.attrs['number_of_columns']) self.assertEqual(call_args[5], self.reader.nc.attrs['number_of_lines']) np.testing.assert_allclose(call_args[6], [-5511022.902, -5511022.902, 5511022.902, 5511022.902]) def test_get_dataset_vis(self): """Test get visible calibrated data.""" from satpy.tests.utils import make_dataid key = make_dataid(name='VI006', calibration='reflectance') res = self.reader.get_dataset(key, { 'file_key': 'image_pixel_values', 'standard_name': 'toa_bidirectional_reflectance', 'units': '%', }) exp = {'calibration': 'reflectance', 'modifiers': (), 'platform_name': 'GEO-KOMPSAT-2A', 'sensor': 'ami', 'units': '%'} for key, val in exp.items(): self.assertEqual(val, res.attrs[key]) self._check_orbital_parameters(res.attrs['orbital_parameters']) def test_get_dataset_counts(self): """Test get counts data.""" from satpy.tests.utils import make_dataid key = make_dataid(name='VI006', calibration='counts') res = self.reader.get_dataset(key, { 'file_key': 'image_pixel_values', 'standard_name': 'counts', 'units': '1', }) exp = {'calibration': 'counts', 'modifiers': (), 'platform_name': 'GEO-KOMPSAT-2A', 'sensor': 'ami', 'units': '1'} for key, val in exp.items(): self.assertEqual(val, res.attrs[key]) self._check_orbital_parameters(res.attrs['orbital_parameters']) class TestAMIL1bNetCDFIRCal(TestAMIL1bNetCDFBase): """Test IR specific things about the AMI reader.""" def setUp(self): """Create test data for IR calibration tests.""" from satpy.tests.utils import make_dataid count_data = (np.arange(10).reshape((2, 5))) + 7000 count_data = count_data.astype(np.uint16) count = xr.DataArray( da.from_array(count_data, chunks='auto'), dims=('y', 'x'), attrs={ 'channel_name': "IR087", 'detector_side': 2, 'number_of_total_pixels': 484000000, 'number_of_error_pixels': 113892451, 'max_pixel_value': 32768, 'min_pixel_value': 6, 'average_pixel_value': 8228.98770845248, 'stddev_pixel_value': 13621.130386551, 'number_of_total_bits_per_pixel': 16, 'number_of_data_quality_flag_bits_per_pixel': 2, 'number_of_valid_bits_per_pixel': 13, 'data_quality_flag_meaning': "0:good_pixel, 1:conditionally_usable_pixel, 2:out_of_scan_area_pixel, 3:error_pixel", 'ground_sample_distance_ew': 1.4e-05, 'ground_sample_distance_ns': 1.4e-05, } ) self.ds_id = make_dataid(name='IR087', wavelength=[8.415, 8.59, 8.765], calibration='brightness_temperature') self.ds_info = { 'file_key': 'image_pixel_values', 'wavelength': [8.415, 8.59, 8.765], 'standard_name': 'toa_brightness_temperature', 'units': 'K', } super(TestAMIL1bNetCDFIRCal, self).setUp(counts=count) def test_default_calibrate(self): """Test default (pyspectral) IR calibration.""" from satpy.readers.ami_l1b import rad2temp with mock.patch('satpy.readers.ami_l1b.rad2temp', wraps=rad2temp) as r2t_mock: res = self.reader.get_dataset(self.ds_id, self.ds_info) r2t_mock.assert_called_once() expected = np.array([[238.34385135, 238.31443527, 238.28500087, 238.25554813, 238.22607701], [238.1965875, 238.16707956, 238.13755317, 238.10800829, 238.07844489]]) np.testing.assert_allclose(res.data.compute(), expected, equal_nan=True) # make sure the attributes from the file are in the data array self.assertEqual(res.attrs['standard_name'], 'toa_brightness_temperature') def test_infile_calibrate(self): """Test IR calibration using in-file coefficients.""" from satpy.readers.ami_l1b import rad2temp self.reader.calib_mode = 'FILE' with mock.patch('satpy.readers.ami_l1b.rad2temp', wraps=rad2temp) as r2t_mock: res = self.reader.get_dataset(self.ds_id, self.ds_info) r2t_mock.assert_not_called() expected = np.array([[238.34385135, 238.31443527, 238.28500087, 238.25554813, 238.22607701], [238.1965875, 238.16707956, 238.13755317, 238.10800829, 238.07844489]]) # file coefficients are pretty close, give some wiggle room np.testing.assert_allclose(res.data.compute(), expected, equal_nan=True, atol=0.04) # make sure the attributes from the file are in the data array self.assertEqual(res.attrs['standard_name'], 'toa_brightness_temperature') def test_gsics_radiance_corr(self): """Test IR radiance adjustment using in-file GSICS coefs.""" from satpy.readers.ami_l1b import rad2temp self.reader.calib_mode = 'GSICS' expected = np.array([[238.036797, 238.007106, 237.977396, 237.947668, 237.91792], [237.888154, 237.85837, 237.828566, 237.798743, 237.768902]]) with mock.patch('satpy.readers.ami_l1b.rad2temp', wraps=rad2temp) as r2t_mock: res = self.reader.get_dataset(self.ds_id, self.ds_info) r2t_mock.assert_not_called() # file coefficients are pretty close, give some wiggle room np.testing.assert_allclose(res.data.compute(), expected, equal_nan=True, atol=0.01) # make sure the attributes from the file are in the data array self.assertEqual(res.attrs['standard_name'], 'toa_brightness_temperature') def test_user_radiance_corr(self): """Test IR radiance adjustment using user-supplied coefs.""" from satpy.readers.ami_l1b import rad2temp self.reader.calib_mode = 'FILE' self.reader.user_calibration = {'IR087': {'slope': 0.99669, 'offset': 0.16907}} expected = np.array([[238.073713, 238.044043, 238.014354, 237.984647, 237.954921], [237.925176, 237.895413, 237.865631, 237.835829, 237.806009]]) with mock.patch('satpy.readers.ami_l1b.rad2temp', wraps=rad2temp) as r2t_mock: res = self.reader.get_dataset(self.ds_id, self.ds_info) r2t_mock.assert_not_called() # file coefficients are pretty close, give some wiggle room np.testing.assert_allclose(res.data.compute(), expected, equal_nan=True, atol=0.01) # make sure the attributes from the file are in the data array self.assertEqual(res.attrs['standard_name'], 'toa_brightness_temperature') satpy-0.34.0/satpy/tests/reader_tests/test_amsr2_l1b.py000066400000000000000000000171231420401153000231110ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.amsr2_l1b module.""" import os import unittest from unittest import mock import numpy as np from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler from satpy.tests.utils import convert_file_content_to_data_array DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (10, 300) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32) DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0) DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) class FakeHDF5FileHandler2(FakeHDF5FileHandler): """Swap-in HDF5 File Handler.""" def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" file_content = { '/attr/PlatformShortName': 'GCOM-W1', '/attr/SensorShortName': 'AMSR2', '/attr/StartOrbitNumber': '22210', '/attr/StopOrbitNumber': '22210', } for bt_chan in [ '(10.7GHz,H)', '(10.7GHz,V)', '(18.7GHz,H)', '(18.7GHz,V)', '(23.8GHz,H)', '(23.8GHz,V)', '(36.5GHz,H)', '(36.5GHz,V)', '(6.9GHz,H)', '(6.9GHz,V)', '(7.3GHz,H)', '(7.3GHz,V)', '(89.0GHz-A,H)', '(89.0GHz-A,V)', '(89.0GHz-B,H)', '(89.0GHz-B,V)', ]: k = 'Brightness Temperature {}'.format(bt_chan) file_content[k] = DEFAULT_FILE_DATA[:, ::2] file_content[k + '/shape'] = (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1] // 2) file_content[k + '/attr/UNIT'] = 'K' file_content[k + '/attr/SCALE FACTOR'] = 0.01 for bt_chan in [ '(89.0GHz-A,H)', '(89.0GHz-A,V)', '(89.0GHz-B,H)', '(89.0GHz-B,V)', ]: k = 'Brightness Temperature {}'.format(bt_chan) file_content[k] = DEFAULT_FILE_DATA file_content[k + '/shape'] = DEFAULT_FILE_SHAPE file_content[k + '/attr/UNIT'] = 'K' file_content[k + '/attr/SCALE FACTOR'] = 0.01 for nav_chan in ['89A', '89B']: lon_k = 'Longitude of Observation Point for ' + nav_chan lat_k = 'Latitude of Observation Point for ' + nav_chan file_content[lon_k] = DEFAULT_LON_DATA file_content[lon_k + '/shape'] = DEFAULT_FILE_SHAPE file_content[lon_k + '/attr/SCALE FACTOR'] = 1 file_content[lon_k + '/attr/UNIT'] = 'deg' file_content[lat_k] = DEFAULT_LAT_DATA file_content[lat_k + '/shape'] = DEFAULT_FILE_SHAPE file_content[lat_k + '/attr/SCALE FACTOR'] = 1 file_content[lat_k + '/attr/UNIT'] = 'deg' convert_file_content_to_data_array(file_content) return file_content class TestAMSR2L1BReader(unittest.TestCase): """Test AMSR2 L1B Reader.""" yaml_file = "amsr2_l1b.yaml" def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.amsr2_l1b import AMSR2L1BFileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(AMSR2L1BFileHandler, '__bases__', (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the HDF5 file handler.""" self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'GW1AM2_201607201808_128A_L1DLBTBR_1110110.h5', ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) # make sure we have some files self.assertTrue(r.file_handlers) def test_load_basic(self): """Test loading of basic channels.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'GW1AM2_201607201808_128A_L1DLBTBR_1110110.h5', ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) ds = r.load([ 'btemp_10.7v', 'btemp_10.7h', 'btemp_6.9v', 'btemp_6.9h', 'btemp_7.3v', 'btemp_7.3h', 'btemp_18.7v', 'btemp_18.7h', 'btemp_23.8v', 'btemp_23.8h', 'btemp_36.5v', 'btemp_36.5h', ]) self.assertEqual(len(ds), 12) for d in ds.values(): self.assertEqual(d.attrs['calibration'], 'brightness_temperature') self.assertTupleEqual(d.shape, (DEFAULT_FILE_SHAPE[0], int(DEFAULT_FILE_SHAPE[1] // 2))) self.assertIn('area', d.attrs) self.assertIsNotNone(d.attrs['area']) self.assertTupleEqual(d.attrs['area'].lons.shape, (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1] // 2)) self.assertTupleEqual(d.attrs['area'].lats.shape, (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1] // 2)) assert d.attrs['sensor'] == 'amsr2' assert d.attrs['platform_name'] == 'GCOM-W1' def test_load_89ghz(self): """Test loading of 89GHz channels.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'GW1AM2_201607201808_128A_L1DLBTBR_1110110.h5', ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) ds = r.load([ 'btemp_89.0av', 'btemp_89.0ah', 'btemp_89.0bv', 'btemp_89.0bh', ]) self.assertEqual(len(ds), 4) for d in ds.values(): self.assertEqual(d.attrs['calibration'], 'brightness_temperature') self.assertTupleEqual(d.shape, DEFAULT_FILE_SHAPE) self.assertIn('area', d.attrs) self.assertIsNotNone(d.attrs['area']) self.assertTupleEqual(d.attrs['area'].lons.shape, DEFAULT_FILE_SHAPE) self.assertTupleEqual(d.attrs['area'].lats.shape, DEFAULT_FILE_SHAPE) satpy-0.34.0/satpy/tests/reader_tests/test_amsr2_l2.py000066400000000000000000000121611420401153000227450ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit tests for AMSR L2 reader.""" import os import unittest from unittest import mock import numpy as np from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler from satpy.tests.utils import convert_file_content_to_data_array DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (10, 30) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0) DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) class FakeHDF5FileHandler2(FakeHDF5FileHandler): """Swap-in HDF5 File Handler.""" def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" file_content = { '/attr/PlatformShortName': 'GCOM-W1', '/attr/SensorShortName': 'AMSR2', '/attr/StartOrbitNumber': '22210', '/attr/StopOrbitNumber': '22210', } k = 'Geophysical Data' file_content[k] = DEFAULT_FILE_DATA[:, :] file_content[k + '/shape'] = (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1]) file_content[k + '/attr/UNIT'] = 'K' file_content[k + '/attr/SCALE FACTOR'] = 1 k = 'Latitude of Observation Point' file_content[k] = DEFAULT_FILE_DATA[:, :] file_content[k + '/shape'] = (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1]) file_content[k + '/attr/UNIT'] = 'deg' file_content[k + '/attr/SCALE FACTOR'] = 1 k = 'Longitude of Observation Point' file_content[k] = DEFAULT_FILE_DATA[:, :] file_content[k + '/shape'] = (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1]) file_content[k + '/attr/UNIT'] = 'deg' file_content[k + '/attr/SCALE FACTOR'] = 1 convert_file_content_to_data_array(file_content, dims=('dim_0', 'dim_1')) return file_content class TestAMSR2L2Reader(unittest.TestCase): """Test AMSR2 L2 Reader.""" yaml_file = "amsr2_l2.yaml" def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.amsr2_l1b import AMSR2L1BFileHandler from satpy.readers.amsr2_l2 import AMSR2L2FileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(AMSR2L2FileHandler, '__bases__', (FakeHDF5FileHandler2, AMSR2L1BFileHandler)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the HDF5 file handler.""" self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'GW1AM2_202004160129_195B_L2SNSSWLB3300300.h5', ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) # make sure we have some files self.assertTrue(r.file_handlers) def test_load_basic(self): """Test loading of basic channels.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'GW1AM2_202004160129_195B_L2SNSSWLB3300300.h5', ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) ds = r.load(['ssw']) self.assertEqual(len(ds), 1) for d in ds.values(): self.assertTupleEqual(d.shape, (DEFAULT_FILE_SHAPE[0], int(DEFAULT_FILE_SHAPE[1]))) self.assertIn('area', d.attrs) self.assertIsNotNone(d.attrs['area']) self.assertTupleEqual(d.attrs['area'].lons.shape, (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1])) self.assertTupleEqual(d.attrs['area'].lats.shape, (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1])) satpy-0.34.0/satpy/tests/reader_tests/test_amsr2_l2_gaasp.py000066400000000000000000000326551420401153000241320ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the 'amsr2_l2_gaasp' reader.""" import os from datetime import datetime from unittest import mock import dask.array as da import numpy as np import pytest import xarray as xr MBT_FILENAME = "AMSR2-MBT_v2r2_GW1_s202008120558310_e202008120607010_c202008120637340.nc" PRECIP_FILENAME = "AMSR2-PRECIP_v2r2_GW1_s202008120558310_e202008120607010_c202008120637340.nc" OCEAN_FILENAME = "AMSR2-OCEAN_v2r2_GW1_s202008120558310_e202008120607010_c202008120637340.nc" SEAICE_NH_FILENAME = "AMSR2-SEAICE-NH_v2r2_GW1_s202008120558310_e202008120607010_c202008120637340.nc" SEAICE_SH_FILENAME = "AMSR2-SEAICE-SH_v2r2_GW1_s202008120558310_e202008120607010_c202008120637340.nc" SNOW_FILENAME = "AMSR2-SNOW_v2r2_GW1_s202008120558310_e202008120607010_c202008120637340.nc" SOIL_FILENAME = "AMSR2-SOIL_v2r2_GW1_s202008120558310_e202008120607010_c202008120637340.nc" EXAMPLE_FILENAMES = [ MBT_FILENAME, PRECIP_FILENAME, OCEAN_FILENAME, SEAICE_NH_FILENAME, SEAICE_SH_FILENAME, SNOW_FILENAME, SOIL_FILENAME, ] def _get_shared_global_attrs(filename): attrs = { 'time_coverage_start': '2020-08-12T05:58:31.0Z', 'time_coverage_end': '2020-08-12T06:07:01.0Z', 'platform_name': 'GCOM-W1', 'instrument_name': 'AMSR2', } return attrs def _create_two_res_gaasp_dataset(filename): """Represent files with two resolution of variables in them (ex. OCEAN).""" lon_var_hi = xr.DataArray(da.zeros((10, 10), dtype=np.float32), dims=('Number_of_Scans', 'Number_of_hi_rez_FOVs'), attrs={'standard_name': 'longitude'}) lat_var_hi = xr.DataArray(da.zeros((10, 10), dtype=np.float32), dims=('Number_of_Scans', 'Number_of_hi_rez_FOVs'), attrs={'standard_name': 'latitude'}) lon_var_lo = xr.DataArray(da.zeros((10, 10), dtype=np.float32), dims=('Number_of_Scans', 'Number_of_low_rez_FOVs'), attrs={'standard_name': 'longitude'}) lat_var_lo = xr.DataArray(da.zeros((10, 10), dtype=np.float32), dims=('Number_of_Scans', 'Number_of_low_rez_FOVs'), attrs={'standard_name': 'latitude'}) swath_var1 = xr.DataArray(da.zeros((10, 10), dtype=np.float32), dims=('Number_of_Scans', 'Number_of_hi_rez_FOVs'), coords={'some_longitude_hi': lon_var_hi, 'some_latitude_hi': lat_var_hi}, attrs={'_FillValue': -9999., 'scale_factor': 0.5, 'add_offset': 2.0}) swath_var2 = xr.DataArray(da.zeros((10, 10), dtype=np.float32), dims=('Number_of_Scans', 'Number_of_low_rez_FOVs'), coords={'some_longitude_lo': lon_var_lo, 'some_latitude_lo': lat_var_lo}, attrs={'_FillValue': -9999.}) swath_int_var = xr.DataArray(da.zeros((10, 10), dtype=np.uint16), dims=('Number_of_Scans', 'Number_of_low_rez_FOVs'), attrs={'_FillValue': 100, 'comment': 'Some comment'}) not_xy_dim_var = xr.DataArray(da.zeros((10, 5), dtype=np.float32), dims=('Number_of_Scans', 'Time_Dimension')) time_var = xr.DataArray(da.zeros((5,), dtype=np.float32), dims=('Time_Dimension',)) ds_vars = { 'swath_var_hi': swath_var1, 'swath_var_low': swath_var2, 'swath_var_low_int': swath_int_var, 'some_longitude_hi': lon_var_hi, 'some_latitude_hi': lat_var_hi, 'some_longitude_lo': lon_var_lo, 'some_latitude_lo': lat_var_lo, 'not_xy_dim_var': not_xy_dim_var, 'time_var': time_var, } attrs = _get_shared_global_attrs(filename) ds = xr.Dataset(ds_vars, attrs=attrs) return ds def _create_gridded_gaasp_dataset(filename): """Represent files with gridded products.""" grid_var = xr.DataArray(da.zeros((10, 10), dtype=np.float32), dims=('Number_of_Y_Dimension', 'Number_of_X_Dimension'), attrs={ '_FillValue': -9999., 'scale_factor': 0.5, 'add_offset': 2.0 }) latency_var = xr.DataArray(da.zeros((10, 10), dtype=np.timedelta64), dims=('Number_of_Y_Dimension', 'Number_of_X_Dimension'), attrs={ '_FillValue': -9999, }) time_var = xr.DataArray(da.zeros((5,), dtype=np.float32), dims=('Time_Dimension',)) ds_vars = { 'grid_var': grid_var, 'latency_var': latency_var, 'time_var': time_var, } attrs = _get_shared_global_attrs(filename) return xr.Dataset(ds_vars, attrs=attrs) def _create_one_res_gaasp_dataset(filename): """Represent files with one resolution of variables in them (ex. SOIL).""" lon_var_lo = xr.DataArray(da.zeros((10, 10), dtype=np.float32), dims=('Number_of_Scans', 'Number_of_low_rez_FOVs'), attrs={'standard_name': 'longitude'}) lat_var_lo = xr.DataArray(da.zeros((10, 10), dtype=np.float32), dims=('Number_of_Scans', 'Number_of_low_rez_FOVs'), attrs={'standard_name': 'latitude'}) swath_var2 = xr.DataArray(da.zeros((10, 10), dtype=np.float32), dims=('Number_of_Scans', 'Number_of_low_rez_FOVs'), coords={'some_longitude_lo': lon_var_lo, 'some_latitude_lo': lat_var_lo}, attrs={ '_FillValue': -9999., 'scale_factor': 0.5, 'add_offset': 2.0 }) swath_int_var = xr.DataArray(da.zeros((10, 10), dtype=np.uint16), dims=('Number_of_Scans', 'Number_of_low_rez_FOVs'), attrs={'_FillValue': 100, 'comment': 'Some comment'}) time_var = xr.DataArray(da.zeros((5,), dtype=np.float32), dims=('Time_Dimension',)) ds_vars = { 'swath_var': swath_var2, 'swath_var_int': swath_int_var, 'some_longitude_lo': lon_var_lo, 'some_latitude_lo': lat_var_lo, 'time_var': time_var, } attrs = _get_shared_global_attrs(filename) return xr.Dataset(ds_vars, attrs=attrs) def fake_open_dataset(filename, **kwargs): """Create a Dataset similar to reading an actual file with xarray.open_dataset.""" if filename in [MBT_FILENAME, PRECIP_FILENAME, OCEAN_FILENAME]: return _create_two_res_gaasp_dataset(filename) if filename in [SEAICE_NH_FILENAME, SEAICE_SH_FILENAME]: return _create_gridded_gaasp_dataset(filename) return _create_one_res_gaasp_dataset(filename) class TestGAASPReader: """Tests for the GAASP reader.""" yaml_file = 'amsr2_l2_gaasp.yaml' def setup_method(self): """Wrap pygrib to read fake data.""" from satpy._config import config_search_paths self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) @pytest.mark.parametrize( ("filenames", "expected_loadables"), [ (EXAMPLE_FILENAMES, 7), ([MBT_FILENAME], 1), ([PRECIP_FILENAME], 1), ([OCEAN_FILENAME], 1), ([SEAICE_NH_FILENAME], 1), ([SEAICE_SH_FILENAME], 1), ([SNOW_FILENAME], 1), ([SOIL_FILENAME], 1), ] ) def test_reader_creation(self, filenames, expected_loadables): """Test basic initialization.""" from satpy.readers import load_reader with mock.patch('satpy.readers.amsr2_l2_gaasp.xr.open_dataset') as od: od.side_effect = fake_open_dataset r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) assert len(loadables) == expected_loadables r.create_filehandlers(loadables) # make sure we have some files assert r.file_handlers @pytest.mark.parametrize( ("filenames", "expected_datasets"), [ (EXAMPLE_FILENAMES, ['swath_var_hi', 'swath_var_low', 'swath_var_low_int', 'swath_var', 'swath_var_int', 'grid_var_NH', 'grid_var_SH', 'latency_var_NH', 'latency_var_SH']), ([MBT_FILENAME], ['swath_var_hi', 'swath_var_low', 'swath_var_low_int']), ([PRECIP_FILENAME], ['swath_var_hi', 'swath_var_low', 'swath_var_low_int']), ([OCEAN_FILENAME], ['swath_var_hi', 'swath_var_low', 'swath_var_low_int']), ([SEAICE_NH_FILENAME], ['grid_var_NH', 'latency_var_NH']), ([SEAICE_SH_FILENAME], ['grid_var_SH', 'latency_var_SH']), ([SNOW_FILENAME], ['swath_var', 'swath_var_int']), ([SOIL_FILENAME], ['swath_var', 'swath_var_int']), ]) def test_available_datasets(self, filenames, expected_datasets): """Test that variables are dynamically discovered.""" from satpy.readers import load_reader with mock.patch('satpy.readers.amsr2_l2_gaasp.xr.open_dataset') as od: od.side_effect = fake_open_dataset r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) r.create_filehandlers(loadables) avails = list(r.available_dataset_names) for var_name in expected_datasets: assert var_name in avails assert 'not_xy_dim_var' not in expected_datasets @staticmethod def _check_area(data_id, data_arr): from pyresample.geometry import AreaDefinition, SwathDefinition area = data_arr.attrs['area'] if 'grid_var' in data_id['name'] or 'latency_var' in data_id['name']: assert isinstance(area, AreaDefinition) else: assert isinstance(area, SwathDefinition) @staticmethod def _check_fill(data_id, data_arr): if 'int' in data_id['name']: assert data_arr.attrs['_FillValue'] == 100 assert np.issubdtype(data_arr.dtype, np.integer) else: assert '_FillValue' not in data_arr.attrs if np.issubdtype(data_arr.dtype, np.floating): # we started with float32, it should stay that way assert data_arr.dtype.type == np.float32 @staticmethod def _check_attrs(data_arr): attrs = data_arr.attrs assert 'scale_factor' not in attrs assert 'add_offset' not in attrs assert attrs['platform_name'] == 'GCOM-W1' assert attrs['sensor'] == 'amsr2' assert attrs['start_time'] == datetime(2020, 8, 12, 5, 58, 31) assert attrs['end_time'] == datetime(2020, 8, 12, 6, 7, 1) @pytest.mark.parametrize( ("filenames", "loadable_ids"), [ (EXAMPLE_FILENAMES, ['swath_var_hi', 'swath_var_low', 'swath_var_low_int', 'swath_var', 'swath_var_int', 'grid_var_NH', 'grid_var_SH', 'latency_var_NH', 'latency_var_SH']), ([MBT_FILENAME], ['swath_var_hi', 'swath_var_low', 'swath_var_low_int']), ([PRECIP_FILENAME], ['swath_var_hi', 'swath_var_low', 'swath_var_low_int']), ([OCEAN_FILENAME], ['swath_var_hi', 'swath_var_low', 'swath_var_low_int']), ([SEAICE_NH_FILENAME], ['grid_var_NH', 'latency_var_NH']), ([SEAICE_SH_FILENAME], ['grid_var_SH', 'latency_var_SH']), ([SNOW_FILENAME], ['swath_var', 'swath_var_int']), ([SOIL_FILENAME], ['swath_var', 'swath_var_int']), ]) def test_basic_load(self, filenames, loadable_ids): """Test that variables are loaded properly.""" from satpy.readers import load_reader with mock.patch('satpy.readers.amsr2_l2_gaasp.xr.open_dataset') as od: od.side_effect = fake_open_dataset r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) r.create_filehandlers(loadables) loaded_data_arrs = r.load(loadable_ids) assert loaded_data_arrs for data_id, data_arr in loaded_data_arrs.items(): self._check_area(data_id, data_arr) self._check_fill(data_id, data_arr) self._check_attrs(data_arr) satpy-0.34.0/satpy/tests/reader_tests/test_ascat_l2_soilmoisture_bufr.py000066400000000000000000000156471420401153000266640ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unittesting the ASCAT SCATTEROMETER SOIL MOISTURE BUFR reader.""" import os import sys import unittest from datetime import datetime import numpy as np # TDB: this test is based on test_seviri_l2_bufr.py and test_iasi_l2.py # This is a test for ASCAT SoilMoisture product message, take from a real # bufr file distributed over EUMETCAST def create_message(): """Create fake message for testing.""" nlat = 10 nlon = 10 samples = nlat*nlon lat, lon = np.meshgrid(np.linspace(63, 65, nlat), np.linspace(-30, -20, nlon)) lat = np.round(np.ravel(lat), 4) lon = np.round(np.ravel(lon), 4) rstate = np.random.RandomState(0) surfaceSoilMoisture = np.round(rstate.rand(samples)*100, 1) surfaceSoilMoisture[0] = -1e+100 retmsg = { 'inputDelayedDescriptorReplicationFactor': [8], 'edition': 4, 'masterTableNumber': 0, 'bufrHeaderCentre': 254, 'bufrHeaderSubCentre': 0, 'updateSequenceNumber': 0, 'dataCategory': 12, 'internationalDataSubCategory': 255, 'dataSubCategory': 190, 'masterTablesVersionNumber': 13, 'localTablesVersionNumber': 0, 'typicalYear': 2020, 'typicalMonth': 12, 'typicalDay': 21, 'typicalHour': 9, 'typicalMinute': 33, 'typicalSecond': 0, 'numberOfSubsets': samples, 'observedData': 1, 'compressedData': 1, 'unexpandedDescriptors': 312061, 'centre': 254, 'subCentre': 0, '#1#softwareIdentification': 1000, 'satelliteIdentifier': 4, 'satelliteInstruments': 190, 'year': 2020, 'month': 12, 'day': 21, 'hour': 9, 'minute': 33, 'second': np.linspace(0, 59, samples), 'latitude': lat, 'longitude': lon, 'surfaceSoilMoisture': surfaceSoilMoisture, 'soilMoistureQuality': np.zeros(samples), } return retmsg MSG = create_message() # the notional filename that would contain the above test message data FILENAME = 'W_XX-EUMETSAT-TEST,SOUNDING+SATELLITE,METOPA+ASCAT_C_EUMC_20201221093300_73545_eps_o_125_ssm_l2.bin' # the information that would be extracted from the above filename according to the pattern in the .yaml FILENAME_INFO = { 'reception_location': 'TEST', 'platform': 'METOPA', 'instrument': 'ASCAT', 'start_time': '20201221093300', 'perigee': '73545', 'species': '125_ssm', 'level': 'l2' } # file type info for the above file that is defined in the .yaml FILETYPE_INFO = { 'file_type': 'ascat_l2_soilmoisture_bufr', 'file_reader': 'AscatSoilMoistureBufr' } def save_test_data(path): """Save the test file to the indicated directory.""" import eccodes as ec filepath = os.path.join(path, FILENAME) with open(filepath, "wb") as f: for m in [MSG]: buf = ec.codes_bufr_new_from_samples('BUFR4_local_satellite') for key in m: val = m[key] if np.isscalar(val): ec.codes_set(buf, key, val) else: ec.codes_set_array(buf, key, val) ec.codes_set(buf, 'pack', 1) ec.codes_write(buf, f) ec.codes_release(buf) return filepath class TesitAscatL2SoilmoistureBufr(unittest.TestCase): """Test ASCAT Soil Mosture loader.""" def setUp(self): """Create temporary file to perform tests with.""" import tempfile from satpy.readers.ascat_l2_soilmoisture_bufr import AscatSoilMoistureBufr self.base_dir = tempfile.mkdtemp() self.fname = save_test_data(self.base_dir) self.fname_info = FILENAME_INFO self.ftype_info = FILETYPE_INFO self.reader = AscatSoilMoistureBufr(self.fname, self.fname_info, self.ftype_info) def tearDown(self): """Remove the temporary directory created for a test.""" try: import shutil shutil.rmtree(self.base_dir, ignore_errors=True) except OSError: pass @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") def test_scene(self): """Test scene creation.""" from satpy import Scene fname = os.path.join(self.base_dir, FILENAME) scn = Scene(reader='ascat_l2_soilmoisture_bufr', filenames=[fname]) self.assertTrue('scatterometer' in scn.sensor_names) self.assertTrue(datetime(2020, 12, 21, 9, 33, 0) == scn.start_time) self.assertTrue(datetime(2020, 12, 21, 9, 33, 59) == scn.end_time) @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") def test_scene_load_available_datasets(self): """Test that all datasets are available.""" from satpy import Scene fname = os.path.join(self.base_dir, FILENAME) scn = Scene(reader='ascat_l2_soilmoisture_bufr', filenames=[fname]) self.assertTrue('surface_soil_moisture' in scn.available_dataset_names()) scn.load(scn.available_dataset_names()) loaded = [dataset.name for dataset in scn] self.assertTrue(sorted(loaded) == sorted(scn.available_dataset_names())) @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") def test_scene_dataset_values(self): """Test loading data.""" from satpy import Scene fname = os.path.join(self.base_dir, FILENAME) scn = Scene(reader='ascat_l2_soilmoisture_bufr', filenames=[fname]) for name in scn.available_dataset_names(): scn.load([name]) loaded_values = scn[name].values fill_value = scn[name].attrs['fill_value'] # replace nans in data loaded from file with the fill value defined in the .yaml # to make them comparable loaded_values_nan_filled = np.nan_to_num(loaded_values, nan=fill_value) key = scn[name].attrs['key'] original_values = MSG[key] # this makes each assertion below a separate test from unittest's point of view # (note: if all subtests pass, they will count as one test) with self.subTest(msg="Test failed for dataset: "+name): self.assertTrue(np.allclose(original_values, loaded_values_nan_filled)) satpy-0.34.0/satpy/tests/reader_tests/test_avhrr_l0_hrpt.py000066400000000000000000000213671420401153000241060ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2009-2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the hrpt reader.""" import os import unittest from contextlib import suppress from tempfile import NamedTemporaryFile from unittest import mock import numpy as np import xarray as xr from satpy.readers.hrpt import HRPTFile, dtype from satpy.tests.reader_tests.test_avhrr_l1b_gaclac import PygacPatcher from satpy.tests.utils import make_dataid NUMBER_OF_SCANS = 10 SWATH_WIDTH = 2048 class TestHRPTWithFile(unittest.TestCase): """Test base class with writing a fake file.""" def setUp(self) -> None: """Set up the test case.""" test_data = np.ones(NUMBER_OF_SCANS, dtype=dtype) # Channel 3a test_data["id"]["id"][:5] = 891 # Channel 3b test_data["id"]["id"][5:] = 890 with NamedTemporaryFile(mode='w+', suffix='.hmf', delete=False) as hrpt_file: self.filename = hrpt_file.name test_data.tofile(hrpt_file) def tearDown(self) -> None: """Tear down the test case.""" with suppress(OSError): os.remove(self.filename) def _get_dataset(self, dataset_id): fh = HRPTFile(self.filename, {}, {}) return fh.get_dataset(dataset_id, {}) class TestHRPTReading(TestHRPTWithFile): """Test case for reading hrpt data.""" def test_reading(self): """Test that data is read.""" fh = HRPTFile(self.filename, {}, {}) assert fh._data is not None class TestHRPTGetUncalibratedData(TestHRPTWithFile): """Test case for reading uncalibrated hrpt data.""" def _get_channel_1_counts(self): return self._get_dataset(make_dataid(name='1', calibration='counts')) def test_get_dataset_returns_a_dataarray(self): """Test that get_dataset returns a dataarray.""" result = self._get_channel_1_counts() assert isinstance(result, xr.DataArray) def test_platform_name(self): """Test that the platform name is correct.""" result = self._get_channel_1_counts() assert result.attrs['platform_name'] == 'NOAA 19' def test_no_calibration_values_are_1(self): """Test that the values of non-calibrated data is 1.""" result = self._get_channel_1_counts() assert (result.values == 1).all() def fake_calibrate_solar(data, *args, **kwargs): """Fake calibration.""" del args, kwargs return data * 25.43 + 3 def fake_calibrate_thermal(data, *args, **kwargs): """Fake calibration.""" del args, kwargs return data * 35.43 + 3 class CalibratorPatcher(PygacPatcher): """Patch pygac.""" def setUp(self) -> None: """Patch pygac's calibration.""" super().setUp() # Import things to patch here to make them patchable. Otherwise another function # might import it first which would prevent a successful patch. from pygac.calibration import Calibrator, calibrate_solar, calibrate_thermal self.Calibrator = Calibrator self.calibrate_thermal = calibrate_thermal self.calibrate_thermal.side_effect = fake_calibrate_thermal self.calibrate_solar = calibrate_solar self.calibrate_solar.side_effect = fake_calibrate_solar class TestHRPTWithPatchedCalibratorAndFile(CalibratorPatcher, TestHRPTWithFile): """Test case with patched calibration routines and a synthetic file.""" def setUp(self) -> None: """Set up the test case.""" CalibratorPatcher.setUp(self) TestHRPTWithFile.setUp(self) def tearDown(self): """Tear down the test case.""" CalibratorPatcher.tearDown(self) TestHRPTWithFile.tearDown(self) class TestHRPTGetCalibratedReflectances(TestHRPTWithPatchedCalibratorAndFile): """Test case for reading calibrated reflectances from hrpt data.""" def _get_channel_1_reflectance(self): """Get the channel 1 reflectance.""" dataset_id = make_dataid(name='1', calibration='reflectance') return self._get_dataset(dataset_id) def test_calibrated_reflectances_values(self): """Test the calibrated reflectance values.""" result = self._get_channel_1_reflectance() np.testing.assert_allclose(result.values, 28.43) class TestHRPTGetCalibratedBT(TestHRPTWithPatchedCalibratorAndFile): """Test case for reading calibrated brightness temperature from hrpt data.""" def _get_channel_4_bt(self): """Get the channel 4 bt.""" dataset_id = make_dataid(name='4', calibration='brightness_temperature') return self._get_dataset(dataset_id) def test_calibrated_bt_values(self): """Test the calibrated reflectance values.""" result = self._get_channel_4_bt() np.testing.assert_allclose(result.values, 38.43) class TestHRPTChannel3(TestHRPTWithPatchedCalibratorAndFile): """Test case for reading calibrated brightness temperature from hrpt data.""" def _get_channel_3b_bt(self): """Get the channel 4 bt.""" dataset_id = make_dataid(name='3b', calibration='brightness_temperature') return self._get_dataset(dataset_id) def _get_channel_3a_reflectance(self): """Get the channel 4 bt.""" dataset_id = make_dataid(name='3a', calibration='reflectance') return self._get_dataset(dataset_id) def _get_channel_3a_counts(self): """Get the channel 4 bt.""" dataset_id = make_dataid(name='3a', calibration='counts') return self._get_dataset(dataset_id) def test_channel_3b_masking(self): """Test that channel 3b is split correctly.""" result = self._get_channel_3b_bt() assert np.isnan(result.values[:5]).all() assert np.isfinite(result.values[5:]).all() def test_channel_3a_masking(self): """Test that channel 3a is split correctly.""" result = self._get_channel_3a_reflectance() assert np.isnan(result.values[5:]).all() assert np.isfinite(result.values[:5]).all() def test_uncalibrated_channel_3a_masking(self): """Test that channel 3a is split correctly.""" result = self._get_channel_3a_counts() assert np.isnan(result.values[5:]).all() assert np.isfinite(result.values[:5]).all() class TestHRPTNavigation(TestHRPTWithFile): """Test case for computing HRPT navigation.""" def setUp(self) -> None: """Set up the test case.""" super().setUp() self.fake_lons = np.ones((NUMBER_OF_SCANS, SWATH_WIDTH)) self.fake_lats = np.ones((NUMBER_OF_SCANS, SWATH_WIDTH)) * 2 def _prepare_mocks(self, Orbital, SatelliteInterpolator, get_lonlatalt): """Prepare the mocks.""" Orbital.return_value.get_position.return_value = mock.MagicMock(), mock.MagicMock() get_lonlatalt.return_value = (mock.MagicMock(), mock.MagicMock(), mock.MagicMock()) SatelliteInterpolator.return_value.interpolate.return_value = self.fake_lons, self.fake_lats @mock.patch.multiple('satpy.readers.hrpt', Orbital=mock.DEFAULT, compute_pixels=mock.DEFAULT, get_lonlatalt=mock.DEFAULT, SatelliteInterpolator=mock.DEFAULT) def test_longitudes_are_returned(self, Orbital, compute_pixels, get_lonlatalt, SatelliteInterpolator): """Check that latitudes are returned properly.""" self._prepare_mocks(Orbital, SatelliteInterpolator, get_lonlatalt) dataset_id = make_dataid(name='longitude') result = self._get_dataset(dataset_id) assert (result == self.fake_lons).all() @mock.patch.multiple('satpy.readers.hrpt', Orbital=mock.DEFAULT, compute_pixels=mock.DEFAULT, get_lonlatalt=mock.DEFAULT, SatelliteInterpolator=mock.DEFAULT) def test_latitudes_are_returned(self, Orbital, compute_pixels, get_lonlatalt, SatelliteInterpolator): """Check that latitudes are returned properly.""" self._prepare_mocks(Orbital, SatelliteInterpolator, get_lonlatalt) dataset_id = make_dataid(name='latitude') result = self._get_dataset(dataset_id) assert (result == self.fake_lats).all() satpy-0.34.0/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py000066400000000000000000000602051420401153000245000ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2009-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Pygac interface.""" from datetime import datetime from unittest import TestCase, mock import dask.array as da import numpy as np import pytest import xarray as xr GAC_PATTERN = '{creation_site:3s}.{transfer_mode:4s}.{platform_id:2s}.D{start_time:%y%j.S%H%M}.E{end_time:%H%M}.B{orbit_number:05d}{end_orbit_last_digits:02d}.{station:2s}' # noqa GAC_POD_FILENAMES = ['NSS.GHRR.NA.D79184.S1150.E1337.B0008384.WI', 'NSS.GHRR.NA.D79184.S2350.E0137.B0008384.WI', 'NSS.GHRR.NA.D80021.S0927.E1121.B0295354.WI', 'NSS.GHRR.NA.D80021.S1120.E1301.B0295455.WI', 'NSS.GHRR.NA.D80021.S1256.E1450.B0295556.GC', 'NSS.GHRR.NE.D83208.S1219.E1404.B0171819.WI', 'NSS.GHRR.NG.D88002.S0614.E0807.B0670506.WI', 'NSS.GHRR.TN.D79183.S1258.E1444.B0369697.GC', 'NSS.GHRR.TN.D80003.S1147.E1332.B0630506.GC', 'NSS.GHRR.TN.D80003.S1328.E1513.B0630507.GC', 'NSS.GHRR.TN.D80003.S1509.E1654.B0630608.GC'] GAC_KLM_FILENAMES = ['NSS.GHRR.NK.D01235.S0252.E0446.B1703233.GC', 'NSS.GHRR.NL.D01288.S2315.E0104.B0549495.GC', 'NSS.GHRR.NM.D04111.S2305.E0050.B0947778.GC', 'NSS.GHRR.NN.D13011.S0559.E0741.B3939192.WI', 'NSS.GHRR.NP.D15361.S0121.E0315.B3547172.SV', 'NSS.GHRR.M1.D15362.S0031.E0129.B1699697.SV', 'NSS.GHRR.M2.D10178.S2359.E0142.B1914142.SV'] LAC_POD_FILENAMES = ['BRN.HRPT.ND.D95152.S1730.E1715.B2102323.UB', 'BRN.HRPT.ND.D95152.S1910.E1857.B2102424.UB', 'BRN.HRPT.NF.D85152.S1345.E1330.B0241414.UB', 'BRN.HRPT.NJ.D95152.S1233.E1217.B0216060.UB'] LAC_KLM_FILENAMES = ['BRN.HRPT.M1.D14152.S0958.E1012.B0883232.UB', 'BRN.HRPT.M1.D14152.S1943.E1958.B0883838.UB', 'BRN.HRPT.M2.D12153.S0912.E0922.B2914747.UB', 'BRN.HRPT.NN.D12153.S0138.E0152.B3622828.UB', 'BRN.HRPT.NN.D12153.S0139.E0153.B3622828.UB', 'BRN.HRPT.NN.D12153.S1309.E1324.B3623535.UB', 'BRN.HRPT.NP.D12153.S0003.E0016.B1707272.UB', 'BRN.HRPT.NP.D12153.S1134.E1148.B1707979.UB', 'BRN.HRPT.NP.D16184.S1256.E1311.B3813131.UB', 'BRN.HRPT.NP.D16184.S1438.E1451.B3813232.UB', 'BRN.HRPT.NP.D16184.S1439.E1451.B3813232.UB', 'BRN.HRPT.NP.D16185.S1245.E1259.B3814545.UB', 'BRN.HRPT.NP.D16185.S1427.E1440.B3814646.UB', 'NSS.FRAC.M2.D12153.S1729.E1910.B2915354.SV', 'NSS.LHRR.NP.D16306.S1803.E1814.B3985555.WI'] @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile.__init__', return_value=None) def _get_fh_mocked(init_mock, **attrs): """Create a mocked file handler with the given attributes.""" from satpy.readers.avhrr_l1b_gaclac import GACLACFile fh = GACLACFile() for name, value in attrs.items(): setattr(fh, name, value) return fh def _get_reader_mocked(along_track=3): """Create a mocked reader.""" reader = mock.MagicMock(spacecraft_name='spacecraft_name', meta_data={'foo': 'bar'}) reader.mask = [0, 0] reader.get_times.return_value = np.arange(along_track) reader.get_tle_lines.return_value = 'tle' return reader class PygacPatcher(TestCase): """Patch pygac.""" def setUp(self): """Patch pygac imports.""" self.pygac = mock.MagicMock() self.fhs = mock.MagicMock() modules = { 'pygac': self.pygac, 'pygac.gac_klm': self.pygac.gac_klm, 'pygac.gac_pod': self.pygac.gac_pod, 'pygac.lac_klm': self.pygac.lac_klm, 'pygac.lac_pod': self.pygac.lac_pod, 'pygac.utils': self.pygac.utils, 'pygac.calibration': self.pygac.calibration, } self.module_patcher = mock.patch.dict('sys.modules', modules) self.module_patcher.start() def tearDown(self): """Unpatch the pygac imports.""" self.module_patcher.stop() class GACLACFilePatcher(PygacPatcher): """Patch pygac.""" def setUp(self): """Patch GACLACFile.""" super().setUp() # Import GACLACFile here to make it patchable. Otherwise self._get_fh # might import it first which would prevent a successful patch. from satpy.readers.avhrr_l1b_gaclac import GACLACFile self.GACLACFile = GACLACFile class TestGACLACFile(GACLACFilePatcher): """Test the GACLAC file handler.""" def _get_fh(self, filename='NSS.GHRR.NG.D88002.S0614.E0807.B0670506.WI', **kwargs): """Create a file handler.""" from trollsift import parse filename_info = parse(GAC_PATTERN, filename) return self.GACLACFile(filename, filename_info, {}, **kwargs) def test_init(self): """Test GACLACFile initialization.""" from pygac.gac_klm import GACKLMReader from pygac.gac_pod import GACPODReader from pygac.lac_klm import LACKLMReader from pygac.lac_pod import LACPODReader kwargs = {'start_line': 1, 'end_line': 2, 'strip_invalid_coords': True, 'interpolate_coords': True, 'adjust_clock_drift': True, 'tle_dir': 'tle_dir', 'tle_name': 'tle_name', 'tle_thresh': 123, 'calibration': 'calibration'} for filenames, reader_cls in zip([GAC_POD_FILENAMES, GAC_KLM_FILENAMES, LAC_POD_FILENAMES, LAC_KLM_FILENAMES], [GACPODReader, GACKLMReader, LACPODReader, LACKLMReader]): for filename in filenames: fh = self._get_fh(filename, **kwargs) self.assertLess(fh.start_time, fh.end_time, "Start time must precede end time.") self.assertIs(fh.reader_class, reader_cls, 'Wrong reader class assigned to {}'.format(filename)) def test_read_raw_data(self): """Test raw data reading.""" fh = _get_fh_mocked(reader=None, interpolate_coords='interpolate_coords', creation_site='creation_site', reader_kwargs={'foo': 'bar'}, filename='myfile') reader = mock.MagicMock(mask=[0]) reader_cls = mock.MagicMock(return_value=reader) fh.reader_class = reader_cls fh.read_raw_data() reader_cls.assert_called_with(interpolate_coords='interpolate_coords', creation_site='creation_site', foo='bar') reader.read.assert_called_with('myfile') # Test exception if all data is masked reader.mask = [1] fh.reader = None with self.assertRaises(ValueError): fh.read_raw_data() @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._update_attrs') @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile.slice') @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_channel') def test_get_dataset_slice(self, get_channel, slc, *mocks): """Get a slice of a dataset.""" from satpy.tests.utils import make_dataid # Test slicing/stripping def slice_patched(data, times): if len(data.shape) == 2: return data[1:3, :], times[1:3] return data[1:3], times[1:3] ch = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9], [10, 11, 12], [13, 14, 15]]) acq = np.array([0, 1, 2, 3, 4]) slc.side_effect = slice_patched get_channel.return_value = ch kwargs_list = [{'strip_invalid_coords': False, 'start_line': 123, 'end_line': 456}, {'strip_invalid_coords': True, 'start_line': None, 'end_line': None}, {'strip_invalid_coords': True, 'start_line': 123, 'end_line': 456}] for kwargs in kwargs_list: fh = _get_fh_mocked( reader=_get_reader_mocked(along_track=len(acq)), chn_dict={'1': 0}, **kwargs ) key = make_dataid(name='1', calibration='reflectance') info = {'name': '1', 'standard_name': 'reflectance'} res = fh.get_dataset(key, info) np.testing.assert_array_equal(res.data, ch[1:3, :]) np.testing.assert_array_equal(res.coords['acq_time'].data, acq[1:3]) np.testing.assert_array_equal(slc.call_args_list[-1][1]['times'], acq) np.testing.assert_array_equal(slc.call_args_list[-1][1]['data'], ch) @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._update_attrs') def test_get_dataset_latlon(self, *mocks): """Test getting the latitudes and longitudes.""" from satpy.tests.utils import make_dataid lons = np.ones((3, 3)) lats = 2 * lons reader = _get_reader_mocked() reader.get_lonlat.return_value = lons, lats fh = _get_fh_mocked( reader=reader, start_line=None, end_line=None, strip_invalid_coords=False, interpolate_coords=True ) # With interpolation of coordinates for name, exp_data in zip(['longitude', 'latitude'], [lons, lats]): key = make_dataid(name=name) info = {'name': name, 'standard_name': 'my_standard_name'} res = fh.get_dataset(key=key, info=info) exp = xr.DataArray(exp_data, name=res.name, dims=('y', 'x'), coords={'acq_time': ('y', [0, 1, 2])}) xr.testing.assert_equal(res, exp) # Without interpolation of coordinates fh.interpolate_coords = False for name, _exp_data in zip(['longitude', 'latitude'], [lons, lats]): key = make_dataid(name=name) info = {'name': name, 'standard_name': 'my_standard_name'} res = fh.get_dataset(key=key, info=info) self.assertTupleEqual(res.dims, ('y', 'x_every_eighth')) @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._update_attrs') @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_angle') def test_get_dataset_angles(self, get_angle, *mocks): """Test getting the angles.""" from satpy.readers.avhrr_l1b_gaclac import ANGLES from satpy.tests.utils import make_dataid ones = np.ones((3, 3)) get_angle.return_value = ones reader = _get_reader_mocked() fh = _get_fh_mocked( reader=reader, start_line=None, end_line=None, strip_invalid_coords=False, interpolate_coords=True ) # With interpolation of coordinates for angle in ANGLES: key = make_dataid(name=angle) info = {'name': angle, 'standard_name': 'my_standard_name'} res = fh.get_dataset(key=key, info=info) exp = xr.DataArray(ones, name=res.name, dims=('y', 'x'), coords={'acq_time': ('y', [0, 1, 2])}) xr.testing.assert_equal(res, exp) # Without interpolation of coordinates fh.interpolate_coords = False for angle in ANGLES: key = make_dataid(name=angle) info = {'name': angle, 'standard_name': 'my_standard_name'} res = fh.get_dataset(key=key, info=info) self.assertTupleEqual(res.dims, ('y', 'x_every_eighth')) @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._update_attrs') def test_get_dataset_qual_flags(self, *mocks): """Test getting the qualitiy flags.""" from satpy.tests.utils import make_dataid qual_flags = np.ones((3, 7)) reader = _get_reader_mocked() reader.get_qual_flags.return_value = qual_flags fh = _get_fh_mocked( reader=reader, start_line=None, end_line=None, strip_invalid_coords=False, interpolate_coords=True ) key = make_dataid(name='qual_flags') info = {'name': 'qual_flags'} res = fh.get_dataset(key=key, info=info) exp = xr.DataArray(qual_flags, name=res.name, dims=('y', 'num_flags'), coords={'acq_time': ('y', [0, 1, 2]), 'num_flags': ['Scan line number', 'Fatal error flag', 'Insufficient data for calibration', 'Insufficient data for calibration', 'Solar contamination of blackbody in channels 3', 'Solar contamination of blackbody in channels 4', 'Solar contamination of blackbody in channels 5']}) xr.testing.assert_equal(res, exp) def test_get_channel(self): """Test getting the channels.""" from satpy.tests.utils import make_dataid counts = np.moveaxis(np.array([[[1, 2, 3], [4, 5, 6]]]), 0, 2) calib_channels = 2 * counts reader = _get_reader_mocked() reader.get_counts.return_value = counts reader.get_calibrated_channels.return_value = calib_channels fh = _get_fh_mocked(reader=reader, counts=None, calib_channels=None, chn_dict={'1': 0}) key = make_dataid(name='1', calibration='counts') # Counts res = fh._get_channel(key=key) np.testing.assert_array_equal(res, [[1, 2, 3], [4, 5, 6]]) np.testing.assert_array_equal(fh.counts, counts) # Reflectance and Brightness Temperature for calib in ['reflectance', 'brightness_temperature']: key = make_dataid(name='1', calibration=calib) res = fh._get_channel(key=key) np.testing.assert_array_equal(res, [[2, 4, 6], [8, 10, 12]]) np.testing.assert_array_equal(fh.calib_channels, calib_channels) # Invalid with pytest.raises(ValueError): key = make_dataid(name='7', calibration='coffee') # Buffering reader.get_counts.reset_mock() key = make_dataid(name='1', calibration='counts') fh._get_channel(key=key) reader.get_counts.assert_not_called() reader.get_calibrated_channels.reset_mock() for calib in ['reflectance', 'brightness_temperature']: key = make_dataid(name='1', calibration=calib) fh._get_channel(key) reader.get_calibrated_channels.assert_not_called() def test_get_angle(self): """Test getting the angle.""" from satpy.tests.utils import make_dataid reader = mock.MagicMock() reader.get_angles.return_value = 1, 2, 3, 4, 5 fh = _get_fh_mocked(reader=reader, angles=None) # Test angle readout key = make_dataid(name='sensor_zenith_angle') res = fh._get_angle(key) self.assertEqual(res, 2) self.assertDictEqual(fh.angles, {'sensor_zenith_angle': 2, 'sensor_azimuth_angle': 1, 'solar_zenith_angle': 4, 'solar_azimuth_angle': 3, 'sun_sensor_azimuth_difference_angle': 5}) # Test buffering key = make_dataid(name='sensor_azimuth_angle') fh._get_angle(key) reader.get_angles.assert_called_once() def test_strip_invalid_lat(self): """Test stripping invalid coordinates.""" import pygac.utils reader = mock.MagicMock() reader.get_lonlat.return_value = None, None fh = _get_fh_mocked(reader=reader, first_valid_lat=None) # Test stripping pygac.utils.strip_invalid_lat.return_value = 1, 2 start, end = fh._strip_invalid_lat() self.assertTupleEqual((start, end), (1, 2)) # Test buffering fh._strip_invalid_lat() pygac.utils.strip_invalid_lat.assert_called_once() @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._slice') def test_slice(self, _slice): """Test slicing.""" def _slice_patched(data): return data[1:3] _slice.side_effect = _slice_patched data = np.zeros((4, 2)) times = np.array([1, 2, 3, 4], dtype='datetime64[us]') fh = _get_fh_mocked(start_line=1, end_line=3, strip_invalid_coords=False) data_slc, times_slc = fh.slice(data, times) np.testing.assert_array_equal(data_slc, data[1:3]) np.testing.assert_array_equal(times_slc, times[1:3]) self.assertEqual(fh.start_time, datetime(1970, 1, 1, 0, 0, 0, 2)) self.assertEqual(fh.end_time, datetime(1970, 1, 1, 0, 0, 0, 3)) @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_qual_flags') @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._strip_invalid_lat') def test__slice(self, strip_invalid_lat, get_qual_flags): """Test slicing.""" import pygac.utils pygac.utils.check_user_scanlines.return_value = 1, 2 pygac.utils.slice_channel.return_value = 'sliced' strip_invalid_lat.return_value = 3, 4 get_qual_flags.return_value = 'qual_flags' data = np.zeros((2, 2)) # a) Only start/end line given fh = _get_fh_mocked(start_line=5, end_line=6, strip_invalid_coords=False) data_slc = fh._slice(data) self.assertEqual(data_slc, 'sliced') pygac.utils.check_user_scanlines.assert_called_with( start_line=5, end_line=6, first_valid_lat=None, last_valid_lat=None, along_track=2) pygac.utils.slice_channel.assert_called_with( data, start_line=1, end_line=2, first_valid_lat=None, last_valid_lat=None) # b) Only strip_invalid_coords=True fh = _get_fh_mocked(start_line=None, end_line=None, strip_invalid_coords=True) fh._slice(data) pygac.utils.check_user_scanlines.assert_called_with( start_line=0, end_line=0, first_valid_lat=3, last_valid_lat=4, along_track=2) # c) Both fh = _get_fh_mocked(start_line=5, end_line=6, strip_invalid_coords=True) fh._slice(data) pygac.utils.check_user_scanlines.assert_called_with( start_line=5, end_line=6, first_valid_lat=3, last_valid_lat=4, along_track=2) # Test slicing with older pygac versions pygac.utils.slice_channel.return_value = ('sliced', 'foo', 'bar') data_slc = fh._slice(data) self.assertEqual(data_slc, 'sliced') class TestGetDataset(GACLACFilePatcher): """Test the get_dataset method.""" def setUp(self): """Set up the instance.""" self.exp = xr.DataArray(da.ones((3, 3)), name='1', dims=('y', 'x'), coords={'acq_time': ('y', [0, 1, 2])}, attrs={'name': '1', 'platform_name': 'spacecraft_name', 'orbit_number': 123, 'sensor': 'sensor', 'foo': 'bar', 'standard_name': 'my_standard_name'}) self.exp.coords['acq_time'].attrs['long_name'] = 'Mean scanline acquisition time' super().setUp() @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile.__init__', return_value=None) @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile.read_raw_data') @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_channel', return_value=np.ones((3, 3))) def test_get_dataset_channels(self, get_channel, *mocks): """Test getting the channel datasets.""" pygac_reader = _get_reader_mocked() fh = self._create_file_handler(pygac_reader) # Test calibration to reflectance as well as attributes. key, res = self._get_dataset(fh) exp = self._create_expected(res.name) exp.attrs['orbital_parameters'] = {'tle': 'tle'} xr.testing.assert_identical(res, exp) get_channel.assert_called_with(key) self._check_get_channel_calls(fh, get_channel) @staticmethod def _get_dataset(fh): from satpy.tests.utils import make_dataid key = make_dataid(name='1', calibration='reflectance') info = {'name': '1', 'standard_name': 'my_standard_name'} res = fh.get_dataset(key=key, info=info) return key, res @staticmethod def _create_file_handler(reader): """Mock reader and file handler.""" fh = _get_fh_mocked( reader=reader, chn_dict={'1': 0, '5': 0}, start_line=None, end_line=None, strip_invalid_coords=False, filename_info={'orbit_number': 123}, sensor='sensor', ) return fh @staticmethod def _create_expected(name): exp = xr.DataArray(da.ones((3, 3)), name=name, dims=('y', 'x'), coords={'acq_time': ('y', [0, 1, 2])}, attrs={'name': '1', 'platform_name': 'spacecraft_name', 'orbit_number': 123, 'sensor': 'sensor', 'foo': 'bar', 'standard_name': 'my_standard_name'}) exp.coords['acq_time'].attrs['long_name'] = 'Mean scanline acquisition time' return exp @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile.__init__', return_value=None) @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile.read_raw_data') @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_channel', return_value=np.ones((3, 3))) def test_get_dataset_no_tle(self, get_channel, *mocks): """Test getting the channel datasets when no TLEs are present.""" pygac_reader = _get_reader_mocked() pygac_reader.get_tle_lines = mock.MagicMock() pygac_reader.get_tle_lines.side_effect = RuntimeError() fh = self._create_file_handler(pygac_reader) # Test calibration to reflectance as well as attributes. key, res = self._get_dataset(fh) exp = self._create_expected(res.name) xr.testing.assert_identical(res, exp) get_channel.assert_called_with(key) self._check_get_channel_calls(fh, get_channel) @staticmethod def _check_get_channel_calls(fh, get_channel): """Check _get_channel() calls.""" from satpy.tests.utils import make_dataid for key in [make_dataid(name='1', calibration='counts'), make_dataid(name='5', calibration='brightness_temperature')]: fh.get_dataset(key=key, info={'name': 1}) get_channel.assert_called_with(key) satpy-0.34.0/satpy/tests/reader_tests/test_clavrx.py000066400000000000000000000427161420401153000226340ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.clavrx module.""" import os import unittest from unittest import mock import dask.array as da import numpy as np import xarray as xr from pyresample.geometry import AreaDefinition, SwathDefinition from satpy.tests.reader_tests.test_hdf4_utils import FakeHDF4FileHandler DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (10, 300) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32) DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0) DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) class FakeHDF4FileHandlerPolar(FakeHDF4FileHandler): """Swap-in HDF4 File Handler.""" def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" file_content = { '/attr/platform': 'SNPP', '/attr/sensor': 'VIIRS', } file_content['longitude'] = xr.DataArray( da.from_array(DEFAULT_LON_DATA, chunks=4096), attrs={ '_FillValue': np.nan, 'scale_factor': 1., 'add_offset': 0., 'standard_name': 'longitude', }) file_content['longitude/shape'] = DEFAULT_FILE_SHAPE file_content['latitude'] = xr.DataArray( da.from_array(DEFAULT_LAT_DATA, chunks=4096), attrs={ '_FillValue': np.nan, 'scale_factor': 1., 'add_offset': 0., 'standard_name': 'latitude', }) file_content['latitude/shape'] = DEFAULT_FILE_SHAPE file_content['variable1'] = xr.DataArray( da.from_array(DEFAULT_FILE_DATA, chunks=4096).astype(np.float32), attrs={ '_FillValue': -1, 'scale_factor': 1., 'add_offset': 0., 'units': '1', }) file_content['variable1/shape'] = DEFAULT_FILE_SHAPE # data with fill values file_content['variable2'] = xr.DataArray( da.from_array(DEFAULT_FILE_DATA, chunks=4096).astype(np.float32), attrs={ '_FillValue': -1, 'scale_factor': 1., 'add_offset': 0., 'units': '1', }) file_content['variable2/shape'] = DEFAULT_FILE_SHAPE file_content['variable2'] = file_content['variable2'].where( file_content['variable2'] % 2 != 0) # category file_content['variable3'] = xr.DataArray( da.from_array(DEFAULT_FILE_DATA, chunks=4096).astype(np.byte), attrs={ 'SCALED': 0, '_FillValue': -128, 'flag_meanings': 'clear water supercooled mixed ice unknown', 'flag_values': [0, 1, 2, 3, 4, 5], 'units': 'none', }) file_content['variable3/shape'] = DEFAULT_FILE_SHAPE return file_content class TestCLAVRXReaderPolar(unittest.TestCase): """Test CLAVR-X Reader with Polar files.""" yaml_file = "clavrx.yaml" def setUp(self): """Wrap HDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.clavrx import CLAVRXHDF4FileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(CLAVRXHDF4FileHandler, '__bases__', (FakeHDF4FileHandlerPolar,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the NetCDF4 file handler.""" self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'clavrx_npp_d20170520_t2053581_e2055223_b28822.level2.hdf', ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) # make sure we have some files self.assertTrue(r.file_handlers) def test_available_datasets(self): """Test available_datasets with fake variables from YAML.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'clavrx_npp_d20170520_t2053581_e2055223_b28822.level2.hdf', ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) # make sure we have some files self.assertTrue(r.file_handlers) # mimic the YAML file being configured for more datasets fake_dataset_info = [ (None, {'name': 'variable1', 'resolution': None, 'file_type': ['clavrx_hdf4']}), (True, {'name': 'variable2', 'resolution': 742, 'file_type': ['clavrx_hdf4']}), (True, {'name': 'variable2', 'resolution': 1, 'file_type': ['clavrx_hdf4']}), (None, {'name': 'variable2', 'resolution': 1, 'file_type': ['clavrx_hdf4']}), (None, {'name': '_fake1', 'file_type': ['clavrx_hdf4']}), (None, {'name': 'variable1', 'file_type': ['level_fake']}), (True, {'name': 'variable3', 'file_type': ['clavrx_hdf4']}), ] new_ds_infos = list(r.file_handlers['clavrx_hdf4'][0].available_datasets( fake_dataset_info)) self.assertEqual(len(new_ds_infos), 9) # we have this and can provide the resolution self.assertTrue(new_ds_infos[0][0]) self.assertEqual(new_ds_infos[0][1]['resolution'], 742) # hardcoded # we have this, but previous file handler said it knew about it # and it is producing the same resolution as what we have self.assertTrue(new_ds_infos[1][0]) self.assertEqual(new_ds_infos[1][1]['resolution'], 742) # we have this, but don't want to change the resolution # because a previous handler said it has it self.assertTrue(new_ds_infos[2][0]) self.assertEqual(new_ds_infos[2][1]['resolution'], 1) # even though the previous one was known we can still # produce it at our new resolution self.assertTrue(new_ds_infos[3][0]) self.assertEqual(new_ds_infos[3][1]['resolution'], 742) # we have this and can update the resolution since # no one else has claimed it self.assertTrue(new_ds_infos[4][0]) self.assertEqual(new_ds_infos[4][1]['resolution'], 742) # we don't have this variable, don't change it self.assertFalse(new_ds_infos[5][0]) self.assertIsNone(new_ds_infos[5][1].get('resolution')) # we have this, but it isn't supposed to come from our file type self.assertIsNone(new_ds_infos[6][0]) self.assertIsNone(new_ds_infos[6][1].get('resolution')) # we could have loaded this but some other file handler said it has this self.assertTrue(new_ds_infos[7][0]) self.assertIsNone(new_ds_infos[7][1].get('resolution')) # we can add resolution to the previous dataset, so we do self.assertTrue(new_ds_infos[8][0]) self.assertEqual(new_ds_infos[8][1]['resolution'], 742) def test_load_all(self): """Test loading all test datasets.""" import xarray as xr from satpy.readers import load_reader r = load_reader(self.reader_configs) with mock.patch('satpy.readers.clavrx.SDS', xr.DataArray): loadables = r.select_files_from_pathnames([ 'clavrx_npp_d20170520_t2053581_e2055223_b28822.level2.hdf', ]) r.create_filehandlers(loadables) datasets = r.load(['variable1', 'variable2', 'variable3']) self.assertEqual(len(datasets), 3) for v in datasets.values(): self.assertEqual(v.attrs['units'], '1') self.assertEqual(v.attrs['platform_name'], 'npp') self.assertEqual(v.attrs['sensor'], 'viirs') self.assertIsInstance(v.attrs['area'], SwathDefinition) self.assertEqual(v.attrs['area'].lons.attrs['rows_per_scan'], 16) self.assertEqual(v.attrs['area'].lats.attrs['rows_per_scan'], 16) self.assertIsNotNone(datasets['variable3'].attrs.get('flag_meanings')) class FakeHDF4FileHandlerGeo(FakeHDF4FileHandler): """Swap-in HDF4 File Handler.""" def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" file_content = { '/attr/platform': 'HIM8', '/attr/sensor': 'AHI', # this is a Level 2 file that came from a L1B file '/attr/L1B': 'clavrx_H08_20180806_1800', } file_content['longitude'] = xr.DataArray( DEFAULT_LON_DATA, dims=('y', 'x'), attrs={ '_FillValue': np.nan, 'scale_factor': 1., 'add_offset': 0., 'standard_name': 'longitude', }) file_content['longitude/shape'] = DEFAULT_FILE_SHAPE file_content['latitude'] = xr.DataArray( DEFAULT_LAT_DATA, dims=('y', 'x'), attrs={ '_FillValue': np.nan, 'scale_factor': 1., 'add_offset': 0., 'standard_name': 'latitude', }) file_content['latitude/shape'] = DEFAULT_FILE_SHAPE file_content['variable1'] = xr.DataArray( DEFAULT_FILE_DATA.astype(np.float32), dims=('y', 'x'), attrs={ '_FillValue': -1, 'scale_factor': 1., 'add_offset': 0., 'units': '1', 'valid_range': (-32767, 32767), }) file_content['variable1/shape'] = DEFAULT_FILE_SHAPE # data with fill values file_content['variable2'] = xr.DataArray( DEFAULT_FILE_DATA.astype(np.float32), dims=('y', 'x'), attrs={ '_FillValue': -1, 'scale_factor': 1., 'add_offset': 0., 'units': '1', }) file_content['variable2/shape'] = DEFAULT_FILE_SHAPE file_content['variable2'] = file_content['variable2'].where( file_content['variable2'] % 2 != 0) # category file_content['variable3'] = xr.DataArray( DEFAULT_FILE_DATA.astype(np.byte), dims=('y', 'x'), attrs={ 'SCALED': 0, '_FillValue': -128, 'flag_meanings': 'clear water supercooled mixed ice unknown', 'flag_values': [0, 1, 2, 3, 4, 5], 'units': '1', }) file_content['variable3/shape'] = DEFAULT_FILE_SHAPE return file_content class TestCLAVRXReaderGeo(unittest.TestCase): """Test CLAVR-X Reader with Geo files.""" yaml_file = "clavrx.yaml" def setUp(self): """Wrap HDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.clavrx import CLAVRXHDF4FileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(CLAVRXHDF4FileHandler, '__bases__', (FakeHDF4FileHandlerGeo,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the NetCDF4 file handler.""" self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'clavrx_H08_20180806_1800.level2.hdf', ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) # make sure we have some files self.assertTrue(r.file_handlers) def test_no_nav_donor(self): """Test exception raised when no donor file is available.""" import xarray as xr from satpy.readers import load_reader r = load_reader(self.reader_configs) with mock.patch('satpy.readers.clavrx.SDS', xr.DataArray): loadables = r.select_files_from_pathnames([ 'clavrx_H08_20180806_1800.level2.hdf', ]) r.create_filehandlers(loadables) self.assertRaises(IOError, r.load, ['variable1', 'variable2', 'variable3']) def test_load_all_old_donor(self): """Test loading all test datasets with old donor.""" import xarray as xr from satpy.readers import load_reader r = load_reader(self.reader_configs) with mock.patch('satpy.readers.clavrx.SDS', xr.DataArray): loadables = r.select_files_from_pathnames([ 'clavrx_H08_20180806_1800.level2.hdf', ]) r.create_filehandlers(loadables) with mock.patch('satpy.readers.clavrx.glob') as g, mock.patch('satpy.readers.clavrx.netCDF4.Dataset') as d: g.return_value = ['fake_donor.nc'] x = np.linspace(-0.1518, 0.1518, 300) y = np.linspace(0.1518, -0.1518, 10) proj = mock.Mock( semi_major_axis=6378.137, semi_minor_axis=6356.7523142, perspective_point_height=35791, longitude_of_projection_origin=140.7, sweep_angle_axis='y', ) d.return_value = fake_donor = mock.MagicMock( variables={'Projection': proj, 'x': x, 'y': y}, ) fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] datasets = r.load(['variable1', 'variable2', 'variable3']) self.assertEqual(len(datasets), 3) for v in datasets.values(): self.assertNotIn('calibration', v.attrs) self.assertEqual(v.attrs['units'], '1') self.assertIsInstance(v.attrs['area'], AreaDefinition) if v.attrs.get("flag_values"): self.assertIn('_FillValue', v.attrs) else: self.assertNotIn('_FillValue', v.attrs) if v.attrs["name"] == 'variable1': self.assertIsInstance(v.attrs["valid_range"], list) else: self.assertNotIn('valid_range', v.attrs) if 'flag_values' in v.attrs: self.assertTrue(np.issubdtype(v.dtype, np.integer)) self.assertIsNotNone(v.attrs.get('flag_meanings')) def test_load_all_new_donor(self): """Test loading all test datasets with new donor.""" import xarray as xr from satpy.readers import load_reader r = load_reader(self.reader_configs) with mock.patch('satpy.readers.clavrx.SDS', xr.DataArray): loadables = r.select_files_from_pathnames([ 'clavrx_H08_20180806_1800.level2.hdf', ]) r.create_filehandlers(loadables) with mock.patch('satpy.readers.clavrx.glob') as g, mock.patch('satpy.readers.clavrx.netCDF4.Dataset') as d: g.return_value = ['fake_donor.nc'] x = np.linspace(-0.1518, 0.1518, 300) y = np.linspace(0.1518, -0.1518, 10) proj = mock.Mock( semi_major_axis=6378137, semi_minor_axis=6356752.3142, perspective_point_height=35791000, longitude_of_projection_origin=140.7, sweep_angle_axis='y', ) d.return_value = fake_donor = mock.MagicMock( variables={'goes_imager_projection': proj, 'x': x, 'y': y}, ) fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] datasets = r.load(['variable1', 'variable2', 'variable3']) self.assertEqual(len(datasets), 3) for v in datasets.values(): self.assertNotIn('calibration', v.attrs) self.assertEqual(v.attrs['units'], '1') self.assertIsInstance(v.attrs['area'], AreaDefinition) self.assertTrue(v.attrs['area'].is_geostationary) self.assertEqual(v.attrs['platform_name'], 'himawari8') self.assertEqual(v.attrs['sensor'], 'ahi') self.assertIsNotNone(datasets['variable3'].attrs.get('flag_meanings')) satpy-0.34.0/satpy/tests/reader_tests/test_clavrx_nc.py000066400000000000000000000216251420401153000233100ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.clavrx module.""" import os from unittest import mock import numpy as np import pytest import xarray as xr from pyresample.geometry import AreaDefinition DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (10, 300) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32) DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0) DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) AHI_FILE = 'clavrx_H08_20210603_1500_B01_FLDK_R.level2.nc' def fake_test_content(filename, **kwargs): """Mimic reader input file content.""" attrs = { 'platform': 'HIM8', 'sensor': 'AHI', # this is a Level 2 file that came from a L1B file 'L1B': 'clavrx_H08_20210603_1500_B01_FLDK_R', } longitude = xr.DataArray(DEFAULT_LON_DATA, dims=('scan_lines_along_track_direction', 'pixel_elements_along_scan_direction'), attrs={'_FillValue': np.nan, 'scale_factor': 1., 'add_offset': 0., 'standard_name': 'longitude', 'units': 'degrees_east' }) latitude = xr.DataArray(DEFAULT_LAT_DATA, dims=('scan_lines_along_track_direction', 'pixel_elements_along_scan_direction'), attrs={'_FillValue': np.nan, 'scale_factor': 1., 'add_offset': 0., 'standard_name': 'latitude', 'units': 'degrees_south' }) variable1 = xr.DataArray(DEFAULT_FILE_DATA.astype(np.float32), dims=('scan_lines_along_track_direction', 'pixel_elements_along_scan_direction'), attrs={'_FillValue': np.nan, 'scale_factor': 1., 'add_offset': 0., 'units': '1', 'valid_range': [-32767, 32767], }) # data with fill values variable2 = xr.DataArray(DEFAULT_FILE_DATA.astype(np.float32), dims=('scan_lines_along_track_direction', 'pixel_elements_along_scan_direction'), attrs={'_FillValue': np.nan, 'scale_factor': 1., 'add_offset': 0., 'units': '1', 'valid_range': [-32767, 32767], }) variable2 = variable2.where(variable2 % 2 != 0) # category variable3 = xr.DataArray(DEFAULT_FILE_DATA.astype(np.byte), dims=('scan_lines_along_track_direction', 'pixel_elements_along_scan_direction'), attrs={'SCALED': 0, '_FillValue': -128, 'flag_meanings': 'clear water supercooled mixed ice unknown', 'flag_values': [0, 1, 2, 3, 4, 5], 'units': '1', }) ds_vars = { 'longitude': longitude, 'latitude': latitude, 'variable1': variable1, 'variable2': variable2, 'variable3': variable3 } ds = xr.Dataset(ds_vars, attrs=attrs) ds = ds.assign_coords({"latitude": latitude, "longitude": longitude}) return ds class TestCLAVRXReaderGeo: """Test CLAVR-X Reader with Geo files.""" yaml_file = "clavrx.yaml" def setup_method(self): """Read fake data.""" from satpy._config import config_search_paths self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) @pytest.mark.parametrize( ("filenames", "expected_loadables"), [([AHI_FILE], 1)] ) def test_reader_creation(self, filenames, expected_loadables): """Test basic initialization.""" from satpy.readers import load_reader with mock.patch('satpy.readers.clavrx.xr.open_dataset') as od: od.side_effect = fake_test_content r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) assert len(loadables) == expected_loadables r.create_filehandlers(loadables) # make sure we have some files assert r.file_handlers @pytest.mark.parametrize( ("filenames", "expected_datasets"), [([AHI_FILE], ['variable1', 'variable2', 'variable3']), ] ) def test_available_datasets(self, filenames, expected_datasets): """Test that variables are dynamically discovered.""" from satpy.readers import load_reader with mock.patch('satpy.readers.clavrx.xr.open_dataset') as od: od.side_effect = fake_test_content r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) r.create_filehandlers(loadables) avails = list(r.available_dataset_names) for var_name in expected_datasets: assert var_name in avails @pytest.mark.parametrize( ("filenames", "loadable_ids"), [([AHI_FILE], ['variable1', 'variable2', 'variable3']), ] ) def test_load_all_new_donor(self, filenames, loadable_ids): """Test loading all test datasets with new donor.""" from satpy.readers import load_reader with mock.patch('satpy.readers.clavrx.xr.open_dataset') as od: od.side_effect = fake_test_content r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) r.create_filehandlers(loadables) with mock.patch('satpy.readers.clavrx.glob') as g, \ mock.patch('satpy.readers.clavrx.netCDF4.Dataset') as d: g.return_value = ['fake_donor.nc'] x = np.linspace(-0.1518, 0.1518, 300) y = np.linspace(0.1518, -0.1518, 10) proj = mock.Mock( semi_major_axis=6378137, semi_minor_axis=6356752.3142, perspective_point_height=35791000, longitude_of_projection_origin=140.7, sweep_angle_axis='y', ) d.return_value = fake_donor = mock.MagicMock( variables={'goes_imager_projection': proj, 'x': x, 'y': y}, ) fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] datasets = r.load(loadable_ids) assert len(datasets) == 3 for v in datasets.values(): assert 'calibration' not in v.attrs assert v.attrs['units'] == '1' assert isinstance(v.attrs['area'], AreaDefinition) assert v.attrs['platform_name'] == 'himawari8' assert v.attrs['sensor'] == 'AHI' assert 'rows_per_scan' not in v.coords.get('longitude').attrs if v.attrs["name"] in ["variable1", "variable2"]: assert isinstance(v.attrs["valid_range"], list) assert v.dtype == np.float32 else: assert (datasets['variable3'].attrs.get('flag_meanings')) is not None assert "_FillValue" in v.attrs.keys() assert np.issubdtype(v.dtype, np.integer) satpy-0.34.0/satpy/tests/reader_tests/test_cmsaf_claas.py000066400000000000000000000131261420401153000235620ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the 'cmsaf-claas2_l2_nc' reader.""" import datetime import os from unittest import mock import numpy as np import numpy.testing import pytest import xarray as xr from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler class FakeNetCDF4FileHandler2(FakeNetCDF4FileHandler): """Class for faking the NetCDF4 Filehandler.""" _nrows = 30 _ncols = 40 def __init__(self, *args, auto_maskandscale, **kwargs): """Init the file handler.""" # make sure that CLAAS2 reader asks NetCDF4FileHandler for having # auto_maskandscale enabled assert auto_maskandscale super().__init__(*args, **kwargs) def _get_global_attributes(self): data = {} attrs = { "CMSAF_proj4_params": "+a=6378169.0 +h=35785831.0 " "+b=6356583.8 +lon_0=0 +proj=geos", "CMSAF_area_extent": np.array( [-5456233.41938636, -5453233.01608472, 5453233.01608472, 5456233.41938636]), "time_coverage_start": "1985-08-13T13:15:00Z", "time_coverage_end": "2085-08-13T13:15:00Z", } for (k, v) in attrs.items(): data["/attr/" + k] = v return data def _get_data(self): data = { "cph": xr.DataArray( np.arange(self._nrows*self._ncols, dtype="i4").reshape( (1, self._nrows, self._ncols))/100, dims=("time", "y", "x")), "ctt": xr.DataArray( np.arange(self._nrows*self._ncols, 0, -1, dtype="i4").reshape( (self._nrows, self._ncols))/100, dims=("y", "x")), "time_bnds": xr.DataArray( [[12436.91666667, 12436.92534722]], dims=("time", "time_bnds"))} for k in set(data.keys()): data[f"{k:s}/dimensions"] = data[k].dims data[f"{k:s}/attr/fruit"] = "apple" data[f"{k:s}/attr/scale_factor"] = np.float32(0.01) return data def _get_dimensions(self): data = { "/dimension/x": self._nrows, "/dimension/y": self._ncols, "/dimension/time": 1, "/dimension/time_bnds": 2, } return data def get_test_content(self, filename, filename_info, filetype_info): """Get the content of the test data.""" # mock global attributes # - root groups global # - other groups global # mock data variables # mock dimensions # # ... but only what satpy is using ... D = {} D.update(self._get_data()) D.update(self._get_dimensions()) D.update(self._get_global_attributes()) return D @pytest.fixture def reader(): """Return reader for CMSAF CLAAS-2.""" from satpy._config import config_search_paths from satpy.readers import load_reader reader_configs = config_search_paths( os.path.join("readers", "cmsaf-claas2_l2_nc.yaml")) reader = load_reader(reader_configs) return reader @pytest.fixture(autouse=True, scope="class") def fake_handler(): """Wrap NetCDF4 FileHandler with our own fake handler.""" # implementation strongly inspired by test_viirs_l1b.py from satpy.readers.cmsaf_claas2 import CLAAS2 p = mock.patch.object( CLAAS2, "__bases__", (FakeNetCDF4FileHandler2,)) with p: p.is_local = True yield p def test_file_pattern(reader): """Test file pattern matching.""" filenames = [ "CTXin20040120091500305SVMSG01MD.nc", "CTXin20040120093000305SVMSG01MD.nc", "CTXin20040120094500305SVMSG01MD.nc", "abcde52034294023489248MVSSG03DD.nc"] files = reader.select_files_from_pathnames(filenames) # only 3 out of 4 above should match assert len(files) == 3 def test_load(reader): """Test loading.""" from satpy.tests.utils import make_dataid # testing two filenames to test correctly combined filenames = [ "CTXin20040120091500305SVMSG01MD.nc", "CTXin20040120093000305SVMSG01MD.nc"] loadables = reader.select_files_from_pathnames(filenames) reader.create_filehandlers(loadables) res = reader.load( [make_dataid(name=name) for name in ["cph", "ctt"]]) assert 2 == len(res) assert reader.start_time == datetime.datetime(1985, 8, 13, 13, 15) assert reader.end_time == datetime.datetime(2085, 8, 13, 13, 15) np.testing.assert_array_almost_equal( res["cph"].data, np.tile(np.arange(0.0, 12.0, 0.01).reshape((30, 40)), [2, 1])) np.testing.assert_array_almost_equal( res["ctt"].data, np.tile(np.arange(12.0, 0.0, -0.01).reshape((30, 40)), [2, 1])) satpy-0.34.0/satpy/tests/reader_tests/test_electrol_hrit.py000066400000000000000000000245631420401153000241740ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The HRIT electrol reader tests package.""" import datetime import unittest from unittest import mock import dask.array as da import numpy as np from xarray import DataArray from satpy.readers.electrol_hrit import ( HRITGOMSEpilogueFileHandler, HRITGOMSFileHandler, HRITGOMSPrologueFileHandler, epilogue, image_acquisition, prologue, recarray2dict, satellite_status, ) from satpy.tests.utils import make_dataid # Simplify some type selections f64_t = np.float64 i32_t = np.int32 u32_t = np.uint32 class Testrecarray2dict(unittest.TestCase): """Test the function that converts numpy record arrays into dicts for use within SatPy.""" def test_fun(self): """Test record array.""" inner_st = np.dtype([('test_str', '. """Test the eps l1b format.""" import os from contextlib import suppress from tempfile import mkstemp from unittest import TestCase, mock import numpy as np import pytest import xarray as xr import satpy from satpy._config import get_config_path from satpy.readers import eps_l1b as eps from satpy.tests.utils import make_dataid grh_dtype = np.dtype([("record_class", "|i1"), ("INSTRUMENT_GROUP", "|i1"), ("RECORD_SUBCLASS", "|i1"), ("RECORD_SUBCLASS_VERSION", "|i1"), ("RECORD_SIZE", ">u4"), ("RECORD_START_TIME", "S6"), ("RECORD_STOP_TIME", "S6")]) def create_sections(structure): """Create file sections.""" sections = {} format_fn = get_config_path("eps_avhrrl1b_6.5.xml") form = eps.XMLFormat(format_fn) for count, (rec_class, sub_class) in structure: try: the_dtype = form.dtype((rec_class, sub_class)) except KeyError: continue item_size = the_dtype.itemsize + grh_dtype.itemsize the_dtype = np.dtype(grh_dtype.descr + the_dtype.descr) item = np.zeros(count, the_dtype) item['record_class'] = eps.record_class.index(rec_class) item['RECORD_SUBCLASS'] = sub_class item['RECORD_SIZE'] = item_size sections[(rec_class, sub_class)] = item return sections class BaseTestCaseEPSL1B(TestCase): """Base class for EPS l1b test case.""" def _create_structure(self): structure = [(1, ('mphr', 0)), (1, ('sphr', 0)), (11, ('ipr', 0)), (1, ('geadr', 1)), (1, ('geadr', 2)), (1, ('geadr', 3)), (1, ('geadr', 4)), (1, ('geadr', 5)), (1, ('geadr', 6)), (1, ('geadr', 7)), (1, ('giadr', 1)), (1, ('giadr', 2)), (1, ('veadr', 1)), (self.scan_lines, ('mdr', 2))] sections = create_sections(structure) return sections class TestEPSL1B(BaseTestCaseEPSL1B): """Test the filehandler.""" def setUp(self): """Set up the tests.""" # ipr is not present in the xml format ? self.scan_lines = 1080 self.earth_views = 2048 sections = self._create_structure() sections[('mphr', 0)]['TOTAL_MDR'] = (b'TOTAL_MDR = ' + bytes(str(self.scan_lines), encoding='ascii') + b'\n') sections[('mphr', 0)]['SPACECRAFT_ID'] = b'SPACECRAFT_ID = M03\n' sections[('mphr', 0)]['INSTRUMENT_ID'] = b'INSTRUMENT_ID = AVHR\n' sections[('sphr', 0)]['EARTH_VIEWS_PER_SCANLINE'] = (b'EARTH_VIEWS_PER_SCANLINE = ' + bytes(str(self.earth_views), encoding='ascii') + b'\n') sections[('sphr', 0)]['NAV_SAMPLE_RATE'] = b'NAV_SAMPLE_RATE = 20\n' _fd, fname = mkstemp() fd = open(_fd) self.filename = fname for _, arr in sections.items(): arr.tofile(fd) fd.close() self.fh = eps.EPSAVHRRFile(self.filename, {'start_time': 'now', 'end_time': 'later'}, {}) def test_read_all(self): """Test initialization.""" self.fh._read_all() assert(self.fh.scanlines == 1080) assert(self.fh.pixels == 2048) def test_dataset(self): """Test getting a dataset.""" did = make_dataid(name='1', calibration='reflectance') res = self.fh.get_dataset(did, {}) assert(isinstance(res, xr.DataArray)) assert(res.attrs['platform_name'] == 'Metop-C') assert(res.attrs['sensor'] == 'avhrr-3') assert(res.attrs['name'] == '1') assert(res.attrs['calibration'] == 'reflectance') assert(res.attrs['units'] == '%') did = make_dataid(name='4', calibration='brightness_temperature') res = self.fh.get_dataset(did, {}) assert(isinstance(res, xr.DataArray)) assert(res.attrs['platform_name'] == 'Metop-C') assert(res.attrs['sensor'] == 'avhrr-3') assert(res.attrs['name'] == '4') assert(res.attrs['calibration'] == 'brightness_temperature') assert(res.attrs['units'] == 'K') def test_navigation(self): """Test the navigation.""" did = make_dataid(name='longitude') res = self.fh.get_dataset(did, {}) assert(isinstance(res, xr.DataArray)) assert(res.attrs['platform_name'] == 'Metop-C') assert(res.attrs['sensor'] == 'avhrr-3') assert(res.attrs['name'] == 'longitude') def test_angles(self): """Test the navigation.""" did = make_dataid(name='solar_zenith_angle') res = self.fh.get_dataset(did, {}) assert(isinstance(res, xr.DataArray)) assert(res.attrs['platform_name'] == 'Metop-C') assert(res.attrs['sensor'] == 'avhrr-3') assert(res.attrs['name'] == 'solar_zenith_angle') @mock.patch('satpy.readers.eps_l1b.EPSAVHRRFile.__getitem__') @mock.patch('satpy.readers.eps_l1b.EPSAVHRRFile.__init__') def test_get_full_angles_twice(self, mock__init__, mock__getitem__): """Test get full angles twice.""" geotiemock = mock.Mock() metop20kmto1km = geotiemock.metop20kmto1km metop20kmto1km.side_effect = lambda x, y: (x.copy(), y.copy()) def mock_getitem(key): data = {"ANGULAR_RELATIONS_FIRST": np.zeros((7, 4)), "ANGULAR_RELATIONS": np.zeros((7, 103, 4)), "ANGULAR_RELATIONS_LAST": np.zeros((7, 4)), "NAV_SAMPLE_RATE": 20} return data[key] mock__init__.return_value = None mock__getitem__.side_effect = mock_getitem avhrr_reader = satpy.readers.eps_l1b.EPSAVHRRFile() avhrr_reader.scanlines = 7 avhrr_reader.pixels = 2048 with mock.patch.dict("sys.modules", geotiepoints=geotiemock): # Get dask arrays sun_azi, sun_zen, sat_azi, sat_zen = avhrr_reader.get_full_angles() # Convert to numpy array sun_zen_np1 = np.array(sun_zen) # Convert to numpy array again sun_zen_np2 = np.array(sun_zen) assert np.allclose(sun_zen_np1, sun_zen_np2) class TestWrongScanlinesEPSL1B(BaseTestCaseEPSL1B): """Test the filehandler on a corrupt file.""" @pytest.fixture(autouse=True) def inject_fixtures(self, caplog): """Inject caplog.""" self._caplog = caplog def setUp(self): """Set up the tests.""" # ipr is not present in the xml format ? self.scan_lines = 1080 self.earth_views = 2048 sections = self._create_structure() sections[('mphr', 0)]['TOTAL_MDR'] = (b'TOTAL_MDR = ' + bytes(str(self.scan_lines - 2), encoding='ascii') + b'\n') sections[('mphr', 0)]['SPACECRAFT_ID'] = b'SPACECRAFT_ID = M03\n' sections[('mphr', 0)]['INSTRUMENT_ID'] = b'INSTRUMENT_ID = AVHR\n' sections[('sphr', 0)]['EARTH_VIEWS_PER_SCANLINE'] = (b'EARTH_VIEWS_PER_SCANLINE = ' + bytes(str(self.earth_views), encoding='ascii') + b'\n') sections[('sphr', 0)]['NAV_SAMPLE_RATE'] = b'NAV_SAMPLE_RATE = 20\n' _fd, fname = mkstemp() fd = open(_fd) self.filename = fname for _, arr in sections.items(): arr.tofile(fd) fd.close() self.fh = eps.EPSAVHRRFile(self.filename, {'start_time': 'now', 'end_time': 'later'}, {}) def test_read_all_return_right_number_of_scan_lines(self): """Test scanline assignment.""" self.fh._read_all() assert self.fh.scanlines == self.scan_lines def test_read_all_warns_about_scan_lines(self): """Test scanline assignment.""" self.fh._read_all() assert "scanlines" in self._caplog.records[0].message assert self._caplog.records[0].levelname == 'WARNING' def test_read_all_assigns_int_scan_lines(self): """Test scanline assignment.""" self.fh._read_all() assert isinstance(self.fh.scanlines, int) def test_get_dataset_longitude_shape_is_right(self): """Test that the shape of longitude is 1080.""" key = make_dataid(name="longitude") longitudes = self.fh.get_dataset(key, dict()) assert longitudes.shape == (self.scan_lines, self.earth_views) def tearDown(self): """Tear down the tests.""" with suppress(OSError): os.remove(self.filename) class TestWrongSamplingEPSL1B(BaseTestCaseEPSL1B): """Test the filehandler on a corrupt file.""" @pytest.fixture(autouse=True) def inject_fixtures(self, caplog): """Inject caplog.""" self._caplog = caplog def setUp(self): """Set up the tests.""" self.scan_lines = 1080 self.earth_views = 2048 self.sample_rate = 23 sections = self._create_structure() sections[('mphr', 0)]['TOTAL_MDR'] = (b'TOTAL_MDR = ' + bytes(str(self.scan_lines), encoding='ascii') + b'\n') sections[('mphr', 0)]['SPACECRAFT_ID'] = b'SPACECRAFT_ID = M03\n' sections[('mphr', 0)]['INSTRUMENT_ID'] = b'INSTRUMENT_ID = AVHR\n' sections[('sphr', 0)]['EARTH_VIEWS_PER_SCANLINE'] = (b'EARTH_VIEWS_PER_SCANLINE = ' + bytes(str(self.earth_views), encoding='ascii') + b'\n') sections[('sphr', 0)]['NAV_SAMPLE_RATE'] = (b'NAV_SAMPLE_RATE = ' + bytes(str(self.sample_rate), encoding='ascii') + b'\n') _fd, fname = mkstemp() fd = open(_fd) self.filename = fname for _, arr in sections.items(): arr.tofile(fd) fd.close() self.fh = eps.EPSAVHRRFile(self.filename, {'start_time': 'now', 'end_time': 'later'}, {}) def test_get_dataset_fails_because_of_wrong_sample_rate(self): """Test that lons fail to be interpolate.""" key = make_dataid(name="longitude") with pytest.raises(NotImplementedError): self.fh.get_dataset(key, dict()) satpy-0.34.0/satpy/tests/reader_tests/test_eum_base.py000066400000000000000000000156231420401153000231120ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """EUMETSAT base reader tests package.""" import unittest from datetime import datetime import numpy as np from satpy.readers.eum_base import ( get_service_mode, recarray2dict, time_cds, time_cds_expanded, time_cds_short, timecds2datetime, ) from satpy.readers.seviri_base import mpef_product_header class TestMakeTimeCdsDictionary(unittest.TestCase): """Test TestMakeTimeCdsDictionary.""" def test_fun(self): """Test function for TestMakeTimeCdsDictionary.""" # time_cds_short tcds = {'Days': 1, 'Milliseconds': 2} expected = datetime(1958, 1, 2, 0, 0, 0, 2000) self.assertEqual(timecds2datetime(tcds), expected) # time_cds tcds = {'Days': 1, 'Milliseconds': 2, 'Microseconds': 3} expected = datetime(1958, 1, 2, 0, 0, 0, 2003) self.assertEqual(timecds2datetime(tcds), expected) # time_cds_expanded tcds = {'Days': 1, 'Milliseconds': 2, 'Microseconds': 3, 'Nanoseconds': 4} expected = datetime(1958, 1, 2, 0, 0, 0, 2003) self.assertEqual(timecds2datetime(tcds), expected) class TestMakeTimeCdsRecarray(unittest.TestCase): """Test TestMakeTimeCdsRecarray.""" def test_fun(self): """Test function for TestMakeTimeCdsRecarray.""" # time_cds_short tcds = np.array([(1, 2)], dtype=np.dtype(time_cds_short)) expected = datetime(1958, 1, 2, 0, 0, 0, 2000) self.assertEqual(timecds2datetime(tcds), expected) # time_cds tcds = np.array([(1, 2, 3)], dtype=np.dtype(time_cds)) expected = datetime(1958, 1, 2, 0, 0, 0, 2003) self.assertEqual(timecds2datetime(tcds), expected) # time_cds_expanded tcds = np.array([(1, 2, 3, 4)], dtype=np.dtype(time_cds_expanded)) expected = datetime(1958, 1, 2, 0, 0, 0, 2003) self.assertEqual(timecds2datetime(tcds), expected) class TestRecarray2Dict(unittest.TestCase): """Test TestRecarray2Dict.""" def test_timestamps(self): """Test function for TestRecarray2Dict.""" # datatype definition pat_dt = np.dtype([ ('TrueRepeatCycleStart', time_cds_expanded), ('PlanForwardScanEnd', time_cds_expanded), ('PlannedRepeatCycleEnd', time_cds_expanded) ]) # planned acquisition time, add extra dimensions # these should be removed by recarray2dict pat = np.array([[[( (21916, 41409544, 305, 262), (21916, 42160340, 659, 856), (21916, 42309417, 918, 443))]]], dtype=pat_dt) expected = { 'TrueRepeatCycleStart': datetime(2018, 1, 2, 11, 30, 9, 544305), 'PlanForwardScanEnd': datetime(2018, 1, 2, 11, 42, 40, 340660), 'PlannedRepeatCycleEnd': datetime(2018, 1, 2, 11, 45, 9, 417918) } self.assertEqual(recarray2dict(pat), expected) def test_mpef_product_header(self): """Test function for TestRecarray2Dict and mpef product header.""" names = ['ImageLocation', 'GsicsCalMode', 'GsicsCalValidity', 'Padding', 'OffsetToData', 'Padding2'] mpef_header = np.dtype([(name, mpef_product_header.fields[name][0]) for name in names]) mph_struct = np.array([('OPE', True, False, 'XX', 1000, '12345678')], dtype=mpef_header) test_mph = {'ImageLocation': "OPE", 'GsicsCalMode': True, 'GsicsCalValidity': False, 'Padding': 'XX', 'OffsetToData': 1000, 'Padding2': '12345678' } self.assertEqual(recarray2dict(mph_struct), test_mph) class TestGetServiceMode(unittest.TestCase): """Test the get_service_mode function.""" def test_get_seviri_service_mode_fes(self): """Test fetching of SEVIRI service mode information for FES.""" ssp_lon = 0.0 name = 'fes' desc = 'Full Earth Scanning service' res = get_service_mode('seviri', ssp_lon) self.assertEqual(res['service_name'], name) self.assertEqual(res['service_desc'], desc) def test_get_seviri_service_mode_rss(self): """Test fetching of SEVIRI service mode information for RSS.""" ssp_lon = 9.5 name = 'rss' desc = 'Rapid Scanning Service' res = get_service_mode('seviri', ssp_lon) self.assertEqual(res['service_name'], name) self.assertEqual(res['service_desc'], desc) def test_get_seviri_service_mode_iodc(self): """Test fetching of SEVIRI service mode information for IODC.""" ssp_lon = 41.5 name = 'iodc' desc = 'Indian Ocean Data Coverage service' res = get_service_mode('seviri', ssp_lon) self.assertEqual(res['service_name'], name) self.assertEqual(res['service_desc'], desc) def test_get_fci_service_mode_fdss(self): """Test fetching of FCI service mode information for FDSS.""" ssp_lon = 0.0 name = 'fdss' desc = 'Full Disk Scanning Service' res = get_service_mode('fci', ssp_lon) self.assertEqual(res['service_name'], name) self.assertEqual(res['service_desc'], desc) def test_get_fci_service_mode_rss(self): """Test fetching of FCI service mode information for RSS.""" ssp_lon = 9.5 name = 'rss' desc = 'Rapid Scanning Service' res = get_service_mode('fci', ssp_lon) self.assertEqual(res['service_name'], name) self.assertEqual(res['service_desc'], desc) def test_get_unknown_lon_service_mode(self): """Test fetching of service mode information for unknown input longitude.""" ssp_lon = 13 name = 'unknown' desc = 'unknown' res = get_service_mode('fci', ssp_lon) self.assertEqual(res['service_name'], name) self.assertEqual(res['service_desc'], desc) def test_get_unknown_instrument_service_mode(self): """Test fetching of service mode information for unknown input instrument.""" ssp_lon = 0 name = 'unknown' desc = 'unknown' res = get_service_mode('test', ssp_lon) self.assertEqual(res['service_name'], name) self.assertEqual(res['service_desc'], desc) satpy-0.34.0/satpy/tests/reader_tests/test_fci_l1c_nc.py000066400000000000000000000602141420401153000233060ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the 'fci_l1c_nc' reader.""" import logging import os from unittest import mock import dask.array as da import numpy as np import numpy.testing import pytest import xarray as xr from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler class FakeNetCDF4FileHandler2(FakeNetCDF4FileHandler): """Class for faking the NetCDF4 Filehandler.""" def _get_test_calib_for_channel_ir(self, chroot, meas): from pyspectral.blackbody import C_SPEED as c from pyspectral.blackbody import H_PLANCK as h from pyspectral.blackbody import K_BOLTZMANN as k xrda = xr.DataArray data = {} data[meas + "/radiance_to_bt_conversion_coefficient_wavenumber"] = xrda(955) data[meas + "/radiance_to_bt_conversion_coefficient_a"] = xrda(1) data[meas + "/radiance_to_bt_conversion_coefficient_b"] = xrda(0.4) data[meas + "/radiance_to_bt_conversion_constant_c1"] = xrda(1e11 * 2 * h * c ** 2) data[meas + "/radiance_to_bt_conversion_constant_c2"] = xrda(1e2 * h * c / k) return data def _get_test_calib_for_channel_vis(self, chroot, meas): xrda = xr.DataArray data = {} data["state/celestial/earth_sun_distance"] = xrda(da.repeat(da.array([149597870.7]), 6000)) data[meas + "/channel_effective_solar_irradiance"] = xrda(50) return data def _get_test_content_for_channel(self, pat, ch): xrda = xr.DataArray nrows = 200 ncols = 11136 chroot = "data/{:s}" meas = chroot + "/measured" rad = meas + "/effective_radiance" qual = meas + "/pixel_quality" index_map = meas + "/index_map" rad_conv_coeff = meas + "/radiance_unit_conversion_coefficient" pos = meas + "/{:s}_position_{:s}" shp = rad + "/shape" x = meas + "/x" y = meas + "/y" data = {} ch_str = pat.format(ch) ch_path = rad.format(ch_str) common_attrs = { "scale_factor": 5, "add_offset": 10, "long_name": "Effective Radiance", "units": "mW.m-2.sr-1.(cm-1)-1", "ancillary_variables": "pixel_quality" } if ch == 38: fire_line = da.ones((1, ncols), dtype="uint16", chunks=1024) * 5000 data_without_fires = da.ones((nrows - 1, ncols), dtype="uint16", chunks=1024) d = xrda( da.concatenate([fire_line, data_without_fires], axis=0), dims=("y", "x"), attrs={ "valid_range": [0, 8191], "warm_scale_factor": 2, "warm_add_offset": -300, **common_attrs } ) else: d = xrda( da.ones((nrows, ncols), dtype="uint16", chunks=1024), dims=("y", "x"), attrs={ "valid_range": [0, 4095], "warm_scale_factor": 1, "warm_add_offset": 0, **common_attrs } ) data[ch_path] = d data[x.format(ch_str)] = xrda( da.arange(1, ncols + 1, dtype="uint16"), dims=("x",), attrs={ "scale_factor": -5.58877772833e-05, "add_offset": 0.155619515845, } ) data[y.format(ch_str)] = xrda( da.arange(1, nrows + 1, dtype="uint16"), dims=("y",), attrs={ "scale_factor": -5.58877772833e-05, "add_offset": 0.155619515845, } ) data[qual.format(ch_str)] = xrda( da.arange(nrows * ncols, dtype="uint8").reshape(nrows, ncols) % 128, dims=("y", "x")) # add dummy data for index map starting from 1 data[index_map.format(ch_str)] = xrda( (da.arange(nrows * ncols, dtype="uint16").reshape(nrows, ncols) % 6000) + 1, dims=("y", "x")) data[rad_conv_coeff.format(ch_str)] = xrda(1234.56) data[pos.format(ch_str, "start", "row")] = xrda(0) data[pos.format(ch_str, "start", "column")] = xrda(0) data[pos.format(ch_str, "end", "row")] = xrda(nrows) data[pos.format(ch_str, "end", "column")] = xrda(ncols) if pat.startswith("ir") or pat.startswith("wv"): data.update(self._get_test_calib_for_channel_ir(chroot.format(ch_str), meas.format(ch_str))) elif pat.startswith("vis") or pat.startswith("nir"): data.update(self._get_test_calib_for_channel_vis(chroot.format(ch_str), meas.format(ch_str))) data[shp.format(ch_str)] = (nrows, ncols) return data def _get_test_content_all_channels(self): chan_patterns = { "vis_{:>02d}": (4, 5, 6, 8, 9), "nir_{:>02d}": (13, 16, 22), "ir_{:>02d}": (38, 87, 97, 105, 123, 133), "wv_{:>02d}": (63, 73), } data = {} for pat in chan_patterns: for ch_num in chan_patterns[pat]: data.update(self._get_test_content_for_channel(pat, ch_num)) return data def _get_test_content_areadef(self): data = {} proj = "data/mtg_geos_projection" attrs = { "sweep_angle_axis": "y", "perspective_point_height": "35786400.0", "semi_major_axis": "6378137.0", "longitude_of_projection_origin": "0.0", "inverse_flattening": "298.257223563", "units": "m"} data[proj] = xr.DataArray( 0, dims=(), attrs=attrs) # also set attributes cached, as this may be how they are accessed with # the NetCDF4FileHandler for (k, v) in attrs.items(): data[proj + "/attr/" + k] = v return data def _get_test_content_aux_data(self): from satpy.readers.fci_l1c_nc import AUX_DATA xrda = xr.DataArray data = {} indices_dim = 6000 for key, value in AUX_DATA.items(): # skip population of earth_sun_distance as this is already defined for reflectance calculation if key == 'earth_sun_distance': continue data[value] = xrda(da.arange(indices_dim, dtype="float32"), dims=("index")) # compute the last data entry to simulate the FCI caching data[list(AUX_DATA.values())[-1]] = data[list(AUX_DATA.values())[-1]].compute() return data def _get_global_attributes(self): data = {} attrs = {"platform": "MTI1"} for (k, v) in attrs.items(): data["/attr/" + k] = v return data def get_test_content(self, filename, filename_info, filetype_info): """Get the content of the test data.""" # mock global attributes # - root groups global # - other groups global # mock data variables # mock dimensions # # ... but only what satpy is using ... D = {} D.update(self._get_test_content_all_channels()) D.update(self._get_test_content_areadef()) D.update(self._get_test_content_aux_data()) D.update(self._get_global_attributes()) return D class FakeNetCDF4FileHandler3(FakeNetCDF4FileHandler2): """Mock bad data.""" def _get_test_calib_for_channel_ir(self, chroot, meas): from netCDF4 import default_fillvals v = xr.DataArray(default_fillvals["f4"]) data = {} data[meas + "/radiance_to_bt_conversion_coefficient_wavenumber"] = v data[meas + "/radiance_to_bt_conversion_coefficient_a"] = v data[meas + "/radiance_to_bt_conversion_coefficient_b"] = v data[meas + "/radiance_to_bt_conversion_constant_c1"] = v data[meas + "/radiance_to_bt_conversion_constant_c2"] = v return data def _get_test_calib_for_channel_vis(self, chroot, meas): data = super()._get_test_calib_for_channel_vis(chroot, meas) from netCDF4 import default_fillvals v = xr.DataArray(default_fillvals["f4"]) data[meas + "/channel_effective_solar_irradiance"] = v return data @pytest.fixture def reader_configs(): """Return reader configs for FCI.""" from satpy._config import config_search_paths return config_search_paths( os.path.join("readers", "fci_l1c_nc.yaml")) def _get_reader_with_filehandlers(filenames, reader_configs): from satpy.readers import load_reader reader = load_reader(reader_configs) loadables = reader.select_files_from_pathnames(filenames) reader.create_filehandlers(loadables) return reader class TestFCIL1cNCReader: """Initialize the unittest TestCase for the FCI L1c NetCDF Reader.""" yaml_file = "fci_l1c_nc.yaml" _alt_handler = FakeNetCDF4FileHandler2 @pytest.fixture(autouse=True, scope="class") def fake_handler(self): """Wrap NetCDF4 FileHandler with our own fake handler.""" # implementation strongly inspired by test_viirs_l1b.py from satpy.readers.fci_l1c_nc import FCIL1cNCFileHandler p = mock.patch.object( FCIL1cNCFileHandler, "__bases__", (self._alt_handler,)) with p: p.is_local = True yield p class TestFCIL1cNCReaderGoodData(TestFCIL1cNCReader): """Test FCI L1c NetCDF reader.""" _alt_handler = FakeNetCDF4FileHandler2 def test_file_pattern(self, reader_configs): """Test file pattern matching.""" from satpy.readers import load_reader filenames = [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" "20170410113925_20170410113934_N__C_0070_0067.nc", "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114442_GTT_DEV_" "20170410113934_20170410113942_N__C_0070_0068.nc", "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114451_GTT_DEV_" "20170410113942_20170410113951_N__C_0070_0069.nc", "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114500_GTT_DEV_" "20170410113951_20170410114000_N__C_0070_0070.nc", "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-TRAIL--L2P-NC4E_C_EUMT_20170410114600_GTT_DEV_" "20170410113000_20170410114000_N__C_0070_0071.nc", ] reader = load_reader(reader_configs) files = reader.select_files_from_pathnames(filenames) # only 4 out of 5 above should match assert len(files) == 4 _chans = {"solar": ["vis_04", "vis_05", "vis_06", "vis_08", "vis_09", "nir_13", "nir_16", "nir_22"], "terran": ["ir_38", "wv_63", "wv_73", "ir_87", "ir_97", "ir_105", "ir_123", "ir_133"]} def test_load_counts(self, reader_configs): """Test loading with counts.""" from satpy.tests.utils import make_dataid # testing two filenames to test correctly combined filenames = [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" "20170410113925_20170410113934_N__C_0070_0067.nc", "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114442_GTT_DEV_" "20170410113934_20170410113942_N__C_0070_0068.nc", ] reader = _get_reader_with_filehandlers(filenames, reader_configs) res = reader.load( [make_dataid(name=name, calibration="counts") for name in self._chans["solar"] + self._chans["terran"]], pad_data=False) assert 16 == len(res) for ch in self._chans["solar"] + self._chans["terran"]: assert res[ch].shape == (200 * 2, 11136) assert res[ch].dtype == np.uint16 assert res[ch].attrs["calibration"] == "counts" assert res[ch].attrs["units"] == "count" if ch == 'ir_38': numpy.testing.assert_array_equal(res[ch][~0], 1) numpy.testing.assert_array_equal(res[ch][0], 5000) else: numpy.testing.assert_array_equal(res[ch], 1) def test_load_radiance(self, reader_configs): """Test loading with radiance.""" from satpy.tests.utils import make_dataid filenames = [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" "20170410113925_20170410113934_N__C_0070_0067.nc", ] reader = _get_reader_with_filehandlers(filenames, reader_configs) res = reader.load( [make_dataid(name=name, calibration="radiance") for name in self._chans["solar"] + self._chans["terran"]], pad_data=False) assert 16 == len(res) for ch in self._chans["solar"] + self._chans["terran"]: assert res[ch].shape == (200, 11136) assert res[ch].dtype == np.float64 assert res[ch].attrs["calibration"] == "radiance" assert res[ch].attrs["units"] == 'mW m-2 sr-1 (cm-1)-1' assert res[ch].attrs["radiance_unit_conversion_coefficient"] == 1234.56 if ch == 'ir_38': numpy.testing.assert_array_equal(res[ch][~0], 15) numpy.testing.assert_array_equal(res[ch][0], 9700) else: numpy.testing.assert_array_equal(res[ch], 15) def test_load_reflectance(self, reader_configs): """Test loading with reflectance.""" from satpy.tests.utils import make_dataid filenames = [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" "20170410113925_20170410113934_N__C_0070_0067.nc", ] reader = _get_reader_with_filehandlers(filenames, reader_configs) res = reader.load( [make_dataid(name=name, calibration="reflectance") for name in self._chans["solar"]], pad_data=False) assert 8 == len(res) for ch in self._chans["solar"]: assert res[ch].shape == (200, 11136) assert res[ch].dtype == np.float64 assert res[ch].attrs["calibration"] == "reflectance" assert res[ch].attrs["units"] == "%" numpy.testing.assert_array_almost_equal(res[ch], 100 * 15 * 1 * np.pi / 50) def test_load_bt(self, reader_configs, caplog): """Test loading with bt.""" from satpy.tests.utils import make_dataid filenames = [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" "20170410113925_20170410113934_N__C_0070_0067.nc", ] reader = _get_reader_with_filehandlers(filenames, reader_configs) with caplog.at_level(logging.WARNING): res = reader.load( [make_dataid(name=name, calibration="brightness_temperature") for name in self._chans["terran"]], pad_data=False) assert caplog.text == "" for ch in self._chans["terran"]: assert res[ch].shape == (200, 11136) assert res[ch].dtype == np.float64 assert res[ch].attrs["calibration"] == "brightness_temperature" assert res[ch].attrs["units"] == "K" if ch == 'ir_38': numpy.testing.assert_array_almost_equal(res[ch][~0], 209.68274099) numpy.testing.assert_array_almost_equal(res[ch][0], 1888.851296) else: numpy.testing.assert_array_almost_equal(res[ch], 209.68274099) def test_load_index_map(self, reader_configs): """Test loading of index_map.""" filenames = [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" "20170410113925_20170410113934_N__C_0070_0067.nc" ] reader = _get_reader_with_filehandlers(filenames, reader_configs) res = reader.load( [name + '_index_map' for name in self._chans["solar"] + self._chans["terran"]], pad_data=False) assert 16 == len(res) for ch in self._chans["solar"] + self._chans["terran"]: assert res[ch + '_index_map'].shape == (200, 11136) numpy.testing.assert_array_equal(res[ch + '_index_map'][1, 1], 5138) def test_load_aux_data(self, reader_configs): """Test loading of auxiliary data.""" from satpy.readers.fci_l1c_nc import AUX_DATA filenames = [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" "20170410113925_20170410113934_N__C_0070_0067.nc" ] reader = _get_reader_with_filehandlers(filenames, reader_configs) res = reader.load(['vis_04_' + key for key in AUX_DATA.keys()], pad_data=False) for aux in ['vis_04_' + key for key in AUX_DATA.keys()]: assert res[aux].shape == (200, 11136) if aux == 'vis_04_earth_sun_distance': numpy.testing.assert_array_equal(res[aux][1, 1], 149597870.7) else: numpy.testing.assert_array_equal(res[aux][1, 1], 5137) def test_load_composite(self): """Test that composites are loadable.""" # when dedicated composites for FCI are implemented in satpy, # this method should probably move to a dedicated class and module # in the tests.compositor_tests package from satpy.composites.config_loader import load_compositor_configs_for_sensors comps, mods = load_compositor_configs_for_sensors(['fci']) assert len(comps["fci"]) > 0 assert len(mods["fci"]) > 0 def test_load_quality_only(self, reader_configs): """Test that loading quality only works.""" filenames = [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" "20170410113925_20170410113934_N__C_0070_0067.nc", ] reader = _get_reader_with_filehandlers(filenames, reader_configs) res = reader.load( [name + '_pixel_quality' for name in self._chans["solar"] + self._chans["terran"]], pad_data=False) assert 16 == len(res) for ch in self._chans["solar"] + self._chans["terran"]: assert res[ch + '_pixel_quality'].shape == (200, 11136) numpy.testing.assert_array_equal(res[ch + '_pixel_quality'][1, 1], 1) assert res[ch + '_pixel_quality'].attrs["name"] == ch + '_pixel_quality' def test_platform_name(self, reader_configs): """Test that platform name is exposed. Test that the FCI reader exposes the platform name. Corresponds to GH issue 1014. """ filenames = [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" "20170410113925_20170410113934_N__C_0070_0067.nc", ] reader = _get_reader_with_filehandlers(filenames, reader_configs) res = reader.load(["ir_123"], pad_data=False) assert res["ir_123"].attrs["platform_name"] == "MTG-I1" def test_excs(self, reader_configs): """Test that exceptions are raised where expected.""" from satpy.tests.utils import make_dataid filenames = [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" "20170410113925_20170410113934_N__C_0070_0067.nc", ] reader = _get_reader_with_filehandlers(filenames, reader_configs) with pytest.raises(ValueError): reader.file_handlers["fci_l1c_fdhsi"][0].get_dataset(make_dataid(name="invalid"), {}) with pytest.raises(ValueError): reader.file_handlers["fci_l1c_fdhsi"][0].get_dataset( make_dataid(name="ir_123", calibration="unknown"), {"units": "unknown"}) def test_area_definition_computation(self, reader_configs): """Test that the geolocation computation is correct.""" filenames = [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" "20170410113925_20170410113934_N__C_0070_0067.nc", ] reader = _get_reader_with_filehandlers(filenames, reader_configs) res = reader.load(['ir_105', 'vis_06'], pad_data=False) # test that area_ids are harmonisation-conform ___ assert res['vis_06'].attrs['area'].area_id == 'mtg_fci_fdss_1km' assert res['ir_105'].attrs['area'].area_id == 'mtg_fci_fdss_2km' area_def = res['ir_105'].attrs['area'] # test area extents computation np.testing.assert_array_almost_equal(np.array(area_def.area_extent), np.array([-5568062.23065902, 5168057.7600648, 16704186.692027, 5568062.23065902])) # check that the projection is read in properly assert area_def.crs.coordinate_operation.method_name == 'Geostationary Satellite (Sweep Y)' assert area_def.crs.coordinate_operation.params[0].value == 0.0 # projection origin longitude assert area_def.crs.coordinate_operation.params[1].value == 35786400.0 # projection height assert area_def.crs.ellipsoid.semi_major_metre == 6378137.0 assert area_def.crs.ellipsoid.inverse_flattening == 298.257223563 assert area_def.crs.ellipsoid.is_semi_minor_computed class TestFCIL1cNCReaderBadData(TestFCIL1cNCReader): """Test the FCI L1c NetCDF Reader for bad data input.""" _alt_handler = FakeNetCDF4FileHandler3 def test_handling_bad_data_ir(self, reader_configs, caplog): """Test handling of bad IR data.""" from satpy.tests.utils import make_dataid filenames = [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" "20170410113925_20170410113934_N__C_0070_0067.nc", ] reader = _get_reader_with_filehandlers(filenames, reader_configs) with caplog.at_level("ERROR"): reader.load([make_dataid( name="ir_123", calibration="brightness_temperature")], pad_data=False) assert "cannot produce brightness temperature" in caplog.text def test_handling_bad_data_vis(self, reader_configs, caplog): """Test handling of bad VIS data.""" from satpy.tests.utils import make_dataid filenames = [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" "20170410113925_20170410113934_N__C_0070_0067.nc", ] reader = _get_reader_with_filehandlers(filenames, reader_configs) with caplog.at_level("ERROR"): reader.load([make_dataid( name="vis_04", calibration="reflectance")], pad_data=False) assert "cannot produce reflectance" in caplog.text satpy-0.34.0/satpy/tests/reader_tests/test_fci_l2_nc.py000066400000000000000000000515521420401153000231510ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2019 Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # satpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . """The fci_cld_l2_nc reader tests package.""" import datetime import os import unittest import uuid from contextlib import suppress from unittest import mock import numpy as np from netCDF4 import Dataset from satpy.readers.fci_l2_nc import PRODUCT_DATA_DURATION_MINUTES, FciL2NCFileHandler, FciL2NCSegmentFileHandler class TestFciL2NCFileHandler(unittest.TestCase): """Test the FciL2NCFileHandler reader.""" def setUp(self): """Set up the test by creating a test file and opening it with the reader.""" # Easiest way to test the reader is to create a test netCDF file on the fly # Create unique filenames to prevent race conditions when tests are run in parallel self.test_file = str(uuid.uuid4()) + ".nc" with Dataset(self.test_file, 'w') as nc: # Create dimensions nc.createDimension('number_of_columns', 10) nc.createDimension('number_of_rows', 100) nc.createDimension('maximum_number_of_layers', 2) # add global attributes nc.data_source = 'test_data_source' nc.platform = 'test_platform' nc.time_coverage_start = '20170920173040' nc.time_coverage_end = '20170920174117' # Add datasets x = nc.createVariable('x', np.float32, dimensions=('number_of_columns',)) x.standard_name = 'projection_x_coordinate' x[:] = np.arange(10) y = nc.createVariable('y', np.float32, dimensions=('number_of_rows',)) x.standard_name = 'projection_y_coordinate' y[:] = np.arange(100) one_layer_dataset = nc.createVariable('test_one_layer', np.float32, dimensions=('number_of_rows', 'number_of_columns')) one_layer_dataset[:] = np.ones((100, 10)) one_layer_dataset.test_attr = 'attr' one_layer_dataset.units = 'test_units' two_layers_dataset = nc.createVariable('test_two_layers', np.float32, dimensions=('maximum_number_of_layers', 'number_of_rows', 'number_of_columns')) two_layers_dataset[0, :, :] = np.ones((100, 10)) two_layers_dataset[1, :, :] = 2 * np.ones((100, 10)) mtg_geos_projection = nc.createVariable('mtg_geos_projection', int, dimensions=()) mtg_geos_projection.longitude_of_projection_origin = 10.0 mtg_geos_projection.semi_major_axis = 6378137. mtg_geos_projection.semi_minor_axis = 6356752. mtg_geos_projection.perspective_point_height = 35786400. self.reader = FciL2NCFileHandler( filename=self.test_file, filename_info={ 'creation_time': datetime.datetime(year=2017, month=9, day=20, hour=12, minute=30, second=30), }, filetype_info={} ) def tearDown(self): """Remove the previously created test file.""" # First delete the reader, forcing the file to be closed if still open del self.reader # Then we can safely remove the file from the system with suppress(OSError): os.remove(self.test_file) def test_all_basic(self): """Test all basic functionalities.""" self.assertEqual(PRODUCT_DATA_DURATION_MINUTES, 20) self.assertEqual(self.reader._start_time, datetime.datetime(year=2017, month=9, day=20, hour=17, minute=30, second=40)) self.assertEqual(self.reader._end_time, datetime.datetime(year=2017, month=9, day=20, hour=17, minute=41, second=17)) self.assertEqual(self.reader._spacecraft_name, 'test_platform') self.assertEqual(self.reader._sensor_name, 'test_data_source') self.assertEqual(self.reader.ssp_lon, 10.0) global_attributes = self.reader._get_global_attributes() expected_global_attributes = { 'filename': self.test_file, 'start_time': datetime.datetime(year=2017, month=9, day=20, hour=17, minute=30, second=40), 'end_time': datetime.datetime(year=2017, month=9, day=20, hour=17, minute=41, second=17), 'spacecraft_name': 'test_platform', 'ssp_lon': 10.0, 'sensor': 'test_data_source', 'creation_time': datetime.datetime(year=2017, month=9, day=20, hour=12, minute=30, second=30), 'platform_name': 'test_platform' } self.assertEqual(global_attributes, expected_global_attributes) @mock.patch('satpy.readers.fci_l2_nc.get_area_definition') @mock.patch('satpy.readers.fci_l2_nc.make_ext') def test_area_definition(self, me_, gad_): """Test the area definition computation.""" self.reader._compute_area_def() # Asserts that the make_ext function was called with the correct arguments me_.assert_called_once() name, args, kwargs = me_.mock_calls[0] self.assertTrue(np.allclose(args[0], 0.0)) self.assertTrue(np.allclose(args[1], 515.6620)) self.assertTrue(np.allclose(args[2], 0.0)) self.assertTrue(np.allclose(args[3], 5672.28217)) self.assertTrue(np.allclose(args[4], 35786400.)) p_dict = { 'nlines': 100, 'ncols': 10, 'ssp_lon': 10.0, 'a': 6378137., 'b': 6356752., 'h': 35786400., 'a_name': 'FCI Area', 'a_desc': 'Area for FCI instrument', 'p_id': 'geos' } # Asserts that the get_area_definition function was called with the correct arguments gad_.assert_called_once() name, args, kwargs = gad_.mock_calls[0] self.assertEqual(args[0], p_dict) # The second argument must be the return result of the make_ext function self.assertEqual(args[1]._extract_mock_name(), 'make_ext()') def test_dataset(self): """Test the execution of the get_dataset function.""" # Checks the correct execution of the get_dataset function with a valid file_key dataset = self.reader.get_dataset(None, {'file_key': 'test_one_layer', 'fill_value': -999, 'mask_value': 0., 'file_type': 'test_file_type'}) self.assertTrue(np.allclose(dataset.values, np.ones((100, 10)))) self.assertEqual(dataset.attrs['test_attr'], 'attr') self.assertEqual(dataset.attrs['units'], 'test_units') self.assertEqual(dataset.attrs['fill_value'], -999) # Checks the correct execution of the get_dataset function with a valid file_key & layer dataset = self.reader.get_dataset(None, {'file_key': 'test_two_layers', 'layer': 1, 'fill_value': -999, 'mask_value': 0, 'file_type': 'test_file_type'}) self.assertTrue(np.allclose(dataset.values, 2 * np.ones((100, 10)))) self.assertEqual(dataset.attrs['units'], None) self.assertEqual(dataset.attrs['spacecraft_name'], 'test_platform') # Checks the correct execution of the get_dataset function with an invalid file_key invalid_dataset = self.reader.get_dataset(None, {'file_key': 'test_invalid', 'fill_value': -999, 'mask_value': 0, 'file_type': 'test_file_type'}) # Checks that the function returns None self.assertEqual(invalid_dataset, None) class TestFciL2NCSegmentFileHandler(unittest.TestCase): """Test the FciL2NCSegmentFileHandler reader.""" def setUp(self): """Set up the test by creating a test file and opening it with the reader.""" # Easiest way to test the reader is to create a test netCDF file on the fly self.seg_test_file = str(uuid.uuid4()) + ".nc" with Dataset(self.seg_test_file, 'w') as nc: # Create dimensions nc.createDimension('number_of_FoR_cols', 10) nc.createDimension('number_of_FoR_rows', 100) nc.createDimension('number_of_channels', 8) nc.createDimension('number_of_categories', 6) # add global attributes nc.data_source = 'test_fci_data_source' nc.platform = 'test_fci_platform' nc.time_coverage_start = '20170920173040' nc.time_coverage_end = '20170920174117' # Add datasets x = nc.createVariable('x', np.float32, dimensions=('number_of_FoR_cols',)) x.standard_name = 'projection_x_coordinate' x[:] = np.arange(10) y = nc.createVariable('y', np.float32, dimensions=('number_of_FoR_rows',)) x.standard_name = 'projection_y_coordinate' y[:] = np.arange(100) chans = nc.createVariable('channels', np.float32, dimensions=('number_of_channels',)) chans.standard_name = 'fci_channels' chans[:] = np.arange(8) cats = nc.createVariable('categories', np.float32, dimensions=('number_of_categories',)) cats.standard_name = 'product_categories' cats[:] = np.arange(6) test_dataset = nc.createVariable('test_values', np.float32, dimensions=('number_of_FoR_rows', 'number_of_FoR_cols', 'number_of_channels', 'number_of_categories')) test_dataset[:] = np.ones((100, 10, 8, 6)) test_dataset.test_attr = 'attr' test_dataset.units = 'test_units' self.segment_reader = FciL2NCSegmentFileHandler( filename=self.seg_test_file, filename_info={ 'creation_time': datetime.datetime(year=2017, month=9, day=20, hour=12, minute=30, second=30), }, filetype_info={} ) def tearDown(self): """Remove the previously created test file.""" # First delete the reader, forcing the file to be closed if still open del self.segment_reader # Then can safely remove it from the system with suppress(OSError): os.remove(self.seg_test_file) def test_all_basic(self): """Test all basic functionalities.""" self.assertEqual(PRODUCT_DATA_DURATION_MINUTES, 20) self.assertEqual(self.segment_reader._start_time, datetime.datetime(year=2017, month=9, day=20, hour=17, minute=30, second=40)) self.assertEqual(self.segment_reader._end_time, datetime.datetime(year=2017, month=9, day=20, hour=17, minute=41, second=17)) self.assertEqual(self.segment_reader._spacecraft_name, 'test_fci_platform') self.assertEqual(self.segment_reader._sensor_name, 'test_fci_data_source') self.assertEqual(self.segment_reader.ssp_lon, 0.0) global_attributes = self.segment_reader._get_global_attributes() expected_global_attributes = { 'filename': self.seg_test_file, 'start_time': datetime.datetime(year=2017, month=9, day=20, hour=17, minute=30, second=40), 'end_time': datetime.datetime(year=2017, month=9, day=20, hour=17, minute=41, second=17), 'spacecraft_name': 'test_fci_platform', 'ssp_lon': 0.0, 'sensor': 'test_fci_data_source', 'creation_time': datetime.datetime(year=2017, month=9, day=20, hour=12, minute=30, second=30), 'platform_name': 'test_fci_platform' } self.assertEqual(global_attributes, expected_global_attributes) def test_dataset(self): """Test the execution of the get_dataset function.""" # Checks the correct execution of the get_dataset function with a valid file_key dataset = self.segment_reader.get_dataset(None, {'file_key': 'test_values', 'fill_value': -999, 'mask_value': 0, }) self.assertTrue(np.allclose(dataset.values, np.ones((100, 10, 8, 6)))) self.assertEqual(dataset.attrs['test_attr'], 'attr') self.assertEqual(dataset.attrs['units'], 'test_units') self.assertEqual(dataset.attrs['fill_value'], -999) # Checks the correct execution of the get_dataset function with an invalid file_key invalid_dataset = self.segment_reader.get_dataset(None, {'file_key': 'test_invalid', 'fill_value': -999, 'mask_value': 0}) # Checks that the function returns None self.assertEqual(invalid_dataset, None) class TestFciL2NCErrorFileHandler(unittest.TestCase): """Test the FciL2NCFileHandler reader.""" def setUp(self): """Set up the test by creating a test file and opening it with the reader.""" # Easiest way to test the reader is to create a test netCDF file on the fly self.test_error_file = str(uuid.uuid4()) + ".nc" with Dataset(self.test_error_file, 'w') as nc_err: # Create dimensions nc_err.createDimension('number_of_FoR_cols', 10) nc_err.createDimension('number_of_FoR_rows', 100) nc_err.createDimension('number_of_channels', 8) nc_err.createDimension('number_of_categories', 6) # add erroneous global attributes nc_err.data_source = 'test_fci_data_source' # Error in key name nc_err.platform_err = 'test_fci_platform' # Error in key name nc_err.time_coverage_start = '2017092017304000' # Error in time format nc_err.time_coverage_end_err = '20170920174117' # Error in key name # Add datasets x = nc_err.createVariable('x', np.float32, dimensions=('number_of_FoR_cols',)) x.standard_name = 'projection_x_coordinate' x[:] = np.arange(10) y = nc_err.createVariable('y', np.float32, dimensions=('number_of_FoR_rows',)) x.standard_name = 'projection_y_coordinate' y[:] = np.arange(100) chans = nc_err.createVariable('channels', np.float32, dimensions=('number_of_channels',)) chans.standard_name = 'fci_channels' chans[:] = np.arange(8) cats = nc_err.createVariable('categories', np.float32, dimensions=('number_of_categories',)) cats.standard_name = 'product_categories' cats[:] = np.arange(6) test_dataset = nc_err.createVariable('test_values', np.float32, dimensions=('number_of_FoR_rows', 'number_of_FoR_cols', 'number_of_channels', 'number_of_categories')) test_dataset[:] = np.ones((100, 10, 8, 6)) test_dataset.test_attr = 'attr' test_dataset.units = 'test_units' self.error_reader = FciL2NCSegmentFileHandler( filename=self.test_error_file, filename_info={ 'creation_time': datetime.datetime(year=2017, month=9, day=20, hour=12, minute=30, second=30), }, filetype_info={} ) def tearDown(self): """Remove the previously created test file.""" # First delete the reader, forcing the file to be closed if still open del self.error_reader # Then can safely remove it from the system with suppress(OSError): os.remove(self.test_error_file) def test_errors(self): """Test that certain properties cause errors.""" self.assertRaises(TypeError, self.error_reader._start_time, datetime.datetime(year=2017, month=9, day=20, hour=17, minute=30, second=40)) self.assertRaises(TypeError, self.error_reader._end_time, datetime.datetime(year=2017, month=9, day=20, hour=17, minute=41, second=17)) self.assertRaises(TypeError, self.error_reader._spacecraft_name) self.assertRaises(TypeError, self.error_reader._sensor_name) class TestFciL2NCReadingByteData(unittest.TestCase): """Test the FciL2NCFileHandler when reading and extracting byte data.""" def setUp(self): """Set up the test by creating a test file and opening it with the reader.""" # Easiest way to test the reader is to create a test netCDF file on the fly self.test_byte_file = str(uuid.uuid4()) + ".nc" with Dataset(self.test_byte_file, 'w') as nc_byte: # Create dimensions nc_byte.createDimension('number_of_columns', 1) nc_byte.createDimension('number_of_rows', 1) # Add datasets x = nc_byte.createVariable('x', np.float32, dimensions=('number_of_columns',)) x.standard_name = 'projection_x_coordinate' x[:] = np.arange(1) y = nc_byte.createVariable('y', np.float32, dimensions=('number_of_rows',)) x.standard_name = 'projection_y_coordinate' y[:] = np.arange(1) mtg_geos_projection = nc_byte.createVariable('mtg_geos_projection', int, dimensions=()) mtg_geos_projection.longitude_of_projection_origin = 10.0 mtg_geos_projection.semi_major_axis = 6378137. mtg_geos_projection.semi_minor_axis = 6356752. mtg_geos_projection.perspective_point_height = 35786400. test_dataset = nc_byte.createVariable('cloud_mask_test_flag', np.float32, dimensions=('number_of_rows', 'number_of_columns',)) # This number was chosen as we know the expected byte values test_dataset[:] = 4544767 self.byte_reader = FciL2NCFileHandler( filename=self.test_byte_file, filename_info={ 'creation_time': datetime.datetime(year=2017, month=9, day=20, hour=12, minute=30, second=30), }, filetype_info={} ) def tearDown(self): """Remove the previously created test file.""" # First delete the reader, forcing the file to be closed if still open del self.byte_reader # Then can safely remove it from the system with suppress(OSError): os.remove(self.test_byte_file) def test_byte_extraction(self): """Test the execution of the get_dataset function.""" # Value of 1 is expected to be returned for this test dataset = self.byte_reader.get_dataset(None, {'file_key': 'cloud_mask_test_flag', 'fill_value': -999, 'mask_value': 0., 'file_type': 'nc_fci_test_clm', 'extract_byte': 1, }) self.assertEqual(dataset.values, 1) # Value of 0 is expected fto be returned or this test dataset = self.byte_reader.get_dataset(None, {'file_key': 'cloud_mask_test_flag', 'fill_value': -999, 'mask_value': 0., 'file_type': 'nc_fci_test_clm', 'extract_byte': 23, }) self.assertEqual(dataset.values, 0) satpy-0.34.0/satpy/tests/reader_tests/test_generic_image.py000066400000000000000000000310411420401153000241000ustar00rootroot00000000000000#!/usr/bin/python # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unittests for generic image reader.""" import os import unittest import dask.array as da import numpy as np import xarray as xr from satpy.tests.utils import make_dataid class TestGenericImage(unittest.TestCase): """Test generic image reader.""" def setUp(self): """Create temporary images to test on.""" import tempfile from datetime import datetime from pyresample.geometry import AreaDefinition from satpy.scene import Scene self.date = datetime(2018, 1, 1) # Create area definition pcs_id = 'ETRS89 / LAEA Europe' proj4_dict = "EPSG:3035" self.x_size = 100 self.y_size = 100 area_extent = (2426378.0132, 1528101.2618, 6293974.6215, 5446513.5222) self.area_def = AreaDefinition('geotiff_area', pcs_id, pcs_id, proj4_dict, self.x_size, self.y_size, area_extent) # Create datasets for L, LA, RGB and RGBA mode images r__ = da.random.randint(0, 256, size=(self.y_size, self.x_size), chunks=(50, 50)).astype(np.uint8) g__ = da.random.randint(0, 256, size=(self.y_size, self.x_size), chunks=(50, 50)).astype(np.uint8) b__ = da.random.randint(0, 256, size=(self.y_size, self.x_size), chunks=(50, 50)).astype(np.uint8) a__ = 255 * np.ones((self.y_size, self.x_size), dtype=np.uint8) a__[:10, :10] = 0 a__ = da.from_array(a__, chunks=(50, 50)) r_nan__ = np.random.uniform(0., 1., size=(self.y_size, self.x_size)) r_nan__[:10, :10] = np.nan r_nan__ = da.from_array(r_nan__, chunks=(50, 50)) ds_l = xr.DataArray(da.stack([r__]), dims=('bands', 'y', 'x'), attrs={'name': 'test_l', 'start_time': self.date}) ds_l['bands'] = ['L'] ds_la = xr.DataArray(da.stack([r__, a__]), dims=('bands', 'y', 'x'), attrs={'name': 'test_la', 'start_time': self.date}) ds_la['bands'] = ['L', 'A'] ds_rgb = xr.DataArray(da.stack([r__, g__, b__]), dims=('bands', 'y', 'x'), attrs={'name': 'test_rgb', 'start_time': self.date}) ds_rgb['bands'] = ['R', 'G', 'B'] ds_rgba = xr.DataArray(da.stack([r__, g__, b__, a__]), dims=('bands', 'y', 'x'), attrs={'name': 'test_rgba', 'start_time': self.date}) ds_rgba['bands'] = ['R', 'G', 'B', 'A'] ds_l_nan = xr.DataArray(da.stack([r_nan__]), dims=('bands', 'y', 'x'), attrs={'name': 'test_l_nan', 'start_time': self.date}) ds_l_nan['bands'] = ['L'] # Temp dir for the saved images self.base_dir = tempfile.mkdtemp() # Put the datasets to Scene for easy saving scn = Scene() scn['l'] = ds_l scn['l'].attrs['area'] = self.area_def scn['la'] = ds_la scn['la'].attrs['area'] = self.area_def scn['rgb'] = ds_rgb scn['rgb'].attrs['area'] = self.area_def scn['rgba'] = ds_rgba scn['rgba'].attrs['area'] = self.area_def scn['l_nan'] = ds_l_nan scn['l_nan'].attrs['area'] = self.area_def # Save the images. Two images in PNG and two in GeoTIFF scn.save_dataset('l', os.path.join(self.base_dir, 'test_l.png'), writer='simple_image') scn.save_dataset('la', os.path.join(self.base_dir, '20180101_0000_test_la.png'), writer='simple_image') scn.save_dataset('rgb', os.path.join(self.base_dir, '20180101_0000_test_rgb.tif'), writer='geotiff') scn.save_dataset('rgba', os.path.join(self.base_dir, 'test_rgba.tif'), writer='geotiff') scn.save_dataset('l_nan', os.path.join(self.base_dir, 'test_l_nan_fillvalue.tif'), writer='geotiff', fill_value=0) scn.save_dataset('l_nan', os.path.join(self.base_dir, 'test_l_nan_nofillvalue.tif'), writer='geotiff') self.scn = scn def tearDown(self): """Remove the temporary directory created for a test.""" try: import shutil shutil.rmtree(self.base_dir, ignore_errors=True) except OSError: pass def test_png_scene(self): """Test reading PNG images via satpy.Scene().""" from satpy import Scene fname = os.path.join(self.base_dir, 'test_l.png') scn = Scene(reader='generic_image', filenames=[fname]) scn.load(['image']) self.assertEqual(scn['image'].shape, (1, self.y_size, self.x_size)) self.assertEqual(scn.sensor_names, {'images'}) self.assertEqual(scn.start_time, None) self.assertEqual(scn.end_time, None) self.assertNotIn('area', scn['image'].attrs) fname = os.path.join(self.base_dir, '20180101_0000_test_la.png') scn = Scene(reader='generic_image', filenames=[fname]) scn.load(['image']) data = da.compute(scn['image'].data) self.assertEqual(scn['image'].shape, (1, self.y_size, self.x_size)) self.assertEqual(scn.sensor_names, {'images'}) self.assertEqual(scn.start_time, self.date) self.assertEqual(scn.end_time, self.date) self.assertNotIn('area', scn['image'].attrs) self.assertEqual(np.sum(np.isnan(data)), 100) def test_geotiff_scene(self): """Test reading TIFF images via satpy.Scene().""" from satpy import Scene fname = os.path.join(self.base_dir, '20180101_0000_test_rgb.tif') scn = Scene(reader='generic_image', filenames=[fname]) scn.load(['image']) self.assertEqual(scn['image'].shape, (3, self.y_size, self.x_size)) self.assertEqual(scn.sensor_names, {'images'}) self.assertEqual(scn.start_time, self.date) self.assertEqual(scn.end_time, self.date) self.assertEqual(scn['image'].area, self.area_def) fname = os.path.join(self.base_dir, 'test_rgba.tif') scn = Scene(reader='generic_image', filenames=[fname]) scn.load(['image']) self.assertEqual(scn['image'].shape, (3, self.y_size, self.x_size)) self.assertEqual(scn.sensor_names, {'images'}) self.assertEqual(scn.start_time, None) self.assertEqual(scn.end_time, None) self.assertEqual(scn['image'].area, self.area_def) def test_geotiff_scene_nan(self): """Test reading TIFF images originally containing NaN values via satpy.Scene().""" from satpy import Scene fname = os.path.join(self.base_dir, 'test_l_nan_fillvalue.tif') scn = Scene(reader='generic_image', filenames=[fname]) scn.load(['image']) self.assertEqual(scn['image'].shape, (1, self.y_size, self.x_size)) self.assertEqual(np.sum(scn['image'].data[0][:10, :10].compute()), 0) fname = os.path.join(self.base_dir, 'test_l_nan_nofillvalue.tif') scn = Scene(reader='generic_image', filenames=[fname]) scn.load(['image']) self.assertEqual(scn['image'].shape, (1, self.y_size, self.x_size)) self.assertTrue(np.all(np.isnan(scn['image'].data[0][:10, :10].compute()))) def test_GenericImageFileHandler(self): """Test direct use of the reader.""" from satpy.readers.generic_image import GenericImageFileHandler fname = os.path.join(self.base_dir, 'test_rgba.tif') fname_info = {'start_time': self.date} ftype_info = {} reader = GenericImageFileHandler(fname, fname_info, ftype_info) foo = make_dataid(name='image') self.assertTrue(reader.file_content) self.assertEqual(reader.finfo['filename'], fname) self.assertEqual(reader.finfo['start_time'], self.date) self.assertEqual(reader.finfo['end_time'], self.date) self.assertEqual(reader.area, self.area_def) self.assertEqual(reader.get_area_def(None), self.area_def) self.assertEqual(reader.start_time, self.date) self.assertEqual(reader.end_time, self.date) dataset = reader.get_dataset(foo, {}) self.assertTrue(isinstance(dataset, xr.DataArray)) self.assertIn('crs', dataset.attrs) self.assertIn('transform', dataset.attrs) self.assertTrue(np.all(np.isnan(dataset.data[:, :10, :10].compute()))) def test_GenericImageFileHandler_masking_only_integer(self): """Test direct use of the reader.""" from satpy.readers.generic_image import GenericImageFileHandler class FakeGenericImageFileHandler(GenericImageFileHandler): def __init__(self, filename, filename_info, filetype_info, file_content, **kwargs): """Get fake file content from 'get_test_content'.""" super(GenericImageFileHandler, self).__init__(filename, filename_info, filetype_info) self.file_content = file_content self.dataset_name = None self.file_content.update(kwargs) data = self.scn['rgba'] # do nothing if not integer float_data = data / 255. reader = FakeGenericImageFileHandler("dummy", {}, {}, {"image": float_data}) self.assertIs(reader.get_dataset(make_dataid(name='image'), {}), float_data) # masking if integer data = data.astype(np.uint32) self.assertEqual(data.bands.size, 4) reader = FakeGenericImageFileHandler("dummy", {}, {}, {"image": data}) ret_data = reader.get_dataset(make_dataid(name='image'), {}) self.assertEqual(ret_data.bands.size, 3) def test_GenericImageFileHandler_datasetid(self): """Test direct use of the reader.""" from satpy.readers.generic_image import GenericImageFileHandler fname = os.path.join(self.base_dir, 'test_rgba.tif') fname_info = {'start_time': self.date} ftype_info = {} reader = GenericImageFileHandler(fname, fname_info, ftype_info) foo = make_dataid(name='image-custom') self.assertTrue(reader.file_content, 'file_content should be set') dataset = reader.get_dataset(foo, {}) self.assertTrue(isinstance(dataset, xr.DataArray), 'dataset should be a xr.DataArray') def test_GenericImageFileHandler_nodata(self): """Test nodata handling with direct use of the reader.""" from satpy.readers.generic_image import GenericImageFileHandler fname = os.path.join(self.base_dir, 'test_l_nan_fillvalue.tif') fname_info = {'start_time': self.date} ftype_info = {} reader = GenericImageFileHandler(fname, fname_info, ftype_info) foo = make_dataid(name='image-custom') self.assertTrue(reader.file_content, 'file_content should be set') info = {'nodata_handling': 'nan_mask'} dataset = reader.get_dataset(foo, info) self.assertTrue(isinstance(dataset, xr.DataArray), 'dataset should be a xr.DataArray') self.assertTrue(np.all(np.isnan(dataset.data[0][:10, :10].compute())), 'values should be np.nan') self.assertTrue(np.isnan(dataset.attrs['_FillValue']), '_FillValue should be np.nan') info = {'nodata_handling': 'fill_value'} dataset = reader.get_dataset(foo, info) self.assertTrue(isinstance(dataset, xr.DataArray), 'dataset should be a xr.DataArray') self.assertEqual(np.sum(dataset.data[0][:10, :10].compute()), 0) self.assertEqual(dataset.attrs['_FillValue'], 0) # default same as 'nodata_handling': 'fill_value' dataset = reader.get_dataset(foo, {}) self.assertTrue(isinstance(dataset, xr.DataArray), 'dataset should be a xr.DataArray') self.assertEqual(np.sum(dataset.data[0][:10, :10].compute()), 0) self.assertEqual(dataset.attrs['_FillValue'], 0) satpy-0.34.0/satpy/tests/reader_tests/test_geocat.py000066400000000000000000000210521420401153000225650ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.geocat module.""" import os import unittest from unittest import mock import numpy as np from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler from satpy.tests.utils import convert_file_content_to_data_array DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (10, 300) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32) DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0) DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) class FakeNetCDF4FileHandler2(FakeNetCDF4FileHandler): """Swap-in NetCDF4 File Handler.""" def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" file_content = { '/attr/Platform_Name': filename_info['platform_shortname'], '/attr/Element_Resolution': 2., '/attr/Line_Resolution': 2., '/attr/Subsatellite_Longitude': -70.2 if 'GOES' in filename_info['platform_shortname'] else 140.65, 'pixel_longitude': DEFAULT_LON_DATA, 'pixel_longitude/attr/scale_factor': 1., 'pixel_longitude/attr/add_offset': 0., 'pixel_longitude/shape': DEFAULT_FILE_SHAPE, 'pixel_longitude/attr/_FillValue': np.nan, 'pixel_latitude': DEFAULT_LAT_DATA, 'pixel_latitude/attr/scale_factor': 1., 'pixel_latitude/attr/add_offset': 0., 'pixel_latitude/shape': DEFAULT_FILE_SHAPE, 'pixel_latitude/attr/_FillValue': np.nan, } sensor = { 'HIMAWARI-8': 'himawari8', 'GOES-17': 'goesr', 'GOES-16': 'goesr', 'GOES-13': 'goes', 'GOES-14': 'goes', 'GOES-15': 'goes', }[filename_info['platform_shortname']] file_content['/attr/Sensor_Name'] = sensor if filename_info['platform_shortname'] == 'HIMAWARI-8': file_content['pixel_longitude'] = DEFAULT_LON_DATA + 130. file_content['variable1'] = DEFAULT_FILE_DATA.astype(np.float32) file_content['variable1/attr/_FillValue'] = -1 file_content['variable1/attr/scale_factor'] = 1. file_content['variable1/attr/add_offset'] = 0. file_content['variable1/attr/units'] = '1' file_content['variable1/shape'] = DEFAULT_FILE_SHAPE # data with fill values file_content['variable2'] = np.ma.masked_array( DEFAULT_FILE_DATA.astype(np.float32), mask=np.zeros_like(DEFAULT_FILE_DATA)) file_content['variable2'].mask[::5, ::5] = True file_content['variable2/attr/_FillValue'] = -1 file_content['variable2/attr/scale_factor'] = 1. file_content['variable2/attr/add_offset'] = 0. file_content['variable2/attr/units'] = '1' file_content['variable2/shape'] = DEFAULT_FILE_SHAPE # category file_content['variable3'] = DEFAULT_FILE_DATA.astype(np.byte) file_content['variable3/attr/_FillValue'] = -128 file_content['variable3/attr/flag_meanings'] = "clear water supercooled mixed ice unknown" file_content['variable3/attr/flag_values'] = [0, 1, 2, 3, 4, 5] file_content['variable3/attr/units'] = '1' file_content['variable3/shape'] = DEFAULT_FILE_SHAPE attrs = ('_FillValue', 'flag_meanings', 'flag_values', 'units') convert_file_content_to_data_array( file_content, attrs=attrs, dims=('z', 'lines', 'elements')) return file_content class TestGEOCATReader(unittest.TestCase): """Test GEOCAT Reader.""" yaml_file = "geocat.yaml" def setUp(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.geocat import GEOCATFileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(GEOCATFileHandler, '__bases__', (FakeNetCDF4FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the NetCDF4 file handler.""" self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'geocatL2.GOES-13.2015143.234500.nc', ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) # make sure we have some files self.assertTrue(r.file_handlers) def test_load_all_old_goes(self): """Test loading all test datasets from old GOES files.""" import xarray as xr from satpy.readers import load_reader r = load_reader(self.reader_configs) with mock.patch('satpy.readers.geocat.netCDF4.Variable', xr.DataArray): loadables = r.select_files_from_pathnames([ 'geocatL2.GOES-13.2015143.234500.nc', ]) r.create_filehandlers(loadables) datasets = r.load(['variable1', 'variable2', 'variable3']) self.assertEqual(len(datasets), 3) for v in datasets.values(): assert 'calibration' not in v.attrs self.assertEqual(v.attrs['units'], '1') self.assertIsNotNone(datasets['variable3'].attrs.get('flag_meanings')) def test_load_all_himawari8(self): """Test loading all test datasets from H8 NetCDF file.""" import xarray as xr from pyresample.geometry import AreaDefinition from satpy.readers import load_reader r = load_reader(self.reader_configs) with mock.patch('satpy.readers.geocat.netCDF4.Variable', xr.DataArray): loadables = r.select_files_from_pathnames([ 'geocatL2.HIMAWARI-8.2017092.210730.R304.R20.nc', ]) r.create_filehandlers(loadables) datasets = r.load(['variable1', 'variable2', 'variable3']) self.assertEqual(len(datasets), 3) for v in datasets.values(): assert 'calibration' not in v.attrs self.assertEqual(v.attrs['units'], '1') self.assertIsNotNone(datasets['variable3'].attrs.get('flag_meanings')) self.assertIsInstance(datasets['variable1'].attrs['area'], AreaDefinition) def test_load_all_goes17_hdf4(self): """Test loading all test datasets from GOES-17 HDF4 file.""" import xarray as xr from pyresample.geometry import AreaDefinition from satpy.readers import load_reader r = load_reader(self.reader_configs) with mock.patch('satpy.readers.geocat.netCDF4.Variable', xr.DataArray): loadables = r.select_files_from_pathnames([ 'geocatL2.GOES-17.CONUS.2020041.163130.hdf', ]) r.create_filehandlers(loadables) datasets = r.load(['variable1', 'variable2', 'variable3']) self.assertEqual(len(datasets), 3) for v in datasets.values(): assert 'calibration' not in v.attrs self.assertEqual(v.attrs['units'], '1') self.assertIsNotNone(datasets['variable3'].attrs.get('flag_meanings')) self.assertIsInstance(datasets['variable1'].attrs['area'], AreaDefinition) satpy-0.34.0/satpy/tests/reader_tests/test_geos_area.py000066400000000000000000000162751420401153000232630ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Geostationary project utility module tests package.""" import unittest import numpy as np from satpy.readers._geos_area import ( get_area_definition, get_area_extent, get_geos_area_naming, get_resolution_and_unit_strings, get_xy_from_linecol, sampling_to_lfac_cfac, ) class TestGEOSProjectionUtil(unittest.TestCase): """Tests for the area utilities.""" def make_pdict_ext(self, typ, scan): """Create a dictionary and extents to use in testing.""" if typ == 1: # Fulldisk pdict = {'a': 6378169.00, 'b': 6356583.80, 'h': 35785831.00, 'ssp_lon': 0.0, 'nlines': 3712, 'ncols': 3712, 'a_name': 'geostest', 'a_desc': 'test area', 'p_id': 'test_area', 'cfac': -13642337, 'lfac': -13642337, 'coff': 1856} if scan == 'N2S': pdict['scandir'] = 'N2S' pdict['loff'] = 1856 extent = (5567248.28340708, 5567248.28340708, -5570248.686685662, -5570248.686685662) if scan == 'S2N': pdict['scandir'] = 'S2N' pdict['loff'] = -1856 extent = (5567248.28340708, 5570248.686685662, -5570248.686685662, -5567248.28340708) if typ == 2: # One sector pdict = {'a': 6378169.00, 'b': 6356583.80, 'h': 35785831.00, 'ssp_lon': 0.0, 'nlines': 464, 'ncols': 3712, 'a_name': 'geostest', 'a_desc': 'test area', 'p_id': 'test_area', 'cfac': -13642337, 'lfac': -13642337, 'coff': 1856} if scan == 'N2S': pdict['scandir'] = 'N2S' pdict['loff'] = 464 extent = (5567248.28340708, 1390686.9196223018, -5570248.686685662, -1500.2016392905093) if scan == 'S2N': pdict['scandir'] = 'S2N' pdict['loff'] = 464 extent = (5567248.28340708, -1390686.9196223018, -5570248.686685662, -2782874.0408838945) return pdict, extent def test_geos_area(self): """Test area extent calculation with N->S scan then S->N scan.""" # North -> South full disk pdict, extent = self.make_pdict_ext(1, 'N2S') aex = get_area_extent(pdict) np.testing.assert_allclose(aex, extent) # South -> North full disk pdict, extent = self.make_pdict_ext(1, 'S2N') aex = get_area_extent(pdict) np.testing.assert_allclose(aex, extent) # North -> South one sector pdict, extent = self.make_pdict_ext(2, 'N2S') aex = get_area_extent(pdict) np.testing.assert_allclose(aex, extent) # South -> North one sector pdict, extent = self.make_pdict_ext(2, 'S2N') aex = get_area_extent(pdict) np.testing.assert_allclose(aex, extent) def test_get_xy_from_linecol(self): """Test the scan angle calculation.""" pdict, extent = self.make_pdict_ext(1, 'S2N') good_xy = [0.2690166648133674, -10.837528496767087] factors = (pdict['lfac'], pdict['cfac']) offsets = (pdict['loff'], pdict['coff']) x, y = get_xy_from_linecol(400, 1800, offsets, factors) np.testing.assert_approx_equal(x, good_xy[0]) np.testing.assert_approx_equal(y, good_xy[1]) pdict, extent = self.make_pdict_ext(2, 'N2S') good_xy = [0.2690166648133674, 0.30744761692956274] factors = (pdict['lfac'], pdict['cfac']) offsets = (pdict['loff'], pdict['coff']) x, y = get_xy_from_linecol(400, 1800, offsets, factors) np.testing.assert_approx_equal(x, good_xy[0]) np.testing.assert_approx_equal(y, good_xy[1]) def test_get_area_definition(self): """Test the retrieval of the area definition.""" from pyresample.utils import proj4_radius_parameters pdict, extent = self.make_pdict_ext(1, 'N2S') good_res = (-3000.4032785810186, -3000.4032785810186) a_def = get_area_definition(pdict, extent) self.assertEqual(a_def.area_id, pdict['a_name']) self.assertEqual(a_def.resolution, good_res) self.assertEqual(a_def.proj_dict['proj'], 'geos') self.assertEqual(a_def.proj_dict['units'], 'm') a, b = proj4_radius_parameters(a_def.proj_dict) self.assertEqual(a, 6378169) self.assertEqual(b, 6356583.8) self.assertEqual(a_def.proj_dict['h'], 35785831) def test_sampling_to_lfac_cfac(self): """Test conversion from angular sampling to line/column offset.""" lfac = 13642337 # SEVIRI LFAC sampling = np.deg2rad(2 ** 16 / lfac) np.testing.assert_allclose(sampling_to_lfac_cfac(sampling), lfac) def test_get_geos_area_naming(self): """Test the geos area naming function.""" input_dict = {'platform_name': 'testplatform', 'instrument_name': 'testinstrument', 'resolution': 1000, 'service_name': 'testservicename', 'service_desc': 'testdesc'} output_dict = get_geos_area_naming(input_dict) self.assertEqual(output_dict['area_id'], 'testplatform_testinstrument_testservicename_1km') self.assertEqual(output_dict['description'], 'TESTPLATFORM TESTINSTRUMENT testdesc area definition' ' with 1 km resolution') def test_get_resolution_and_unit_strings_in_km(self): """Test the resolution and unit strings function for a km resolution.""" out = get_resolution_and_unit_strings(1000) self.assertEqual(out['value'], '1') self.assertEqual(out['unit'], 'km') def test_get_resolution_and_unit_strings_in_m(self): """Test the resolution and unit strings function for a m resolution.""" out = get_resolution_and_unit_strings(500) self.assertEqual(out['value'], '500') self.assertEqual(out['unit'], 'm') satpy-0.34.0/satpy/tests/reader_tests/test_glm_l2.py000066400000000000000000000216701420401153000225050ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The glm_l2 reader tests package.""" import os import unittest from unittest import mock import numpy as np import xarray as xr def setup_fake_dataset(): """Create a fake dataset to avoid opening a file.""" # flash_extent_density fed = (np.arange(10.).reshape((2, 5)) + 1.) * 50. fed = (fed + 1.) / 0.5 fed = fed.astype(np.int16) fed = xr.DataArray( fed, dims=('y', 'x'), attrs={ 'scale_factor': 0.5, 'add_offset': -1., '_FillValue': 0, 'units': 'Count per nominal 3136 microradian^2 pixel per 1.0 min', 'grid_mapping': 'goes_imager_projection', 'standard_name': 'flash_extent_density', 'long_name': 'Flash extent density', } ) dqf = xr.DataArray( fed.data.copy().astype(np.uint8), dims=('y', 'x'), attrs={ '_FillValue': -1, 'units': '1', 'grid_mapping': 'goes_imager_projection', 'standard_name': 'status_flag', 'long_name': 'GLM data quality flags', 'flag_meanings': "valid invalid", } ) # create a variable that won't be configured to test available_datasets not_configured = xr.DataArray( fed.data.copy(), dims=('y', 'x'), attrs={ 'scale_factor': 0.5, 'add_offset': -1., '_FillValue': 0, 'units': '1', 'grid_mapping': 'goes_imager_projection', 'standard_name': 'test', 'long_name': 'Test', } ) x__ = xr.DataArray( range(5), attrs={'scale_factor': 2., 'add_offset': -1.}, dims=('x',), ) y__ = xr.DataArray( range(2), attrs={'scale_factor': -2., 'add_offset': 1.}, dims=('y',), ) proj = xr.DataArray( [], attrs={ 'semi_major_axis': 1., 'semi_minor_axis': 1., 'perspective_point_height': 1., 'longitude_of_projection_origin': -90., 'latitude_of_projection_origin': 0., 'sweep_angle_axis': u'x' } ) fake_dataset = xr.Dataset( data_vars={ 'flash_extent_density': fed, 'not_configured': not_configured, 'DQF': dqf, 'x': x__, 'y': y__, 'goes_imager_projection': proj, "nominal_satellite_subpoint_lat": np.array(0.0), "nominal_satellite_subpoint_lon": np.array(-89.5), "nominal_satellite_height": np.array(35786.02) }, attrs={ "time_coverage_start": "2017-09-20T17:30:40Z", "time_coverage_end": "2017-09-20T17:41:17Z", "spatial_resolution": "2km at nadir", } ) return fake_dataset class TestGLML2FileHandler(unittest.TestCase): """Tests for the GLM L2 reader.""" @mock.patch('satpy.readers.abi_base.xr') def setUp(self, xr_): """Create a fake file handler to test.""" from satpy.readers.glm_l2 import NCGriddedGLML2 fake_dataset = setup_fake_dataset() xr_.open_dataset.return_value = fake_dataset self.reader = NCGriddedGLML2('filename', {'platform_shortname': 'G16', 'scene_abbr': 'C', 'scan_mode': 'M3'}, {'filetype': 'glm_l2_imagery'}) def test_basic_attributes(self): """Test getting basic file attributes.""" from datetime import datetime self.assertEqual(self.reader.start_time, datetime(2017, 9, 20, 17, 30, 40)) self.assertEqual(self.reader.end_time, datetime(2017, 9, 20, 17, 41, 17)) def test_get_dataset(self): """Test the get_dataset method.""" from satpy.tests.utils import make_dataid key = make_dataid(name='flash_extent_density') res = self.reader.get_dataset(key, {'info': 'info'}) exp = {'instrument_ID': None, 'modifiers': (), 'name': 'flash_extent_density', 'orbital_parameters': {'projection_altitude': 1.0, 'projection_latitude': 0.0, 'projection_longitude': -90.0, # 'satellite_nominal_altitude': 35786.02, 'satellite_nominal_latitude': 0.0, 'satellite_nominal_longitude': -89.5}, 'orbital_slot': None, 'platform_name': 'GOES-16', 'platform_shortname': 'G16', 'production_site': None, 'scan_mode': 'M3', 'scene_abbr': 'C', 'scene_id': None, "spatial_resolution": "2km at nadir", 'sensor': 'glm', 'timeline_ID': None, 'grid_mapping': 'goes_imager_projection', 'standard_name': 'flash_extent_density', 'long_name': 'Flash extent density', 'units': 'Count per nominal 3136 microradian^2 pixel per 1.0 min'} self.assertDictEqual(res.attrs, exp) def test_get_dataset_dqf(self): """Test the get_dataset method with special DQF var.""" from satpy.tests.utils import make_dataid key = make_dataid(name='DQF') res = self.reader.get_dataset(key, {'info': 'info'}) exp = {'instrument_ID': None, 'modifiers': (), 'name': 'DQF', 'orbital_parameters': {'projection_altitude': 1.0, 'projection_latitude': 0.0, 'projection_longitude': -90.0, # 'satellite_nominal_altitude': 35786.02, 'satellite_nominal_latitude': 0.0, 'satellite_nominal_longitude': -89.5}, 'orbital_slot': None, 'platform_name': 'GOES-16', 'platform_shortname': 'G16', 'production_site': None, 'scan_mode': 'M3', 'scene_abbr': 'C', 'scene_id': None, "spatial_resolution": "2km at nadir", 'sensor': 'glm', 'timeline_ID': None, 'grid_mapping': 'goes_imager_projection', 'units': '1', '_FillValue': -1, 'standard_name': 'status_flag', 'long_name': 'GLM data quality flags', 'flag_meanings': "valid invalid"} self.assertDictEqual(res.attrs, exp) self.assertTrue(np.issubdtype(res.dtype, np.integer)) class TestGLML2Reader(unittest.TestCase): """Test high-level reading functionality of GLM L2 reader.""" yaml_file = "glm_l2.yaml" @mock.patch('satpy.readers.abi_base.xr') def setUp(self, xr_): """Create a fake reader to test.""" from satpy._config import config_search_paths from satpy.readers import load_reader self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) fake_dataset = setup_fake_dataset() xr_.open_dataset.return_value = fake_dataset r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'OR_GLM-L2-GLMC-M3_G16_s20192862159000_e20192862200000_c20192862200350.nc', 'CSPP_CG_GLM-L2-GLMC-M3_G16_s20192862159000_e20192862200000_c20192862200350.nc', ]) self.assertEqual(len(loadables), 2) r.create_filehandlers(loadables) self.reader = r def test_available_datasets(self): """Test that resolution is added to YAML configured variables.""" # make sure we have some files self.assertTrue(self.reader.file_handlers) available_datasets = list(self.reader.available_dataset_ids) # flash_extent_density, DQF, and not_configured are available in our tests self.assertEqual(len(available_datasets), 3) for ds_id in available_datasets: self.assertEqual(ds_id['resolution'], 2000) # make sure not_configured was discovered names = [dataid['name'] for dataid in available_datasets] assert 'not_configured' in names satpy-0.34.0/satpy/tests/reader_tests/test_goes_imager_hrit.py000066400000000000000000000171731420401153000246430ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The hrit msg reader tests package.""" import datetime import unittest from unittest import mock import numpy as np from xarray import DataArray from satpy.readers.goes_imager_hrit import ( ALTITUDE, HRITGOESFileHandler, HRITGOESPrologueFileHandler, make_gvar_float, make_sgs_time, sgs_time, ) from satpy.tests.utils import make_dataid class TestGVARFloat(unittest.TestCase): """GVAR float tester.""" def test_fun(self): """Test function.""" test_data = [(-1.0, b"\xbe\xf0\x00\x00"), (-0.1640625, b"\xbf\xd6\x00\x00"), (0.0, b"\x00\x00\x00\x00"), (0.1640625, b"\x40\x2a\x00\x00"), (1.0, b"\x41\x10\x00\x00"), (100.1640625, b"\x42\x64\x2a\x00")] for expected, str_val in test_data: val = np.frombuffer(str_val, dtype='>i4') self.assertEqual(expected, make_gvar_float(val)) class TestMakeSGSTime(unittest.TestCase): """SGS Time tester.""" def test_fun(self): """Encode the test time.""" # 2018-129 (may 9th), 21:33:27.999 tcds = np.array([(32, 24, 18, 146, 19, 50, 121, 153)], dtype=sgs_time) expected = datetime.datetime(2018, 5, 9, 21, 33, 27, 999000) self.assertEqual(make_sgs_time(tcds[0]), expected) test_pro = {'TISTR': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), 'TCurr': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), 'TCLMT': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), 'SubSatLongitude': 100.1640625, 'TCHED': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), 'TLTRL': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), 'TIPFS': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), 'TISPC': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), 'ReferenceLatitude': 0.0, 'TIIRT': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), 'TLHED': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), 'TIVIT': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), 'SubSatLatitude': 0.0, 'TIECL': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), 'ReferenceLongitude': 100.1640625, 'TCTRL': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), 'TLRAN': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), 'TINFS': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), 'TIBBC': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), 'TIONA': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), 'ReferenceDistance': 100.1640625, 'SatelliteID': 15} class TestHRITGOESPrologueFileHandler(unittest.TestCase): """Test the HRITFileHandler.""" @mock.patch('satpy.readers.goes_imager_hrit.recarray2dict') @mock.patch('satpy.readers.goes_imager_hrit.np.fromfile') @mock.patch('satpy.readers.goes_imager_hrit.HRITFileHandler.__init__') def test_init(self, new_fh_init, fromfile, recarray2dict): """Setup the hrit file handler for testing.""" recarray2dict.side_effect = lambda x: x[0] new_fh_init.return_value.filename = 'filename' HRITGOESPrologueFileHandler.filename = 'filename' HRITGOESPrologueFileHandler.mda = {'total_header_length': 1} ret = {} the_time = np.array([(32, 24, 18, 146, 19, 50, 121, 153)], dtype=sgs_time)[0] for key in ['TCurr', 'TCHED', 'TCTRL', 'TLHED', 'TLTRL', 'TIPFS', 'TINFS', 'TISPC', 'TIECL', 'TIBBC', 'TISTR', 'TLRAN', 'TIIRT', 'TIVIT', 'TCLMT', 'TIONA']: ret[key] = the_time ret['SubSatLatitude'] = np.frombuffer(b"\x00\x00\x00\x00", dtype='>i4')[0] ret['ReferenceLatitude'] = np.frombuffer(b"\x00\x00\x00\x00", dtype='>i4')[0] ret['SubSatLongitude'] = np.frombuffer(b"\x42\x64\x2a\x00", dtype='>i4')[0] ret['ReferenceLongitude'] = np.frombuffer(b"\x42\x64\x2a\x00", dtype='>i4')[0] ret['ReferenceDistance'] = np.frombuffer(b"\x42\x64\x2a\x00", dtype='>i4')[0] ret['SatelliteID'] = 15 fromfile.return_value = [ret] m = mock.mock_open() with mock.patch('satpy.readers.goes_imager_hrit.open', m, create=True) as newopen: newopen.return_value.__enter__.return_value.seek.return_value = 1 self.reader = HRITGOESPrologueFileHandler( 'filename', {'platform_shortname': 'GOES15', 'start_time': datetime.datetime(2016, 3, 3, 0, 0), 'service': 'test_service'}, {'filetype': 'info'}) self.assertEqual(test_pro, self.reader.prologue) class TestHRITGOESFileHandler(unittest.TestCase): """Test the HRITFileHandler.""" @mock.patch('satpy.readers.goes_imager_hrit.HRITFileHandler.__init__') def setUp(self, new_fh_init): """Set up the hrit file handler for testing.""" blob = '$HALFTONE:=10\r\n_NAME:=albedo\r\n_UNIT:=percent\r\n0:=0.0\r\n1023:=100.0\r\n'.encode() mda = {'projection_parameters': {'SSP_longitude': -123.0}, 'spectral_channel_id': 1, 'image_data_function': blob} HRITGOESFileHandler.filename = 'filename' HRITGOESFileHandler.mda = mda self.prologue = mock.MagicMock() self.prologue.prologue = test_pro self.reader = HRITGOESFileHandler('filename', {}, {}, self.prologue) def test_init(self): """Test the init.""" blob = '$HALFTONE:=10\r\n_NAME:=albedo\r\n_UNIT:=percent\r\n0:=0.0\r\n1023:=100.0\r\n'.encode() mda = {'spectral_channel_id': 1, 'projection_parameters': {'SSP_longitude': 100.1640625}, 'image_data_function': blob} self.assertEqual(self.reader.mda, mda) @mock.patch('satpy.readers.goes_imager_hrit.HRITFileHandler.get_dataset') def test_get_dataset(self, base_get_dataset): """Test get_dataset.""" key = make_dataid(name="CH1", calibration='reflectance') base_get_dataset.return_value = DataArray(np.arange(25).reshape(5, 5)) res = self.reader.get_dataset(key, {}) expected = np.array([[np.nan, 0.097752, 0.195503, 0.293255, 0.391007], [0.488759, 0.58651, 0.684262, 0.782014, 0.879765], [0.977517, 1.075269, 1.173021, 1.270772, 1.368524], [1.466276, 1.564027, 1.661779, 1.759531, 1.857283], [1.955034, 2.052786, 2.150538, 2.248289, 2.346041]]) self.assertTrue(np.allclose(res.values, expected, equal_nan=True)) self.assertEqual(res.attrs['units'], '%') self.assertDictEqual(res.attrs['orbital_parameters'], {'projection_longitude': self.reader.mda['projection_parameters']['SSP_longitude'], 'projection_latitude': 0.0, 'projection_altitude': ALTITUDE}) satpy-0.34.0/satpy/tests/reader_tests/test_goes_imager_nc.py000066400000000000000000000600711420401153000242700ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the goes imager nc reader.""" import datetime import unittest from unittest import mock import numpy as np import pytest import xarray as xr from satpy.tests.utils import make_dataid class GOESNCBaseFileHandlerTest(unittest.TestCase): """Testing the file handler.""" longMessage = True @mock.patch('satpy.readers.goes_imager_nc.xr') @mock.patch.multiple('satpy.readers.goes_imager_nc.GOESNCBaseFileHandler', __abstractmethods__=set(), _get_sector=mock.MagicMock()) def setUp(self, xr_): """Set up the tests.""" from satpy.readers.goes_imager_nc import CALIB_COEFS, GOESNCBaseFileHandler self.coefs = CALIB_COEFS['GOES-15'] # Mock file access to return a fake dataset. self.time = datetime.datetime(2018, 8, 16, 16, 7) self.dummy3d = np.zeros((1, 2, 2)) self.dummy2d = np.zeros((2, 2)) self.band = 1 self.nc = xr.Dataset( {'data': xr.DataArray(self.dummy3d, dims=('time', 'yc', 'xc')), 'lon': xr.DataArray(data=self.dummy2d, dims=('yc', 'xc')), 'lat': xr.DataArray(data=self.dummy2d, dims=('yc', 'xc')), 'time': xr.DataArray(data=np.array([self.time], dtype='datetime64[ms]'), dims=('time',)), 'bands': xr.DataArray(data=np.array([self.band]))}, attrs={'Satellite Sensor': 'G-15'}) xr_.open_dataset.return_value = self.nc # Instantiate reader using the mocked open_dataset() method. Also, make # the reader believe all abstract methods have been implemented. self.reader = GOESNCBaseFileHandler(filename='dummy', filename_info={}, filetype_info={}) def test_init(self): """Tests reader initialization.""" self.assertEqual(self.reader.nlines, self.dummy2d.shape[0]) self.assertEqual(self.reader.ncols, self.dummy2d.shape[1]) self.assertEqual(self.reader.platform_name, 'GOES-15') self.assertEqual(self.reader.platform_shortname, 'goes15') self.assertEqual(self.reader.gvar_channel, self.band) self.assertIsInstance(self.reader.geo_data, xr.Dataset) def test_get_nadir_pixel(self): """Test identification of the nadir pixel.""" from satpy.readers.goes_imager_nc import FULL_DISC earth_mask = np.array([[0, 0, 0, 0], [0, 1, 0, 0], [1, 1, 1, 0], [0, 1, 0, 0], [0, 0, 0, 0]]) nadir_row, nadir_col = self.reader._get_nadir_pixel( earth_mask=earth_mask, sector=FULL_DISC) self.assertEqual((nadir_row, nadir_col), (2, 1), msg='Incorrect nadir pixel') def test_get_earth_mask(self): """Test identification of earth/space pixels.""" lat = xr.DataArray([-100, -90, -45, 0, 45, 90, 100]) expected = np.array([0, 1, 1, 1, 1, 1, 0]) mask = self.reader._get_earth_mask(lat) self.assertTrue(np.all(mask == expected), msg='Incorrect identification of earth/space pixel') def test_is_yaw_flip(self): """Test yaw flip identification.""" lat_asc = xr.DataArray([[1, 1, 1], [2, 2, 2], [3, 3, 3]]) lat_dsc = xr.DataArray([[3, 3, 3], [2, 2, 3], [1, 1, 1]]) self.assertEqual(self.reader._is_yaw_flip(lat_asc, delta=1), True, msg='Yaw flip not identified') self.assertEqual(self.reader._is_yaw_flip(lat_dsc, delta=1), False, msg='Yaw flip false alarm') def test_viscounts2radiance(self): """Test conversion from VIS counts to radiance.""" # Reference data is for detector #1 slope = self.coefs['00_7']['slope'][0] offset = self.coefs['00_7']['offset'][0] counts = xr.DataArray([0, 100, 200, 500, 1000, 1023]) rad_expected = xr.DataArray( [0., 41.54896, 100.06862, 275.6276, 568.2259, 581.685422]) rad = self.reader._viscounts2radiance(counts=counts, slope=slope, offset=offset) self.assertTrue(np.allclose(rad.data, rad_expected.data, atol=1E-6), msg='Incorrect conversion from VIS counts to ' 'radiance') def test_ircounts2radiance(self): """Test conversion from IR counts to radiance.""" # Test counts counts = xr.DataArray([0, 100, 500, 1000, 1023]) # Reference Radiance from NOAA lookup tables (same for detectors 1 and # 2, see [IR]) rad_expected = { '03_9': np.array([0, 0.140, 1.899, 4.098, 4.199]), '06_5': np.array([0, 1.825, 12.124, 24.998, 25.590]), '10_7': np.array([0, 16.126, 92.630, 188.259, 192.658]), '13_3': np.array([0, 15.084, 87.421, 177.842, 182.001]) } # The input counts are exact, but the accuracy of the output radiance is # limited to 3 digits atol = 1E-3 for ch in sorted(rad_expected.keys()): coefs = self.coefs[ch] rad = self.reader._ircounts2radiance( counts=counts, scale=coefs['scale'], offset=coefs['offset']) self.assertTrue(np.allclose(rad.data, rad_expected[ch], atol=atol), msg='Incorrect conversion from IR counts to ' 'radiance in channel {}'.format(ch)) def test_calibrate_vis(self): """Test VIS calibration.""" rad = xr.DataArray([0, 1, 10, 100, 500]) refl_expected = xr.DataArray([0., 0.188852, 1.88852, 18.8852, 94.426]) refl = self.reader._calibrate_vis(radiance=rad, k=self.coefs['00_7']['k']) self.assertTrue(np.allclose(refl.data, refl_expected.data, atol=1E-6), msg='Incorrect conversion from radiance to ' 'reflectance') def test_calibrate_ir(self): """Test IR calibration.""" # Test radiance values and corresponding BT from NOAA lookup tables # rev. H (see [IR]). rad = { '03_9': xr.DataArray([0, 0.1, 2, 3.997, 4.199]), '06_5': xr.DataArray([0, 0.821, 12.201, 25.590, 100]), '10_7': xr.DataArray([0, 11.727, 101.810, 189.407, 192.658]), '13_3': xr.DataArray([0, 22.679, 90.133, 182.001, 500]) } bt_expected = { '03_9': np.array([[np.nan, 253.213, 319.451, 339.983, np.nan], [np.nan, 253.213, 319.451, 339.983, np.nan]]), '06_5': np.array([[np.nan, 200.291, 267.860, 294.988, np.nan], [np.nan, 200.308, 267.879, 295.008, np.nan]]), '10_7': np.array([[np.nan, 200.105, 294.437, 339.960, np.nan], [np.nan, 200.097, 294.429, 339.953, np.nan]]), '13_3': np.array([[np.nan, 200.006, 267.517, 321.986, np.nan], [np.nan, 200.014, 267.524, 321.990, np.nan]]) } # first row is for detector 1, second for detector 2. # The accuracy of the input radiance is limited to 3 digits so that # the results differ slightly. atol = {'03_9': 0.04, '06_5': 0.03, '10_7': 0.01, '13_3': 0.01} for ch in sorted(rad.keys()): coefs = self.coefs[ch] for det in [0, 1]: bt = self.reader._calibrate_ir(radiance=rad[ch], coefs={'a': coefs['a'][det], 'b': coefs['b'][det], 'n': coefs['n'][det], 'btmin': coefs['btmin'], 'btmax': coefs['btmax']}) self.assertTrue( np.allclose(bt.data, bt_expected[ch][det], equal_nan=True, atol=atol[ch]), msg='Incorrect conversion from radiance to brightness ' 'temperature in channel {} detector {}'.format(ch, det)) def test_start_time(self): """Test dataset start time stamp.""" self.assertEqual(self.reader.start_time, self.time) def test_end_time(self): """Test dataset end time stamp.""" from satpy.readers.goes_imager_nc import FULL_DISC, SCAN_DURATION, UNKNOWN_SECTOR expected = { UNKNOWN_SECTOR: self.time, FULL_DISC: self.time + SCAN_DURATION[FULL_DISC] } for sector, end_time in expected.items(): self.reader.sector = sector self.assertEqual(self.reader.end_time, end_time) class GOESNCFileHandlerTest(unittest.TestCase): """Test the file handler.""" longMessage = True @mock.patch('satpy.readers.goes_imager_nc.xr') def setUp(self, xr_): """Set up the tests.""" from satpy.readers.goes_imager_nc import CALIB_COEFS, GOESNCFileHandler self.coefs = CALIB_COEFS['GOES-15'] self.all_coefs = CALIB_COEFS self.channels = sorted(self.coefs.keys()) self.ir_channels = sorted([ch for ch in self.channels if not GOESNCFileHandler._is_vis(ch)]) self.vis_channels = sorted([ch for ch in self.channels if GOESNCFileHandler._is_vis(ch)]) # Mock file access to return a fake dataset. Choose a medium count value # (100) to avoid elements being masked due to invalid # radiance/reflectance/BT nrows = ncols = 300 self.counts = 100 * 32 * np.ones((1, nrows, ncols)) # emulate 10-bit self.lon = np.zeros((nrows, ncols)) # Dummy self.lat = np.repeat(np.linspace(-150, 150, nrows), ncols).reshape( nrows, ncols) # Includes invalid values to be masked xr_.open_dataset.return_value = xr.Dataset( {'data': xr.DataArray(data=self.counts, dims=('time', 'yc', 'xc')), 'lon': xr.DataArray(data=self.lon, dims=('yc', 'xc')), 'lat': xr.DataArray(data=self.lat, dims=('yc', 'xc')), 'time': xr.DataArray(data=np.array([0], dtype='datetime64[ms]'), dims=('time',)), 'bands': xr.DataArray(data=np.array([1]))}, attrs={'Satellite Sensor': 'G-15'}) # Instantiate reader using the mocked open_dataset() method self.reader = GOESNCFileHandler(filename='dummy', filename_info={}, filetype_info={}) def test_get_dataset_coords(self): """Test whether coordinates returned by get_dataset() are correct.""" lon = self.reader.get_dataset(key=make_dataid(name='longitude'), info={}) lat = self.reader.get_dataset(key=make_dataid(name='latitude'), info={}) # ... this only compares the valid (unmasked) elements self.assertTrue(np.all(lat.to_masked_array() == self.lat), msg='get_dataset() returns invalid latitude') self.assertTrue(np.all(lon.to_masked_array() == self.lon), msg='get_dataset() returns invalid longitude') def test_get_dataset_counts(self): """Test whether counts returned by get_dataset() are correct.""" from satpy.readers.goes_imager_nc import ALTITUDE, UNKNOWN_SECTOR self.reader.meta.update({'lon0': -75.0, 'lat0': 0.0, 'sector': UNKNOWN_SECTOR, 'nadir_row': 1, 'nadir_col': 2, 'area_def_uni': 'some_area'}) attrs_exp = {'orbital_parameters': {'projection_longitude': -75.0, 'projection_latitude': 0.0, 'projection_altitude': ALTITUDE, 'yaw_flip': True}, 'satellite_longitude': -75.0, 'satellite_latitude': 0.0, 'satellite_altitude': ALTITUDE, 'platform_name': 'GOES-15', 'sensor': 'goes_imager', 'sector': UNKNOWN_SECTOR, 'nadir_row': 1, 'nadir_col': 2, 'area_def_uniform_sampling': 'some_area'} for ch in self.channels: counts = self.reader.get_dataset( key=make_dataid(name=ch, calibration='counts'), info={}) # ... this only compares the valid (unmasked) elements self.assertTrue(np.all(self.counts/32. == counts.to_masked_array()), msg='get_dataset() returns invalid counts for ' 'channel {}'.format(ch)) # Check attributes self.assertDictEqual(counts.attrs, attrs_exp) def test_get_dataset_masks(self): """Test whether data and coordinates are masked consistently.""" # Requires that no element has been masked due to invalid # radiance/reflectance/BT (see setUp()). lon = self.reader.get_dataset(key=make_dataid(name='longitude'), info={}) lon_mask = lon.to_masked_array().mask for ch in self.channels: for calib in ('counts', 'radiance', 'reflectance', 'brightness_temperature'): try: data = self.reader.get_dataset( key=make_dataid(name=ch, calibration=calib), info={}) except ValueError: continue data_mask = data.to_masked_array().mask self.assertTrue(np.all(data_mask == lon_mask), msg='get_dataset() returns inconsistently ' 'masked {} in channel {}'.format(calib, ch)) def test_get_dataset_invalid(self): """Test handling of invalid calibrations.""" # VIS -> BT args = dict(key=make_dataid(name='00_7', calibration='brightness_temperature'), info={}) self.assertRaises(ValueError, self.reader.get_dataset, **args) # IR -> Reflectance args = dict(key=make_dataid(name='10_7', calibration='reflectance'), info={}) self.assertRaises(ValueError, self.reader.get_dataset, **args) # Unsupported calibration with pytest.raises(ValueError): args = dict(key=make_dataid(name='10_7', calibration='invalid'), info={}) def test_calibrate(self): """Test whether the correct calibration methods are called.""" for ch in self.channels: if self.reader._is_vis(ch): calibs = {'radiance': '_viscounts2radiance', 'reflectance': '_calibrate_vis'} else: calibs = {'radiance': '_ircounts2radiance', 'brightness_temperature': '_calibrate_ir'} for calib, method in calibs.items(): with mock.patch.object(self.reader, method) as target_func: self.reader.calibrate(counts=self.reader.nc['data'], calibration=calib, channel=ch) target_func.assert_called() def test_get_sector(self): """Test sector identification.""" from satpy.readers.goes_imager_nc import ( FULL_DISC, NORTH_HEMIS_EAST, NORTH_HEMIS_WEST, SOUTH_HEMIS_EAST, SOUTH_HEMIS_WEST, UNKNOWN_SECTOR, ) shapes_vis = { (10800, 20754): FULL_DISC, (7286, 13900): NORTH_HEMIS_EAST, (2301, 13840): SOUTH_HEMIS_EAST, (5400, 13200): NORTH_HEMIS_WEST, (4300, 11090): SOUTH_HEMIS_WEST, (123, 456): UNKNOWN_SECTOR } shapes_ir = { (2700, 5200): FULL_DISC, (1850, 3450): NORTH_HEMIS_EAST, (600, 3500): SOUTH_HEMIS_EAST, (1310, 3300): NORTH_HEMIS_WEST, (1099, 2800): SOUTH_HEMIS_WEST, (123, 456): UNKNOWN_SECTOR } shapes = shapes_ir.copy() shapes.update(shapes_vis) for (nlines, ncols), sector_ref in shapes.items(): if (nlines, ncols) in shapes_vis: channel = '00_7' else: channel = '10_7' sector = self.reader._get_sector(channel=channel, nlines=nlines, ncols=ncols) self.assertEqual(sector, sector_ref, msg='Incorrect sector identification') class GOESNCEUMFileHandlerRadianceTest(unittest.TestCase): """Tests for the radiances.""" longMessage = True @mock.patch('satpy.readers.goes_imager_nc.xr') def setUp(self, xr_): """Set up the tests.""" from satpy.readers.goes_imager_nc import CALIB_COEFS, GOESEUMNCFileHandler self.coefs = CALIB_COEFS['GOES-15'] self.all_coefs = CALIB_COEFS self.channels = sorted(self.coefs.keys()) self.ir_channels = sorted([ch for ch in self.channels if not GOESEUMNCFileHandler._is_vis(ch)]) self.vis_channels = sorted([ch for ch in self.channels if GOESEUMNCFileHandler._is_vis(ch)]) # Mock file access to return a fake dataset. nrows = ncols = 300 self.radiance = np.ones((1, nrows, ncols)) # IR channels self.lon = np.zeros((nrows, ncols)) # Dummy self.lat = np.repeat(np.linspace(-150, 150, nrows), ncols).reshape( nrows, ncols) # Includes invalid values to be masked xr_.open_dataset.return_value = xr.Dataset( {'data': xr.DataArray(data=self.radiance, dims=('time', 'yc', 'xc')), 'time': xr.DataArray(data=np.array([0], dtype='datetime64[ms]'), dims=('time',)), 'bands': xr.DataArray(data=np.array([1]))}, attrs={'Satellite Sensor': 'G-15'}) geo_data = xr.Dataset( {'lon': xr.DataArray(data=self.lon, dims=('yc', 'xc')), 'lat': xr.DataArray(data=self.lat, dims=('yc', 'xc'))}, attrs={'Satellite Sensor': 'G-15'}) # Instantiate reader using the mocked open_dataset() method self.reader = GOESEUMNCFileHandler(filename='dummy', filename_info={}, filetype_info={}, geo_data=geo_data) def test_get_dataset_radiance(self): """Test getting the radiances.""" for ch in self.channels: if not self.reader._is_vis(ch): radiance = self.reader.get_dataset( key=make_dataid(name=ch, calibration='radiance'), info={}) # ... this only compares the valid (unmasked) elements self.assertTrue(np.all(self.radiance == radiance.to_masked_array()), msg='get_dataset() returns invalid radiance for ' 'channel {}'.format(ch)) def test_calibrate(self): """Test whether the correct calibration methods are called.""" for ch in self.channels: if not self.reader._is_vis(ch): calibs = {'brightness_temperature': '_calibrate_ir'} for calib, method in calibs.items(): with mock.patch.object(self.reader, method) as target_func: self.reader.calibrate(data=self.reader.nc['data'], calibration=calib, channel=ch) target_func.assert_called() def test_get_sector(self): """Test sector identification.""" from satpy.readers.goes_imager_nc import ( FULL_DISC, NORTH_HEMIS_EAST, NORTH_HEMIS_WEST, SOUTH_HEMIS_EAST, SOUTH_HEMIS_WEST, UNKNOWN_SECTOR, ) shapes = { (2700, 5200): FULL_DISC, (1850, 3450): NORTH_HEMIS_EAST, (600, 3500): SOUTH_HEMIS_EAST, (1310, 3300): NORTH_HEMIS_WEST, (1099, 2800): SOUTH_HEMIS_WEST, (123, 456): UNKNOWN_SECTOR } for (nlines, ncols), sector_ref in shapes.items(): for channel in ('00_7', '10_7'): sector = self.reader._get_sector(channel=channel, nlines=nlines, ncols=ncols) self.assertEqual(sector, sector_ref, msg='Incorrect sector identification') class GOESNCEUMFileHandlerReflectanceTest(unittest.TestCase): """Testing the reflectances.""" longMessage = True @mock.patch('satpy.readers.goes_imager_nc.xr') def setUp(self, xr_): """Set up the tests.""" from satpy.readers.goes_imager_nc import CALIB_COEFS, GOESEUMNCFileHandler self.coefs = CALIB_COEFS['GOES-15'] self.all_coefs = CALIB_COEFS self.channels = sorted(self.coefs.keys()) self.ir_channels = sorted([ch for ch in self.channels if not GOESEUMNCFileHandler._is_vis(ch)]) self.vis_channels = sorted([ch for ch in self.channels if GOESEUMNCFileHandler._is_vis(ch)]) # Mock file access to return a fake dataset. nrows = ncols = 300 self.reflectance = 50 * np.ones((1, nrows, ncols)) # Vis channel self.lon = np.zeros((nrows, ncols)) # Dummy self.lat = np.repeat(np.linspace(-150, 150, nrows), ncols).reshape( nrows, ncols) # Includes invalid values to be masked xr_.open_dataset.return_value = xr.Dataset( {'data': xr.DataArray(data=self.reflectance, dims=('time', 'yc', 'xc')), 'time': xr.DataArray(data=np.array([0], dtype='datetime64[ms]'), dims=('time',)), 'bands': xr.DataArray(data=np.array([1]))}, attrs={'Satellite Sensor': 'G-15'}) geo_data = xr.Dataset( {'lon': xr.DataArray(data=self.lon, dims=('yc', 'xc')), 'lat': xr.DataArray(data=self.lat, dims=('yc', 'xc'))}, attrs={'Satellite Sensor': 'G-15'}) # Instantiate reader using the mocked open_dataset() method self.reader = GOESEUMNCFileHandler(filename='dummy', filename_info={}, filetype_info={}, geo_data=geo_data) def test_get_dataset_reflectance(self): """Test getting the reflectance.""" for ch in self.channels: if self.reader._is_vis(ch): refl = self.reader.get_dataset( key=make_dataid(name=ch, calibration='reflectance'), info={}) # ... this only compares the valid (unmasked) elements self.assertTrue(np.all(self.reflectance == refl.to_masked_array()), msg='get_dataset() returns invalid reflectance for ' 'channel {}'.format(ch)) satpy-0.34.0/satpy/tests/reader_tests/test_gpm_imerg.py000066400000000000000000000116371420401153000233010ustar00rootroot00000000000000#!/usr/bin/env python # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unittests for GPM IMERG reader.""" import os import unittest from datetime import datetime from unittest import mock import dask.array as da import numpy as np import xarray as xr from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler DEFAULT_FILE_SHAPE = (3600, 1800) DEFAULT_LAT_DATA = np.linspace(-89.95, 89.95, DEFAULT_FILE_SHAPE[1]).astype(np.float32) DEFAULT_LON_DATA = np.linspace(-179.95, 179.95, DEFAULT_FILE_SHAPE[0]).astype(np.float32) class FakeHDF5FileHandler2(FakeHDF5FileHandler): """Swap-in HDF5 File Handler.""" def _get_geo_data(self, num_rows, num_cols): geo = { 'Grid/lon': xr.DataArray(DEFAULT_LON_DATA, attrs={'units': 'degrees_east', }, dims=('lon')), 'Grid/lat': xr.DataArray(DEFAULT_LAT_DATA, attrs={'units': 'degrees_north', }, dims=('lat')), } return geo def _get_precip_data(self, num_rows, num_cols): selection = { 'Grid/IRprecipitation': xr.DataArray( da.ones((1, num_rows, num_cols), chunks=1024, dtype=np.float32), attrs={ '_FillValue': -9999.9, 'units': 'mm/hr', 'Units': 'mm/hr', }, dims=('time', 'lon', 'lat')), } return selection def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" num_rows = 1800 num_cols = 3600 test_content = {} data = {} data = self._get_geo_data(num_rows, num_cols) test_content.update(data) data = self._get_precip_data(num_rows, num_cols) test_content.update(data) return test_content class TestHdf5IMERG(unittest.TestCase): """Test the GPM IMERG reader.""" yaml_file = "gpm_imerg.yaml" def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.gpm_imerg import Hdf5IMERG self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(Hdf5IMERG, '__bases__', (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the HDF5 file handler.""" self.p.stop() def test_load_data(self): """Test loading data.""" from satpy.readers import load_reader # Filename to test, needed for start and end times filenames = [ '3B-HHR.MS.MRG.3IMERG.20200131-S233000-E235959.1410.V06B.HDF5', ] # Expected projection in area def pdict = {'proj': 'longlat', 'datum': 'WGS84', 'no_defs': None, 'type': 'crs'} reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) self.assertEqual(1, len(files)) reader.create_filehandlers(files) # Make sure we have some files self.assertTrue(reader.file_handlers) res = reader.load(['IRprecipitation']) self.assertEqual(1, len(res)) self.assertEqual(res['IRprecipitation'].start_time, datetime(2020, 1, 31, 23, 30, 0)) self.assertEqual(res['IRprecipitation'].end_time, datetime(2020, 1, 31, 23, 59, 59)) self.assertEqual(res['IRprecipitation'].resolution, 0.1) self.assertEqual(res['IRprecipitation'].area.width, 3600) self.assertEqual(res['IRprecipitation'].area.height, 1800) self.assertEqual(res['IRprecipitation'].area.proj_dict, pdict) np.testing.assert_almost_equal(res['IRprecipitation'].area.area_extent, (-179.95, -89.95, 179.95, 89.95), 5) satpy-0.34.0/satpy/tests/reader_tests/test_grib.py000066400000000000000000000301101420401153000222410ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.grib module.""" import os import sys from unittest import mock import numpy as np import pytest import xarray as xr from satpy.dataset import DataQuery # Parameterized cases TEST_ARGS = ('proj_params', 'lon_corners', 'lat_corners') TEST_PARAMS = ( (None, None, None), # cyl default case ( { 'a': 6371229, 'b': 6371229, 'proj': 'lcc', 'lon_0': 265.0, 'lat_0': 25.0, 'lat_1': 25.0, 'lat_2': 25.0 }, [-133.459, -65.12555139, -152.8786225, -49.41598659], [12.19, 14.34208538, 54.56534318, 57.32843565] ), ) def fake_gribdata(): """Return some faked data for use as grib values.""" return np.arange(25.).reshape((5, 5)) def _round_trip_projection_lonlat_check(area): """Check that X/Y coordinates can be transformed multiple times. Many GRIB files include non-standard projects that work for the initial transformation of X/Y coordinates to longitude/latitude, but may fail in the reverse transformation. For example, an eqc projection that goes from 0 longitude to 360 longitude. The X/Y coordinates may accurately go from the original X/Y metered space to the correct longitude/latitude, but transforming those coordinates back to X/Y space will produce the wrong result. """ from pyproj import Proj p = Proj(area.crs) x, y = area.get_proj_vectors() lon, lat = p(x, y, inverse=True) x2, y2 = p(lon, lat) np.testing.assert_almost_equal(x, x2) np.testing.assert_almost_equal(y, y2) class FakeMessage(object): """Fake message returned by pygrib.open().message(x).""" def __init__(self, values, proj_params=None, latlons=None, **attrs): """Init the message.""" super(FakeMessage, self).__init__() self.attrs = attrs self.values = values if proj_params is None: proj_params = {'a': 6371229, 'b': 6371229, 'proj': 'cyl'} self.projparams = proj_params self._latlons = latlons def keys(self): """Get message keys.""" return self.attrs.keys() def latlons(self): """Get coordinates.""" return self._latlons def __getitem__(self, item): """Get item.""" return self.attrs[item] def valid_key(self, key): """Validate key.""" return True class FakeGRIB(object): """Fake GRIB file returned by pygrib.open.""" def __init__(self, messages=None, proj_params=None, latlons=None): """Init the grib file.""" super(FakeGRIB, self).__init__() if messages is not None: self._messages = messages else: self._messages = [ FakeMessage( values=fake_gribdata(), name='TEST', shortName='t', level=100, pressureUnits='hPa', cfName='air_temperature', units='K', dataDate=20180504, dataTime=1200, validityDate=20180504, validityTime=1800, distinctLongitudes=np.arange(5.), distinctLatitudes=np.arange(5.), missingValue=9999, modelName='notknown', minimum=100., maximum=200., typeOfLevel='isobaricInhPa', jScansPositively=0, proj_params=proj_params, latlons=latlons, ), FakeMessage( values=fake_gribdata(), name='TEST', shortName='t', level=200, pressureUnits='hPa', cfName='air_temperature', units='K', dataDate=20180504, dataTime=1200, validityDate=20180504, validityTime=1800, distinctLongitudes=np.arange(5.), distinctLatitudes=np.arange(5.), missingValue=9999, modelName='notknown', minimum=100., maximum=200., typeOfLevel='isobaricInhPa', jScansPositively=1, proj_params=proj_params, latlons=latlons, ), FakeMessage( values=fake_gribdata(), name='TEST', shortName='t', level=300, pressureUnits='hPa', cfName='air_temperature', units='K', dataDate=20180504, dataTime=1200, validityDate=20180504, validityTime=1800, distinctLongitudes=np.arange(5.), distinctLatitudes=np.arange(5.), missingValue=9999, minimum=100., maximum=200., typeOfLevel='isobaricInhPa', jScansPositively=0, proj_params=proj_params, latlons=latlons, ), ] self.messages = len(self._messages) def message(self, msg_num): """Get a message.""" return self._messages[msg_num - 1] def seek(self, loc): """Seek.""" return def __iter__(self): """Iterate.""" return iter(self._messages) def __enter__(self): """Enter.""" return self def __exit__(self, exc_type, exc_val, exc_tb): """Exit.""" class TestGRIBReader: """Test GRIB Reader.""" yaml_file = "grib.yaml" def setup_method(self): """Wrap pygrib to read fake data.""" from satpy._config import config_search_paths self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) try: import pygrib except ImportError: pygrib = None self.orig_pygrib = pygrib sys.modules['pygrib'] = mock.MagicMock() def teardown_method(self): """Re-enable pygrib import.""" sys.modules['pygrib'] = self.orig_pygrib def _get_test_datasets(self, dataids, fake_pygrib=None): from satpy.readers import load_reader if fake_pygrib is None: fake_pygrib = FakeGRIB() with mock.patch('satpy.readers.grib.pygrib') as pg: pg.open.return_value = fake_pygrib r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'gfs.t18z.sfluxgrbf106.grib2', ]) r.create_filehandlers(loadables) datasets = r.load(dataids) return datasets @staticmethod def _get_fake_pygrib(proj_params, lon_corners, lat_corners): latlons = None if lon_corners is not None: lats = np.array([ [lat_corners[0], 0, 0, 0, lat_corners[1]], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [lat_corners[2], 0, 0, 0, lat_corners[3]]]) lons = np.array([ [lon_corners[0], 0, 0, 0, lon_corners[1]], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [lon_corners[2], 0, 0, 0, lon_corners[3]]]) latlons = (lats, lons) fake_pygrib = FakeGRIB( proj_params=proj_params, latlons=latlons) return fake_pygrib def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader with mock.patch('satpy.readers.grib.pygrib') as pg: pg.open.return_value = FakeGRIB() r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'gfs.t18z.sfluxgrbf106.grib2', ]) assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files assert r.file_handlers def test_file_pattern(self): """Test matching of file patterns.""" from satpy.readers import load_reader filenames = [ "quinoa.grb", "tempeh.grb2", "tofu.grib2", "falafel.grib", "S_NWC_NWP_1900-01-01T00:00:00Z_999.grib"] r = load_reader(self.reader_configs) files = r.select_files_from_pathnames(filenames) assert len(files) == 4 @pytest.mark.parametrize(TEST_ARGS, TEST_PARAMS) def test_load_all(self, proj_params, lon_corners, lat_corners): """Test loading all test datasets.""" fake_pygrib = self._get_fake_pygrib(proj_params, lon_corners, lat_corners) dataids = [ DataQuery(name='t', level=100, modifiers=tuple()), DataQuery(name='t', level=200, modifiers=tuple()), DataQuery(name='t', level=300, modifiers=tuple()) ] datasets = self._get_test_datasets(dataids, fake_pygrib) assert len(datasets) == 3 for v in datasets.values(): assert v.attrs['units'] == 'K' assert isinstance(v, xr.DataArray) @pytest.mark.parametrize(TEST_ARGS, TEST_PARAMS) def test_area_def_crs(self, proj_params, lon_corners, lat_corners): """Check that the projection is accurate.""" fake_pygrib = self._get_fake_pygrib(proj_params, lon_corners, lat_corners) dataids = [DataQuery(name='t', level=100, modifiers=tuple())] datasets = self._get_test_datasets(dataids, fake_pygrib) area = datasets['t'].attrs['area'] if not hasattr(area, 'crs'): pytest.skip("Can't test with pyproj < 2.0") _round_trip_projection_lonlat_check(area) @pytest.mark.parametrize(TEST_ARGS, TEST_PARAMS) def test_missing_attributes(self, proj_params, lon_corners, lat_corners): """Check that the grib reader handles missing attributes in the grib file.""" fake_pygrib = self._get_fake_pygrib(proj_params, lon_corners, lat_corners) # This has modelName query_contains = DataQuery(name='t', level=100, modifiers=tuple()) # This does not have modelName query_not_contains = DataQuery(name='t', level=300, modifiers=tuple()) dataset = self._get_test_datasets([query_contains, query_not_contains], fake_pygrib) assert dataset[query_contains].attrs['modelName'] == 'notknown' assert dataset[query_not_contains].attrs['modelName'] == 'unknown' @pytest.mark.parametrize(TEST_ARGS, TEST_PARAMS) def test_jscanspositively(self, proj_params, lon_corners, lat_corners): """Check that data is flipped if the jScansPositively is present.""" fake_pygrib = self._get_fake_pygrib(proj_params, lon_corners, lat_corners) # This has no jScansPositively query_not_contains = DataQuery(name='t', level=100, modifiers=tuple()) # This contains jScansPositively query_contains = DataQuery(name='t', level=200, modifiers=tuple()) dataset = self._get_test_datasets([query_contains, query_not_contains], fake_pygrib) np.testing.assert_allclose(fake_gribdata(), dataset[query_not_contains].values) np.testing.assert_allclose(fake_gribdata(), dataset[query_contains].values[::-1]) satpy-0.34.0/satpy/tests/reader_tests/test_hdf4_utils.py000066400000000000000000000114411420401153000233710ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.hdf4_utils module.""" import os import unittest import numpy as np import xarray as xr try: from satpy.readers.hdf4_utils import HDF4FileHandler except ImportError: # fake the import so we can at least run the tests in this file HDF4FileHandler = object # type: ignore class FakeHDF4FileHandler(HDF4FileHandler): """Swap-in NetCDF4 File Handler for reader tests to use.""" def __init__(self, filename, filename_info, filetype_info, **kwargs): """Get fake file content from 'get_test_content'.""" if HDF4FileHandler is object: raise ImportError("Base 'HDF4FileHandler' could not be " "imported.") super(HDF4FileHandler, self).__init__(filename, filename_info, filetype_info) self.file_content = self.get_test_content(filename, filename_info, filetype_info) self.file_content.update(kwargs) def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content. Args: filename (str): input filename filename_info (dict): Dict of metadata pulled from filename filetype_info (dict): Dict of metadata from the reader's yaml config for this file type Returns: dict of file content with keys like: - 'dataset' - '/attr/global_attr' - 'dataset/attr/global_attr' - 'dataset/shape' """ raise NotImplementedError("Fake File Handler subclass must implement 'get_test_content'") class TestHDF4FileHandler(unittest.TestCase): """Test HDF4 File Handler Utility class.""" def setUp(self): """Create a test HDF4 file.""" from pyhdf.SD import SD, SDC h = SD('test.hdf', SDC.WRITE | SDC.CREATE | SDC.TRUNC) data = np.arange(10. * 100, dtype=np.float32).reshape((10, 100)) v1 = h.create('ds1_f', SDC.FLOAT32, (10, 100)) v1[:] = data v2 = h.create('ds1_i', SDC.INT16, (10, 100)) v2[:] = data.astype(np.int16) # Add attributes h.test_attr_str = 'test_string' h.test_attr_int = 0 h.test_attr_float = 1.2 # h.test_attr_str_arr = np.array(b"test_string2") for d in [v1, v2]: d.test_attr_str = 'test_string' d.test_attr_int = 0 d.test_attr_float = 1.2 h.end() def tearDown(self): """Remove the previously created test file.""" os.remove('test.hdf') def test_all_basic(self): """Test everything about the HDF4 class.""" from satpy.readers.hdf4_utils import HDF4FileHandler file_handler = HDF4FileHandler('test.hdf', {}, {}) for ds in ('ds1_f', 'ds1_i'): self.assertEqual(file_handler[ds + '/dtype'], np.float32 if ds.endswith('f') else np.int16) self.assertTupleEqual(file_handler[ds + '/shape'], (10, 100)) # make sure that the dtype is an instance, not the class self.assertEqual(file_handler[ds].dtype.itemsize, 4 if ds.endswith('f') else 2) attrs = file_handler[ds].attrs self.assertEqual(attrs.get('test_attr_str'), 'test_string') self.assertEqual(attrs.get('test_attr_int'), 0) self.assertEqual(attrs.get('test_attr_float'), 1.2) self.assertIsInstance(file_handler['/attr/test_attr_str'], str) self.assertEqual(file_handler['/attr/test_attr_str'], 'test_string') # self.assertEqual(file_handler['/attr/test_attr_str_arr'], 'test_string2') self.assertIsInstance(file_handler['/attr/test_attr_int'], int) self.assertEqual(file_handler['/attr/test_attr_int'], 0) self.assertIsInstance(file_handler['/attr/test_attr_float'], float) self.assertEqual(file_handler['/attr/test_attr_float'], 1.2) self.assertIsInstance(file_handler.get('ds1_f'), xr.DataArray) self.assertIsNone(file_handler.get('fake_ds')) self.assertEqual(file_handler.get('fake_ds', 'test'), 'test') self.assertTrue('ds1_f' in file_handler) self.assertFalse('fake_ds' in file_handler) satpy-0.34.0/satpy/tests/reader_tests/test_hdf5_utils.py000066400000000000000000000144171420401153000234000ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.hdf5_utils module.""" import os import unittest import numpy as np try: from satpy.readers.hdf5_utils import HDF5FileHandler except ImportError: # fake the import so we can at least run the tests in this file HDF5FileHandler = object # type: ignore class FakeHDF5FileHandler(HDF5FileHandler): """Swap HDF5 File Handler for reader tests to use.""" def __init__(self, filename, filename_info, filetype_info, **kwargs): """Get fake file content from 'get_test_content'.""" if HDF5FileHandler is object: raise ImportError("Base 'HDF5FileHandler' could not be " "imported.") filename = str(filename) super(HDF5FileHandler, self).__init__(filename, filename_info, filetype_info) self.file_content = self.get_test_content(filename, filename_info, filetype_info) self.file_content.update(kwargs) def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content. Args: filename (str): input filename filename_info (dict): Dict of metadata pulled from filename filetype_info (dict): Dict of metadata from the reader's yaml config for this file type Returns: dict of file content with keys like: - 'dataset' - '/attr/global_attr' - 'dataset/attr/global_attr' - 'dataset/shape' """ raise NotImplementedError("Fake File Handler subclass must implement 'get_test_content'") class TestHDF5FileHandler(unittest.TestCase): """Test HDF5 File Handler Utility class.""" def setUp(self): """Create a test HDF5 file.""" import h5py h = h5py.File('test.h5', 'w') # Create Group g1 = h.create_group('test_group') # Add datasets ds1_f = g1.create_dataset('ds1_f', shape=(10, 100), dtype=np.float32, data=np.arange(10. * 100).reshape((10, 100))) ds1_i = g1.create_dataset('ds1_i', shape=(10, 100), dtype=np.int32, data=np.arange(10 * 100).reshape((10, 100))) ds2_f = h.create_dataset('ds2_f', shape=(10, 100), dtype=np.float32, data=np.arange(10. * 100).reshape((10, 100))) ds2_i = h.create_dataset('ds2_i', shape=(10, 100), dtype=np.int32, data=np.arange(10 * 100).reshape((10, 100))) # Add attributes # shows up as a scalar array of bytes (shape=(), size=1) h.attrs['test_attr_str'] = 'test_string' h.attrs['test_attr_byte'] = b'test_byte' h.attrs['test_attr_int'] = 0 h.attrs['test_attr_float'] = 1.2 # shows up as a numpy bytes object h.attrs['test_attr_str_arr'] = np.array(b"test_string2") g1.attrs['test_attr_str'] = 'test_string' g1.attrs['test_attr_byte'] = b'test_byte' g1.attrs['test_attr_int'] = 0 g1.attrs['test_attr_float'] = 1.2 for d in [ds1_f, ds1_i, ds2_f, ds2_i]: d.attrs['test_attr_str'] = 'test_string' d.attrs['test_attr_byte'] = b'test_byte' d.attrs['test_attr_int'] = 0 d.attrs['test_attr_float'] = 1.2 d.attrs['test_ref'] = d.ref self.var_attrs = list(d.attrs.keys()) h.close() def tearDown(self): """Remove the previously created test file.""" os.remove('test.h5') def test_all_basic(self): """Test everything about the HDF5 class.""" import xarray as xr from satpy.readers.hdf5_utils import HDF5FileHandler file_handler = HDF5FileHandler('test.h5', {}, {}) for ds_name in ('test_group/ds1_f', 'test_group/ds1_i', 'ds2_f', 'ds2_i'): ds = file_handler[ds_name] attrs = ds.attrs self.assertEqual(ds.dtype, np.float32 if ds_name.endswith('f') else np.int32) self.assertTupleEqual(file_handler[ds_name + '/shape'], (10, 100)) self.assertEqual(attrs['test_attr_str'], 'test_string') self.assertEqual(attrs['test_attr_byte'], 'test_byte') self.assertEqual(attrs['test_attr_int'], 0) self.assertEqual(attrs['test_attr_float'], 1.2) self.assertEqual(file_handler[ds_name + '/attr/test_attr_str'], 'test_string') self.assertEqual(file_handler[ds_name + '/attr/test_attr_byte'], 'test_byte') self.assertEqual(file_handler[ds_name + '/attr/test_attr_int'], 0) self.assertEqual(file_handler[ds_name + '/attr/test_attr_float'], 1.2) self.assertEqual(file_handler['/attr/test_attr_str'], 'test_string') self.assertEqual(file_handler['/attr/test_attr_byte'], 'test_byte') self.assertEqual(file_handler['/attr/test_attr_str_arr'], 'test_string2') self.assertEqual(file_handler['/attr/test_attr_int'], 0) self.assertEqual(file_handler['/attr/test_attr_float'], 1.2) self.assertIsInstance(file_handler.get('ds2_f'), xr.DataArray) self.assertIsNone(file_handler.get('fake_ds')) self.assertEqual(file_handler.get('fake_ds', 'test'), 'test') self.assertTrue('ds2_f' in file_handler) self.assertFalse('fake_ds' in file_handler) self.assertIsInstance(file_handler['ds2_f/attr/test_ref'], np.ndarray) satpy-0.34.0/satpy/tests/reader_tests/test_hdfeos_base.py000066400000000000000000000552561420401153000236020ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the HDF-EOS base functionality.""" import unittest nrt_mda = '''GROUP = INVENTORYMETADATA GROUPTYPE = MASTERGROUP GROUP = ECSDATAGRANULE OBJECT = REPROCESSINGPLANNED NUM_VAL = 1 VALUE = "further update is anticipated" END_OBJECT = REPROCESSINGPLANNED OBJECT = REPROCESSINGACTUAL NUM_VAL = 1 VALUE = "Near Real Time" END_OBJECT = REPROCESSINGACTUAL OBJECT = LOCALGRANULEID NUM_VAL = 1 VALUE = "MYD03.A2019051.1225.061.2019051131153.NRT.hdf" END_OBJECT = LOCALGRANULEID OBJECT = PRODUCTIONDATETIME NUM_VAL = 1 VALUE = "2019-02-20T13:11:53.000Z" END_OBJECT = PRODUCTIONDATETIME OBJECT = DAYNIGHTFLAG NUM_VAL = 1 VALUE = "Day" END_OBJECT = DAYNIGHTFLAG OBJECT = LOCALVERSIONID NUM_VAL = 1 VALUE = "6.0.4" END_OBJECT = LOCALVERSIONID END_GROUP = ECSDATAGRANULE GROUP = MEASUREDPARAMETER OBJECT = MEASUREDPARAMETERCONTAINER CLASS = "1" OBJECT = PARAMETERNAME CLASS = "1" NUM_VAL = 1 VALUE = "Geolocation" END_OBJECT = PARAMETERNAME GROUP = QAFLAGS CLASS = "1" OBJECT = AUTOMATICQUALITYFLAG NUM_VAL = 1 CLASS = "1" VALUE = "Passed" END_OBJECT = AUTOMATICQUALITYFLAG OBJECT = AUTOMATICQUALITYFLAGEXPLANATION NUM_VAL = 1 CLASS = "1" VALUE = "Set to 'Failed' if processing error occurred, set to 'Passed' otherwise" END_OBJECT = AUTOMATICQUALITYFLAGEXPLANATION OBJECT = SCIENCEQUALITYFLAG NUM_VAL = 1 VALUE = "Not Investigated" CLASS = "1" END_OBJECT = SCIENCEQUALITYFLAG END_GROUP = QAFLAGS GROUP = QASTATS CLASS = "1" OBJECT = QAPERCENTMISSINGDATA NUM_VAL = 1 CLASS = "1" VALUE = 0 END_OBJECT = QAPERCENTMISSINGDATA OBJECT = QAPERCENTOUTOFBOUNDSDATA NUM_VAL = 1 CLASS = "1" VALUE = 0 END_OBJECT = QAPERCENTOUTOFBOUNDSDATA END_GROUP = QASTATS END_OBJECT = MEASUREDPARAMETERCONTAINER END_GROUP = MEASUREDPARAMETER GROUP = ORBITCALCULATEDSPATIALDOMAIN OBJECT = ORBITCALCULATEDSPATIALDOMAINCONTAINER CLASS = "1" OBJECT = ORBITNUMBER CLASS = "1" NUM_VAL = 1 VALUE = 89393 END_OBJECT = ORBITNUMBER OBJECT = EQUATORCROSSINGLONGITUDE CLASS = "1" NUM_VAL = 1 VALUE = -151.260740805733 END_OBJECT = EQUATORCROSSINGLONGITUDE OBJECT = EQUATORCROSSINGTIME CLASS = "1" NUM_VAL = 1 VALUE = "12:49:52.965727" END_OBJECT = EQUATORCROSSINGTIME OBJECT = EQUATORCROSSINGDATE CLASS = "1" NUM_VAL = 1 VALUE = "2019-02-20" END_OBJECT = EQUATORCROSSINGDATE END_OBJECT = ORBITCALCULATEDSPATIALDOMAINCONTAINER END_GROUP = ORBITCALCULATEDSPATIALDOMAIN GROUP = COLLECTIONDESCRIPTIONCLASS OBJECT = SHORTNAME NUM_VAL = 1 VALUE = "MYD03" END_OBJECT = SHORTNAME OBJECT = VERSIONID NUM_VAL = 1 VALUE = 61 END_OBJECT = VERSIONID END_GROUP = COLLECTIONDESCRIPTIONCLASS GROUP = INPUTGRANULE OBJECT = INPUTPOINTER NUM_VAL = 8 VALUE = ("MYD01.61.2019-051T12:25:00.000000Z.NA.29878844.500100_1.hdf", "MYD03LUT.coeff_V6.1.4", "PM1EPHND_NRT.A2019051.1220.061.2019051125628", "PM1EPHND_NRT.A2019051.1225.061.2019051125628", "PM1EPHND_NRT.A2019051.1230.061.2019051125628", " PM1ATTNR_NRT.A2019051.1220.061.2019051125628", "PM1ATTNR_NRT.A2019051.1225.061.2019051125628", "PM1ATTNR_NRT.A2019051.1230.061.2019051125628") END_OBJECT = INPUTPOINTER END_GROUP = INPUTGRANULE GROUP = SPATIALDOMAINCONTAINER GROUP = HORIZONTALSPATIALDOMAINCONTAINER GROUP = GPOLYGON OBJECT = GPOLYGONCONTAINER CLASS = "1" GROUP = GRING CLASS = "1" OBJECT = EXCLUSIONGRINGFLAG NUM_VAL = 1 CLASS = "1" VALUE = "N" END_OBJECT = EXCLUSIONGRINGFLAG END_GROUP = GRING GROUP = GRINGPOINT CLASS = "1" OBJECT = GRINGPOINTLONGITUDE NUM_VAL = 4 CLASS = "1" VALUE = (25.3839329817764, 1.80418778807854, -6.50842421663422, 23.0260060198343) END_OBJECT = GRINGPOINTLONGITUDE OBJECT = GRINGPOINTLATITUDE NUM_VAL = 4 CLASS = "1" VALUE = (29.5170117594673, 26.1480434828114, 43.2445462598877, 47.7959787025408) END_OBJECT = GRINGPOINTLATITUDE OBJECT = GRINGPOINTSEQUENCENO NUM_VAL = 4 CLASS = "1" VALUE = (1, 2, 3, 4) END_OBJECT = GRINGPOINTSEQUENCENO END_GROUP = GRINGPOINT END_OBJECT = GPOLYGONCONTAINER END_GROUP = GPOLYGON END_GROUP = HORIZONTALSPATIALDOMAINCONTAINER END_GROUP = SPATIALDOMAINCONTAINER GROUP = RANGEDATETIME OBJECT = RANGEBEGINNINGTIME NUM_VAL = 1 VALUE = "12:25:00.000000" END_OBJECT = RANGEBEGINNINGTIME OBJECT = RANGEENDINGTIME NUM_VAL = 1 VALUE = "12:30:00.000000" END_OBJECT = RANGEENDINGTIME OBJECT = RANGEBEGINNINGDATE NUM_VAL = 1 VALUE = "2019-02-20" END_OBJECT = RANGEBEGINNINGDATE OBJECT = RANGEENDINGDATE NUM_VAL = 1 VALUE = "2019-02-20" END_OBJECT = RANGEENDINGDATE END_GROUP = RANGEDATETIME GROUP = ASSOCIATEDPLATFORMINSTRUMENTSENSOR OBJECT = ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER CLASS = "1" OBJECT = ASSOCIATEDSENSORSHORTNAME CLASS = "1" NUM_VAL = 1 VALUE = "MODIS" END_OBJECT = ASSOCIATEDSENSORSHORTNAME OBJECT = ASSOCIATEDPLATFORMSHORTNAME CLASS = "1" NUM_VAL = 1 VALUE = "Aqua" END_OBJECT = ASSOCIATEDPLATFORMSHORTNAME OBJECT = ASSOCIATEDINSTRUMENTSHORTNAME CLASS = "1" NUM_VAL = 1 VALUE = "MODIS" END_OBJECT = ASSOCIATEDINSTRUMENTSHORTNAME END_OBJECT = ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER END_GROUP = ASSOCIATEDPLATFORMINSTRUMENTSENSOR GROUP = PGEVERSIONCLASS OBJECT = PGEVERSION NUM_VAL = 1 VALUE = "6.1.4" END_OBJECT = PGEVERSION END_GROUP = PGEVERSIONCLASS GROUP = ADDITIONALATTRIBUTES OBJECT = ADDITIONALATTRIBUTESCONTAINER CLASS = "1" OBJECT = ADDITIONALATTRIBUTENAME CLASS = "1" NUM_VAL = 1 VALUE = "GRANULENUMBER" END_OBJECT = ADDITIONALATTRIBUTENAME GROUP = INFORMATIONCONTENT CLASS = "1" OBJECT = PARAMETERVALUE NUM_VAL = 1 CLASS = "1" VALUE = "151" END_OBJECT = PARAMETERVALUE END_GROUP = INFORMATIONCONTENT END_OBJECT = ADDITIONALATTRIBUTESCONTAINER OBJECT = ADDITIONALATTRIBUTESCONTAINER CLASS = "2" OBJECT = ADDITIONALATTRIBUTENAME CLASS = "2" NUM_VAL = 1 VALUE = "SCI_STATE" END_OBJECT = ADDITIONALATTRIBUTENAME GROUP = INFORMATIONCONTENT CLASS = "2" OBJECT = PARAMETERVALUE NUM_VAL = 1 CLASS = "2" VALUE = "1" END_OBJECT = PARAMETERVALUE END_GROUP = INFORMATIONCONTENT END_OBJECT = ADDITIONALATTRIBUTESCONTAINER OBJECT = ADDITIONALATTRIBUTESCONTAINER CLASS = "3" OBJECT = ADDITIONALATTRIBUTENAME CLASS = "3" NUM_VAL = 1 VALUE = "SCI_ABNORM" END_OBJECT = ADDITIONALATTRIBUTENAME GROUP = INFORMATIONCONTENT CLASS = "3" OBJECT = PARAMETERVALUE NUM_VAL = 1 CLASS = "3" VALUE = "1" END_OBJECT = PARAMETERVALUE END_GROUP = INFORMATIONCONTENT END_OBJECT = ADDITIONALATTRIBUTESCONTAINER OBJECT = ADDITIONALATTRIBUTESCONTAINER CLASS = "5" OBJECT = ADDITIONALATTRIBUTENAME CLASS = "5" NUM_VAL = 1 VALUE = "PROCESSVERSION" END_OBJECT = ADDITIONALATTRIBUTENAME GROUP = INFORMATIONCONTENT CLASS = "5" OBJECT = PARAMETERVALUE NUM_VAL = 1 CLASS = "5" VALUE = "6.1.0" END_OBJECT = PARAMETERVALUE END_GROUP = INFORMATIONCONTENT END_OBJECT = ADDITIONALATTRIBUTESCONTAINER OBJECT = ADDITIONALATTRIBUTESCONTAINER CLASS = "4" OBJECT = ADDITIONALATTRIBUTENAME CLASS = "4" NUM_VAL = 1 VALUE = "GEO_EST_RMS_ERROR" END_OBJECT = ADDITIONALATTRIBUTENAME GROUP = INFORMATIONCONTENT CLASS = "4" OBJECT = PARAMETERVALUE NUM_VAL = 1 CLASS = "4" VALUE = "75 " END_OBJECT = PARAMETERVALUE END_GROUP = INFORMATIONCONTENT END_OBJECT = ADDITIONALATTRIBUTESCONTAINER OBJECT = ADDITIONALATTRIBUTESCONTAINER CLASS = "6" OBJECT = ADDITIONALATTRIBUTENAME CLASS = "6" NUM_VAL = 1 VALUE = "identifier_product_doi" END_OBJECT = ADDITIONALATTRIBUTENAME GROUP = INFORMATIONCONTENT CLASS = "6" OBJECT = PARAMETERVALUE NUM_VAL = 1 CLASS = "6" VALUE = "10.5067/MODIS/MYD03.NRT.061" END_OBJECT = PARAMETERVALUE END_GROUP = INFORMATIONCONTENT END_OBJECT = ADDITIONALATTRIBUTESCONTAINER OBJECT = ADDITIONALATTRIBUTESCONTAINER CLASS = "7" OBJECT = ADDITIONALATTRIBUTENAME CLASS = "7" NUM_VAL = 1 VALUE = "identifier_product_doi_authority" END_OBJECT = ADDITIONALATTRIBUTENAME GROUP = INFORMATIONCONTENT CLASS = "7" OBJECT = PARAMETERVALUE NUM_VAL = 1 CLASS = "7" VALUE = "http://dx.doi.org" END_OBJECT = PARAMETERVALUE END_GROUP = INFORMATIONCONTENT END_OBJECT = ADDITIONALATTRIBUTESCONTAINER END_GROUP = ADDITIONALATTRIBUTES END_GROUP = INVENTORYMETADATA END''' # noqa: E501 nrt_mda_dict = { 'INVENTORYMETADATA': { 'ADDITIONALATTRIBUTES': { 'ADDITIONALATTRIBUTESCONTAINER': { 'ADDITIONALATTRIBUTENAME': { 'VALUE': 'identifier_product_doi_authority' }, 'INFORMATIONCONTENT': { 'PARAMETERVALUE': { 'VALUE': 'http://dx.doi.org' } } } }, 'ASSOCIATEDPLATFORMINSTRUMENTSENSOR': { 'ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER': { 'ASSOCIATEDINSTRUMENTSHORTNAME': { 'VALUE': 'MODIS' }, 'ASSOCIATEDPLATFORMSHORTNAME': { 'VALUE': 'Aqua' }, 'ASSOCIATEDSENSORSHORTNAME': { 'VALUE': 'MODIS' } } }, 'COLLECTIONDESCRIPTIONCLASS': { 'SHORTNAME': { 'VALUE': 'MYD03' }, 'VERSIONID': { 'VALUE': 61 } }, 'ECSDATAGRANULE': { 'DAYNIGHTFLAG': { 'VALUE': 'Day' }, 'LOCALGRANULEID': { 'VALUE': 'MYD03.A2019051.1225.061.2019051131153.NRT.hdf' }, 'LOCALVERSIONID': { 'VALUE': '6.0.4' }, 'PRODUCTIONDATETIME': { 'VALUE': '2019-02-20T13:11:53.000Z' }, 'REPROCESSINGACTUAL': { 'VALUE': 'Near ' 'Real ' 'Time' }, 'REPROCESSINGPLANNED': { 'VALUE': 'further ' 'update ' 'is ' 'anticipated' } }, 'GROUPTYPE': 'MASTERGROUP', 'INPUTGRANULE': { 'INPUTPOINTER': { 'VALUE': ('MYD01.61.2019-051T12:25:00.000000Z.NA.29878844.500100_1.hdf', 'MYD03LUT.coeff_V6.1.4', 'PM1EPHND_NRT.A2019051.1220.061.2019051125628', 'PM1EPHND_NRT.A2019051.1225.061.2019051125628', 'PM1EPHND_NRT.A2019051.1230.061.2019051125628', ' ' 'PM1ATTNR_NRT.A2019051.1220.061.2019051125628', 'PM1ATTNR_NRT.A2019051.1225.061.2019051125628', 'PM1ATTNR_NRT.A2019051.1230.061.2019051125628') } }, 'MEASUREDPARAMETER': { 'MEASUREDPARAMETERCONTAINER': { 'PARAMETERNAME': { 'VALUE': 'Geolocation' }, 'QAFLAGS': { 'AUTOMATICQUALITYFLAG': { 'VALUE': 'Passed' }, 'AUTOMATICQUALITYFLAGEXPLANATION': { 'VALUE': 'Set ' 'to ' "'Failed' " 'if ' 'processing ' 'error ' 'occurred, ' 'set ' 'to ' "'Passed' " 'otherwise' }, 'SCIENCEQUALITYFLAG': { 'VALUE': 'Not ' 'Investigated' } }, 'QASTATS': { 'QAPERCENTMISSINGDATA': { 'VALUE': 0 }, 'QAPERCENTOUTOFBOUNDSDATA': { 'VALUE': 0 } } } }, 'ORBITCALCULATEDSPATIALDOMAIN': { 'ORBITCALCULATEDSPATIALDOMAINCONTAINER': { 'EQUATORCROSSINGDATE': { 'VALUE': '2019-02-20' }, 'EQUATORCROSSINGLONGITUDE': { 'VALUE': -151.260740805733 }, 'EQUATORCROSSINGTIME': { 'VALUE': '12:49:52.965727' }, 'ORBITNUMBER': { 'VALUE': 89393 } } }, 'PGEVERSIONCLASS': { 'PGEVERSION': { 'VALUE': '6.1.4' } }, 'RANGEDATETIME': { 'RANGEBEGINNINGDATE': { 'VALUE': '2019-02-20' }, 'RANGEBEGINNINGTIME': { 'VALUE': '12:25:00.000000' }, 'RANGEENDINGDATE': { 'VALUE': '2019-02-20' }, 'RANGEENDINGTIME': { 'VALUE': '12:30:00.000000' } }, 'SPATIALDOMAINCONTAINER': { 'HORIZONTALSPATIALDOMAINCONTAINER': { 'GPOLYGON': { 'GPOLYGONCONTAINER': { 'GRING': { 'EXCLUSIONGRINGFLAG': { 'VALUE': 'N' } }, 'GRINGPOINT': { 'GRINGPOINTLATITUDE': { 'VALUE': (29.5170117594673, 26.1480434828114, 43.2445462598877, 47.7959787025408) }, 'GRINGPOINTLONGITUDE': { 'VALUE': (25.3839329817764, 1.80418778807854, -6.50842421663422, 23.0260060198343) }, 'GRINGPOINTSEQUENCENO': { 'VALUE': (1, 2, 3, 4) } } } } } } } } metadata_modisl1b = """ GROUP=SwathStructure GROUP=SWATH_1 SwathName="MODIS_SWATH_Type_L1B" GROUP=DimensionMap OBJECT=DimensionMap_1 GeoDimension="2*nscans" DataDimension="10*nscans" Offset=2 Increment=5 END_OBJECT=DimensionMap_1 OBJECT=DimensionMap_2 GeoDimension="1KM_geo_dim" DataDimension="Max_EV_frames" Offset=2 Increment=5 END_OBJECT=DimensionMap_2 END_GROUP=DimensionMap END_GROUP=SWATH_1 END_GROUP=SwathStructure END """ # noqa: E501 metadata_modisl2 = """ GROUP=SwathStructure GROUP=SWATH_1 SwathName="mod35" GROUP=DimensionMap OBJECT=DimensionMap_1 GeoDimension="Cell_Across_Swath_5km" DataDimension="Cell_Across_Swath_1km" Offset=2 Increment=5 END_OBJECT=DimensionMap_1 OBJECT=DimensionMap_2 GeoDimension="Cell_Along_Swath_5km" DataDimension="Cell_Along_Swath_1km" Offset=2 Increment=5 END_OBJECT=DimensionMap_2 END_GROUP=DimensionMap GROUP=IndexDimensionMap END_GROUP=IndexDimensionMap END_GROUP=SWATH_1 END_GROUP=SwathStructure END """ # noqa: E501 class TestReadMDA(unittest.TestCase): """Test reading metadata.""" def test_read_mda(self): """Test reading basic metadata.""" from satpy.readers.hdfeos_base import HDFEOSBaseFileReader res = HDFEOSBaseFileReader.read_mda(nrt_mda) self.assertDictEqual(res, nrt_mda_dict) def test_read_mda_geo_resolution(self): """Test reading geo resolution.""" from satpy.readers.hdfeos_base import HDFEOSGeoReader resolution_l1b = HDFEOSGeoReader.read_geo_resolution( HDFEOSGeoReader.read_mda(metadata_modisl1b) ) self.assertEqual(resolution_l1b, 1000) resolution_l2 = HDFEOSGeoReader.read_geo_resolution( HDFEOSGeoReader.read_mda(metadata_modisl2) ) self.assertEqual(resolution_l2, 5000) satpy-0.34.0/satpy/tests/reader_tests/test_hrit_base.py000066400000000000000000000150731420401153000232710ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The HRIT base reader tests package.""" import os import unittest from datetime import datetime from tempfile import NamedTemporaryFile, gettempdir from unittest import mock import numpy as np from satpy.readers.hrit_base import HRITFileHandler, decompress, get_xritdecompress_cmd, get_xritdecompress_outfile class TestHRITDecompress(unittest.TestCase): """Test the on-the-fly decompression.""" def test_xrit_cmd(self): """Test running the xrit decompress command.""" old_env = os.environ.get('XRIT_DECOMPRESS_PATH', None) os.environ['XRIT_DECOMPRESS_PATH'] = '/path/to/my/bin' self.assertRaises(IOError, get_xritdecompress_cmd) os.environ['XRIT_DECOMPRESS_PATH'] = gettempdir() self.assertRaises(IOError, get_xritdecompress_cmd) with NamedTemporaryFile() as fd: os.environ['XRIT_DECOMPRESS_PATH'] = fd.name fname = fd.name res = get_xritdecompress_cmd() if old_env is not None: os.environ['XRIT_DECOMPRESS_PATH'] = old_env else: os.environ.pop('XRIT_DECOMPRESS_PATH') self.assertEqual(fname, res) def test_xrit_outfile(self): """Test the right decompression filename is used.""" stdout = [b"Decompressed file: bla.__\n"] outfile = get_xritdecompress_outfile(stdout) self.assertEqual(outfile, b'bla.__') @mock.patch('satpy.readers.hrit_base.Popen') def test_decompress(self, popen): """Test decompression works.""" popen.return_value.returncode = 0 popen.return_value.communicate.return_value = [b"Decompressed file: bla.__\n"] old_env = os.environ.get('XRIT_DECOMPRESS_PATH', None) with NamedTemporaryFile() as fd: os.environ['XRIT_DECOMPRESS_PATH'] = fd.name res = decompress('bla.C_') if old_env is not None: os.environ['XRIT_DECOMPRESS_PATH'] = old_env else: os.environ.pop('XRIT_DECOMPRESS_PATH') self.assertEqual(res, os.path.join('.', 'bla.__')) class TestHRITFileHandler(unittest.TestCase): """Test the HRITFileHandler.""" @mock.patch('satpy.readers.hrit_base.np.fromfile') def setUp(self, fromfile): """Set up the hrit file handler for testing.""" m = mock.mock_open() fromfile.return_value = np.array([(1, 2)], dtype=[('total_header_length', int), ('hdr_id', int)]) with mock.patch('satpy.readers.hrit_base.open', m, create=True) as newopen: newopen.return_value.__enter__.return_value.tell.return_value = 1 self.reader = HRITFileHandler('filename', {'platform_shortname': 'MSG3', 'start_time': datetime(2016, 3, 3, 0, 0)}, {'filetype': 'info'}, [mock.MagicMock(), mock.MagicMock(), mock.MagicMock()]) ncols = 3712 nlines = 464 nbits = 10 self.reader.mda['number_of_bits_per_pixel'] = nbits self.reader.mda['number_of_lines'] = nlines self.reader.mda['number_of_columns'] = ncols self.reader.mda['data_field_length'] = nlines * ncols * nbits self.reader.mda['cfac'] = 5 self.reader.mda['lfac'] = 5 self.reader.mda['coff'] = 10 self.reader.mda['loff'] = 10 self.reader.mda['projection_parameters'] = {} self.reader.mda['projection_parameters']['a'] = 6378169.0 self.reader.mda['projection_parameters']['b'] = 6356583.8 self.reader.mda['projection_parameters']['h'] = 35785831.0 self.reader.mda['projection_parameters']['SSP_longitude'] = 44 def test_get_xy_from_linecol(self): """Test get_xy_from_linecol.""" x__, y__ = self.reader.get_xy_from_linecol(0, 0, (10, 10), (5, 5)) self.assertEqual(-131072, x__) self.assertEqual(-131072, y__) x__, y__ = self.reader.get_xy_from_linecol(10, 10, (10, 10), (5, 5)) self.assertEqual(0, x__) self.assertEqual(0, y__) x__, y__ = self.reader.get_xy_from_linecol(20, 20, (10, 10), (5, 5)) self.assertEqual(131072, x__) self.assertEqual(131072, y__) def test_get_area_extent(self): """Test getting the area extent.""" res = self.reader.get_area_extent((20, 20), (10, 10), (5, 5), 33) exp = (-71717.44995740513, -71717.44995740513, 79266.655216079365, 79266.655216079365) self.assertTupleEqual(res, exp) def test_get_area_def(self): """Test getting an area definition.""" from pyresample.utils import proj4_radius_parameters area = self.reader.get_area_def('VIS06') proj_dict = area.proj_dict a, b = proj4_radius_parameters(proj_dict) self.assertEqual(a, 6378169.0) self.assertEqual(b, 6356583.8) self.assertEqual(proj_dict['h'], 35785831.0) self.assertEqual(proj_dict['lon_0'], 44.0) self.assertEqual(proj_dict['proj'], 'geos') self.assertEqual(proj_dict['units'], 'm') self.assertEqual(area.area_extent, (-77771774058.38356, -77771774058.38356, 30310525626438.438, 3720765401003.719)) @mock.patch('satpy.readers.hrit_base.np.memmap') def test_read_band(self, memmap): """Test reading a single band.""" nbits = self.reader.mda['number_of_bits_per_pixel'] memmap.return_value = np.random.randint(0, 256, size=int((464 * 3712 * nbits) / 8), dtype=np.uint8) res = self.reader.read_band('VIS006', None) self.assertEqual(res.compute().shape, (464, 3712)) satpy-0.34.0/satpy/tests/reader_tests/test_hsaf_grib.py000066400000000000000000000140221420401153000232460ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.grib module.""" import sys import unittest from datetime import datetime from unittest import mock import numpy as np from satpy.tests.utils import make_dataid class FakeMessage(object): """Fake message returned by pygrib.open().message(x).""" def __init__(self, values, proj_params=None, latlons=None, **attrs): """Init the fake message.""" super(FakeMessage, self).__init__() self.attrs = attrs self.values = values if proj_params is None: proj_params = {'a': 6378140.0, 'b': 6356755.0, 'lat_0': 0.0, 'lon_0': 0.0, 'proj': 'geos', 'h': 35785830.098} self.projparams = proj_params self._latlons = latlons def latlons(self): """Get the latlons.""" return self._latlons def __getitem__(self, item): """Get item.""" return self.attrs[item] def valid_key(self, key): """Check if key is valid.""" return True class FakeGRIB(object): """Fake GRIB file returned by pygrib.open.""" def __init__(self, messages=None, proj_params=None, latlons=None): """Init the fake grib file.""" super(FakeGRIB, self).__init__() if messages is not None: self._messages = messages else: self._messages = [ FakeMessage( values=np.arange(25.).reshape((5, 5)), name='Instantaneous rain rate', shortName='irrate', cfName='unknown', units='kg m**-2 s**-1', dataDate=20190603, dataTime=1645, missingValue=9999, modelName='unknown', centreDescription='Rome', minimum=0.0, maximum=0.01475, Nx=3712, Ny=3712, NrInRadiusOfEarth=6.6107, dx=3622, dy=3610, XpInGridLengths=1856.0, YpInGridLengths=1856.0, jScansPositively=0, proj_params=proj_params, latlons=latlons, ) ] self.messages = len(self._messages) def message(self, msg_num): """Fake message.""" return self._messages[msg_num - 1] def seek(self, loc): """Fake seek.""" return def __iter__(self): """Iterate over messages.""" return iter(self._messages) def __enter__(self): """Enter the context.""" return self def __exit__(self, exc_type, exc_val, exc_tb): """Exit the context.""" class TestHSAFFileHandler(unittest.TestCase): """Test HSAF Reader.""" def setUp(self): """Wrap pygrib to read fake data.""" try: import pygrib except ImportError: pygrib = None self.orig_pygrib = pygrib sys.modules['pygrib'] = mock.MagicMock() def tearDown(self): """Re-enable pygrib import.""" sys.modules['pygrib'] = self.orig_pygrib @mock.patch('satpy.readers.hsaf_grib.pygrib.open', return_value=FakeGRIB()) def test_init(self, pg): """Test the init function, ensure that the correct dates and metadata are returned.""" pg.open.return_value = FakeGRIB() correct_dt = datetime(2019, 6, 3, 16, 45, 0) from satpy.readers.hsaf_grib import HSAFFileHandler fh = HSAFFileHandler('filename', mock.MagicMock(), mock.MagicMock()) self.assertEqual(fh._analysis_time, correct_dt) self.assertEqual(fh.metadata['projparams']['lat_0'], 0.0) self.assertEqual(fh.metadata['shortName'], 'irrate') self.assertEqual(fh.metadata['nx'], 3712) @mock.patch('satpy.readers.hsaf_grib.pygrib.open', return_value=FakeGRIB()) def test_get_area_def(self, pg): """Test the area definition setup, checks the size and extent.""" pg.open.return_value = FakeGRIB() from satpy.readers.hsaf_grib import HSAFFileHandler fh = HSAFFileHandler('filename', mock.MagicMock(), mock.MagicMock()) area_def = HSAFFileHandler.get_area_def(fh, 'H03B') self.assertEqual(area_def.width, 3712) self.assertAlmostEqual(area_def.area_extent[0], -5569209.3026, places=3) self.assertAlmostEqual(area_def.area_extent[3], 5587721.9097, places=3) @mock.patch('satpy.readers.hsaf_grib.pygrib.open', return_value=FakeGRIB()) def test_get_dataset(self, pg): """Test reading the actual datasets from a grib file.""" pg.open.return_value = FakeGRIB() from satpy.readers.hsaf_grib import HSAFFileHandler # Instantaneous precipitation fh = HSAFFileHandler('filename', mock.MagicMock(), mock.MagicMock()) fh.filename = "H03B" ds_id = make_dataid(name='H03B') data = fh.get_dataset(ds_id, mock.Mock()) np.testing.assert_array_equal(data.values, np.arange(25.).reshape((5, 5))) # Accumulated precipitation fh = HSAFFileHandler('filename', mock.MagicMock(), mock.MagicMock()) fh.filename = "H05B" ds_id = make_dataid(name='H05B') data = fh.get_dataset(ds_id, mock.Mock()) np.testing.assert_array_equal(data.values, np.arange(25.).reshape((5, 5))) satpy-0.34.0/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py000066400000000000000000000470661420401153000242070ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020, 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.hy2_scat_l2b_h5 module.""" import os import unittest from unittest import mock import dask.array as da import numpy as np import xarray as xr from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (10, 300) DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(np.float32) DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0) DEFAULT_LON_DATA = np.linspace(-10, 10, DEFAULT_FILE_SHAPE[1]).astype(np.float32) DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) class FakeHDF5FileHandler2(FakeHDF5FileHandler): """Swap-in HDF5 File Handler.""" def _get_geo_data(self, num_rows, num_cols): geo = { 'wvc_lon': xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.float32), attrs={ 'fill_value': 1.7e+38, 'scale_factor': 1., 'add_offset': 0., 'units': 'degree', 'valid range': [0, 359.99], }, dims=('y', 'x')), 'wvc_lat': xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.float32), attrs={ 'fill_value': 1.7e+38, 'scale_factor': 1., 'add_offset': 0., 'units': 'degree', 'valid range': [-90.0, 90.0], }, dims=('y', 'x')), } return geo def _get_geo_data_nsoas(self, num_rows, num_cols): geo = { 'wvc_lon': xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.float32), attrs={ 'fill_value': 1.7e+38, 'scale_factor': 1., 'add_offset': 0., 'units': 'degree', 'valid_range': [0, 359.99], }, dims=('y', 'x')), 'wvc_lat': xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.float32), attrs={ 'fill_value': 1.7e+38, 'scale_factor': 1., 'add_offset': 0., 'units': 'degree', 'valid_range': [-90.0, 90.0], }, dims=('y', 'x')), } return geo def _get_selection_data(self, num_rows, num_cols): selection = { 'wvc_selection': xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int8), attrs={ 'fill_value': 0, 'scale_factor': 1., 'add_offset': 0., 'units': 'count', 'valid range': [1, 8], }, dims=('y', 'x')), 'wind_speed_selection': xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int16), attrs={ 'fill_value': -32767, 'scale_factor': 0.1, 'add_offset': 0., 'units': 'deg', 'valid range': [0, 3599], }, dims=('y', 'x')), 'wind_dir_selection': xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int16), attrs={ 'fill_value': -32767, 'scale_factor': 0.01, 'add_offset': 0., 'units': 'm/s', 'valid range': [0, 5000], }, dims=('y', 'x')), 'model_dir': xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int16), attrs={ 'fill_value': -32767, 'scale_factor': 0.01, 'add_offset': 0., 'units': 'm/s', 'valid range': [0, 5000], }, dims=('y', 'x')), 'model_speed': xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int16), attrs={ 'fill_value': -32767, 'scale_factor': 0.1, 'add_offset': 0., 'units': 'deg', 'valid range': [0, 3599], }, dims=('y', 'x')), 'num_ambigs': xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int8), attrs={ 'fill_value': 0, 'scale_factor': 1., 'add_offset': 0., 'units': 'count', 'valid range': [1, 8], }, dims=('y', 'x')), 'num_in_aft': xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int8), attrs={ 'fill_value': 0, 'scale_factor': 1., 'add_offset': 0., 'units': 'count', 'valid range': [1, 127], }, dims=('y', 'x')), 'num_in_fore': xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int8), attrs={ 'fill_value': 0, 'scale_factor': 1., 'add_offset': 0., 'units': 'count', 'valid range': [1, 127], }, dims=('y', 'x')), 'num_out_aft': xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int8), attrs={ 'fill_value': 0, 'scale_factor': 1., 'add_offset': 0., 'units': 'count', 'valid range': [1, 127], }, dims=('y', 'x')), 'num_out_fore': xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int8), attrs={ 'fill_value': 0, 'scale_factor': 1., 'add_offset': 0., 'units': 'count', 'valid range': [1, 127], }, dims=('y', 'x')), 'wvc_quality_flag': xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.uint16), attrs={ 'fill_value': 2.14748e+09, 'scale_factor': 1., 'add_offset': 0., 'units': 'na', 'valid range': [1, 2.14748e+09], }, dims=('y', 'x')), } return selection def _get_all_ambiguities_data(self, num_rows, num_cols, num_amb): all_amb = { 'max_likelihood_est': xr.DataArray( da.ones((num_rows, num_cols, num_amb), chunks=1024, dtype=np.int16), attrs={ 'fill_value': -32767, 'scale_factor': 1., 'add_offset': 0., 'units': 'na', 'valid range': [0, 32767], }, dims=('y', 'x', 'selection')), 'wind_dir': xr.DataArray( da.ones((num_rows, num_cols, num_amb), chunks=1024, dtype=np.int16), attrs={ 'fill_value': -32767, 'scale_factor': 0.1, 'add_offset': 0., 'units': 'deg', 'valid range': [0, 3599], }, dims=('y', 'x', 'selection')), 'wind_speed': xr.DataArray( da.ones((num_rows, num_cols, num_amb), chunks=1024, dtype=np.int16), attrs={ 'fill_value': -32767, 'scale_factor': 0.01, 'add_offset': 0., 'units': 'm/s', 'valid range': [0, 5000], }, dims=('y', 'x', 'selection')), } return all_amb def _get_wvc_row_time(self, num_rows): data = ["20200326T01:11:07.639", "20200326T01:11:11.443", "20200326T01:11:15.246", "20200326T01:11:19.049", "20200326T01:11:22.856", "20200326T01:11:26.660", "20200326T01:11:30.464", "20200326T01:11:34.268", "20200326T01:11:38.074", "20200326T01:11:41.887"] wvc_row_time = { 'wvc_row_time': xr.DataArray(data, attrs={ 'fill_value': "", }, dims=('y',)), } return wvc_row_time def _get_global_attrs(self, num_rows, num_cols): return { '/attr/Equator_Crossing_Longitude': '246.408397', '/attr/Equator_Crossing_Time': '20200326T01:37:15.875', '/attr/HDF_Version_Id': 'HDF5-1.8.16', '/attr/Input_L2A_Filename': 'H2B_OPER_SCA_L2A_OR_20200326T010839_20200326T025757_07076_dps_250_20.h5', '/attr/Instrument_ShorName': 'HSCAT-B', '/attr/L2A_Inputdata_Version': '10', '/attr/L2B_Actual_WVC_Rows': np.int32(num_rows), '/attr/L2B_Algorithm_Descriptor': ('Wind retrieval processing uses the multiple solution scheme (MSS) for ' 'wind inversion with the NSCAT-4 GMF,and a circular median filter ' 'method (CMF) for ambiguity removal. The ECMWF/NCEP forescate data are ' 'used as background winds in the CMF'), '/attr/L2B_Data_Version': '10', '/attr/L2B_Expected_WVC_Rows': np.int32(num_rows), '/attr/L2B_Processing_Type': 'OPER', '/attr/L2B_Processor_Name': 'hy2_sca_l2b_pro', '/attr/L2B_Processor_Version': '01.00', '/attr/Long_Name': 'HY-2B/SCAT Level 2B Ocean Wind Vectors in 25.0 km Swath Grid', '/attr/Orbit_Inclination': np.float32(99.3401), '/attr/Orbit_Number': '07076', '/attr/Output_L2B_Filename': 'H2B_OPER_SCA_L2B_OR_20200326T011107_20200326T025540_07076_dps_250_20_owv.h5', '/attr/Platform_LongName': 'Haiyang 2B Ocean Observing Satellite', '/attr/Platform_ShortName': 'HY-2B', '/attr/Platform_Type': 'spacecraft', '/attr/Producer_Agency': 'Ministry of Natural Resources of the People\'s Republic of China', '/attr/Producer_Institution': 'NSOAS', '/attr/Production_Date_Time': '20200326T06:23:10', '/attr/Range_Beginning_Time': '20200326T01:11:07', '/attr/Range_Ending_Time': '20200326T02:55:40', '/attr/Rev_Orbit_Period': '14 days', '/attr/Short_Name': 'HY-2B SCAT-L2B-25km', '/attr/Sigma0_Granularity': 'whole pulse', '/attr/WVC_Size': '25000m*25000m', } def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" num_rows = 300 num_cols = 10 num_amb = 8 test_content = {} test_content.update(self._get_global_attrs(num_rows, num_cols)) data = {} if 'OPER_SCA_L2B' in filename: test_content.update({'/attr/L2B_Expected_WVC_Cells': np.int32(num_cols)}) data = self._get_geo_data_nsoas(num_rows, num_cols) else: test_content.update({'/attr/L2B_Number_WVC_cells': np.int32(num_cols)}) data = self._get_geo_data(num_rows, num_cols) test_content.update(data) data = self._get_selection_data(num_rows, num_cols) test_content.update(data) data = self._get_all_ambiguities_data(num_rows, num_cols, num_amb) test_content.update(data) data = self._get_wvc_row_time(num_rows) test_content.update(data) return test_content class TestHY2SCATL2BH5Reader(unittest.TestCase): """Test HY2 Scatterometer L2B H5 Reader.""" yaml_file = "hy2_scat_l2b_h5.yaml" def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.hy2_scat_l2b_h5 import HY2SCATL2BH5FileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(HY2SCATL2BH5FileHandler, '__bases__', (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the HDF5 file handler.""" self.p.stop() def test_load_geo(self): """Test loading data.""" from satpy.readers import load_reader filenames = [ 'W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5', ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) self.assertEqual(1, len(files)) reader.create_filehandlers(files) # Make sure we have some files self.assertTrue(reader.file_handlers) res = reader.load(['wvc_lon', 'wvc_lat']) self.assertEqual(2, len(res)) def test_load_geo_nsoas(self): """Test loading data from nsoas file.""" from satpy.readers import load_reader filenames = [ 'H2B_OPER_SCA_L2B_OR_20210803T100304_20210803T104601_13905_pwp_250_07_owv.h5', ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) self.assertEqual(1, len(files)) reader.create_filehandlers(files) # Make sure we have some files self.assertTrue(reader.file_handlers) res = reader.load(['wvc_lon', 'wvc_lat']) self.assertEqual(2, len(res)) def test_load_data_selection(self): """Test loading data.""" from satpy.readers import load_reader filenames = [ 'W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5', ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) self.assertEqual(1, len(files)) reader.create_filehandlers(files) # Make sure we have some files self.assertTrue(reader.file_handlers) res = reader.load(['wind_speed_selection', 'wind_dir_selection', 'wvc_selection']) self.assertEqual(3, len(res)) def test_load_data_all_ambiguities(self): """Test loading data.""" from satpy.readers import load_reader filenames = [ 'W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5', ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) self.assertEqual(1, len(files)) reader.create_filehandlers(files) # Make sure we have some files self.assertTrue(reader.file_handlers) res = reader.load(['wind_speed', 'wind_dir', 'max_likelihood_est', 'model_dir', 'model_speed', 'num_ambigs', 'num_in_aft', 'num_in_fore', 'num_out_aft', 'num_out_fore', 'wvc_quality_flag']) self.assertEqual(11, len(res)) def test_load_data_row_times(self): """Test loading data.""" from satpy.readers import load_reader filenames = [ 'W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5', ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) self.assertEqual(1, len(files)) reader.create_filehandlers(files) # Make sure we have some files self.assertTrue(reader.file_handlers) res = reader.load(['wvc_row_time']) self.assertEqual(1, len(res)) def test_reading_attrs(self): """Test loading data.""" from satpy.readers import load_reader filenames = [ 'W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5', ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) reader.create_filehandlers(files) # Make sure we have some files res = reader.load(['wvc_lon']) self.assertEqual(res['wvc_lon'].attrs['L2B_Number_WVC_cells'], 10) with self.assertRaises(KeyError): self.assertEqual(res['wvc_lon'].attrs['L2B_Expected_WVC_Cells'], 10) def test_reading_attrs_nsoas(self): """Test loading data.""" from satpy.readers import load_reader filenames = [ 'H2B_OPER_SCA_L2B_OR_20210803T100304_20210803T104601_13905_pwp_250_07_owv.h5', ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) reader.create_filehandlers(files) # Make sure we have some files res = reader.load(['wvc_lon']) with self.assertRaises(KeyError): self.assertEqual(res['wvc_lon'].attrs['L2B_Number_WVC_cells'], 10) self.assertEqual(res['wvc_lon'].attrs['L2B_Expected_WVC_Cells'], 10) satpy-0.34.0/satpy/tests/reader_tests/test_iasi_l2.py000066400000000000000000000327321420401153000226540ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit tests for IASI L2 reader.""" import os import unittest import numpy as np SCAN_WIDTH = 120 NUM_LEVELS = 138 NUM_SCANLINES = 1 FNAME = "W_XX-EUMETSAT-kan,iasi,metopb+kan_C_EUMS_20170920103559_IASI_PW3_02_M01_20170920102217Z_20170920102912Z.hdf" # Structure for the test data, to be written to HDF5 file TEST_DATA = { # Not implemented in the reader 'Amsu': { 'FLG_AMSUBAD': {'data': np.zeros((NUM_SCANLINES, 30), dtype=np.uint8), 'attrs': {}} }, # Not implemented in the reader 'INFO': { 'OmC': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), 'attrs': {'long_name': "Cloud signal. Predicted average window channel 'Obs minus Calc", 'units': 'K'}}, 'mdist': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), 'attrs': {}} }, 'L1C': { 'Latitude': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), 'attrs': {'units': 'degrees_north'}}, 'Longitude': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), 'attrs': {'units': 'degrees_north'}}, 'SatAzimuth': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), 'attrs': {'units': 'degrees'}}, 'SatZenith': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), 'attrs': {'units': 'degrees'}}, 'SensingTime_day': {'data': np.array([6472], dtype=np.uint16), 'attrs': {}}, 'SensingTime_msec': {'data': np.array([37337532], dtype=np.uint32), 'attrs': {}}, 'SunAzimuth': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), 'attrs': {'units': 'degrees'}}, 'SunZenith': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), 'attrs': {'units': 'degrees'}}, }, # Not implemented in the reader 'Maps': { 'Height': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), 'attrs': {'units': 'm'}}, 'HeightStd': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), 'attrs': {'units': 'm'}}, }, # Not implemented in the reader 'Mhs': { 'FLG_MHSBAD': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.uint8), 'attrs': {}} }, 'PWLR': { 'E': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH, 10), dtype=np.float32), 'attrs': {'emissivity_wavenumbers': np.array([699.3, 826.4, 925.9, 1075.2, 1204.8, 1315.7, 1724.1, 2000.0, 2325.5, 2702.7], dtype=np.float32)}}, 'O': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH, NUM_LEVELS), dtype=np.float32), 'attrs': {'long_name': 'Ozone mixing ratio vertical profile', 'units': 'kg/kg'}}, 'OC': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), 'attrs': {}}, 'P': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH, NUM_LEVELS), dtype=np.float32), 'attrs': {'long_name': 'Atmospheric pressures at which the vertical profiles are given. ' 'Last value is the surface pressure', 'units': 'hpa'}}, 'QE': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), 'attrs': {}}, 'QO': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), 'attrs': {}}, 'QP': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), 'attrs': {}}, 'QT': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), 'attrs': {}}, 'QTs': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), 'attrs': {}}, 'QW': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), 'attrs': {}}, 'T': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH, NUM_LEVELS), dtype=np.float32), 'attrs': {'long_name': 'Temperature vertical profile', 'units': 'K'}}, 'Ts': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), 'attrs': {'long_name': 'Surface skin temperature', 'units': 'K'}}, 'W': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH, NUM_LEVELS), dtype=np.float32), 'attrs': {'long_name': 'Water vapour mixing ratio vertical profile', 'units': 'kg/kg'}}, 'WC': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), 'attrs': {'long_name': 'Water vapour total columnar amount', 'units': 'mm'}}, } } def save_test_data(path): """Save the test to the indicated directory.""" import h5py with h5py.File(os.path.join(path, FNAME), 'w') as fid: # Create groups for grp in TEST_DATA: fid.create_group(grp) # Write datasets for dset in TEST_DATA[grp]: fid[grp][dset] = TEST_DATA[grp][dset]['data'] # Write dataset attributes for attr in TEST_DATA[grp][dset]['attrs']: fid[grp][dset].attrs[attr] = \ TEST_DATA[grp][dset]['attrs'][attr] class TestIasiL2(unittest.TestCase): """Test IASI L2 reader.""" def setUp(self): """Create temporary data to test on.""" import datetime as dt import tempfile from satpy.readers.iasi_l2 import IASIL2HDF5 self.base_dir = tempfile.mkdtemp() save_test_data(self.base_dir) self.fname = os.path.join(self.base_dir, FNAME) self.fname_info = {'start_time': dt.datetime(2017, 9, 20, 10, 22, 17), 'end_time': dt.datetime(2017, 9, 20, 10, 29, 12), 'processing_time': dt.datetime(2017, 9, 20, 10, 35, 59), 'processing_location': 'kan', 'long_platform_id': 'metopb', 'instrument': 'iasi', 'platform_id': 'M01'} self.ftype_info = {'file_reader': IASIL2HDF5, 'file_patterns': ['{fname}.hdf'], 'file_type': 'iasi_l2_hdf5'} self.reader = IASIL2HDF5(self.fname, self.fname_info, self.ftype_info) def tearDown(self): """Remove the temporary directory created for a test.""" try: import shutil shutil.rmtree(self.base_dir, ignore_errors=True) except OSError: pass def test_scene(self): """Test scene creation.""" from satpy import Scene fname = os.path.join(self.base_dir, FNAME) scn = Scene(reader='iasi_l2', filenames=[fname]) assert scn.start_time is not None assert scn.end_time is not None assert scn.sensor_names assert 'iasi' in scn.sensor_names def test_scene_load_available_datasets(self): """Test that all datasets are available.""" from satpy import Scene fname = os.path.join(self.base_dir, FNAME) scn = Scene(reader='iasi_l2', filenames=[fname]) scn.load(scn.available_dataset_names()) def test_scene_load_pressure(self): """Test loading pressure data.""" from satpy import Scene fname = os.path.join(self.base_dir, FNAME) scn = Scene(reader='iasi_l2', filenames=[fname]) scn.load(['pressure']) pres = scn['pressure'].compute() self.check_pressure(pres, scn.attrs) def test_scene_load_emissivity(self): """Test loading emissivity data.""" from satpy import Scene fname = os.path.join(self.base_dir, FNAME) scn = Scene(reader='iasi_l2', filenames=[fname]) scn.load(['emissivity']) emis = scn['emissivity'].compute() self.check_emissivity(emis) def test_scene_load_sensing_times(self): """Test loading sensing times.""" from satpy import Scene fname = os.path.join(self.base_dir, FNAME) scn = Scene(reader='iasi_l2', filenames=[fname]) scn.load(['sensing_time']) times = scn['sensing_time'].compute() self.check_sensing_times(times) def test_init(self): """Test reader initialization.""" self.assertEqual(self.reader.filename, self.fname) self.assertEqual(self.reader.finfo, self.fname_info) self.assertTrue(self.reader.lons is None) self.assertTrue(self.reader.lats is None) self.assertEqual(self.reader.mda['platform_name'], 'Metop-B') self.assertEqual(self.reader.mda['sensor'], 'iasi') def test_time_properties(self): """Test time properties.""" import datetime as dt self.assertTrue(isinstance(self.reader.start_time, dt.datetime)) self.assertTrue(isinstance(self.reader.end_time, dt.datetime)) def test_get_dataset(self): """Test get_dataset() for different datasets.""" from satpy.tests.utils import make_dataid info = {'eggs': 'spam'} key = make_dataid(name='pressure') data = self.reader.get_dataset(key, info).compute() self.check_pressure(data) self.assertTrue('eggs' in data.attrs) self.assertEqual(data.attrs['eggs'], 'spam') key = make_dataid(name='emissivity') data = self.reader.get_dataset(key, info).compute() self.check_emissivity(data) key = make_dataid(name='sensing_time') data = self.reader.get_dataset(key, info).compute() self.assertEqual(data.shape, (NUM_SCANLINES, SCAN_WIDTH)) def check_pressure(self, pres, attrs=None): """Test reading pressure dataset. Helper function. """ self.assertTrue(np.all(pres == 0.0)) self.assertEqual(pres.x.size, SCAN_WIDTH) self.assertEqual(pres.y.size, NUM_SCANLINES) self.assertEqual(pres.level.size, NUM_LEVELS) if attrs: self.assertEqual(pres.attrs['start_time'], attrs['start_time']) self.assertEqual(pres.attrs['end_time'], attrs['end_time']) self.assertTrue('long_name' in pres.attrs) self.assertTrue('units' in pres.attrs) def check_emissivity(self, emis): """Test reading emissivity dataset. Helper function. """ self.assertTrue(np.all(emis == 0.0)) self.assertEqual(emis.x.size, SCAN_WIDTH) self.assertEqual(emis.y.size, NUM_SCANLINES) self.assertTrue('emissivity_wavenumbers' in emis.attrs) def check_sensing_times(self, times): """Test reading sensing times. Helper function. """ # Times should be equal in blocks of four, but not beyond, so # there should be SCAN_WIDTH/4 different values for i in range(int(SCAN_WIDTH / 4)): self.assertEqual(np.unique(times[0, i*4:i*4+4]).size, 1) self.assertEqual(np.unique(times[0, :]).size, SCAN_WIDTH / 4) def test_read_dataset(self): """Test read_dataset() function.""" import h5py from satpy.readers.iasi_l2 import read_dataset from satpy.tests.utils import make_dataid with h5py.File(self.fname, 'r') as fid: key = make_dataid(name='pressure') data = read_dataset(fid, key).compute() self.check_pressure(data) key = make_dataid(name='emissivity') data = read_dataset(fid, key).compute() self.check_emissivity(data) # This dataset doesn't have any attributes key = make_dataid(name='ozone_total_column') data = read_dataset(fid, key).compute() self.assertEqual(len(data.attrs), 0) def test_read_geo(self): """Test read_geo() function.""" import h5py from satpy.readers.iasi_l2 import read_geo from satpy.tests.utils import make_dataid with h5py.File(self.fname, 'r') as fid: key = make_dataid(name='sensing_time') data = read_geo(fid, key).compute() self.assertEqual(data.shape, (NUM_SCANLINES, SCAN_WIDTH)) key = make_dataid(name='latitude') data = read_geo(fid, key).compute() self.assertEqual(data.shape, (NUM_SCANLINES, SCAN_WIDTH)) def test_form_datetimes(self): """Test _form_datetimes() function.""" from satpy.readers.iasi_l2 import _form_datetimes days = TEST_DATA['L1C']['SensingTime_day']['data'] msecs = TEST_DATA['L1C']['SensingTime_msec']['data'] times = _form_datetimes(days, msecs) self.check_sensing_times(times) satpy-0.34.0/satpy/tests/reader_tests/test_iasi_l2_so2_bufr.py000066400000000000000000000551321420401153000244540ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unittesting the SEVIRI L2 BUFR reader.""" import os import sys import unittest import numpy as np # TDB: this test is based on test_seviri_l2_bufr.py and test_iasi_l2.py # This is a test IASI level 2 SO2 product message, take from a real # bufr file distributed over EUMETCAST msg = { 'unpack': 1, 'inputDelayedDescriptorReplicationFactor': 5, 'edition': 4, 'masterTableNumber': 0, 'bufrHeaderCentre': 254, 'bufrHeaderSubCentre': 0, 'updateSequenceNumber': 0, 'dataCategory': 3, 'internationalDataSubCategory': 255, 'dataSubCategory': 230, 'masterTablesVersionNumber': 31, 'localTablesVersionNumber': 0, 'typicalYear': 2020, 'typicalMonth': 2, 'typicalDay': 4, 'typicalHour': 8, 'typicalMinute': 59, 'typicalSecond': 0, 'numberOfSubsets': 120, 'observedData': 1, 'compressedData': 1, 'unexpandedDescriptors': np.array([ 1007, 1031, 25060, 2019, 2020, 4001, 4002, 4003, 4004, 4005, 4006, 5040, 201133, 5041, 201000, 5001, 6001, 5043, 7024, 5021, 7025, 5022, 7007, 40068, 7002, 15045, 12080, 102000, 31001, 7007, 15045], dtype=int), '#1#satelliteIdentifier': 4, '#1#centre': 254, '#1#softwareIdentification': 605, '#1#satelliteInstruments': 221, '#1#satelliteClassification': 61, '#1#year': 2020, '#1#month': 2, '#1#day': 4, '#1#hour': 9, '#1#minute': 1, '#1#second': 11, '#1#orbitNumber': 68984, '#1#scanLineNumber': 447, '#1#latitude': np.array([ -33.4055, -33.6659, -33.738, -33.4648, -33.263, -33.5027, -33.5673, -33.3172, -33.1332, -33.3564, -33.4151, -33.1832, -33.0132, -33.2232, -33.2771, -33.0596, -32.903, -33.1021, -33.1522, -32.9466, -32.7982, -32.9884, -33.0354, -32.8395, -32.7005, -32.8832, -32.9276, -32.7399, -32.6061, -32.7826, -32.8251, -32.644, -32.5168, -32.6883, -32.7292, -32.5537, -32.4261, -32.5934, -32.6331, -32.4621, -32.3397, -32.5036, -32.5425, -32.3752, -32.2537, -32.4151, -32.4534, -32.289, -32.1682, -32.3277, -32.3657, -32.2035, -32.0826, -32.2407, -32.2788, -32.1182, -31.9952, -32.1527, -32.1911, -32.0313, -31.9068, -32.0642, -32.1032, -31.9438, -31.8147, -31.9727, -32.0127, -31.8529, -31.7177, -31.8769, -31.9181, -31.7573, -31.6182, -31.7792, -31.8222, -31.6598, -31.5106, -31.674, -31.7191, -31.5545, -31.3962, -31.5628, -31.6107, -31.4431, -31.2727, -31.4434, -31.4947, -31.3233, -31.1375, -31.3131, -31.3686, -31.1926, -30.9867, -31.1684, -31.2293, -31.0476, -30.8201, -31.009, -31.0768, -30.8882, -30.6289, -30.8265, -30.9031, -30.7062, -30.4071, -30.6153, -30.7036, -30.4967, -30.146, -30.3672, -30.4712, -30.2521, -29.8276, -30.0649, -30.1911, -29.9569, -29.4268, -29.6844, -29.8436, -29.5903]), '#1#longitude': np.array([ 2.53790e+00, 2.49440e+00, 3.08690e+00, 3.12690e+00, 1.15600e+00, 1.11230e+00, 1.59640e+00, 1.63750e+00, -3.70000e-03, -4.73000e-02, 3.61900e-01, 4.03500e-01, -1.00010e+00, -1.04340e+00, -6.88300e-01, -6.46600e-01, -1.88040e+00, -1.92340e+00, -1.60890e+00, -1.56730e+00, -2.66750e+00, -2.71020e+00, -2.42680e+00, -2.38520e+00, -3.38640e+00, -3.42890e+00, -3.16970e+00, -3.12830e+00, -4.04920e+00, -4.09150e+00, -3.85140e+00, -3.81000e+00, -4.66850e+00, -4.71080e+00, -4.48590e+00, -4.44450e+00, -5.25210e+00, -5.29440e+00, -5.08140e+00, -5.03990e+00, -5.80970e+00, -5.85220e+00, -5.64840e+00, -5.60670e+00, -6.34640e+00, -6.38920e+00, -6.19250e+00, -6.15060e+00, -6.86700e+00, -6.91020e+00, -6.71870e+00, -6.67640e+00, -7.37770e+00, -7.42140e+00, -7.23330e+00, -7.19050e+00, -7.88100e+00, -7.92530e+00, -7.73920e+00, -7.69570e+00, -8.38370e+00, -8.42900e+00, -8.24320e+00, -8.19890e+00, -8.88730e+00, -8.93360e+00, -8.74660e+00, -8.70130e+00, -9.39480e+00, -9.44230e+00, -9.25260e+00, -9.20620e+00, -9.91570e+00, -9.96460e+00, -9.77050e+00, -9.72270e+00, -1.04496e+01, -1.05002e+01, -1.02999e+01, -1.02505e+01, -1.10049e+01, -1.10576e+01, -1.08489e+01, -1.07977e+01, -1.15859e+01, -1.16409e+01, -1.14216e+01, -1.13682e+01, -1.21993e+01, -1.22570e+01, -1.20240e+01, -1.19681e+01, -1.28575e+01, -1.29185e+01, -1.26682e+01, -1.26093e+01, -1.35688e+01, -1.36337e+01, -1.33615e+01, -1.32990e+01, -1.43504e+01, -1.44199e+01, -1.41196e+01, -1.40529e+01, -1.52201e+01, -1.52953e+01, -1.49585e+01, -1.48867e+01, -1.62074e+01, -1.62896e+01, -1.59045e+01, -1.58264e+01, -1.73549e+01, -1.74460e+01, -1.69944e+01, -1.69085e+01, -1.87277e+01, -1.88302e+01, -1.82832e+01, -1.81873e+01]), '#1#fieldOfViewNumber': np.array([ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120]), '#1#satelliteZenithAngle': np.array([ 56.64, 56.64, 58.38, 58.37, 52.15, 52.15, 53.8, 53.79, 47.84, 47.84, 49.42, 49.42, 43.67, 43.67, 45.21, 45.2, 39.59, 39.59, 41.1, 41.09, 35.59, 35.59, 37.08, 37.07, 31.65, 31.65, 33.11, 33.1, 27.75, 27.75, 29.2, 29.19, 23.89, 23.89, 25.33, 25.32, 20.07, 20.06, 21.49, 21.48, 16.26, 16.26, 17.67, 17.67, 12.47, 12.47, 13.88, 13.87, 8.7, 8.7, 10.1, 10.1, 4.95, 4.95, 6.34, 6.33, 1.33, 1.34, 2.64, 2.63, 2.72, 2.73, 1.43, 1.41, 6.44, 6.45, 5.05, 5.05, 10.19, 10.19, 8.79, 8.79, 13.97, 13.98, 12.57, 12.57, 17.77, 17.77, 16.35, 16.36, 21.58, 21.59, 20.16, 20.17, 25.42, 25.43, 23.99, 24., 29.29, 29.29, 27.84, 27.85, 33.21, 33.21, 31.75, 31.75, 37.16, 37.17, 35.68, 35.69, 41.19, 41.2, 39.69, 39.69, 45.3, 45.31, 43.76, 43.77, 49.52, 49.53, 47.94, 47.94, 53.89, 53.9, 52.25, 52.25, 58.48, 58.48, 56.74, 56.75]), '#1#bearingOrAzimuth': np.array([ 276.93, 278.61, 278.27, 276.61, 277.64, 279.42, 279.14, 277.38, 278.22, 280.11, 279.88, 278.01, 278.69, 280.72, 280.51, 278.51, 279.09, 281.3, 281.11, 278.94, 279.41, 281.83, 281.64, 279.28, 279.68, 282.36, 282.18, 279.58, 279.88, 282.9, 282.71, 279.79, 280.02, 283.49, 283.29, 279.96, 279.98, 284.07, 283.84, 279.96, 279.84, 284.85, 284.57, 279.89, 279.4, 285.9, 285.49, 279.57, 278.31, 287.59, 286.87, 278.78, 275.22, 291.5, 289.61, 276.76, 252.48, 315.67, 299.21, 268.02, 117.92, 88.23, 72.78, 132.31, 109.86, 97.41, 95.43, 111.52, 108.02, 100.14, 99.35, 108.59, 107.2, 101.44, 100.97, 107.44, 106.92, 102.37, 102.04, 107.04, 106.84, 103.07, 102.81, 106.88, 106.87, 103.65, 103.42, 106.87, 107., 104.18, 103.97, 106.97, 107.2, 104.69, 104.49, 107.14, 107.44, 105.16, 104.97, 107.35, 107.74, 105.67, 105.47, 107.64, 108.11, 106.2, 105.99, 107.98, 108.54, 106.76, 106.53, 108.38, 109.06, 107.39, 107.14, 108.87, 109.7, 108.13, 107.83, 109.46]), '#1#solarZenithAngle': np.array([ 44.36, 44.44, 43.98, 43.89, 45.47, 45.54, 45.16, 45.08, 46.4, 46.47, 46.14, 46.07, 47.21, 47.27, 46.99, 46.92, 47.92, 47.98, 47.73, 47.67, 48.56, 48.62, 48.39, 48.33, 49.15, 49.21, 49., 48.94, 49.7, 49.75, 49.55, 49.5, 50.21, 50.26, 50.07, 50.02, 50.69, 50.74, 50.56, 50.51, 51.15, 51.2, 51.03, 50.98, 51.59, 51.64, 51.48, 51.43, 52.02, 52.07, 51.91, 51.87, 52.45, 52.5, 52.34, 52.29, 52.87, 52.92, 52.76, 52.71, 53.29, 53.34, 53.18, 53.14, 53.71, 53.76, 53.6, 53.56, 54.14, 54.18, 54.03, 53.98, 54.58, 54.62, 54.46, 54.41, 55.03, 55.08, 54.91, 54.86, 55.50, 55.55, 55.37, 55.32, 55.99, 56.04, 55.85, 55.81, 56.51, 56.56, 56.37, 56.32, 57.08, 57.13, 56.91, 56.86, 57.69, 57.74, 57.51, 57.46, 58.36, 58.42, 58.16, 58.1, 59.11, 59.17, 58.88, 58.82, 59.98, 60.04, 59.70, 59.64, 60.98, 61.05, 60.65, 60.59, 62.20, 62.27, 61.78, 61.72]), '#1#solarAzimuth': np.array([ 78.89, 78.66, 78.16, 78.41, 80.00, 79.80, 79.40, 79.62, 80.92, 80.74, 80.40, 80.6, 81.69, 81.53, 81.24, 81.42, 82.36, 82.21, 81.96, 82.12, 82.96, 82.82, 82.60, 82.74, 83.49, 83.36, 83.16, 83.3, 83.98, 83.86, 83.68, 83.80, 84.43, 84.32, 84.15, 84.27, 84.86, 84.75, 84.59, 84.7, 85.26, 85.15, 85., 85.11, 85.64, 85.54, 85.40, 85.5, 86.01, 85.91, 85.77, 85.88, 86.37, 86.28, 86.14, 86.24, 86.73, 86.63, 86.50, 86.59, 87.07, 86.98, 86.85, 86.94, 87.42, 87.33, 87.20, 87.29, 87.77, 87.68, 87.55, 87.64, 88.13, 88.04, 87.90, 87.99, 88.49, 88.41, 88.27, 88.36, 88.87, 88.78, 88.64, 88.73, 89.26, 89.17, 89.02, 89.11, 89.67, 89.59, 89.43, 89.51, 90.11, 90.02, 89.85, 89.94, 90.58, 90.49, 90.31, 90.4, 91.09, 91., 90.81, 90.89, 91.66, 91.57, 91.35, 91.44, 92.29, 92.20, 91.95, 92.04, 93.02, 92.93, 92.64, 92.73, 93.87, 93.79, 93.45, 93.54]), '#1#height': 83270, '#1#generalRetrievalQualityFlagForSo2': 9, '#2#height': -1e+100, '#1#sulphurDioxide': -1e+100, '#1#brightnessTemperatureRealPart': np.array([ 0.11, 0.11, -0.07, 0.08, 0.13, 0.15, 0.10, 0.06, -0.02, -0.03, 0.08, 0.17, -0.05, 0.12, 0.08, -0.06, 0.15, 0.08, -0.04, -0.01, 0.06, 0.17, -0.01, 0.15, 0.18, 0.05, 0.11, -0.03, 0.09, 0.02, 0.04, 0.10, 0.00, 0.00, 0.01, 0.18, -0.20, 0.10, 0.00, 0.13, -0.15, 0.09, 0.09, -0.10, 0.04, 0.06, -0.01, -0.03, -0.07, -0.05, -0.07, -0.09, -0.03, -0.13, -0.01, 0.10, -0.21, -0.23, -0.18, -0.08, -0.09, -0.19, -0.07, -0.08, -0.19, -0.24, -0.24, -0.05, -0.03, -0.08, -0.01, -0.07, -0.03, -0.38, -0.39, -0.22, -0.28, -0.15, -0.10, -0.26, -0.18, -0.11, -0.31, -0.18, -0.19, -0.26, -0.22, -0.19, 0.02, -0.19, -0.01, -0.38, -0.06, -0.34, -0.31, -0.19, 0.08, -0.05, -0.08, 0.41, -0.19, -0.22, -0.03, 0.11, -0.26, -0.33, -0.08, 0.03, -0.05, 0.02, 0.17, -0.10, 0.01, 0.01, 0.05, 0.01, 0.15, -0.06, -0.14, 0.38]), '#3#height': 7000, '#2#sulphurDioxide': np.array([ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, 2.3e+000, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100]), '#4#height': 10000, '#3#sulphurDioxide': np.array([ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, 8.0e-001, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100]), '#5#height': 13000, '#4#sulphurDioxide': np.array([ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, 5.0e-001, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100]), '#6#height': 16000, '#5#sulphurDioxide': np.array([ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, 4.0e-001, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100]), '#7#height': 25000, '#6#sulphurDioxide': np.array([ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, 5.0e-001, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100]) } # the notional filename that would contain the above test message data FILENAME = 'W_XX-EUMETSAT-Darmstadt,SOUNDING+SATELLITE,METOPA+IASI_C_EUMC_20200204091455_68977_eps_o_so2_l2.bin' # the information that would be extracted from the above filename according to the pattern in the .yaml FILENAME_INFO = { 'reception_location': 'EUMETSAT-Darmstadt', 'platform': 'METOPA', 'instrument': 'IASI', 'start_time': '20200204091455', 'perigee': '68977', 'species': 'so2', 'level': 'l2' } # file type info for the above file that is defined in the .yaml FILETYPE_INFO = { 'file_type': 'iasi_l2_so2_bufr', 'file_reader': 'IASIL2SO2BUFR' } # number of cross track samples in one IASI scan SCAN_WIDTH = 120 def save_test_data(path): """Save the test file to the indicated directory.""" import eccodes as ec with open(os.path.join(path, FILENAME), "wb") as f: for m in [msg]: buf = ec.codes_bufr_new_from_samples('BUFR4_local_satellite') for key in m: val = m[key] if np.isscalar(val): ec.codes_set(buf, key, val) else: ec.codes_set_array(buf, key, val) ec.codes_set(buf, 'pack', 1) ec.codes_write(buf, f) ec.codes_release(buf) class TestIasiL2So2Bufr(unittest.TestCase): """Test IASI l2 SO2 loader.""" def setUp(self): """Create temporary file to perform tests with.""" import tempfile from satpy.readers.iasi_l2_so2_bufr import IASIL2SO2BUFR self.base_dir = tempfile.mkdtemp() save_test_data(self.base_dir) self.fname = os.path.join(self.base_dir, FILENAME) self.fname_info = FILENAME_INFO self.ftype_info = FILETYPE_INFO self.reader = IASIL2SO2BUFR(self.fname, self.fname_info, self.ftype_info) def tearDown(self): """Remove the temporary directory created for a test.""" try: import shutil shutil.rmtree(self.base_dir, ignore_errors=True) except OSError: pass @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") def test_scene(self): """Test scene creation.""" from satpy import Scene fname = os.path.join(self.base_dir, FILENAME) scn = Scene(reader='iasi_l2_so2_bufr', filenames=[fname]) assert scn.start_time is not None assert scn.end_time is not None assert scn.sensor_names assert 'iasi' in scn.sensor_names @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") def test_scene_load_available_datasets(self): """Test that all datasets are available.""" from satpy import Scene fname = os.path.join(self.base_dir, FILENAME) scn = Scene(reader='iasi_l2_so2_bufr', filenames=[fname]) scn.load(scn.available_dataset_names()) @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") def test_scene_dataset_values(self): """Test loading data.""" from satpy import Scene fname = os.path.join(self.base_dir, FILENAME) scn = Scene(reader='iasi_l2_so2_bufr', filenames=[fname]) for name in scn.available_dataset_names(): scn.load([name]) loaded_values = scn[name].values fill_value = scn[name].attrs['fill_value'] # replace nans in data loaded from file with the fill value defined in the .yaml # to make them comparable loaded_values_nan_filled = np.nan_to_num(loaded_values, nan=fill_value) key = scn[name].attrs['key'] original_values = msg[key] # this makes each assertion below a separate test from unittest's point of view # (note: if all subtests pass, they will count as one test) with self.subTest(msg="Test failed for dataset: "+name): self.assertTrue(np.allclose(original_values, loaded_values_nan_filled)) @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") def test_init(self): """Test reader initialization.""" self.assertTrue(True) satpy-0.34.0/satpy/tests/reader_tests/test_mersi2_l1b.py000066400000000000000000000630571420401153000232750ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the 'mersi2_l1b' reader.""" import os import unittest from unittest import mock import dask.array as da import numpy as np import pytest import xarray as xr from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler class FakeHDF5FileHandler2(FakeHDF5FileHandler): """Swap-in HDF5 File Handler.""" def make_test_data(self, dims): """Make test data.""" return xr.DataArray(da.from_array(np.ones([dim for dim in dims], dtype=np.float32) * 10, [dim for dim in dims])) def _get_calibration(self, num_scans, rows_per_scan): calibration = { 'Calibration/VIS_Cal_Coeff': xr.DataArray( da.ones((19, 3), chunks=1024), attrs={'Slope': np.array([1.] * 19), 'Intercept': np.array([0.] * 19)}, dims=('_bands', '_coeffs')), 'Calibration/IR_Cal_Coeff': xr.DataArray( da.ones((6, 4, num_scans), chunks=1024), attrs={'Slope': np.array([1.] * 6), 'Intercept': np.array([0.] * 6)}, dims=('_bands', '_coeffs', '_scans')), } return calibration def _get_1km_data(self, num_scans, rows_per_scan, num_cols): data = { 'Data/EV_1KM_RefSB': xr.DataArray( da.ones((15, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ 'Slope': np.array([1.] * 15), 'Intercept': np.array([0.] * 15), 'FillValue': 65535, 'units': 'NO', 'valid_range': [0, 4095], 'long_name': b'1km Earth View Science Data', }, dims=('_ref_bands', '_rows', '_cols')), 'Data/EV_1KM_Emissive': xr.DataArray( da.ones((4, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ 'Slope': np.array([1.] * 4), 'Intercept': np.array([0.] * 4), 'FillValue': 65535, 'units': 'mW/ (m2 cm-1 sr)', 'valid_range': [0, 25000], 'long_name': b'1km Emissive Bands Earth View ' b'Science Data', }, dims=('_ir_bands', '_rows', '_cols')), 'Data/EV_250_Aggr.1KM_RefSB': xr.DataArray( da.ones((4, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ 'Slope': np.array([1.] * 4), 'Intercept': np.array([0.] * 4), 'FillValue': 65535, 'units': 'NO', 'valid_range': [0, 4095], 'long_name': b'250m Reflective Bands Earth View ' b'Science Data Aggregated to 1 km' }, dims=('_ref250_bands', '_rows', '_cols')), 'Data/EV_250_Aggr.1KM_Emissive': xr.DataArray( da.ones((2, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ 'Slope': np.array([1.] * 2), 'Intercept': np.array([0.] * 2), 'FillValue': 65535, 'units': 'mW/ (m2 cm-1 sr)', 'valid_range': [0, 4095], 'long_name': b'250m Emissive Bands Earth View ' b'Science Data Aggregated to 1 km' }, dims=('_ir250_bands', '_rows', '_cols')), } return data def _get_250m_data(self, num_scans, rows_per_scan, num_cols): data = { 'Data/EV_250_RefSB_b1': xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ 'Slope': np.array([1.] * 1), 'Intercept': np.array([0.] * 1), 'FillValue': 65535, 'units': 'NO', 'valid_range': [0, 4095], }, dims=('_rows', '_cols')), 'Data/EV_250_RefSB_b2': xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ 'Slope': np.array([1.] * 1), 'Intercept': np.array([0.] * 1), 'FillValue': 65535, 'units': 'NO', 'valid_range': [0, 4095], }, dims=('_rows', '_cols')), 'Data/EV_250_RefSB_b3': xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ 'Slope': np.array([1.] * 1), 'Intercept': np.array([0.] * 1), 'FillValue': 65535, 'units': 'NO', 'valid_range': [0, 4095], }, dims=('_rows', '_cols')), 'Data/EV_250_RefSB_b4': xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ 'Slope': np.array([1.] * 1), 'Intercept': np.array([0.] * 1), 'FillValue': 65535, 'units': 'NO', 'valid_range': [0, 4095], }, dims=('_rows', '_cols')), 'Data/EV_250_Emissive_b24': xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ 'Slope': np.array([1.] * 1), 'Intercept': np.array([0.] * 1), 'FillValue': 65535, 'units': 'mW/ (m2 cm-1 sr)', 'valid_range': [0, 4095], }, dims=('_rows', '_cols')), 'Data/EV_250_Emissive_b25': xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ 'Slope': np.array([1.] * 1), 'Intercept': np.array([0.] * 1), 'FillValue': 65535, 'units': 'mW/ (m2 cm-1 sr)', 'valid_range': [0, 4095], }, dims=('_rows', '_cols')), } return data def _get_geo_data(self, num_scans, rows_per_scan, num_cols, prefix='Geolocation/'): geo = { prefix + 'Longitude': xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024), attrs={ 'Slope': np.array([1.] * 1), 'Intercept': np.array([0.] * 1), 'units': 'degree', 'valid_range': [-90, 90], }, dims=('_rows', '_cols')), prefix + 'Latitude': xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024), attrs={ 'Slope': np.array([1.] * 1), 'Intercept': np.array([0.] * 1), 'units': 'degree', 'valid_range': [-180, 180], }, dims=('_rows', '_cols')), prefix + 'SensorZenith': xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024), attrs={ 'Slope': np.array([.01] * 1), 'Intercept': np.array([0.] * 1), 'units': 'degree', 'valid_range': [0, 28000], }, dims=('_rows', '_cols')), } return geo def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" rows_per_scan = self.filetype_info.get('rows_per_scan', 10) num_scans = 2 num_cols = 2048 global_attrs = { '/attr/Observing Beginning Date': '2019-01-01', '/attr/Observing Ending Date': '2019-01-01', '/attr/Observing Beginning Time': '18:27:39.720', '/attr/Observing Ending Time': '18:38:36.728', '/attr/Satellite Name': 'FY-3D', '/attr/Sensor Identification Code': 'MERSI', } data = {} if self.filetype_info['file_type'] == 'mersi2_l1b_1000': data = self._get_1km_data(num_scans, rows_per_scan, num_cols) global_attrs['/attr/TBB_Trans_Coefficient_A'] = np.array([1.0] * 6) global_attrs['/attr/TBB_Trans_Coefficient_B'] = np.array([0.0] * 6) elif self.filetype_info['file_type'] == 'mersi2_l1b_250': data = self._get_250m_data(num_scans, rows_per_scan, num_cols * 2) global_attrs['/attr/TBB_Trans_Coefficient_A'] = np.array([0.0] * 6) global_attrs['/attr/TBB_Trans_Coefficient_B'] = np.array([0.0] * 6) elif self.filetype_info['file_type'] == 'mersi2_l1b_1000_geo': data = self._get_geo_data(num_scans, rows_per_scan, num_cols) elif self.filetype_info['file_type'] == 'mersi2_l1b_250_geo': data = self._get_geo_data(num_scans, rows_per_scan, num_cols * 2, prefix='') test_content = {} test_content.update(global_attrs) test_content.update(data) test_content.update(self._get_calibration(num_scans, rows_per_scan)) return test_content class TestMERSI2L1BReader(unittest.TestCase): """Test MERSI2 L1B Reader.""" yaml_file = "mersi2_l1b.yaml" def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.mersi2_l1b import MERSI2L1B self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(MERSI2L1B, '__bases__', (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the HDF5 file handler.""" self.p.stop() def test_fy3d_all_resolutions(self): """Test loading data when all resolutions are available.""" from satpy.dataset.data_dict import get_key from satpy.readers import load_reader from satpy.tests.utils import make_dataid filenames = [ 'tf2019071182739.FY3D-X_MERSI_0250M_L1B.HDF', 'tf2019071182739.FY3D-X_MERSI_1000M_L1B.HDF', 'tf2019071182739.FY3D-X_MERSI_GEO1K_L1B.HDF', 'tf2019071182739.FY3D-X_MERSI_GEOQK_L1B.HDF', ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) self.assertEqual(4, len(files)) reader.create_filehandlers(files) # Make sure we have some files self.assertTrue(reader.file_handlers) # Verify that we have multiple resolutions for: # - Bands 1-4 (visible) # - Bands 24-25 (IR) available_datasets = reader.available_dataset_ids for band_name in ('1', '2', '3', '4', '24', '25'): if band_name in ('24', '25'): # don't know how to get radiance for IR bands num_results = 2 else: num_results = 3 ds_id = make_dataid(name=band_name, resolution=250) res = get_key(ds_id, available_datasets, num_results=num_results, best=False) self.assertEqual(num_results, len(res)) ds_id = make_dataid(name=band_name, resolution=1000) res = get_key(ds_id, available_datasets, num_results=num_results, best=False) self.assertEqual(num_results, len(res)) res = reader.load(['1', '2', '3', '4', '5', '20', '24', '25']) self.assertEqual(8, len(res)) self.assertEqual((2 * 40, 2048 * 2), res['1'].shape) self.assertEqual('reflectance', res['1'].attrs['calibration']) self.assertEqual('%', res['1'].attrs['units']) self.assertEqual((2 * 40, 2048 * 2), res['2'].shape) self.assertEqual('reflectance', res['2'].attrs['calibration']) self.assertEqual('%', res['2'].attrs['units']) self.assertEqual((2 * 40, 2048 * 2), res['3'].shape) self.assertEqual('reflectance', res['3'].attrs['calibration']) self.assertEqual('%', res['3'].attrs['units']) self.assertEqual((2 * 40, 2048 * 2), res['4'].shape) self.assertEqual('reflectance', res['4'].attrs['calibration']) self.assertEqual('%', res['4'].attrs['units']) self.assertEqual((2 * 10, 2048), res['5'].shape) self.assertEqual('reflectance', res['5'].attrs['calibration']) self.assertEqual('%', res['5'].attrs['units']) self.assertEqual((2 * 10, 2048), res['20'].shape) self.assertEqual('brightness_temperature', res['20'].attrs['calibration']) self.assertEqual('K', res['20'].attrs['units']) self.assertEqual((2 * 40, 2048 * 2), res['24'].shape) self.assertEqual('brightness_temperature', res['24'].attrs['calibration']) self.assertEqual('K', res['24'].attrs['units']) self.assertEqual((2 * 40, 2048 * 2), res['25'].shape) self.assertEqual('brightness_temperature', res['25'].attrs['calibration']) self.assertEqual('K', res['25'].attrs['units']) def test_fy3d_counts_calib(self): """Test loading data at counts calibration.""" from satpy.readers import load_reader from satpy.tests.utils import make_dataid filenames = [ 'tf2019071182739.FY3D-X_MERSI_0250M_L1B.HDF', 'tf2019071182739.FY3D-X_MERSI_1000M_L1B.HDF', 'tf2019071182739.FY3D-X_MERSI_GEO1K_L1B.HDF', 'tf2019071182739.FY3D-X_MERSI_GEOQK_L1B.HDF', ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) self.assertEqual(4, len(files)) reader.create_filehandlers(files) # Make sure we have some files self.assertTrue(reader.file_handlers) ds_ids = [] for band_name in ['1', '2', '3', '4', '5', '20', '24', '25']: ds_ids.append(make_dataid(name=band_name, calibration='counts')) ds_ids.append(make_dataid(name='satellite_zenith_angle')) res = reader.load(ds_ids) self.assertEqual(9, len(res)) self.assertEqual((2 * 40, 2048 * 2), res['1'].shape) self.assertEqual('counts', res['1'].attrs['calibration']) self.assertEqual(res['1'].dtype, np.uint16) self.assertEqual('1', res['1'].attrs['units']) self.assertEqual((2 * 40, 2048 * 2), res['2'].shape) self.assertEqual('counts', res['2'].attrs['calibration']) self.assertEqual(res['2'].dtype, np.uint16) self.assertEqual('1', res['2'].attrs['units']) self.assertEqual((2 * 40, 2048 * 2), res['3'].shape) self.assertEqual('counts', res['3'].attrs['calibration']) self.assertEqual(res['3'].dtype, np.uint16) self.assertEqual('1', res['3'].attrs['units']) self.assertEqual((2 * 40, 2048 * 2), res['4'].shape) self.assertEqual('counts', res['4'].attrs['calibration']) self.assertEqual(res['4'].dtype, np.uint16) self.assertEqual('1', res['4'].attrs['units']) self.assertEqual((2 * 10, 2048), res['5'].shape) self.assertEqual('counts', res['5'].attrs['calibration']) self.assertEqual(res['5'].dtype, np.uint16) self.assertEqual('1', res['5'].attrs['units']) self.assertEqual((2 * 10, 2048), res['20'].shape) self.assertEqual('counts', res['20'].attrs['calibration']) self.assertEqual(res['20'].dtype, np.uint16) self.assertEqual('1', res['20'].attrs['units']) self.assertEqual((2 * 40, 2048 * 2), res['24'].shape) self.assertEqual('counts', res['24'].attrs['calibration']) self.assertEqual(res['24'].dtype, np.uint16) self.assertEqual('1', res['24'].attrs['units']) self.assertEqual((2 * 40, 2048 * 2), res['25'].shape) self.assertEqual('counts', res['25'].attrs['calibration']) self.assertEqual(res['25'].dtype, np.uint16) self.assertEqual('1', res['25'].attrs['units']) def test_fy3d_rad_calib(self): """Test loading data at radiance calibration.""" from satpy.readers import load_reader from satpy.tests.utils import make_dataid filenames = [ 'tf2019071182739.FY3D-X_MERSI_0250M_L1B.HDF', 'tf2019071182739.FY3D-X_MERSI_1000M_L1B.HDF', 'tf2019071182739.FY3D-X_MERSI_GEO1K_L1B.HDF', 'tf2019071182739.FY3D-X_MERSI_GEOQK_L1B.HDF', ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) self.assertEqual(4, len(files)) reader.create_filehandlers(files) # Make sure we have some files self.assertTrue(reader.file_handlers) ds_ids = [] for band_name in ['1', '2', '3', '4', '5']: ds_ids.append(make_dataid(name=band_name, calibration='radiance')) res = reader.load(ds_ids) self.assertEqual(5, len(res)) self.assertEqual((2 * 40, 2048 * 2), res['1'].shape) self.assertEqual('radiance', res['1'].attrs['calibration']) self.assertEqual('mW/ (m2 cm-1 sr)', res['1'].attrs['units']) self.assertEqual((2 * 40, 2048 * 2), res['2'].shape) self.assertEqual('radiance', res['2'].attrs['calibration']) self.assertEqual('mW/ (m2 cm-1 sr)', res['2'].attrs['units']) self.assertEqual((2 * 40, 2048 * 2), res['3'].shape) self.assertEqual('radiance', res['3'].attrs['calibration']) self.assertEqual('mW/ (m2 cm-1 sr)', res['3'].attrs['units']) self.assertEqual((2 * 40, 2048 * 2), res['4'].shape) self.assertEqual('radiance', res['4'].attrs['calibration']) self.assertEqual('mW/ (m2 cm-1 sr)', res['4'].attrs['units']) self.assertEqual((2 * 10, 2048), res['5'].shape) self.assertEqual('radiance', res['5'].attrs['calibration']) self.assertEqual('mW/ (m2 cm-1 sr)', res['5'].attrs['units']) def test_fy3d_1km_resolutions(self): """Test loading data when only 1km resolutions are available.""" from satpy.dataset.data_dict import get_key from satpy.readers import load_reader from satpy.tests.utils import make_dataid filenames = [ 'tf2019071182739.FY3D-X_MERSI_1000M_L1B.HDF', 'tf2019071182739.FY3D-X_MERSI_GEO1K_L1B.HDF', ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) self.assertEqual(2, len(files)) reader.create_filehandlers(files) # Make sure we have some files self.assertTrue(reader.file_handlers) # Verify that we have multiple resolutions for: # - Bands 1-4 (visible) # - Bands 24-25 (IR) available_datasets = reader.available_dataset_ids for band_name in ('1', '2', '3', '4', '24', '25'): if band_name in ('24', '25'): # don't know how to get radiance for IR bands num_results = 2 else: num_results = 3 ds_id = make_dataid(name=band_name, resolution=250) with pytest.raises(KeyError): res = get_key(ds_id, available_datasets, num_results=num_results, best=False) ds_id = make_dataid(name=band_name, resolution=1000) res = get_key(ds_id, available_datasets, num_results=num_results, best=False) self.assertEqual(num_results, len(res)) res = reader.load(['1', '2', '3', '4', '5', '20', '24', '25']) self.assertEqual(8, len(res)) self.assertEqual((2 * 10, 2048), res['1'].shape) self.assertEqual('reflectance', res['1'].attrs['calibration']) self.assertEqual('%', res['1'].attrs['units']) self.assertEqual((2 * 10, 2048), res['2'].shape) self.assertEqual('reflectance', res['2'].attrs['calibration']) self.assertEqual('%', res['2'].attrs['units']) self.assertEqual((2 * 10, 2048), res['3'].shape) self.assertEqual('reflectance', res['3'].attrs['calibration']) self.assertEqual('%', res['3'].attrs['units']) self.assertEqual((2 * 10, 2048), res['4'].shape) self.assertEqual('reflectance', res['4'].attrs['calibration']) self.assertEqual('%', res['4'].attrs['units']) self.assertEqual((2 * 10, 2048), res['5'].shape) self.assertEqual('reflectance', res['5'].attrs['calibration']) self.assertEqual('%', res['5'].attrs['units']) self.assertEqual((2 * 10, 2048), res['20'].shape) self.assertEqual('brightness_temperature', res['20'].attrs['calibration']) self.assertEqual('K', res['20'].attrs['units']) self.assertEqual((2 * 10, 2048), res['24'].shape) self.assertEqual('brightness_temperature', res['24'].attrs['calibration']) self.assertEqual('K', res['24'].attrs['units']) self.assertEqual((2 * 10, 2048), res['25'].shape) self.assertEqual('brightness_temperature', res['25'].attrs['calibration']) self.assertEqual('K', res['25'].attrs['units']) def test_fy3d_250_resolutions(self): """Test loading data when only 250m resolutions are available.""" from satpy.dataset.data_dict import get_key from satpy.readers import load_reader from satpy.tests.utils import make_dataid filenames = [ 'tf2019071182739.FY3D-X_MERSI_0250M_L1B.HDF', 'tf2019071182739.FY3D-X_MERSI_GEOQK_L1B.HDF', ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) self.assertEqual(2, len(files)) reader.create_filehandlers(files) # Make sure we have some files self.assertTrue(reader.file_handlers) # Verify that we have multiple resolutions for: # - Bands 1-4 (visible) # - Bands 24-25 (IR) available_datasets = reader.available_dataset_ids for band_name in ('1', '2', '3', '4', '24', '25'): if band_name in ('24', '25'): # don't know how to get radiance for IR bands num_results = 2 else: num_results = 3 ds_id = make_dataid(name=band_name, resolution=250) res = get_key(ds_id, available_datasets, num_results=num_results, best=False) self.assertEqual(num_results, len(res)) ds_id = make_dataid(name=band_name, resolution=1000) with pytest.raises(KeyError): res = get_key(ds_id, available_datasets, num_results=num_results, best=False) res = reader.load(['1', '2', '3', '4', '5', '20', '24', '25']) self.assertEqual(6, len(res)) self.assertRaises(KeyError, res.__getitem__, '5') self.assertRaises(KeyError, res.__getitem__, '20') self.assertEqual((2 * 40, 2048 * 2), res['1'].shape) self.assertEqual('reflectance', res['1'].attrs['calibration']) self.assertEqual('%', res['1'].attrs['units']) self.assertEqual((2 * 40, 2048 * 2), res['2'].shape) self.assertEqual('reflectance', res['2'].attrs['calibration']) self.assertEqual('%', res['2'].attrs['units']) self.assertEqual((2 * 40, 2048 * 2), res['3'].shape) self.assertEqual('reflectance', res['3'].attrs['calibration']) self.assertEqual('%', res['3'].attrs['units']) self.assertEqual((2 * 40, 2048 * 2), res['4'].shape) self.assertEqual('reflectance', res['4'].attrs['calibration']) self.assertEqual('%', res['4'].attrs['units']) self.assertEqual((2 * 40, 2048 * 2), res['24'].shape) self.assertEqual('brightness_temperature', res['24'].attrs['calibration']) self.assertEqual('K', res['24'].attrs['units']) self.assertEqual((2 * 40, 2048 * 2), res['25'].shape) self.assertEqual('brightness_temperature', res['25'].attrs['calibration']) self.assertEqual('K', res['25'].attrs['units']) satpy-0.34.0/satpy/tests/reader_tests/test_mimic_TPW2_lowres.py000066400000000000000000000213571420401153000246400ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2019 Satpy developers # # This file is part of Satpy. # # Satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # Satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # Satpy. If not, see . """Module for testing the satpy.readers.tropomi_l2 module.""" import itertools import os import unittest from datetime import datetime from unittest import mock import numpy as np import xarray as xr from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler DEFAULT_FILE_DTYPE = np.float32 DEFAULT_FILE_SHAPE = (721, 1440) DEFAULT_DATE = datetime(2019, 6, 19, 13, 0) DEFAULT_LAT = np.linspace(-90, 90, DEFAULT_FILE_SHAPE[0], dtype=DEFAULT_FILE_DTYPE) DEFAULT_LON = np.linspace(-180, 180, DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE) DEFAULT_FILE_FLOAT_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE) DEFAULT_FILE_DATE_DATA = np.clip(DEFAULT_FILE_FLOAT_DATA, 0, 1049) DEFAULT_FILE_UBYTE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=np.ubyte) float_variables = ['tpwGrid', 'tpwGridPrior', 'tpwGridSubseq', 'footGridPrior', 'footGridSubseq'] date_variables = ['timeAwayGridPrior', 'timeAwayGridSubseq'] ubyte_variables = ['satGridPrior', 'satGridSubseq'] file_content_attr = dict() class FakeNetCDF4FileHandlerMimicLow(FakeNetCDF4FileHandler): """Swap-in NetCDF4 File Handler.""" def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content for lower resolution files.""" dt_s = filename_info.get('start_time', DEFAULT_DATE) dt_e = filename_info.get('end_time', DEFAULT_DATE) if filetype_info['file_type'] == 'mimicTPW2_comp': file_content = { '/attr/start_time': dt_s.strftime('%Y%m%d.%H%M%S'), '/attr/end_time': dt_e.strftime('%Y%m%d.%H%M%S'), '/attr/platform_shortname': 'aggregated microwave', '/attr/sensor': 'mimic', } file_content['latArr'] = DEFAULT_LAT file_content['latArr/shape'] = (DEFAULT_FILE_SHAPE[0],) file_content['latArr/attr/units'] = 'degress_north' file_content['lonArr'] = DEFAULT_LON file_content['lonArr/shape'] = (DEFAULT_FILE_SHAPE[1],) file_content['lonArr/attr/units'] = 'degrees_east' file_content['/dimension/lat'] = DEFAULT_FILE_SHAPE[0] file_content['/dimension/lon'] = DEFAULT_FILE_SHAPE[1] for float_var in float_variables: file_content[float_var] = DEFAULT_FILE_FLOAT_DATA.reshape(DEFAULT_FILE_SHAPE) file_content['{}/shape'.format(float_var)] = DEFAULT_FILE_SHAPE file_content_attr[float_var] = {"units": "mm"} for date_var in date_variables: file_content[date_var] = DEFAULT_FILE_DATE_DATA.reshape(DEFAULT_FILE_SHAPE) file_content['{}/shape'.format(date_var)] = DEFAULT_FILE_SHAPE file_content_attr[date_var] = {"units": "minutes"} for ubyte_var in ubyte_variables: file_content[ubyte_var] = DEFAULT_FILE_UBYTE_DATA.reshape(DEFAULT_FILE_SHAPE) file_content['{}/shape'.format(ubyte_var)] = DEFAULT_FILE_SHAPE file_content_attr[ubyte_var] = {"source_key": "Key: 0: None, 1: NOAA-N, 2: NOAA-P, 3: Metop-A, \ 4: Metop-B, 5: SNPP, 6: SSMI-17, 7: SSMI-18"} # convert to xarrays for key, val in file_content.items(): if key == 'lonArr' or key == 'latArr': file_content[key] = xr.DataArray(val) elif isinstance(val, np.ndarray): if val.ndim > 1: file_content[key] = xr.DataArray(val, dims=('y', 'x'), attrs=file_content_attr[key]) else: file_content[key] = xr.DataArray(val) for key in itertools.chain(float_variables, ubyte_variables): file_content[key].attrs['_FillValue'] = -999.0 file_content[key].attrs['name'] = key file_content[key].attrs['file_key'] = key file_content[key].attrs['file_type'] = self.filetype_info['file_type'] else: msg = 'Wrong Test Reader for file_type {}'.format(filetype_info['file_type']) raise AssertionError(msg) return file_content class TestMimicTPW2Reader(unittest.TestCase): """Test Mimic Reader.""" yaml_file = "mimicTPW2_comp.yaml" def setUp(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.mimic_TPW2_nc import MimicTPW2FileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(MimicTPW2FileHandler, '__bases__', (FakeNetCDF4FileHandlerMimicLow,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the NetCDF4 file handler.""" self.p.stop() def test_init(self): """Test basic initialization of this reader.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'comp20190619.130000.nc', ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) # make sure we have some files self.assertTrue(r.file_handlers) def test_load_mimic_float(self): """Load TPW mimic float data.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) with mock.patch('satpy.readers.mimic_TPW2_nc.netCDF4.Variable', xr.DataArray): loadables = r.select_files_from_pathnames([ 'comp20190619.130000.nc', ]) r.create_filehandlers(loadables) ds = r.load(float_variables) self.assertEqual(len(ds), len(float_variables)) for d in ds.values(): self.assertEqual(d.attrs['platform_shortname'], 'aggregated microwave') self.assertEqual(d.attrs['sensor'], 'mimic') self.assertEqual(d.attrs['units'], 'mm') self.assertIn('area', d.attrs) self.assertIsNotNone(d.attrs['area']) def test_load_mimic_timedelta(self): """Load TPW mimic timedelta data (data latency variables).""" from satpy.readers import load_reader r = load_reader(self.reader_configs) with mock.patch('satpy.readers.mimic_TPW2_nc.netCDF4.Variable', xr.DataArray): loadables = r.select_files_from_pathnames([ 'comp20190619.130000.nc', ]) r.create_filehandlers(loadables) ds = r.load(date_variables) self.assertEqual(len(ds), len(date_variables)) for d in ds.values(): self.assertEqual(d.attrs['platform_shortname'], 'aggregated microwave') self.assertEqual(d.attrs['sensor'], 'mimic') self.assertEqual(d.attrs['units'], 'minutes') self.assertIn('area', d.attrs) self.assertIsNotNone(d.attrs['area']) self.assertEqual(d.dtype, DEFAULT_FILE_DTYPE) def test_load_mimic_ubyte(self): """Load TPW mimic sensor grids.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) with mock.patch('satpy.readers.mimic_TPW2_nc.netCDF4.Variable', xr.DataArray): loadables = r.select_files_from_pathnames([ 'comp20190619.130000.nc', ]) r.create_filehandlers(loadables) ds = r.load(ubyte_variables) self.assertEqual(len(ds), len(ubyte_variables)) for d in ds.values(): self.assertEqual(d.attrs['platform_shortname'], 'aggregated microwave') self.assertEqual(d.attrs['sensor'], 'mimic') self.assertIn('source_key', d.attrs) self.assertIn('area', d.attrs) self.assertIsNotNone(d.attrs['area']) self.assertEqual(d.dtype, np.uint8) satpy-0.34.0/satpy/tests/reader_tests/test_mimic_TPW2_nc.py000066400000000000000000000126131420401153000237200ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2019 Satpy developers # # This file is part of Satpy. # # Satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # Satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # Satpy. If not, see . """Module for testing the satpy.readers.tropomi_l2 module.""" import os import unittest from datetime import datetime from unittest import mock import numpy as np import xarray as xr from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler DEFAULT_FILE_DTYPE = np.float32 DEFAULT_FILE_SHAPE = (9001, 18000) DEFAULT_LAT = np.linspace(-90, 90, DEFAULT_FILE_SHAPE[0], dtype=DEFAULT_FILE_DTYPE) DEFAULT_LON = np.linspace(-180, 180, DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) file_content_units = dict() class FakeNetCDF4FileHandlerMimic(FakeNetCDF4FileHandler): """Swap-in NetCDF4 File Handler.""" def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" from xarray import DataArray dt_s = filename_info.get('start_time', datetime(2019, 6, 19, 13, 0)) dt_e = filename_info.get('end_time', datetime(2019, 6, 19, 13, 0)) if filetype_info['file_type'] == 'mimicTPW2_comp': file_content = { '/attr/start_time': dt_s.strftime('%Y%m%d.%H%M%S'), '/attr/end_time': dt_e.strftime('%Y%m%d.%H%M%S'), '/attr/platform_shortname': 'aggregated microwave', '/attr/sensor': 'mimic', } file_content['latArr'] = DEFAULT_LAT file_content['latArr/shape'] = (DEFAULT_FILE_SHAPE[0],) file_content['latArr/attr/units'] = 'degress_north' file_content['lonArr'] = DEFAULT_LON file_content['lonArr/shape'] = (DEFAULT_FILE_SHAPE[1],) file_content['lonArr/attr/units'] = 'degrees_east' file_content['tpwGrid'] = DEFAULT_FILE_DATA file_content['tpwGrid/shape'] = DEFAULT_FILE_SHAPE file_content_units['tpwGrid'] = 'mm' file_content['/dimension/lat'] = DEFAULT_FILE_SHAPE[0] file_content['/dimension/lon'] = DEFAULT_FILE_SHAPE[1] # convert to xarrays for key, val in file_content.items(): if key == 'lonArr' or key == 'latArr': file_content[key] = DataArray(val) elif isinstance(val, np.ndarray): if val.ndim > 1: file_content[key] = DataArray(val, dims=('y', 'x'), attrs={"units": file_content_units[key]}) else: file_content[key] = DataArray(val) else: msg = 'Wrong Test Reader for file_type {}'.format(filetype_info['file_type']) raise AssertionError(msg) return file_content class TestMimicTPW2Reader(unittest.TestCase): """Test Mimic Reader.""" yaml_file = "mimicTPW2_comp.yaml" def setUp(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.mimic_TPW2_nc import MimicTPW2FileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(MimicTPW2FileHandler, '__bases__', (FakeNetCDF4FileHandlerMimic,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the NetCDF4 file handler.""" self.p.stop() def test_init(self): """Test basic initialization of this reader.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'comp20190619.130000.nc', ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) # make sure we have some files self.assertTrue(r.file_handlers) def test_load_mimic(self): """Load Mimic data.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) with mock.patch('satpy.readers.mimic_TPW2_nc.netCDF4.Variable', xr.DataArray): loadables = r.select_files_from_pathnames([ 'comp20190619.130000.nc', ]) r.create_filehandlers(loadables) ds = r.load(['tpwGrid']) self.assertEqual(len(ds), 1) for d in ds.values(): self.assertEqual(d.attrs['platform_shortname'], 'aggregated microwave') self.assertEqual(d.attrs['sensor'], 'mimic') self.assertIn('area', d.attrs) self.assertIn('units', d.attrs) self.assertIsNotNone(d.attrs['area']) satpy-0.34.0/satpy/tests/reader_tests/test_mirs.py000066400000000000000000000350171420401153000223030ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2019 Satpy developers # # This file is part of Satpy. # # Satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # Satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # Satpy. If not, see . """Module for testing the satpy.readers.tropomi_l2 module.""" import os from datetime import datetime from unittest import mock import numpy as np import pytest import xarray as xr METOP_FILE = "IMG_SX.M2.D17037.S1601.E1607.B0000001.WE.HR.ORB.nc" NPP_MIRS_L2_SWATH = "NPR-MIRS-IMG_v11r6_npp_s201702061601000_e201702061607000_c202012201658410.nc" N20_MIRS_L2_SWATH = "NPR-MIRS-IMG_v11r4_n20_s201702061601000_e201702061607000_c202012201658410.nc" OTHER_MIRS_L2_SWATH = "NPR-MIRS-IMG_v11r4_gpm_s201702061601000_e201702061607000_c202010080001310.nc" EXAMPLE_FILES = [METOP_FILE, NPP_MIRS_L2_SWATH, OTHER_MIRS_L2_SWATH] N_CHANNEL = 22 N_FOV = 96 N_SCANLINE = 100 DEFAULT_FILE_DTYPE = np.float64 DEFAULT_2D_SHAPE = (N_SCANLINE, N_FOV) DEFAULT_DATE = datetime(2019, 6, 19, 13, 0) DEFAULT_LAT = np.linspace(23.09356, 36.42844, N_SCANLINE * N_FOV, dtype=DEFAULT_FILE_DTYPE) DEFAULT_LON = np.linspace(127.6879, 144.5284, N_SCANLINE * N_FOV, dtype=DEFAULT_FILE_DTYPE) FREQ = xr.DataArray([23.8, 31.4, 50.3, 51.76, 52.8, 53.596, 54.4, 54.94, 55.5, 57.29, 57.29, 57.29, 57.29, 57.29, 57.29, 88.2, 165.5, 183.31, 183.31, 183.31, 183.31, 183.31][:N_CHANNEL], dims='Channel', attrs={'description': "Central Frequencies (GHz)"}) POLO = xr.DataArray([2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 3, 3, 3, 3, 3, 3][:N_CHANNEL], dims='Channel', attrs={'description': "Polarizations"}) DS_IDS = ['RR', 'longitude', 'latitude'] TEST_VARS = ['btemp_88v', 'btemp_165h', 'btemp_23v', 'RR', 'Sfc_type'] DEFAULT_UNITS = {'btemp_88v': 'K', 'btemp_165h': 'K', 'btemp_23v': 'K', 'RR': 'mm/hr', 'Sfc_type': "1"} PLATFORM = {"M2": "metop-a", "NPP": "npp", "GPM": "gpm"} SENSOR = {"m2": "amsu-mhs", "npp": "atms", "gpm": "GPI"} START_TIME = datetime(2017, 2, 6, 16, 1, 0) END_TIME = datetime(2017, 2, 6, 16, 7, 0) def fake_coeff_from_fn(fn): """Create Fake Coefficients.""" ameans = np.random.uniform(261, 267, N_CHANNEL) locations = [ [1, 2], [1, 2], [3, 4, 5], [3, 4, 5], [4, 5, 6], [5, 6, 7], [6, 7, 8], [7, 8], [9, 10, 11], [10, 11], [10, 11, 12], [11, 12, 13], [12, 13], [12, 13, 14], [14, 15], [1, 16], [17, 18], [18, 19], [18, 19, 20], [19, 20, 21], [20, 21, 22], [21, 22], ] all_nchx = [len(loc) for loc in locations] coeff_str = [] for idx in range(1, N_CHANNEL + 1): nx = idx - 1 coeff_str.append('\n') next_line = ' {} {} {}\n'.format(idx, all_nchx[nx], ameans[nx]) coeff_str.append(next_line) next_line = ' {}\n'.format(" ".join([str(x) for x in locations[idx - 1]])) coeff_str.append(next_line) for fov in range(1, N_FOV+1): random_coeff = np.random.rand(all_nchx[nx]) random_coeff = np.ones(all_nchx[nx]) str_coeff = ' '.join([str(x) for x in random_coeff]) random_means = np.random.uniform(261, 267, all_nchx[nx]) random_means = np.zeros(all_nchx[nx]) str_means = ' '.join([str(x) for x in random_means]) error_val = np.random.uniform(0, 4) coeffs_line = ' {:>2} {:>2} {} {} {}\n'.format(idx, fov, str_coeff, str_means, error_val) coeff_str.append(coeffs_line) return coeff_str def _get_datasets_with_attributes(**kwargs): """Represent files with two resolution of variables in them (ex. OCEAN).""" bt = xr.DataArray(np.linspace(1830, 3930, N_SCANLINE * N_FOV * N_CHANNEL). reshape(N_SCANLINE, N_FOV, N_CHANNEL), attrs={'long_name': "Channel Temperature (K)", 'units': "Kelvin", 'coordinates': "Longitude Latitude Freq", 'scale_factor': 0.01, '_FillValue': -999, 'valid_range': [0, 50000]}, dims=('Scanline', 'Field_of_view', 'Channel')) rr = xr.DataArray(np.random.randint(100, 500, size=(N_SCANLINE, N_FOV)), attrs={'long_name': "Rain Rate (mm/hr)", 'units': "mm/hr", 'coordinates': "Longitude Latitude", 'scale_factor': 0.1, '_FillValue': -999, 'valid_range': [0, 1000]}, dims=('Scanline', 'Field_of_view')) sfc_type = xr.DataArray(np.random.randint(0, 4, size=(N_SCANLINE, N_FOV)), attrs={'description': "type of surface:0-ocean," + "1-sea ice,2-land,3-snow", 'units': "1", 'coordinates': "Longitude Latitude", '_FillValue': -999, 'valid_range': [0, 3] }, dims=('Scanline', 'Field_of_view')) latitude = xr.DataArray(DEFAULT_LAT.reshape(DEFAULT_2D_SHAPE), attrs={'long_name': "Latitude of the view (-90,90)"}, dims=('Scanline', 'Field_of_view')) longitude = xr.DataArray(DEFAULT_LON.reshape(DEFAULT_2D_SHAPE), attrs={'long_name': "Longitude of the view (-180,180)"}, dims=('Scanline', 'Field_of_view')) ds_vars = { 'Freq': FREQ, 'Polo': POLO, 'BT': bt, 'RR': rr, 'Sfc_type': sfc_type, 'Latitude': latitude, 'Longitude': longitude } attrs = {'missing_value': -999.} ds = xr.Dataset(ds_vars, attrs=attrs) ds = ds.assign_coords({"Freq": FREQ, "Latitude": latitude, "Longitude": longitude}) return ds def _get_datasets_with_less_attributes(): """Represent files with two resolution of variables in them (ex. OCEAN).""" bt = xr.DataArray(np.linspace(1830, 3930, N_SCANLINE * N_FOV * N_CHANNEL). reshape(N_SCANLINE, N_FOV, N_CHANNEL), attrs={'long_name': "Channel Temperature (K)", 'scale_factor': 0.01}, dims=('Scanline', 'Field_of_view', 'Channel')) rr = xr.DataArray(np.random.randint(100, 500, size=(N_SCANLINE, N_FOV)), attrs={'long_name': "Rain Rate (mm/hr)", 'scale_factor': 0.1}, dims=('Scanline', 'Field_of_view')) sfc_type = xr.DataArray(np.random.randint(0, 4, size=(N_SCANLINE, N_FOV)), attrs={'description': "type of surface:0-ocean," + "1-sea ice,2-land,3-snow"}, dims=('Scanline', 'Field_of_view')) latitude = xr.DataArray(DEFAULT_LAT.reshape(DEFAULT_2D_SHAPE), attrs={'long_name': "Latitude of the view (-90,90)"}, dims=('Scanline', 'Field_of_view')) longitude = xr.DataArray(DEFAULT_LON.reshape(DEFAULT_2D_SHAPE), attrs={"long_name": "Longitude of the view (-180,180)"}, dims=('Scanline', 'Field_of_view')) ds_vars = { 'Freq': FREQ, 'Polo': POLO, 'BT': bt, 'RR': rr, 'Sfc_type': sfc_type, 'Longitude': longitude, 'Latitude': latitude } attrs = {'missing_value': -999.} ds = xr.Dataset(ds_vars, attrs=attrs) ds = ds.assign_coords({"Freq": FREQ, "Latitude": latitude, "Longitude": longitude}) return ds def fake_open_dataset(filename, **kwargs): """Create a Dataset similar to reading an actual file with xarray.open_dataset.""" if filename == METOP_FILE: return _get_datasets_with_less_attributes() return _get_datasets_with_attributes() class TestMirsL2_NcReader: """Test mirs Reader.""" yaml_file = "mirs.yaml" def setup_method(self): """Read fake data.""" from satpy._config import config_search_paths self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) @pytest.mark.parametrize( ("filenames", "expected_loadables"), [ ([METOP_FILE], 1), ([NPP_MIRS_L2_SWATH], 1), ([OTHER_MIRS_L2_SWATH], 1), ] ) def test_reader_creation(self, filenames, expected_loadables): """Test basic initialization.""" from satpy.readers import load_reader with mock.patch('satpy.readers.mirs.xr.open_dataset') as od: od.side_effect = fake_open_dataset r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) assert len(loadables) == expected_loadables r.create_filehandlers(loadables) # make sure we have some files assert r.file_handlers @pytest.mark.parametrize( ("filenames", "expected_datasets"), [ ([METOP_FILE], DS_IDS), ([NPP_MIRS_L2_SWATH], DS_IDS), ([OTHER_MIRS_L2_SWATH], DS_IDS), ] ) def test_available_datasets(self, filenames, expected_datasets): """Test that variables are dynamically discovered.""" from satpy.readers import load_reader with mock.patch('satpy.readers.mirs.xr.open_dataset') as od: od.side_effect = fake_open_dataset r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) r.create_filehandlers(loadables) avails = list(r.available_dataset_names) for var_name in expected_datasets: assert var_name in avails @staticmethod def _check_area(data_arr): from pyresample.geometry import SwathDefinition area = data_arr.attrs['area'] assert isinstance(area, SwathDefinition) @staticmethod def _check_fill(data_arr): assert '_FillValue' not in data_arr.attrs if np.issubdtype(data_arr.dtype, np.floating): # we started with float32, it should stay that way assert data_arr.dtype.type == np.float64 @staticmethod def _check_valid_range(data_arr, test_valid_range): # valid_range is popped out of data_arr.attrs when it is applied assert 'valid_range' not in data_arr.attrs assert data_arr.data.min() >= test_valid_range[0] assert data_arr.data.max() <= test_valid_range[1] @staticmethod def _check_fill_value(data_arr, test_fill_value): assert '_FillValue' not in data_arr.attrs assert not (data_arr.data == test_fill_value).any() @staticmethod def _check_attrs(data_arr, platform_name): attrs = data_arr.attrs assert 'scale_factor' not in attrs assert 'platform_name' in attrs assert attrs['platform_name'] == platform_name assert attrs['start_time'] == START_TIME assert attrs['end_time'] == END_TIME @pytest.mark.parametrize( ("filenames", "loadable_ids", "platform_name"), [ ([METOP_FILE], TEST_VARS, "metop-a"), ([NPP_MIRS_L2_SWATH], TEST_VARS, "npp"), ([N20_MIRS_L2_SWATH], TEST_VARS, "noaa-20"), ([OTHER_MIRS_L2_SWATH], TEST_VARS, "gpm"), ] ) @pytest.mark.parametrize('reader_kw', [{}, {'limb_correction': False}]) def test_basic_load(self, filenames, loadable_ids, platform_name, reader_kw): """Test that variables are loaded properly.""" from satpy.readers import load_reader with mock.patch('satpy.readers.mirs.xr.open_dataset') as od: od.side_effect = fake_open_dataset r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) r.create_filehandlers(loadables, fh_kwargs=reader_kw) with mock.patch('satpy.readers.mirs.read_atms_coeff_to_string') as \ fd, mock.patch('satpy.readers.mirs.retrieve'): fd.side_effect = fake_coeff_from_fn loaded_data_arrs = r.load(loadable_ids) assert len(loaded_data_arrs) == len(loadable_ids) test_data = fake_open_dataset(filenames[0]) for _data_id, data_arr in loaded_data_arrs.items(): data_arr = data_arr.compute() var_name = data_arr.attrs["name"] if var_name not in ['latitude', 'longitude']: self._check_area(data_arr) self._check_fill(data_arr) self._check_attrs(data_arr, platform_name) input_fake_data = test_data['BT'] if "btemp" in var_name \ else test_data[var_name] if "valid_range" in input_fake_data.attrs: valid_range = input_fake_data.attrs["valid_range"] self._check_valid_range(data_arr, valid_range) if "_FillValue" in input_fake_data.attrs: fill_value = input_fake_data.attrs["_FillValue"] self._check_fill_value(data_arr, fill_value) sensor = data_arr.attrs['sensor'] if reader_kw.get('limb_correction', True) and sensor == 'atms': fd.assert_called() else: fd.assert_not_called() assert data_arr.attrs['units'] == DEFAULT_UNITS[var_name] satpy-0.34.0/satpy/tests/reader_tests/test_modis_l1b.py000066400000000000000000000161711420401153000232020ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit tests for MODIS L1b HDF reader.""" from __future__ import annotations import dask import numpy as np import pytest from pytest_lazyfixture import lazy_fixture from satpy import Scene, available_readers from ..utils import CustomScheduler, make_dataid from ._modis_fixtures import ( AVAILABLE_1KM_PRODUCT_NAMES, AVAILABLE_HKM_PRODUCT_NAMES, AVAILABLE_QKM_PRODUCT_NAMES, _shape_for_resolution, ) def _check_shared_metadata(data_arr): assert data_arr.attrs["sensor"] == "modis" assert data_arr.attrs["platform_name"] == "EOS-Terra" assert "rows_per_scan" in data_arr.attrs assert isinstance(data_arr.attrs["rows_per_scan"], int) assert data_arr.attrs['reader'] == 'modis_l1b' def _load_and_check_geolocation(scene, resolution, exp_res, exp_shape, has_res, check_callback=_check_shared_metadata): scene.load(["longitude", "latitude"], resolution=resolution) lon_id = make_dataid(name="longitude", resolution=exp_res) lat_id = make_dataid(name="latitude", resolution=exp_res) if has_res: lon_arr = scene[lon_id] lat_arr = scene[lat_id] assert lon_arr.shape == exp_shape assert lat_arr.shape == exp_shape # compute lon/lat at the same time to avoid wasted computation lon_vals, lat_vals = dask.compute(lon_arr, lat_arr) np.testing.assert_array_less(lon_vals, 0) np.testing.assert_array_less(0, lat_vals) check_callback(lon_arr) check_callback(lat_arr) else: pytest.raises(KeyError, scene.__getitem__, lon_id) pytest.raises(KeyError, scene.__getitem__, lat_id) class TestModisL1b: """Test MODIS L1b reader.""" def test_available_reader(self): """Test that MODIS L1b reader is available.""" assert 'modis_l1b' in available_readers() @pytest.mark.parametrize( ('input_files', 'expected_names', 'expected_data_res', 'expected_geo_res'), [ [lazy_fixture('modis_l1b_nasa_mod021km_file'), AVAILABLE_1KM_PRODUCT_NAMES + AVAILABLE_HKM_PRODUCT_NAMES + AVAILABLE_QKM_PRODUCT_NAMES, [1000], [5000, 1000]], [lazy_fixture('modis_l1b_imapp_1000m_file'), AVAILABLE_1KM_PRODUCT_NAMES + AVAILABLE_HKM_PRODUCT_NAMES + AVAILABLE_QKM_PRODUCT_NAMES, [1000], [5000, 1000]], [lazy_fixture('modis_l1b_nasa_mod02hkm_file'), AVAILABLE_HKM_PRODUCT_NAMES + AVAILABLE_QKM_PRODUCT_NAMES, [500], [1000, 500, 250]], [lazy_fixture('modis_l1b_nasa_mod02qkm_file'), AVAILABLE_QKM_PRODUCT_NAMES, [250], [1000, 500, 250]], ] ) def test_scene_available_datasets(self, input_files, expected_names, expected_data_res, expected_geo_res): """Test that datasets are available.""" scene = Scene(reader='modis_l1b', filenames=input_files) available_datasets = scene.available_dataset_names() assert len(available_datasets) > 0 assert 'longitude' in available_datasets assert 'latitude' in available_datasets for chan_name in expected_names: assert chan_name in available_datasets available_data_ids = scene.available_dataset_ids() available_datas = {x: [] for x in expected_data_res} available_geos = {x: [] for x in expected_geo_res} # Make sure that every resolution from the reader is what we expect for data_id in available_data_ids: res = data_id['resolution'] if data_id['name'] in ['longitude', 'latitude']: assert res in expected_geo_res available_geos[res].append(data_id) else: assert res in expected_data_res available_datas[res].append(data_id) # Make sure that every resolution we expect has at least one dataset for exp_res, avail_id in available_datas.items(): assert avail_id, f"Missing datasets for data resolution {exp_res}" for exp_res, avail_id in available_geos.items(): assert avail_id, f"Missing geo datasets for geo resolution {exp_res}" @pytest.mark.parametrize( ('input_files', 'has_5km', 'has_500', 'has_250', 'default_res'), [ [lazy_fixture('modis_l1b_nasa_mod021km_file'), True, False, False, 1000], [lazy_fixture('modis_l1b_imapp_1000m_file'), True, False, False, 1000], [lazy_fixture('modis_l1b_nasa_mod02hkm_file'), False, True, True, 250], [lazy_fixture('modis_l1b_nasa_mod02qkm_file'), False, True, True, 250], [lazy_fixture('modis_l1b_nasa_1km_mod03_files'), True, True, True, 250], ] ) def test_load_longitude_latitude(self, input_files, has_5km, has_500, has_250, default_res): """Test that longitude and latitude datasets are loaded correctly.""" scene = Scene(reader='modis_l1b', filenames=input_files) shape_5km = _shape_for_resolution(5000) shape_500m = _shape_for_resolution(500) shape_250m = _shape_for_resolution(250) default_shape = _shape_for_resolution(default_res) with dask.config.set(scheduler=CustomScheduler(max_computes=1 + has_5km + has_500 + has_250)): _load_and_check_geolocation(scene, "*", default_res, default_shape, True) _load_and_check_geolocation(scene, 5000, 5000, shape_5km, has_5km) _load_and_check_geolocation(scene, 500, 500, shape_500m, has_500) _load_and_check_geolocation(scene, 250, 250, shape_250m, has_250) def test_load_sat_zenith_angle(self, modis_l1b_nasa_mod021km_file): """Test loading satellite zenith angle band.""" scene = Scene(reader='modis_l1b', filenames=modis_l1b_nasa_mod021km_file) dataset_name = 'satellite_zenith_angle' scene.load([dataset_name]) dataset = scene[dataset_name] assert dataset.shape == _shape_for_resolution(1000) assert dataset.attrs['resolution'] == 1000 _check_shared_metadata(dataset) def test_load_vis(self, modis_l1b_nasa_mod021km_file): """Test loading visible band.""" scene = Scene(reader='modis_l1b', filenames=modis_l1b_nasa_mod021km_file) dataset_name = '1' scene.load([dataset_name]) dataset = scene[dataset_name] assert dataset.shape == _shape_for_resolution(1000) assert dataset.attrs['resolution'] == 1000 _check_shared_metadata(dataset) satpy-0.34.0/satpy/tests/reader_tests/test_modis_l2.py000066400000000000000000000164151420401153000230420ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit tests for MODIS L2 HDF reader.""" from __future__ import annotations import dask import numpy as np import pytest from pytest_lazyfixture import lazy_fixture from satpy import Scene, available_readers from ..utils import CustomScheduler, make_dataid from ._modis_fixtures import _shape_for_resolution def _check_shared_metadata(data_arr, expect_area=False): assert data_arr.attrs["sensor"] == "modis" assert data_arr.attrs["platform_name"] == "EOS-Terra" assert "rows_per_scan" in data_arr.attrs assert isinstance(data_arr.attrs["rows_per_scan"], int) assert data_arr.attrs['reader'] == 'modis_l2' if expect_area: assert data_arr.attrs.get('area') is not None else: assert 'area' not in data_arr.attrs class TestModisL2: """Test MODIS L2 reader.""" def test_available_reader(self): """Test that MODIS L2 reader is available.""" assert 'modis_l2' in available_readers() def test_scene_available_datasets(self, modis_l2_nasa_mod35_file): """Test that datasets are available.""" scene = Scene(reader='modis_l2', filenames=modis_l2_nasa_mod35_file) available_datasets = scene.all_dataset_names() assert len(available_datasets) > 0 assert 'cloud_mask' in available_datasets assert 'latitude' in available_datasets assert 'longitude' in available_datasets @pytest.mark.parametrize( ('input_files', 'has_5km', 'has_500', 'has_250', 'default_res'), [ [lazy_fixture('modis_l2_nasa_mod35_file'), True, False, False, 1000], ] ) def test_load_longitude_latitude(self, input_files, has_5km, has_500, has_250, default_res): """Test that longitude and latitude datasets are loaded correctly.""" from .test_modis_l1b import _load_and_check_geolocation scene = Scene(reader='modis_l2', filenames=input_files) shape_5km = _shape_for_resolution(5000) shape_500m = _shape_for_resolution(500) shape_250m = _shape_for_resolution(250) default_shape = _shape_for_resolution(default_res) with dask.config.set(scheduler=CustomScheduler(max_computes=1 + has_5km + has_500 + has_250)): _load_and_check_geolocation(scene, "*", default_res, default_shape, True, check_callback=_check_shared_metadata) _load_and_check_geolocation(scene, 5000, 5000, shape_5km, has_5km, check_callback=_check_shared_metadata) _load_and_check_geolocation(scene, 500, 500, shape_500m, has_500, check_callback=_check_shared_metadata) _load_and_check_geolocation(scene, 250, 250, shape_250m, has_250, check_callback=_check_shared_metadata) def test_load_quality_assurance(self, modis_l2_nasa_mod35_file): """Test loading quality assurance.""" scene = Scene(reader='modis_l2', filenames=modis_l2_nasa_mod35_file) dataset_name = 'quality_assurance' scene.load([dataset_name]) quality_assurance_id = make_dataid(name=dataset_name, resolution=1000) assert quality_assurance_id in scene quality_assurance = scene[quality_assurance_id] assert quality_assurance.shape == _shape_for_resolution(1000) _check_shared_metadata(quality_assurance, expect_area=True) @pytest.mark.parametrize( ('input_files', 'loadables', 'request_resolution', 'exp_resolution', 'exp_area'), [ [lazy_fixture('modis_l2_nasa_mod35_mod03_files'), ["cloud_mask"], 1000, 1000, True], [lazy_fixture('modis_l2_imapp_mask_byte1_geo_files'), ["cloud_mask", "land_sea_mask", "snow_ice_mask"], None, 1000, True], ] ) def test_load_category_dataset(self, input_files, loadables, request_resolution, exp_resolution, exp_area): """Test loading category products.""" scene = Scene(reader='modis_l2', filenames=input_files) kwargs = {"resolution": request_resolution} if request_resolution is not None else {} scene.load(loadables, **kwargs) for ds_name in loadables: cat_id = make_dataid(name=ds_name, resolution=exp_resolution) assert cat_id in scene cat_data_arr = scene[cat_id] assert cat_data_arr.shape == _shape_for_resolution(exp_resolution) assert cat_data_arr.attrs.get('resolution') == exp_resolution # mask variables should be integers assert np.issubdtype(cat_data_arr.dtype, np.integer) assert cat_data_arr.attrs.get('_FillValue') is not None _check_shared_metadata(cat_data_arr, expect_area=exp_area) @pytest.mark.parametrize( ('input_files', 'exp_area'), [ [lazy_fixture('modis_l2_nasa_mod35_file'), False], [lazy_fixture('modis_l2_nasa_mod35_mod03_files'), True], ] ) def test_load_250m_cloud_mask_dataset(self, input_files, exp_area): """Test loading 250m cloud mask.""" scene = Scene(reader='modis_l2', filenames=input_files) dataset_name = 'cloud_mask' scene.load([dataset_name], resolution=250) cloud_mask_id = make_dataid(name=dataset_name, resolution=250) assert cloud_mask_id in scene cloud_mask = scene[cloud_mask_id] assert cloud_mask.shape == _shape_for_resolution(250) # mask variables should be integers assert np.issubdtype(cloud_mask.dtype, np.integer) assert cloud_mask.attrs.get('_FillValue') is not None _check_shared_metadata(cloud_mask, expect_area=exp_area) @pytest.mark.parametrize( ('input_files', 'loadables', 'exp_resolution', 'exp_area'), [ [lazy_fixture('modis_l2_nasa_mod06_file'), ["surface_pressure"], 5000, True], [lazy_fixture('modis_l2_imapp_snowmask_file'), ["snow_mask"], 1000, False], [lazy_fixture('modis_l2_imapp_snowmask_geo_files'), ["snow_mask"], 1000, True], ] ) def test_load_l2_dataset(self, input_files, loadables, exp_resolution, exp_area): """Load and check an L2 variable.""" scene = Scene(reader='modis_l2', filenames=input_files) scene.load(loadables) for ds_name in loadables: assert ds_name in scene data_arr = scene[ds_name] assert data_arr.shape == _shape_for_resolution(exp_resolution) assert data_arr.attrs.get('resolution') == exp_resolution _check_shared_metadata(data_arr, expect_area=exp_area) satpy-0.34.0/satpy/tests/reader_tests/test_msi_safe.py000066400000000000000000002501311420401153000231130ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.msi_safe module.""" import unittest import unittest.mock as mock from io import BytesIO, StringIO import numpy as np import pytest import xarray as xr from satpy.tests.utils import make_dataid mtd_tile_xml = b""" S2B_OPER_MSI_L1C_TL_VGS1_20201001T183541_A018656_T16SEB_N02.09 S2B_OPER_MSI_L1C_DS_VGS1_20201001T183541_S20201001T162735_N02.09 NOMINAL 2020-10-01T16:34:23.153611Z VGS1 2020-10-01T18:55:55.59803Z WGS84 / UTM zone 16N EPSG:32616 10980 10980 5490 5490 1830 1830 499980 3700020 10 -10 499980 3700020 20 -20 499980 3700020 60 -60 5000 5000 39.8824 39.8636 39.8448 39.8261 39.8074 39.7888 39.7702 39.7516 39.7331 39.7145 39.6961 39.6776 39.6592 39.6408 39.6225 39.6042 39.5859 39.5677 39.5495 39.5313 39.5132 39.4951 39.477 39.8404 39.8216 39.8029 39.7841 39.7655 39.7468 39.7282 39.7096 39.691 39.6725 39.654 39.6355 39.6171 39.5987 39.5804 39.5621 39.5438 39.5255 39.5073 39.4891 39.471 39.4529 39.4348 39.7985 39.7797 39.7609 39.7422 39.7235 39.7048 39.6862 39.6675 39.649 39.6304 39.6119 39.5935 39.575 39.5566 39.5383 39.5199 39.5016 39.4834 39.4651 39.4469 39.4288 39.4107 39.3926 39.7566 39.7377 39.719 39.7002 39.6815 39.6628 39.6441 39.6255 39.6069 39.5884 39.5699 39.5514 39.533 39.5145 39.4962 39.4778 39.4595 39.4412 39.423 39.4048 39.3866 39.3685 39.3504 39.7146 39.6958 39.677 39.6582 39.6395 39.6208 39.6021 39.5835 39.5649 39.5464 39.5278 39.5093 39.4909 39.4724 39.4541 39.4357 39.4174 39.3991 39.3808 39.3626 39.3444 39.3263 39.3082 39.6727 39.6539 39.635 39.6163 39.5975 39.5788 39.5601 39.5415 39.5229 39.5043 39.4858 39.4673 39.4488 39.4304 39.412 39.3936 39.3752 39.3569 39.3387 39.3204 39.3023 39.2841 39.266 39.6308 39.6119 39.5931 39.5743 39.5556 39.5368 39.5181 39.4995 39.4809 39.4623 39.4437 39.4252 39.4067 39.3883 39.3699 39.3515 39.3331 39.3148 39.2965 39.2783 39.2601 39.2419 39.2238 39.5889 39.57 39.5512 39.5324 39.5136 39.4949 39.4762 39.4575 39.4389 39.4203 39.4017 39.3832 39.3647 39.3462 39.3278 39.3094 39.291 39.2727 39.2544 39.2361 39.2179 39.1997 39.1816 39.547 39.5281 39.5092 39.4904 39.4716 39.4529 39.4342 39.4155 39.3968 39.3782 39.3596 39.3411 39.3226 39.3041 39.2857 39.2673 39.2489 39.2306 39.2123 39.194 39.1758 39.1576 39.1394 39.5051 39.4862 39.4673 39.4485 39.4297 39.4109 39.3922 39.3735 39.3548 39.3362 39.3176 39.2991 39.2805 39.2621 39.2436 39.2252 39.2068 39.1884 39.1701 39.1518 39.1336 39.1154 39.0972 39.4632 39.4442 39.4254 39.4065 39.3877 39.3689 39.3502 39.3315 39.3128 39.2942 39.2756 39.257 39.2385 39.22 39.2015 39.1831 39.1647 39.1463 39.128 39.1097 39.0914 39.0732 39.055 39.4213 39.4023 39.3834 39.3646 39.3458 39.327 39.3082 39.2895 39.2708 39.2522 39.2336 39.215 39.1964 39.1779 39.1594 39.141 39.1226 39.1042 39.0859 39.0676 39.0493 39.0311 39.0129 39.3794 39.3604 39.3415 39.3227 39.3038 39.285 39.2663 39.2475 39.2288 39.2102 39.1915 39.1729 39.1544 39.1359 39.1174 39.0989 39.0805 39.0621 39.0438 39.0254 39.0072 38.9889 38.9707 39.3375 39.3185 39.2996 39.2807 39.2619 39.2431 39.2243 39.2056 39.1868 39.1682 39.1495 39.1309 39.1123 39.0938 39.0753 39.0568 39.0384 39.02 39.0016 38.9833 38.965 38.9468 38.9285 39.2956 39.2766 39.2577 39.2388 39.22 39.2011 39.1823 39.1636 39.1449 39.1262 39.1075 39.0889 39.0703 39.0518 39.0332 39.0148 38.9963 38.9779 38.9595 38.9412 38.9229 38.9046 38.8864 39.2537 39.2348 39.2158 39.1969 39.178 39.1592 39.1404 39.1216 39.1029 39.0842 39.0655 39.0469 39.0283 39.0097 38.9912 38.9727 38.9542 38.9358 38.9174 38.8991 38.8807 38.8625 38.8442 39.2119 39.1929 39.1739 39.155 39.1361 39.1173 39.0984 39.0797 39.0609 39.0422 39.0235 39.0049 38.9862 38.9677 38.9491 38.9306 38.9122 38.8937 38.8753 38.8569 38.8386 38.8203 38.8021 39.17 39.151 39.132 39.1131 39.0942 39.0753 39.0565 39.0377 39.0189 39.0002 38.9815 38.9628 38.9442 38.9256 38.9071 38.8886 38.8701 38.8516 38.8332 38.8148 38.7965 38.7782 38.7599 39.1281 39.1091 39.0901 39.0712 39.0523 39.0334 39.0145 38.9957 38.977 38.9582 38.9395 38.9208 38.9022 38.8836 38.865 38.8465 38.828 38.8095 38.7911 38.7727 38.7544 38.736 38.7178 39.0863 39.0672 39.0482 39.0293 39.0104 38.9915 38.9726 38.9538 38.935 38.9162 38.8975 38.8788 38.8602 38.8416 38.823 38.8045 38.7859 38.7675 38.749 38.7306 38.7122 38.6939 38.6756 39.0444 39.0254 39.0064 38.9874 38.9685 38.9496 38.9307 38.9118 38.893 38.8743 38.8555 38.8368 38.8182 38.7996 38.781 38.7624 38.7439 38.7254 38.7069 38.6885 38.6701 38.6518 38.6335 39.0026 38.9835 38.9645 38.9455 38.9266 38.9076 38.8888 38.8699 38.8511 38.8323 38.8136 38.7949 38.7762 38.7575 38.7389 38.7204 38.7018 38.6833 38.6649 38.6464 38.628 38.6097 38.5913 38.9607 38.9417 38.9226 38.9036 38.8847 38.8657 38.8468 38.828 38.8091 38.7903 38.7716 38.7529 38.7342 38.7155 38.6969 38.6783 38.6598 38.6413 38.6228 38.6043 38.5859 38.5676 38.5492 5000 5000 154.971 155.049 155.126 155.204 155.282 155.359 155.437 155.515 155.593 155.671 155.749 155.827 155.905 155.983 156.061 156.14 156.218 156.296 156.375 156.453 156.532 156.61 156.689 154.953 155.03 155.108 155.186 155.263 155.341 155.419 155.497 155.575 155.653 155.731 155.809 155.887 155.965 156.043 156.122 156.2 156.278 156.357 156.435 156.514 156.592 156.671 154.934 155.012 155.09 155.167 155.245 155.323 155.401 155.478 155.556 155.634 155.712 155.79 155.869 155.947 156.025 156.103 156.182 156.26 156.338 156.417 156.495 156.574 156.653 154.916 154.994 155.071 155.149 155.227 155.304 155.382 155.46 155.538 155.616 155.694 155.772 155.85 155.928 156.007 156.085 156.163 156.242 156.32 156.399 156.477 156.556 156.634 154.897 154.975 155.053 155.13 155.208 155.286 155.364 155.442 155.52 155.598 155.676 155.754 155.832 155.91 155.988 156.067 156.145 156.223 156.302 156.38 156.459 156.538 156.616 154.879 154.956 155.034 155.112 155.19 155.267 155.345 155.423 155.501 155.579 155.657 155.735 155.814 155.892 155.97 156.048 156.127 156.205 156.284 156.362 156.441 156.519 156.598 154.86 154.938 155.015 155.093 155.171 155.249 155.327 155.405 155.483 155.561 155.639 155.717 155.795 155.873 155.952 156.03 156.108 156.187 156.265 156.344 156.422 156.501 156.58 154.841 154.919 154.997 155.075 155.152 155.23 155.308 155.386 155.464 155.542 155.62 155.698 155.777 155.855 155.933 156.012 156.09 156.168 156.247 156.325 156.404 156.483 156.561 154.823 154.9 154.978 155.056 155.134 155.212 155.289 155.367 155.445 155.524 155.602 155.68 155.758 155.836 155.915 155.993 156.071 156.15 156.228 156.307 156.386 156.464 156.543 154.804 154.882 154.959 155.037 155.115 155.193 155.271 155.349 155.427 155.505 155.583 155.661 155.739 155.818 155.896 155.974 156.053 156.131 156.21 156.289 156.367 156.446 156.525 154.785 154.863 154.941 155.018 155.096 155.174 155.252 155.33 155.408 155.486 155.564 155.643 155.721 155.799 155.878 155.956 156.034 156.113 156.191 156.27 156.349 156.427 156.506 154.766 154.844 154.922 155 155.077 155.155 155.233 155.311 155.389 155.467 155.546 155.624 155.702 155.78 155.859 155.937 156.016 156.094 156.173 156.251 156.33 156.409 156.488 154.747 154.825 154.903 154.981 155.059 155.136 155.214 155.292 155.371 155.449 155.527 155.605 155.683 155.762 155.84 155.919 155.997 156.076 156.154 156.233 156.312 156.39 156.469 154.728 154.806 154.884 154.962 155.04 155.118 155.196 155.274 155.352 155.43 155.508 155.586 155.665 155.743 155.821 155.9 155.978 156.057 156.136 156.214 156.293 156.372 156.451 154.709 154.787 154.865 154.943 155.021 155.099 155.177 155.255 155.333 155.411 155.489 155.568 155.646 155.724 155.803 155.881 155.96 156.038 156.117 156.196 156.274 156.353 156.432 154.69 154.768 154.846 154.924 155.002 155.08 155.158 155.236 155.314 155.392 155.47 155.549 155.627 155.705 155.784 155.862 155.941 156.019 156.098 156.177 156.256 156.334 156.413 154.671 154.749 154.827 154.905 154.983 155.061 155.139 155.217 155.295 155.373 155.451 155.53 155.608 155.686 155.765 155.843 155.922 156.001 156.079 156.158 156.237 156.316 156.394 154.652 154.73 154.808 154.886 154.964 155.042 155.12 155.198 155.276 155.354 155.432 155.511 155.589 155.668 155.746 155.825 155.903 155.982 156.06 156.139 156.218 156.297 156.376 154.633 154.711 154.789 154.866 154.944 155.022 155.101 155.179 155.257 155.335 155.413 155.492 155.57 155.649 155.727 155.806 155.884 155.963 156.042 156.12 156.199 156.278 156.357 154.614 154.691 154.769 154.847 154.925 155.003 155.081 155.16 155.238 155.316 155.394 155.473 155.551 155.63 155.708 155.787 155.865 155.944 156.023 156.101 156.18 156.259 156.338 154.594 154.672 154.75 154.828 154.906 154.984 155.062 155.14 155.219 155.297 155.375 155.454 155.532 155.61 155.689 155.768 155.846 155.925 156.004 156.082 156.161 156.24 156.319 154.575 154.653 154.731 154.809 154.887 154.965 155.043 155.121 155.199 155.278 155.356 155.434 155.513 155.591 155.67 155.748 155.827 155.906 155.985 156.063 156.142 156.221 156.3 154.556 154.633 154.711 154.789 154.867 154.945 155.024 155.102 155.18 155.258 155.337 155.415 155.494 155.572 155.651 155.729 155.808 155.887 155.965 156.044 156.123 156.202 156.281 39.2158335161115 155.62398389104 5000 5000 NaN 11.7128 11.3368 10.9601 10.5837 10.2053 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 11.6285 11.2531 10.8763 10.4977 10.1207 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 11.9203 11.5439 11.1676 10.79 10.4135 10.036 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 11.8359 11.4595 11.0825 10.7054 10.3284 9.95143 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 11.751 11.3743 10.9977 10.6209 10.2437 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 11.6664 11.2901 10.9134 10.5362 10.1591 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 11.5818 11.2061 10.8293 10.4518 10.0747 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 11.4976 11.121 10.7439 10.3664 9.98937 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5000 5000 NaN 111.269 111.67 112.096 112.551 113.041 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 111.354 111.759 112.192 112.657 113.152 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 111.053 111.44 111.852 112.292 112.762 113.266 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 111.136 111.529 111.946 112.392 112.869 113.381 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 111.219 111.618 112.042 112.494 112.978 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 111.305 111.708 112.138 112.597 113.089 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 111.391 111.799 112.235 112.702 113.201 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 111.478 111.893 112.336 112.809 113.317 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5000 5000 NaN NaN NaN NaN NaN NaN 9.82039 9.4373 9.05284 8.66805 8.28339 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.73454 9.35159 8.96724 8.58182 8.19763 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0315 9.64827 9.26401 8.87996 8.49572 8.11079 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.94572 9.56205 9.17796 8.79367 8.4095 8.02451 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.85977 9.47669 9.09189 8.70763 8.32282 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.77437 9.38968 9.00597 8.62183 8.23655 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.68751 9.30377 8.91958 8.53514 8.15057 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.98449 9.60143 9.21746 8.83286 8.4486 8.06421 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.51494 9.13074 8.74664 8.3621 7.97741 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5000 5000 NaN NaN NaN NaN NaN NaN 92.2969 91.9939 91.6606 91.294 90.8911 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 92.2267 91.9172 91.5775 91.2031 90.7918 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 92.4452 92.1553 91.8379 91.4911 91.1101 90.6885 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 92.3781 92.0825 91.7591 91.4043 91.0144 90.5834 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 92.31 92.0089 91.6783 91.3163 90.9166 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 92.2413 91.9324 91.5954 91.2255 90.8166 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 92.1696 91.8556 91.5111 91.1322 90.7147 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 92.391 92.0976 91.7769 91.4248 91.0382 90.611 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 92.0248 91.6966 91.3373 90.9417 90.5043 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5000 5000 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.94194 7.56511 7.19038 6.81626 6.44423 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.8569 7.48093 7.10605 6.73281 6.36089 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.7724 7.39658 7.02215 6.64892 6.27782 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.06455 7.688 7.31247 6.93823 6.56551 6.19477 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.97983 7.60366 7.2287 6.85441 6.48197 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.8952 7.51946 7.14517 6.77071 6.39873 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.81105 7.43489 7.0603 6.68714 6.31558 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.72611 7.35074 6.97674 6.60389 6.23289 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.01804 7.64172 7.26672 6.89282 6.52025 6.14959 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.55748 7.18239 6.80886 6.43657 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5000 5000 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 116.798 117.613 118.509 119.504 120.609 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 116.97 117.802 118.719 119.735 120.87 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 117.146 117.996 118.934 119.975 121.137 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 116.538 117.326 118.194 119.155 120.222 121.414 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 116.706 117.511 118.397 119.38 120.474 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 116.877 117.699 118.604 119.612 120.733 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 117.05 117.892 118.82 119.85 120.998 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 117.231 118.089 119.037 120.092 121.27 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 116.616 117.414 118.291 119.262 120.343 121.552 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 117.601 118.499 119.492 120.6 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5000 5000 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.98397 5.60436 5.22629 4.85051 4.47749 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.89902 5.51979 5.14214 4.76699 4.39482 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.81385 5.43495 5.05811 4.68338 4.31176 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.10878 5.72912 5.35071 4.97413 4.59998 4.22933 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.02356 5.64376 5.26618 4.88984 4.51664 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.938 5.55897 5.1813 4.80571 4.43316 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.85332 5.47505 5.09703 4.72192 4.35017 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.14847 5.76823 5.38949 5.01237 4.63811 4.26692 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.06275 5.68347 5.30458 4.92804 4.55459 4.18407 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.9774 5.59788 5.21981 4.84402 4.47086 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.51271 5.13498 4.7597 4.38749 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5000 5000 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 87.1015 86.1123 84.974 83.6538 82.1077 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 86.8857 85.8663 84.6903 83.3238 81.7192 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 86.6628 85.6105 84.3968 82.9801 81.3118 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 87.3809 86.4344 85.3483 84.0925 82.6251 80.8924 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 87.1741 86.1977 85.0768 83.7748 82.2541 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 86.9589 85.9527 84.7944 83.4481 81.8691 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 86.7408 85.7045 84.5049 83.1103 81.4708 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 87.4494 86.5139 85.442 84.2014 82.7561 81.052 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 87.244 86.2812 85.1729 83.8909 82.3929 80.6189 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 87.0343 86.0398 84.8955 83.5691 82.0132 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 85.7916 84.6089 83.2341 81.6205 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5000 5000 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.27277 3.93031 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.19493 3.85385 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.11765 3.77876 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.38681 4.04091 3.70407 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.30823 3.96401 3.63007 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.22988 3.88788 3.55663 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.15225 3.8125 3.48381 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.4219 4.07481 3.73746 3.41201 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.34311 3.998 3.66286 3.34095 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.26482 3.92174 3.58929 3.27063 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.18686 3.84597 3.51627 3.20131 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 3.7708 3.44395 3.13291 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5000 5000 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 130.971 133.734 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 131.547 134.423 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 132.144 135.129 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 130.141 132.763 135.869 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 130.69 133.411 136.637 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 131.26 134.084 137.44 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 131.852 134.784 138.279 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 129.884 132.465 135.512 139.15 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 130.424 133.101 136.272 140.06 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 130.987 133.764 137.059 141.008 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 131.568 134.453 137.883 142.001 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 135.169 138.743 143.037 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.18024135920399 106.255157702848 7.22336464325122 106.346078097961 6.98729381785528 105.765382381968 6.92446640065506 105.09376719949 6.95791117837005 105.44993173891 6.99577507894955 105.692478311655 7.0231940118902 105.844309043016 7.05361232723533 105.969629461909 7.0871211819946 106.101277617057 6.93953882104395 105.275888180279 7.12343057570894 106.21247453177 7.04938612963508 106.030547019406 7.13282515906901 106.31610702063 0 0 GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DEFECT_B01.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DETFOO_B01.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_NODATA_B01.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_SATURA_B01.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_TECQUA_B01.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DEFECT_B02.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DETFOO_B02.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_NODATA_B02.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_SATURA_B02.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_TECQUA_B02.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DEFECT_B03.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DETFOO_B03.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_NODATA_B03.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_SATURA_B03.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_TECQUA_B03.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DEFECT_B04.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DETFOO_B04.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_NODATA_B04.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_SATURA_B04.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_TECQUA_B04.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DEFECT_B05.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DETFOO_B05.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_NODATA_B05.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_SATURA_B05.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_TECQUA_B05.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DEFECT_B06.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DETFOO_B06.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_NODATA_B06.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_SATURA_B06.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_TECQUA_B06.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DEFECT_B07.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DETFOO_B07.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_NODATA_B07.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_SATURA_B07.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_TECQUA_B07.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DEFECT_B08.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DETFOO_B08.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_NODATA_B08.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_SATURA_B08.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_TECQUA_B08.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DEFECT_B8A.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DETFOO_B8A.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_NODATA_B8A.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_SATURA_B8A.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_TECQUA_B8A.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DEFECT_B09.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DETFOO_B09.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_NODATA_B09.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_SATURA_B09.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_TECQUA_B09.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DEFECT_B10.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DETFOO_B10.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_NODATA_B10.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_SATURA_B10.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_TECQUA_B10.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DEFECT_B11.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DETFOO_B11.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_NODATA_B11.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_SATURA_B11.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_TECQUA_B11.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DEFECT_B12.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DETFOO_B12.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_NODATA_B12.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_SATURA_B12.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_TECQUA_B12.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_CLOUDS_B00.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/T16SEB_20201001T162019_PVI.jp2 """ # noqa mtd_l1c_old_xml = """ 2021-05-17T10:36:19.024Z 2021-05-17T10:36:19.024Z S2B_MSIL1C_20210517T103619_N7990_R008_T30QVE_20210929T075738.SAFE Level-1C S2MSI1C 79.90 https://doi.org/10.5270/S2_-742ikth 2021-09-29T07:57:38.000000Z Not applicable Not applicable Sentinel-2B INS-NOBS 2021-05-17T10:36:19.024Z 8 DESCENDING SAFE_COMPACT GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B01 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B02 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B03 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B04 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B05 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B06 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B07 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B08 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B8A GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B09 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B10 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B11 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B12 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_TCI NODATA 0 SATURATED 65535 3 2 1 10000 0.979428313059035 1874.3 1959.75 1824.93 1512.79 1425.78 1291.13 1175.57 1041.28 953.93 817.58 365.41 247.08 87.75 60 411 456 442.3 1 0.0062411 0.01024045 0.00402983 0.00642179 0.00552753 0.0065525 0.00409887 0.006297 0.00436742 0.00233356 0.00058162 0.00202276 0.00294328 0.00485362 0.00317041 0.00237657 0.00234612 0.00440152 0.01292397 0.05001678 0.18650104 0.45441623 0.72307877 0.83999211 0.86456334 0.87472096 0.89215296 0.91090814 0.92588017 0.93924094 0.94491826 0.95078529 0.96803023 0.99939195 1 0.97548364 0.96148351 0.94986211 0.91841452 0.87989802 0.80383677 0.59752075 0.30474132 0.10798014 0.0304465 0.00885119 10 456 532 492.3 1 0.05529541 0.12005068 0.25199051 0.4623617 0.65162379 0.77642171 0.82319091 0.83083116 0.83382106 0.837526 0.86304286 0.88226141 0.90486326 0.92043837 0.93602675 0.930533 0.92714067 0.9161479 0.90551724 0.89745515 0.90266694 0.90854264 0.92047913 0.92417935 0.91845025 0.90743244 0.89733983 0.88646415 0.87189983 0.85643973 0.84473414 0.84190734 0.85644111 0.87782724 0.90261174 0.91840544 0.94585847 0.96887192 0.99336135 0.99927899 1 0.99520325 0.98412711 0.97947473 0.97808297 0.97213439 0.96277794 0.95342234 0.93802376 0.92460144 0.90932642 0.90192251 0.89184298 0.88963556 0.89146958 0.89877911 0.91056869 0.92427362 0.93823555 0.95311791 0.97150808 0.98737003 0.99658514 0.99367959 0.98144714 0.95874415 0.89291635 0.73566218 0.52060373 0.3322804 0.19492197 0.11732617 0.07507304 0.05094154 0.03213016 0.01510217 0.00447984 60 1339 1415 1376.9 1 2.472e-05 0.00013691 0.00012558 8.901e-05 0.00012425 9.941e-05 0.00013952 0.00015816 0.00019272 0.00025959 0.00032221 0.00034719 0.0003699 0.00054874 0.00105434 0.00218813 0.00480743 0.01135252 0.02671185 0.05776022 0.11176337 0.19587518 0.31418191 0.46188068 0.62292578 0.7709851 0.88086652 0.9448941 0.97405066 0.98616696 0.99306955 0.99775441 1 0.99942348 0.99616891 0.99082045 0.9842131 0.97708513 0.97013647 0.96374366 0.95755001 0.95127438 0.94546638 0.94069659 0.93759595 0.93624612 0.93510206 0.93054472 0.91630845 0.88530334 0.83129653 0.74856466 0.63524397 0.49733159 0.34907723 0.21259735 0.10971453 0.04789269 0.01853013 0.00716776 0.0031533 0.00157017 0.00084901 0.00053006 0.00033171 0.00019447 0.00022104 0.00022646 0.00018156 0.00016063 0.00015475 0.00014734 0.00014776 0.00017405 0.00023619 0.00012007 4.337e-05 3.97083657 3.81081866 4.21881648 4.7545091 5.16489535 5.06418355 4.7429031 6.789537 5.73223234 9.32447797 56.36387909 37.15464608 108.67071783 3 """ # noqa mtd_l1c_xml = """ 2021-05-17T10:36:19.024Z 2021-05-17T10:36:19.024Z S2B_MSIL1C_20210517T103619_N7990_R008_T30QVE_20210929T075738.SAFE Level-1C S2MSI1C 79.90 https://doi.org/10.5270/S2_-742ikth 2021-09-29T07:57:38.000000Z Not applicable Not applicable Sentinel-2B INS-NOBS 2021-05-17T10:36:19.024Z 8 DESCENDING SAFE_COMPACT GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B01 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B02 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B03 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B04 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B05 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B06 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B07 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B08 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B8A GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B09 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B10 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B11 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B12 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_TCI NODATA 0 SATURATED 65535 3 2 1 10000 -1000 -1000 -1000 -1000 -1000 -1000 -1000 -1000 -1000 -1000 -2000 -1000 -1000 0.979428313059035 1874.3 1959.75 1824.93 1512.79 1425.78 1291.13 1175.57 1041.28 953.93 817.58 365.41 247.08 87.75 60 411 456 442.3 1 0.0062411 0.01024045 0.00402983 0.00642179 0.00552753 0.0065525 0.00409887 0.006297 0.00436742 0.00233356 0.00058162 0.00202276 0.00294328 0.00485362 0.00317041 0.00237657 0.00234612 0.00440152 0.01292397 0.05001678 0.18650104 0.45441623 0.72307877 0.83999211 0.86456334 0.87472096 0.89215296 0.91090814 0.92588017 0.93924094 0.94491826 0.95078529 0.96803023 0.99939195 1 0.97548364 0.96148351 0.94986211 0.91841452 0.87989802 0.80383677 0.59752075 0.30474132 0.10798014 0.0304465 0.00885119 10 456 532 492.3 1 0.05529541 0.12005068 0.25199051 0.4623617 0.65162379 0.77642171 0.82319091 0.83083116 0.83382106 0.837526 0.86304286 0.88226141 0.90486326 0.92043837 0.93602675 0.930533 0.92714067 0.9161479 0.90551724 0.89745515 0.90266694 0.90854264 0.92047913 0.92417935 0.91845025 0.90743244 0.89733983 0.88646415 0.87189983 0.85643973 0.84473414 0.84190734 0.85644111 0.87782724 0.90261174 0.91840544 0.94585847 0.96887192 0.99336135 0.99927899 1 0.99520325 0.98412711 0.97947473 0.97808297 0.97213439 0.96277794 0.95342234 0.93802376 0.92460144 0.90932642 0.90192251 0.89184298 0.88963556 0.89146958 0.89877911 0.91056869 0.92427362 0.93823555 0.95311791 0.97150808 0.98737003 0.99658514 0.99367959 0.98144714 0.95874415 0.89291635 0.73566218 0.52060373 0.3322804 0.19492197 0.11732617 0.07507304 0.05094154 0.03213016 0.01510217 0.00447984 60 1339 1415 1376.9 1 2.472e-05 0.00013691 0.00012558 8.901e-05 0.00012425 9.941e-05 0.00013952 0.00015816 0.00019272 0.00025959 0.00032221 0.00034719 0.0003699 0.00054874 0.00105434 0.00218813 0.00480743 0.01135252 0.02671185 0.05776022 0.11176337 0.19587518 0.31418191 0.46188068 0.62292578 0.7709851 0.88086652 0.9448941 0.97405066 0.98616696 0.99306955 0.99775441 1 0.99942348 0.99616891 0.99082045 0.9842131 0.97708513 0.97013647 0.96374366 0.95755001 0.95127438 0.94546638 0.94069659 0.93759595 0.93624612 0.93510206 0.93054472 0.91630845 0.88530334 0.83129653 0.74856466 0.63524397 0.49733159 0.34907723 0.21259735 0.10971453 0.04789269 0.01853013 0.00716776 0.0031533 0.00157017 0.00084901 0.00053006 0.00033171 0.00019447 0.00022104 0.00022646 0.00018156 0.00016063 0.00015475 0.00014734 0.00014776 0.00017405 0.00023619 0.00012007 4.337e-05 3.97083657 3.81081866 4.21881648 4.7545091 5.16489535 5.06418355 4.7429031 6.789537 5.73223234 9.32447797 56.36387909 37.15464608 108.67071783 3 """ # noqa class TestMTDXML(unittest.TestCase): """Test the SAFE MTD XML file handler.""" def setUp(self): """Set up the test case.""" from satpy.readers.msi_safe import SAFEMSIMDXML, SAFEMSITileMDXML filename_info = dict(observation_time=None, dtile_number=None, fmission_id="S2A") self.xml_tile_fh = SAFEMSITileMDXML(BytesIO(mtd_tile_xml), filename_info, mock.MagicMock()) self.old_xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_old_xml), filename_info, mock.MagicMock()) self.xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_xml), filename_info, mock.MagicMock(), mask_saturated=True) def test_satellite_zenith_array(self): """Test reading the satellite zenith array.""" info = dict(xml_tag="Viewing_Incidence_Angles_Grids", xml_item="Zenith") expected_data = np.array([[11.7128, 11.18397802, 10.27667671, 9.35384969, 8.42850504, 7.55445611, 6.65475545, 5.66517232, 4.75893757, 4.04976844], [11.88606009, 10.9799713, 10.07083278, 9.14571825, 8.22607131, 7.35181457, 6.44647222, 5.46144173, 4.56625547, 3.86638233], [11.6823579, 10.7763071, 9.86302106, 8.93879112, 8.04005637, 7.15028077, 6.21461062, 5.25780953, 4.39876601, 3.68620793], [11.06724679, 10.35723901, 9.63958896, 8.73072512, 7.83680864, 6.94792574, 5.9889201, 5.05445872, 4.26089708, 3.50984272], [6.28411038, 6.28411038, 6.28411038, 6.28411038, 6.28411038, 5.99769643, 5.62586167, 4.85165966, 4.13238314, 3.33781401], [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837]]) res = self.xml_tile_fh.get_dataset(make_dataid(name="satellite_zenith_angle", resolution=60), info)[::200, ::200] np.testing.assert_allclose(res, expected_data) def test_old_xml_calibration(self): """Test the calibration of older data formats (no offset).""" fake_data = xr.DataArray([[[0, 1, 2, 3], [4, 1000, 65534, 65535]]], dims=["band", "x", "y"]) result = self.old_xml_fh.calibrate_to_reflectances(fake_data, "B01") np.testing.assert_allclose(result, [[[np.nan, 0.01, 0.02, 0.03], [0.04, 10, 655.34, np.inf]]]) def test_xml_calibration(self): """Test the calibration with radiometric offset.""" fake_data = xr.DataArray([[[0, 1, 2, 3], [4, 1000, 65534, 65535]]], dims=["band", "x", "y"]) result = self.xml_fh.calibrate_to_reflectances(fake_data, "B01") np.testing.assert_allclose(result, [[[np.nan, 0.01 - 10, 0.02 - 10, 0.03 - 10], [0.04 - 10, 0, 655.34 - 10, np.inf]]]) def test_xml_calibration_unmasked_saturated(self): """Test the calibration with radiometric offset but unmasked saturated pixels.""" from satpy.readers.msi_safe import SAFEMSIMDXML filename_info = dict(observation_time=None, dtile_number=None, fmission_id="S2A") self.xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_xml), filename_info, mock.MagicMock(), mask_saturated=False) fake_data = xr.DataArray([[[0, 1, 2, 3], [4, 1000, 65534, 65535]]], dims=["band", "x", "y"]) result = self.xml_fh.calibrate_to_reflectances(fake_data, "B01") np.testing.assert_allclose(result, [[[np.nan, 0.01 - 10, 0.02 - 10, 0.03 - 10], [0.04 - 10, 0, 655.34 - 10, 655.35 - 10]]]) def test_xml_calibration_with_different_offset(self): """Test the calibration with a different offset.""" fake_data = xr.DataArray([[[0, 1, 2, 3], [4, 1000, 65534, 65535]]], dims=["band", "x", "y"]) result = self.xml_fh.calibrate_to_reflectances(fake_data, "B10") np.testing.assert_allclose(result, [[[np.nan, 0.01 - 20, 0.02 - 20, 0.03 - 20], [0.04 - 20, -10, 655.34 - 20, np.inf]]]) def test_xml_calibration_to_radiance(self): """Test the calibration with a different offset.""" fake_data = xr.DataArray([[[0, 1, 2, 3], [4, 1000, 65534, 65535]]], dims=["band", "x", "y"]) result = self.xml_fh.calibrate_to_radiances(fake_data, "B01") expected = np.array([[[np.nan, -251.584265, -251.332429, -251.080593], [-250.828757, 0., 16251.99095, np.inf]]]) np.testing.assert_allclose(result, expected) def test_xml_navigation(self): """Test the navigation.""" from pyproj import CRS crs = CRS('EPSG:32616') dsid = make_dataid(name="B01", resolution=60) result = self.xml_tile_fh.get_area_def(dsid) area_extents = (499980.0, 3590220.0, 609780.0, 3700020.0) assert result.crs == crs np.testing.assert_allclose(result.area_extent, area_extents) class TestSAFEMSIL1C: """Test case for image reading (jp2k).""" def setup(self): """Set up the test.""" from satpy.readers.msi_safe import SAFEMSITileMDXML self.filename_info = dict(observation_time=None, fmission_id="S2A", band_name="B01", dtile_number=None) self.fake_data = xr.DataArray([[[0, 1], [65534, 65535]]], dims=["band", "x", "y"]) self.tile_mda = mock.create_autospec(SAFEMSITileMDXML)(BytesIO(mtd_tile_xml), self.filename_info, mock.MagicMock()) @pytest.mark.parametrize("mask_saturated,calibration,expected", [(True, "reflectance", [[np.nan, 0.01 - 10], [645.34, np.inf]]), (False, "reflectance", [[np.nan, 0.01 - 10], [645.34, 645.35]]), (True, "radiance", [[np.nan, -251.58426503], [16251.99095011, np.inf]])]) def test_calibration_and_masking(self, mask_saturated, calibration, expected): """Test that saturated is masked with inf when requested and that calibration is performed.""" from satpy.readers.msi_safe import SAFEMSIL1C, SAFEMSIMDXML mda = SAFEMSIMDXML(StringIO(mtd_l1c_xml), self.filename_info, mock.MagicMock(), mask_saturated=mask_saturated) self.jp2_fh = SAFEMSIL1C("somefile", self.filename_info, mock.MagicMock(), mda, self.tile_mda) with mock.patch("satpy.readers.msi_safe.rioxarray.open_rasterio", return_value=self.fake_data): res = self.jp2_fh.get_dataset(make_dataid(name="B01", calibration=calibration), info=dict()) np.testing.assert_allclose(res, expected) satpy-0.34.0/satpy/tests/reader_tests/test_msu_gsa_l1b.py000066400000000000000000000161221420401153000235210ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the 'msu_gsa_l1b' reader.""" import os from unittest import mock import dask.array as da import numpy as np import pytest import xarray as xr from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler from satpy.tests.utils import make_dataid SOLCONST = '273.59' class FakeHDF5FileHandler2(FakeHDF5FileHandler): """Swap-in HDF5 File Handler.""" def _get_data(self, num_scans, num_cols): data = { 'Data/resolution_1km/Solar_Zenith_Angle': xr.DataArray( da.ones((num_scans*4, num_cols*4), chunks=1024, dtype=np.uint16), attrs={ 'scale': 0.01, 'offset': 0., 'fill_value': -999. }, dims=('x', 'y')), 'Geolocation/resolution_1km/Latitude': xr.DataArray( da.ones((num_scans*4, num_cols*4), chunks=1024, dtype=np.uint16), attrs={ 'scale': 0.01, 'offset': 0., 'fill_value': -999. }, dims=('x', 'y')), 'Geolocation/resolution_1km/Longitude': xr.DataArray( da.ones((num_scans*4, num_cols*4), chunks=1024, dtype=np.uint16), attrs={ 'scale': 0.01, 'offset': 0., 'fill_value': -999. }, dims=('x', 'y')), 'Data/resolution_1km/Radiance_01': xr.DataArray( da.ones((num_scans*4, num_cols*4), chunks=1024, dtype=np.uint16), attrs={ 'scale': 0.01, 'offset': 0., 'fill_value': -999., 'F_solar_constant': SOLCONST }, dims=('x', 'y')), 'Data/resolution_4km/Solar_Zenith_Angle': xr.DataArray( da.ones((num_scans, num_cols), chunks=1024, dtype=np.uint16), attrs={ 'scale': 0.01, 'offset': 0., 'fill_value': -999. }, dims=('x', 'y')), 'Geolocation/resolution_4km/Latitude': xr.DataArray( da.ones((num_scans, num_cols), chunks=1024, dtype=np.uint16), attrs={ 'scale': 0.01, 'offset': 0., 'fill_value': -999. }, dims=('x', 'y')), 'Geolocation/resolution_4km/Longitude': xr.DataArray( da.ones((num_scans, num_cols), chunks=1024, dtype=np.uint16), attrs={ 'scale': 0.01, 'offset': 0., 'fill_value': -999. }, dims=('x', 'y')), 'Data/resolution_4km/Brightness_Temperature_09': xr.DataArray( da.ones((num_scans, num_cols), chunks=1024, dtype=np.uint16), attrs={ 'scale': 0.01, 'offset': 0., 'fill_value': -999. }, dims=('x', 'y')), } return data def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" num_scans = 20 num_cols = 2048 global_attrs = { '/attr/timestamp_without_timezone': '2022-01-13T12:45:00', '/attr/satellite_observation_point_height': '38500.0', '/attr/satellite_observation_point_latitude': '71.25', '/attr/satellite_observation_point_longitude': '21.44', } data = self._get_data(num_scans, num_cols) test_content = {} test_content.update(global_attrs) test_content.update(data) return test_content class TestMSUGSABReader: """Test MSU GS/A L1B Reader.""" yaml_file = "msu_gsa_l1b.yaml" def setup(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers import load_reader from satpy.readers.msu_gsa_l1b import MSUGSAFileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(MSUGSAFileHandler, '__bases__', (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True filenames = ['ArcticaM1_202201131245.h5'] self.reader = load_reader(self.reader_configs) files = self.reader.select_files_from_pathnames(filenames) self.reader.create_filehandlers(files) def teardown(self): """Stop wrapping the HDF5 file handler.""" self.p.stop() def test_irbt(self): """Test retrieval in brightness temperature.""" ds_ids = [make_dataid(name='C09', calibration='brightness_temperature')] res = self.reader.load(ds_ids) assert 'C09' in res assert res['C09'].attrs['calibration'] == 'brightness_temperature' assert res['C09'].attrs['platform_name'] == 'Arctica-M N1' assert res['C09'].attrs['sat_latitude'] == 71.25 assert res['C09'].attrs['sat_longitude'] == 21.44 assert res['C09'].attrs['sat_altitude'] == 38500. assert res['C09'].attrs['resolution'] == 4000 def test_nocounts(self): """Test we can't get IR or VIS data as counts.""" ds_ids = [make_dataid(name='C01', calibration='counts')] with pytest.raises(KeyError): self.reader.load(ds_ids) ds_ids = [make_dataid(name='C09', calibration='counts')] with pytest.raises(KeyError): self.reader.load(ds_ids) def test_vis_cal(self): """Test that we can retrieve VIS data as both radiance and reflectance.""" ds_ids = [make_dataid(name='C01', calibration='radiance')] res = self.reader.load(ds_ids) rad = res['C01'].data ds_ids = [make_dataid(name='C01', calibration='reflectance')] res = self.reader.load(ds_ids) refl = res['C01'].data # Check the RAD->REFL conversion np.testing.assert_allclose(100 * np.pi * rad / float(SOLCONST), refl) satpy-0.34.0/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py000066400000000000000000000451761420401153000254020ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit tests for the FIDUCEO MVIRI FCDR Reader.""" from __future__ import annotations import os from unittest import mock import dask.array as da import numpy as np import pytest import xarray as xr from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_radius_parameters from satpy.readers.mviri_l1b_fiduceo_nc import ( ALTITUDE, EQUATOR_RADIUS, POLE_RADIUS, DatasetWrapper, FiduceoMviriEasyFcdrFileHandler, FiduceoMviriFullFcdrFileHandler, ) from satpy.tests.utils import make_dataid attrs_exp: dict = { 'platform': 'MET7', 'raw_metadata': {'foo': 'bar'}, 'sensor': 'MVIRI', 'orbital_parameters': { 'projection_longitude': 57.0, 'projection_latitude': 0.0, 'projection_altitude': 35785860.0, 'satellite_actual_longitude': 57.1, 'satellite_actual_latitude': 0.1, } } attrs_refl_exp = attrs_exp.copy() attrs_refl_exp.update( {'sun_earth_distance_correction_applied': True, 'sun_earth_distance_correction_factor': 1.} ) acq_time_vis_exp = [np.datetime64('1970-01-01 00:30'), np.datetime64('1970-01-01 00:30'), np.datetime64('1970-01-01 02:30'), np.datetime64('1970-01-01 02:30')] vis_counts_exp = xr.DataArray( np.array( [[0., 17., 34., 51.], [68., 85., 102., 119.], [136., 153., np.nan, 187.], [204., 221., 238., 255]], dtype=np.float32 ), dims=('y', 'x'), coords={ 'acq_time': ('y', acq_time_vis_exp), }, attrs=attrs_exp ) vis_rad_exp = xr.DataArray( np.array( [[np.nan, 18.56, 38.28, 58.], [77.72, 97.44, 117.16, 136.88], [156.6, 176.32, np.nan, 215.76], [235.48, 255.2, 274.92, 294.64]], dtype=np.float32 ), dims=('y', 'x'), coords={ 'acq_time': ('y', acq_time_vis_exp), }, attrs=attrs_exp ) vis_refl_exp = xr.DataArray( np.array( [[np.nan, 23.440929, np.nan, np.nan], [40.658744, 66.602233, 147.970867, np.nan], [75.688217, 92.240733, np.nan, np.nan], [np.nan, np.nan, np.nan, np.nan]], dtype=np.float32 ), # (0, 0) and (2, 2) are NaN because radiance is NaN # (0, 2) is NaN because SZA >= 90 degrees # Last row/col is NaN due to SZA interpolation dims=('y', 'x'), coords={ 'acq_time': ('y', acq_time_vis_exp), }, attrs=attrs_refl_exp ) u_vis_refl_exp = xr.DataArray( np.array( [[0.1, 0.2, 0.3, 0.4], [0.5, 0.6, 0.7, 0.8], [0.9, 1.0, 1.1, 1.2], [1.3, 1.4, 1.5, 1.6]], dtype=np.float32 ), dims=('y', 'x'), coords={ 'acq_time': ('y', acq_time_vis_exp), }, attrs=attrs_exp ) acq_time_ir_wv_exp = [np.datetime64('1970-01-01 00:30'), np.datetime64('1970-01-01 02:30')] wv_counts_exp = xr.DataArray( np.array( [[0, 85], [170, 255]], dtype=np.uint8 ), dims=('y', 'x'), coords={ 'acq_time': ('y', acq_time_ir_wv_exp), }, attrs=attrs_exp ) wv_rad_exp = xr.DataArray( np.array( [[np.nan, 3.75], [8, 12.25]], dtype=np.float32 ), dims=('y', 'x'), coords={ 'acq_time': ('y', acq_time_ir_wv_exp), }, attrs=attrs_exp ) wv_bt_exp = xr.DataArray( np.array( [[np.nan, 230.461366], [252.507448, 266.863289]], dtype=np.float32 ), dims=('y', 'x'), coords={ 'acq_time': ('y', acq_time_ir_wv_exp), }, attrs=attrs_exp ) ir_counts_exp = xr.DataArray( np.array( [[0, 85], [170, 255]], dtype=np.uint8 ), dims=('y', 'x'), coords={ 'acq_time': ('y', acq_time_ir_wv_exp), }, attrs=attrs_exp ) ir_rad_exp = xr.DataArray( np.array( [[np.nan, 80], [165, 250]], dtype=np.float32 ), dims=('y', 'x'), coords={ 'acq_time': ('y', acq_time_ir_wv_exp), }, attrs=attrs_exp ) ir_bt_exp = xr.DataArray( np.array( [[np.nan, 178.00013189], [204.32955838, 223.28709913]], dtype=np.float32 ), dims=('y', 'x'), coords={ 'acq_time': ('y', acq_time_ir_wv_exp), }, attrs=attrs_exp ) quality_pixel_bitmask_exp = xr.DataArray( np.array( [[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 1, 0], [0, 0, 0, 0]], dtype=np.uint8 ), dims=('y', 'x'), coords={ 'acq_time': ('y', acq_time_vis_exp), }, attrs=attrs_exp ) sza_vis_exp = xr.DataArray( np.array( [[45., 67.5, 90., np.nan], [22.5, 45., 67.5, np.nan], [0., 22.5, 45., np.nan], [np.nan, np.nan, np.nan, np.nan]], dtype=np.float32 ), dims=('y', 'x'), attrs=attrs_exp ) sza_ir_wv_exp = xr.DataArray( np.array( [[45, 90], [0, 45]], dtype=np.float32 ), dims=('y', 'x'), attrs=attrs_exp ) area_vis_exp = AreaDefinition( area_id='geos_mviri_4x4', proj_id='geos_mviri_4x4', description='MVIRI Geostationary Projection', projection={ 'proj': 'geos', 'lon_0': 57.0, 'h': ALTITUDE, 'a': EQUATOR_RADIUS, 'b': POLE_RADIUS }, width=4, height=4, area_extent=[5621229.74392, 5621229.74392, -5621229.74392, -5621229.74392] ) area_ir_wv_exp = area_vis_exp.copy( area_id='geos_mviri_2x2', proj_id='geos_mviri_2x2', width=2, height=2 ) @pytest.fixture(name='fake_dataset') def fixture_fake_dataset(): """Create fake dataset.""" count_ir = da.linspace(0, 255, 4, dtype=np.uint8).reshape(2, 2) count_wv = da.linspace(0, 255, 4, dtype=np.uint8).reshape(2, 2) count_vis = da.linspace(0, 255, 16, dtype=np.uint8).reshape(4, 4) sza = da.from_array( np.array( [[45, 90], [0, 45]], dtype=np.float32 ) ) mask = da.from_array( np.array( [[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 1, 0], # 1 = "invalid" [0, 0, 0, 0]], dtype=np.uint8 ) ) time = np.arange(4).astype('datetime64[h]').reshape(2, 2) ds = xr.Dataset( data_vars={ 'count_vis': (('y', 'x'), count_vis), 'count_wv': (('y_ir_wv', 'x_ir_wv'), count_wv), 'count_ir': (('y_ir_wv', 'x_ir_wv'), count_ir), 'toa_bidirectional_reflectance_vis': vis_refl_exp / 100, 'u_independent_toa_bidirectional_reflectance': u_vis_refl_exp / 100, 'quality_pixel_bitmask': (('y', 'x'), mask), 'solar_zenith_angle': (('y_tie', 'x_tie'), sza), 'time_ir_wv': (('y_ir_wv', 'x_ir_wv'), time), 'a_ir': -5.0, 'b_ir': 1.0, 'bt_a_ir': 10.0, 'bt_b_ir': -1000.0, 'a_wv': -0.5, 'b_wv': 0.05, 'bt_a_wv': 10.0, 'bt_b_wv': -2000.0, 'years_since_launch': 20.0, 'a0_vis': 1.0, 'a1_vis': 0.01, 'a2_vis': -0.0001, 'mean_count_space_vis': 1.0, 'distance_sun_earth': 1.0, 'solar_irradiance_vis': 650.0, 'sub_satellite_longitude_start': 57.1, 'sub_satellite_longitude_end': np.nan, 'sub_satellite_latitude_start': np.nan, 'sub_satellite_latitude_end': 0.1, }, coords={ 'y': [1, 2, 3, 4], 'x': [1, 2, 3, 4], 'y_ir_wv': [1, 2], 'x_ir_wv': [1, 2], 'y_tie': [1, 2], 'x_tie': [1, 2] }, attrs={'foo': 'bar'} ) ds['count_ir'].attrs['ancillary_variables'] = 'a_ir b_ir' ds['count_wv'].attrs['ancillary_variables'] = 'a_wv b_wv' return ds @pytest.fixture( name='file_handler', params=[FiduceoMviriEasyFcdrFileHandler, FiduceoMviriFullFcdrFileHandler] ) def fixture_file_handler(fake_dataset, request): """Create mocked file handler.""" marker = request.node.get_closest_marker("file_handler_data") mask_bad_quality = True if marker: mask_bad_quality = marker.kwargs['mask_bad_quality'] fh_class = request.param with mock.patch('satpy.readers.mviri_l1b_fiduceo_nc.xr.open_dataset') as open_dataset: open_dataset.return_value = fake_dataset return fh_class( filename='filename', filename_info={'platform': 'MET7', 'sensor': 'MVIRI', 'projection_longitude': '57.0'}, filetype_info={'foo': 'bar'}, mask_bad_quality=mask_bad_quality ) @pytest.fixture(name='reader') def fixture_reader(): """Return MVIRI FIDUCEO FCDR reader.""" from satpy._config import config_search_paths from satpy.readers import load_reader reader_configs = config_search_paths( os.path.join("readers", "mviri_l1b_fiduceo_nc.yaml")) reader = load_reader(reader_configs) return reader class TestFiduceoMviriFileHandlers: """Unit tests for FIDUCEO MVIRI file handlers.""" def test_init(self, file_handler): """Test file handler initialization.""" assert file_handler.projection_longitude == 57.0 assert file_handler.mask_bad_quality is True @pytest.mark.parametrize( ('name', 'calibration', 'resolution', 'expected'), [ ('VIS', 'counts', 2250, vis_counts_exp), ('VIS', 'radiance', 2250, vis_rad_exp), ('VIS', 'reflectance', 2250, vis_refl_exp), ('WV', 'counts', 4500, wv_counts_exp), ('WV', 'radiance', 4500, wv_rad_exp), ('WV', 'brightness_temperature', 4500, wv_bt_exp), ('IR', 'counts', 4500, ir_counts_exp), ('IR', 'radiance', 4500, ir_rad_exp), ('IR', 'brightness_temperature', 4500, ir_bt_exp), ('quality_pixel_bitmask', None, 2250, quality_pixel_bitmask_exp), ('solar_zenith_angle', None, 2250, sza_vis_exp), ('solar_zenith_angle', None, 4500, sza_ir_wv_exp), ('u_independent_toa_bidirectional_reflectance', None, 4500, u_vis_refl_exp) ] ) def test_get_dataset(self, file_handler, name, calibration, resolution, expected): """Test getting datasets.""" id_keys = {'name': name, 'resolution': resolution} if calibration: id_keys['calibration'] = calibration dataset_id = make_dataid(**id_keys) dataset_info = {'platform': 'MET7'} is_easy = isinstance(file_handler, FiduceoMviriEasyFcdrFileHandler) is_vis = name == 'VIS' is_refl = calibration == 'reflectance' if is_easy and is_vis and not is_refl: # VIS counts/radiance not available in easy FCDR with pytest.raises(ValueError): file_handler.get_dataset(dataset_id, dataset_info) else: ds = file_handler.get_dataset(dataset_id, dataset_info) xr.testing.assert_allclose(ds, expected) assert ds.dtype == expected.dtype assert ds.attrs == expected.attrs def test_get_dataset_corrupt(self, file_handler): """Test getting datasets with known corruptions.""" # Time may have different names and satellite position might be missing file_handler.nc.nc = file_handler.nc.nc.rename( {'time_ir_wv': 'time'} ) file_handler.nc.nc = file_handler.nc.nc.drop_vars( ['sub_satellite_longitude_start'] ) dataset_id = make_dataid( name='VIS', calibration='reflectance', resolution=2250 ) ds = file_handler.get_dataset(dataset_id, {'platform': 'MET7'}) assert 'actual_satellite_longitude' not in ds.attrs['orbital_parameters'] assert 'actual_satellite_latitude' not in ds.attrs['orbital_parameters'] xr.testing.assert_allclose(ds, vis_refl_exp) @mock.patch( 'satpy.readers.mviri_l1b_fiduceo_nc.Interpolator.interp_acq_time' ) def test_time_cache(self, interp_acq_time, file_handler): """Test caching of acquisition times.""" dataset_id = make_dataid( name='VIS', resolution=2250, calibration='reflectance' ) info = {} interp_acq_time.return_value = xr.DataArray([1, 2, 3, 4], dims='y') # Cache init file_handler.get_dataset(dataset_id, info) interp_acq_time.assert_called() # Cache hit interp_acq_time.reset_mock() file_handler.get_dataset(dataset_id, info) interp_acq_time.assert_not_called() # Cache miss interp_acq_time.return_value = xr.DataArray([1, 2], dims='y') another_id = make_dataid( name='IR', resolution=4500, calibration='brightness_temperature' ) interp_acq_time.reset_mock() file_handler.get_dataset(another_id, info) interp_acq_time.assert_called() @mock.patch( 'satpy.readers.mviri_l1b_fiduceo_nc.Interpolator.interp_tiepoints' ) def test_angle_cache(self, interp_tiepoints, file_handler): """Test caching of angle datasets.""" dataset_id = make_dataid(name='solar_zenith_angle', resolution=2250) info = {} # Cache init file_handler.get_dataset(dataset_id, info) interp_tiepoints.assert_called() # Cache hit interp_tiepoints.reset_mock() file_handler.get_dataset(dataset_id, info) interp_tiepoints.assert_not_called() # Cache miss another_id = make_dataid(name='solar_zenith_angle', resolution=4500) interp_tiepoints.reset_mock() file_handler.get_dataset(another_id, info) interp_tiepoints.assert_called() @pytest.mark.parametrize( ('name', 'resolution', 'area_exp'), [ ('VIS', 2250, area_vis_exp), ('WV', 4500, area_ir_wv_exp), ('IR', 4500, area_ir_wv_exp), ('quality_pixel_bitmask', 2250, area_vis_exp), ('solar_zenith_angle', 2250, area_vis_exp), ('solar_zenith_angle', 4500, area_ir_wv_exp) ] ) def test_get_area_definition(self, file_handler, name, resolution, area_exp): """Test getting area definitions.""" dataset_id = make_dataid(name=name, resolution=resolution) area = file_handler.get_area_def(dataset_id) a, b = proj4_radius_parameters(area.proj_dict) a_exp, b_exp = proj4_radius_parameters(area_exp.proj_dict) assert a == a_exp assert b == b_exp assert area.width == area_exp.width assert area.height == area_exp.height for key in ['h', 'lon_0', 'proj', 'units']: assert area.proj_dict[key] == area_exp.proj_dict[key] np.testing.assert_allclose(area.area_extent, area_exp.area_extent) def test_calib_exceptions(self, file_handler): """Test calibration exceptions.""" with pytest.raises(KeyError): file_handler.get_dataset( make_dataid(name='solar_zenith_angle', calibration='counts'), {} ) with pytest.raises(KeyError): file_handler.get_dataset( make_dataid( name='VIS', resolution=2250, calibration='brightness_temperature'), {} ) with pytest.raises(KeyError): file_handler.get_dataset( make_dataid( name='IR', resolution=4500, calibration='reflectance'), {} ) if isinstance(file_handler, FiduceoMviriEasyFcdrFileHandler): with pytest.raises(KeyError): file_handler.get_dataset( {'name': 'VIS', 'calibration': 'counts'}, {} ) # not available in easy FCDR @pytest.mark.file_handler_data(mask_bad_quality=False) def test_bad_quality_warning(self, file_handler): """Test warning about bad VIS quality.""" file_handler.nc.nc['quality_pixel_bitmask'] = 2 vis = make_dataid(name='VIS', resolution=2250, calibration='reflectance') with pytest.warns(UserWarning): file_handler.get_dataset(vis, {}) def test_file_pattern(self, reader): """Test file pattern matching.""" filenames = [ "FIDUCEO_FCDR_L15_MVIRI_MET7-57.0_201701201000_201701201030_FULL_v2.6_fv3.1.nc", "FIDUCEO_FCDR_L15_MVIRI_MET7-57.0_201701201000_201701201030_EASY_v2.6_fv3.1.nc", "FIDUCEO_FCDR_L15_MVIRI_MET7-00.0_201701201000_201701201030_EASY_v2.6_fv3.1.nc", "abcde", ] files = reader.select_files_from_pathnames(filenames) # only 3 out of 4 above should match assert len(files) == 3 class TestDatasetWrapper: """Unit tests for DatasetWrapper class.""" def test_reassign_coords(self): """Test reassigning of coordinates. For some reason xarray does not always assign (y, x) coordinates to the high resolution datasets, although they have dimensions (y, x) and coordinates y and x exist. A dataset with these properties seems impossible to create (neither dropping, resetting or deleting coordinates seems to work). Instead use mock as a workaround. """ nc = mock.MagicMock( coords={ 'y': [.1, .2], 'x': [.3, .4] }, dims=('y', 'x') ) nc.__getitem__.return_value = xr.DataArray( [[1, 2], [3, 4]], dims=('y', 'x') ) foo_exp = xr.DataArray( [[1, 2], [3, 4]], dims=('y', 'x'), coords={ 'y': [.1, .2], 'x': [.3, .4] } ) ds = DatasetWrapper(nc) foo = ds['foo'] xr.testing.assert_equal(foo, foo_exp) satpy-0.34.0/satpy/tests/reader_tests/test_netcdf_utils.py000066400000000000000000000200371420401153000240100ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.netcdf_utils module.""" import os import unittest import numpy as np try: from satpy.readers.netcdf_utils import NetCDF4FileHandler except ImportError: # fake the import so we can at least run the tests in this file NetCDF4FileHandler = object # type: ignore class FakeNetCDF4FileHandler(NetCDF4FileHandler): """Swap-in NetCDF4 File Handler for reader tests to use.""" def __init__(self, filename, filename_info, filetype_info, auto_maskandscale=False, xarray_kwargs=None, cache_var_size=0, cache_handle=False, extra_file_content=None): """Get fake file content from 'get_test_content'.""" # unused kwargs from the real file handler del auto_maskandscale del xarray_kwargs del cache_var_size del cache_handle if NetCDF4FileHandler is object: raise ImportError("Base 'NetCDF4FileHandler' could not be " "imported.") super(NetCDF4FileHandler, self).__init__(filename, filename_info, filetype_info) self.file_content = self.get_test_content(filename, filename_info, filetype_info) if extra_file_content: self.file_content.update(extra_file_content) def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content. Args: filename (str): input filename filename_info (dict): Dict of metadata pulled from filename filetype_info (dict): Dict of metadata from the reader's yaml config for this file type Returns: dict of file content with keys like: - 'dataset' - '/attr/global_attr' - 'dataset/attr/global_attr' - 'dataset/shape' - 'dataset/dimensions' - '/dimension/my_dim' """ raise NotImplementedError("Fake File Handler subclass must implement 'get_test_content'") class TestNetCDF4FileHandler(unittest.TestCase): """Test NetCDF4 File Handler Utility class.""" def setUp(self): """Create a test NetCDF4 file.""" from netCDF4 import Dataset with Dataset('test.nc', 'w') as nc: # Create dimensions nc.createDimension('rows', 10) nc.createDimension('cols', 100) # Create Group g1 = nc.createGroup('test_group') # Add datasets ds1_f = g1.createVariable('ds1_f', np.float32, dimensions=('rows', 'cols')) ds1_f[:] = np.arange(10. * 100).reshape((10, 100)) ds1_i = g1.createVariable('ds1_i', np.int32, dimensions=('rows', 'cols')) ds1_i[:] = np.arange(10 * 100).reshape((10, 100)) ds2_f = nc.createVariable('ds2_f', np.float32, dimensions=('rows', 'cols')) ds2_f[:] = np.arange(10. * 100).reshape((10, 100)) ds2_i = nc.createVariable('ds2_i', np.int32, dimensions=('rows', 'cols')) ds2_i[:] = np.arange(10 * 100).reshape((10, 100)) ds2_s = nc.createVariable("ds2_s", np.int8, dimensions=("rows",)) ds2_s[:] = np.arange(10) ds2_sc = nc.createVariable("ds2_sc", np.int8, dimensions=()) ds2_sc[:] = 42 # Add attributes nc.test_attr_str = 'test_string' nc.test_attr_int = 0 nc.test_attr_float = 1.2 nc.test_attr_str_arr = np.array(b"test_string2") g1.test_attr_str = 'test_string' g1.test_attr_int = 0 g1.test_attr_float = 1.2 for d in [ds1_f, ds1_i, ds2_f, ds2_i]: d.test_attr_str = 'test_string' d.test_attr_int = 0 d.test_attr_float = 1.2 def tearDown(self): """Remove the previously created test file.""" os.remove('test.nc') def test_all_basic(self): """Test everything about the NetCDF4 class.""" import xarray as xr from satpy.readers.netcdf_utils import NetCDF4FileHandler file_handler = NetCDF4FileHandler('test.nc', {}, {}) self.assertEqual(file_handler['/dimension/rows'], 10) self.assertEqual(file_handler['/dimension/cols'], 100) for ds in ('test_group/ds1_f', 'test_group/ds1_i', 'ds2_f', 'ds2_i'): self.assertEqual(file_handler[ds].dtype, np.float32 if ds.endswith('f') else np.int32) self.assertTupleEqual(file_handler[ds + '/shape'], (10, 100)) self.assertEqual(file_handler[ds + '/dimensions'], ("rows", "cols")) self.assertEqual(file_handler[ds + '/attr/test_attr_str'], 'test_string') self.assertEqual(file_handler[ds + '/attr/test_attr_int'], 0) self.assertEqual(file_handler[ds + '/attr/test_attr_float'], 1.2) test_group = file_handler['test_group'] self.assertTupleEqual(test_group['ds1_i'].shape, (10, 100)) self.assertTupleEqual(test_group['ds1_i'].dims, ('rows', 'cols')) self.assertEqual(file_handler['/attr/test_attr_str'], 'test_string') self.assertEqual(file_handler['/attr/test_attr_str_arr'], 'test_string2') self.assertEqual(file_handler['/attr/test_attr_int'], 0) self.assertEqual(file_handler['/attr/test_attr_float'], 1.2) global_attrs = { 'test_attr_str': 'test_string', 'test_attr_str_arr': 'test_string2', 'test_attr_int': 0, 'test_attr_float': 1.2 } self.assertEqual(file_handler['/attrs'], global_attrs) self.assertIsInstance(file_handler.get('ds2_f')[:], xr.DataArray) self.assertIsNone(file_handler.get('fake_ds')) self.assertEqual(file_handler.get('fake_ds', 'test'), 'test') self.assertTrue('ds2_f' in file_handler) self.assertFalse('fake_ds' in file_handler) self.assertIsNone(file_handler.file_handle) self.assertEqual(file_handler["ds2_sc"], 42) def test_caching(self): """Test that caching works as intended.""" from satpy.readers.netcdf_utils import NetCDF4FileHandler h = NetCDF4FileHandler("test.nc", {}, {}, cache_var_size=1000, cache_handle=True) self.assertIsNotNone(h.file_handle) self.assertTrue(h.file_handle.isopen()) self.assertEqual(sorted(h.cached_file_content.keys()), ["ds2_s", "ds2_sc"]) # with caching, these tests access different lines than without np.testing.assert_array_equal(h["ds2_s"], np.arange(10)) np.testing.assert_array_equal(h["test_group/ds1_i"], np.arange(10 * 100).reshape((10, 100))) # check that root variables can still be read from cached file object, # even if not cached themselves np.testing.assert_array_equal( h["ds2_f"], np.arange(10. * 100).reshape((10, 100))) h.__del__() self.assertFalse(h.file_handle.isopen()) def test_filenotfound(self): """Test that error is raised when file not found.""" from satpy.readers.netcdf_utils import NetCDF4FileHandler with self.assertRaises(IOError): NetCDF4FileHandler("/thisfiledoesnotexist.nc", {}, {}) satpy-0.34.0/satpy/tests/reader_tests/test_nucaps.py000066400000000000000000000603401420401153000226170ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.nucaps module.""" import datetime import os import unittest from unittest import mock import numpy as np from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler from satpy.tests.utils import convert_file_content_to_data_array DEFAULT_FILE_DTYPE = np.float32 DEFAULT_FILE_SHAPE = (120,) DEFAULT_PRES_FILE_SHAPE = (120, 100,) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0], dtype=DEFAULT_FILE_DTYPE) DEFAULT_PRES_FILE_DATA = np.arange(DEFAULT_PRES_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE) DEFAULT_PRES_FILE_DATA = np.repeat([DEFAULT_PRES_FILE_DATA], DEFAULT_PRES_FILE_SHAPE[0], axis=0) DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32) DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[0]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[0]).astype(DEFAULT_FILE_DTYPE) ALL_PRESSURE_LEVELS = [ 0.0161, 0.0384, 0.0769, 0.137, 0.2244, 0.3454, 0.5064, 0.714, 0.9753, 1.2972, 1.6872, 2.1526, 2.7009, 3.3398, 4.077, 4.9204, 5.8776, 6.9567, 8.1655, 9.5119, 11.0038, 12.6492, 14.4559, 16.4318, 18.5847, 20.9224, 23.4526, 26.1829, 29.121, 32.2744, 35.6505, 39.2566, 43.1001, 47.1882, 51.5278, 56.126, 60.9895, 66.1253, 71.5398, 77.2396, 83.231, 89.5204, 96.1138, 103.017, 110.237, 117.777, 125.646, 133.846, 142.385, 151.266, 160.496, 170.078, 180.018, 190.32, 200.989, 212.028, 223.441, 235.234, 247.408, 259.969, 272.919, 286.262, 300, 314.137, 328.675, 343.618, 358.966, 374.724, 390.893, 407.474, 424.47, 441.882, 459.712, 477.961, 496.63, 515.72, 535.232, 555.167, 575.525, 596.306, 617.511, 639.14, 661.192, 683.667, 706.565, 729.886, 753.628, 777.79, 802.371, 827.371, 852.788, 878.62, 904.866, 931.524, 958.591, 986.067, 1013.95, 1042.23, 1070.92, 1100 ] ALL_PRESSURE_LEVELS = np.repeat([ALL_PRESSURE_LEVELS], DEFAULT_PRES_FILE_SHAPE[0], axis=0) class FakeNetCDF4FileHandler2(FakeNetCDF4FileHandler): """Swap-in NetCDF4 File Handler.""" def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" file_content = { '/attr/time_coverage_start': "2020-10-20T12:00:00.5Z", '/attr/time_coverage_end': "2020-10-20T12:00:36Z", '/attr/start_orbit_number': 1, '/attr/end_orbit_number': 2, '/attr/platform_name': 'NPP', '/attr/instrument_name': 'CrIS, ATMS, VIIRS', } for k, units, standard_name in [ ('Solar_Zenith', 'degrees', 'solar_zenith_angle'), ('Topography', 'meters', ''), ('Land_Fraction', '1', ''), ('Surface_Pressure', 'mb', ''), ('Skin_Temperature', 'Kelvin', 'surface_temperature'), ]: file_content[k] = DEFAULT_FILE_DATA file_content[k + '/shape'] = DEFAULT_FILE_SHAPE file_content[k + '/attr/units'] = units file_content[k + '/attr/valid_range'] = (0., 120.) file_content[k + '/attr/_FillValue'] = -9999. if standard_name: file_content[k + '/attr/standard_name'] = standard_name for k, units, standard_name in [ ('Temperature', 'Kelvin', 'air_temperature'), ('Effective_Pressure', 'mb', ''), ('H2O', '1', ''), ('H2O_MR', 'g/g', ''), ('O3', '1', ''), ('O3_MR', '1', ''), ('Liquid_H2O', '1', ''), ('Liquid_H2O_MR', 'g/g', 'cloud_liquid_water_mixing_ratio'), ('CO', '1', ''), ('CO_MR', '1', ''), ('CH4', '1', ''), ('CH4_MR', '1', ''), ('CO2', '1', ''), ('HNO3', '1', ''), ('HNO3_MR', '1', ''), ('N2O', '1', ''), ('N2O_MR', '1', ''), ('SO2', '1', ''), ('SO2_MR', '1', ''), ]: file_content[k] = DEFAULT_PRES_FILE_DATA file_content[k + '/shape'] = DEFAULT_PRES_FILE_SHAPE file_content[k + '/attr/units'] = units file_content[k + '/attr/valid_range'] = (0., 120.) file_content[k + '/attr/_FillValue'] = -9999. if standard_name: file_content[k + '/attr/standard_name'] = standard_name k = 'Pressure' file_content[k] = ALL_PRESSURE_LEVELS file_content[k + '/shape'] = DEFAULT_PRES_FILE_SHAPE file_content[k + '/attr/units'] = 'mb' file_content[k + '/attr/valid_range'] = (0., 2000.) file_content[k + '/attr/_FillValue'] = -9999. k = 'Quality_Flag' file_content[k] = DEFAULT_FILE_DATA.astype(np.int32) file_content[k + '/shape'] = DEFAULT_FILE_SHAPE file_content[k + '/attr/valid_range'] = (0, 31) file_content[k + '/attr/_FillValue'] = -9999. k = 'Longitude' file_content[k] = DEFAULT_LON_DATA file_content[k + '/shape'] = DEFAULT_FILE_SHAPE file_content[k + '/attr/units'] = 'degrees_east' file_content[k + '/attr/valid_range'] = (-180., 180.) file_content[k + '/attr/standard_name'] = 'longitude' file_content[k + '/attr/_FillValue'] = -9999. k = 'Latitude' file_content[k] = DEFAULT_LAT_DATA file_content[k + '/shape'] = DEFAULT_FILE_SHAPE file_content[k + '/attr/units'] = 'degrees_north' file_content[k + '/attr/valid_range'] = (-90., 90.) file_content[k + '/attr/standard_name'] = 'latitude' file_content[k + '/attr/_FillValue'] = -9999. attrs = ('_FillValue', 'flag_meanings', 'flag_values', 'units') cris_fors_dim_name = 'Number_of_CrIS_FORs' pressure_levels_dim_name = 'Number_of_P_Levels' if ('_v1' in filename): cris_fors_dim_name = 'number_of_FORs' pressure_levels_dim_name = 'number_of_p_levels' convert_file_content_to_data_array( file_content, attrs=attrs, dims=('z', cris_fors_dim_name, pressure_levels_dim_name)) return file_content class TestNUCAPSReader(unittest.TestCase): """Test NUCAPS Reader.""" yaml_file = "nucaps.yaml" def setUp(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.nucaps import NUCAPSFileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(NUCAPSFileHandler, '__bases__', (FakeNetCDF4FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the NetCDF4 file handler.""" self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) # make sure we have some files self.assertTrue(r.file_handlers) def test_init_with_kwargs(self): """Test basic init with extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs, mask_surface=False) loadables = r.select_files_from_pathnames([ 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables, fh_kwargs={'mask_surface': False}) # make sure we have some files self.assertTrue(r.file_handlers) def test_load_nonpressure_based(self): """Test loading all channels that aren't based on pressure.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', ]) r.create_filehandlers(loadables) datasets = r.load(['Solar_Zenith', 'Topography', 'Land_Fraction', 'Surface_Pressure', 'Skin_Temperature', 'Quality_Flag', ]) self.assertEqual(len(datasets), 6) for v in datasets.values(): # self.assertNotEqual(v.info['resolution'], 0) # self.assertEqual(v.info['units'], 'degrees') self.assertEqual(v.ndim, 1) self.assertEqual(v.attrs['sensor'], set(['cris', 'atms', 'viirs'])) self.assertEqual(type(v.attrs['start_time']), datetime.datetime) self.assertEqual(type(v.attrs['end_time']), datetime.datetime) def test_load_pressure_based(self): """Test loading all channels based on pressure.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', ]) r.create_filehandlers(loadables) datasets = r.load(['Temperature', 'Effective_Pressure', 'H2O', 'H2O_MR', 'O3', 'O3_MR', 'Liquid_H2O', 'Liquid_H2O_MR', 'CO', 'CO_MR', 'CH4', 'CH4_MR', 'CO2', 'HNO3', 'HNO3_MR', 'N2O', 'N2O_MR', 'SO2', 'SO2_MR', ]) self.assertEqual(len(datasets), 19) for v in datasets.values(): # self.assertNotEqual(v.info['resolution'], 0) self.assertEqual(v.ndim, 2) if np.issubdtype(v.dtype, np.floating): assert '_FillValue' not in v.attrs def test_load_multiple_files_pressure(self): """Test loading Temperature from multiple input files.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', 'NUCAPS-EDR_v1r0_npp_s201603011159009_e201603011159307_c201603011222270.nc', ]) r.create_filehandlers(loadables) datasets = r.load(r.pressure_dataset_names['Temperature'], pressure_levels=True) self.assertEqual(len(datasets), 100) for v in datasets.values(): self.assertEqual(v.ndim, 1) def test_load_individual_pressure_levels_true(self): """Test loading Temperature with individual pressure datasets.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', ]) r.create_filehandlers(loadables) datasets = r.load(r.pressure_dataset_names['Temperature'], pressure_levels=True) self.assertEqual(len(datasets), 100) for v in datasets.values(): self.assertEqual(v.ndim, 1) def test_load_individual_pressure_levels_min_max(self): """Test loading individual Temperature with min/max level specified.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', ]) r.create_filehandlers(loadables) datasets = r.load(r.pressure_dataset_names['Temperature'], pressure_levels=(100., 150.)) self.assertEqual(len(datasets), 6) for v in datasets.values(): self.assertEqual(v.ndim, 1) def test_load_individual_pressure_levels_single(self): """Test loading individual Temperature with specific levels.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', ]) r.create_filehandlers(loadables) datasets = r.load(r.pressure_dataset_names['Temperature'], pressure_levels=(103.017,)) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.ndim, 1) def test_load_pressure_levels_true(self): """Test loading Temperature with all pressure levels.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', ]) r.create_filehandlers(loadables) datasets = r.load(['Temperature'], pressure_levels=True) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.ndim, 2) self.assertTupleEqual(v.shape, DEFAULT_PRES_FILE_SHAPE) def test_load_pressure_levels_min_max(self): """Test loading Temperature with min/max level specified.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', ]) r.create_filehandlers(loadables) datasets = r.load(['Temperature'], pressure_levels=(100., 150.)) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.ndim, 2) self.assertTupleEqual(v.shape, (DEFAULT_PRES_FILE_SHAPE[0], 6)) def test_load_pressure_levels_single(self): """Test loading a specific Temperature level.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', ]) r.create_filehandlers(loadables) datasets = r.load(['Temperature'], pressure_levels=(103.017,)) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.ndim, 2) self.assertTupleEqual(v.shape, (DEFAULT_PRES_FILE_SHAPE[0], 1)) def test_load_pressure_levels_single_and_pressure_levels(self): """Test loading a specific Temperature level and pressure levels.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', ]) r.create_filehandlers(loadables) datasets = r.load(['Temperature', 'Pressure_Levels'], pressure_levels=(103.017,)) self.assertEqual(len(datasets), 2) t_ds = datasets['Temperature'] self.assertEqual(t_ds.ndim, 2) self.assertTupleEqual(t_ds.shape, (DEFAULT_PRES_FILE_SHAPE[0], 1)) pl_ds = datasets['Pressure_Levels'] self.assertTupleEqual(pl_ds.shape, (1,)) class TestNUCAPSScienceEDRReader(unittest.TestCase): """Test NUCAPS Science EDR Reader.""" yaml_file = "nucaps.yaml" def setUp(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.nucaps import NUCAPSFileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(NUCAPSFileHandler, '__bases__', (FakeNetCDF4FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the NetCDF4 file handler.""" self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) # make sure we have some files self.assertTrue(r.file_handlers) def test_load_nonpressure_based(self): """Test loading all channels that aren't based on pressure.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', ]) r.create_filehandlers(loadables) datasets = r.load(['Topography', 'Land_Fraction', 'Surface_Pressure', 'Skin_Temperature', 'Quality_Flag', ]) self.assertEqual(len(datasets), 5) for v in datasets.values(): self.assertEqual(v.ndim, 1) self.assertEqual(v.attrs['sensor'], set(['cris', 'atms', 'viirs'])) self.assertEqual(type(v.attrs['start_time']), datetime.datetime) self.assertEqual(type(v.attrs['end_time']), datetime.datetime) def test_load_pressure_based(self): """Test loading all channels based on pressure.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', ]) r.create_filehandlers(loadables) datasets = r.load(['Temperature', 'H2O', 'H2O_MR', 'O3', 'O3_MR', 'CO', 'CO_MR', 'CH4', 'CH4_MR', 'CO2', 'HNO3', 'HNO3_MR', 'N2O', 'N2O_MR', 'SO2', 'SO2_MR', ]) self.assertEqual(len(datasets), 16) for v in datasets.values(): # self.assertNotEqual(v.info['resolution'], 0) self.assertEqual(v.ndim, 2) def test_load_individual_pressure_levels_true(self): """Test loading Temperature with individual pressure datasets.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', ]) r.create_filehandlers(loadables) datasets = r.load(r.pressure_dataset_names['Temperature'], pressure_levels=True) self.assertEqual(len(datasets), 100) for v in datasets.values(): self.assertEqual(v.ndim, 1) def test_load_individual_pressure_levels_min_max(self): """Test loading individual Temperature with min/max level specified.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', ]) r.create_filehandlers(loadables) datasets = r.load(r.pressure_dataset_names['Temperature'], pressure_levels=(100., 150.)) self.assertEqual(len(datasets), 6) for v in datasets.values(): self.assertEqual(v.ndim, 1) def test_load_individual_pressure_levels_single(self): """Test loading individual Temperature with specific levels.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', ]) r.create_filehandlers(loadables) datasets = r.load(r.pressure_dataset_names['Temperature'], pressure_levels=(103.017,)) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.ndim, 1) def test_load_pressure_levels_true(self): """Test loading Temperature with all pressure levels.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', ]) r.create_filehandlers(loadables) datasets = r.load(['Temperature'], pressure_levels=True) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.ndim, 2) self.assertTupleEqual(v.shape, DEFAULT_PRES_FILE_SHAPE) def test_load_pressure_levels_min_max(self): """Test loading Temperature with min/max level specified.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', ]) r.create_filehandlers(loadables) datasets = r.load(['Temperature'], pressure_levels=(100., 150.)) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.ndim, 2) self.assertTupleEqual(v.shape, (DEFAULT_PRES_FILE_SHAPE[0], 6)) def test_load_pressure_levels_single(self): """Test loading a specific Temperature level.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', ]) r.create_filehandlers(loadables) datasets = r.load(['Temperature'], pressure_levels=(103.017,)) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.ndim, 2) self.assertTupleEqual(v.shape, (DEFAULT_PRES_FILE_SHAPE[0], 1)) def test_load_pressure_levels_single_and_pressure_levels(self): """Test loading a specific Temperature level and pressure levels.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', ]) r.create_filehandlers(loadables) datasets = r.load(['Temperature', 'Pressure_Levels'], pressure_levels=(103.017,)) self.assertEqual(len(datasets), 2) t_ds = datasets['Temperature'] self.assertEqual(t_ds.ndim, 2) self.assertTupleEqual(t_ds.shape, (DEFAULT_PRES_FILE_SHAPE[0], 1)) pl_ds = datasets['Pressure_Levels'] self.assertTupleEqual(pl_ds.shape, (1,)) satpy-0.34.0/satpy/tests/reader_tests/test_nwcsaf_msg.py000066400000000000000000000443601420401153000234610ustar00rootroot00000000000000#!/usr/bin/env python # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unittests for NWC SAF MSG (2013) reader.""" import os import tempfile import unittest from collections import OrderedDict import h5py import numpy as np CTYPE_TEST_ARRAY = (np.random.rand(1856, 3712) * 255).astype(np.uint8) CTYPE_TEST_FRAME = (np.arange(100).reshape(10, 10) / 100. * 20).astype(np.uint8) CTYPE_TEST_ARRAY[1000:1010, 1000:1010] = CTYPE_TEST_FRAME CTTH_HEIGHT_TEST_ARRAY = (np.random.rand(1856, 3712) * 255).astype(np.uint8) _CTTH_HEIGHT_TEST_FRAME = (np.arange(100).reshape(10, 10) / 100. * 80).astype(np.uint8) CTTH_HEIGHT_TEST_ARRAY[1000:1010, 1000:1010] = _CTTH_HEIGHT_TEST_FRAME CTTH_HEIGHT_TEST_FRAME_RES = _CTTH_HEIGHT_TEST_FRAME.astype(np.float32) * 200 - 2000 CTTH_HEIGHT_TEST_FRAME_RES[0, 0:10] = np.nan CTTH_HEIGHT_TEST_FRAME_RES[1, 0:3] = np.nan CTTH_PRESSURE_TEST_ARRAY = (np.random.rand(1856, 3712) * 255).astype(np.uint8) _CTTH_PRESSURE_TEST_FRAME = (np.arange(100).reshape(10, 10) / 100. * 54).astype(np.uint8) CTTH_PRESSURE_TEST_ARRAY[1000:1010, 1000:1010] = _CTTH_PRESSURE_TEST_FRAME CTTH_PRESSURE_TEST_FRAME_RES = _CTTH_PRESSURE_TEST_FRAME.astype(np.float32) * 25 - 250 CTTH_PRESSURE_TEST_FRAME_RES[0, 0:10] = np.nan CTTH_PRESSURE_TEST_FRAME_RES[1, 0:9] = np.nan CTTH_TEMPERATURE_TEST_ARRAY = (np.random.rand(1856, 3712) * 255).astype(np.uint8) _CTTH_TEMPERATURE_TEST_FRAME = (np.arange(100).reshape(10, 10) / 100. * 140).astype(np.uint8) _CTTH_TEMPERATURE_TEST_FRAME[8, 5] = 255 CTTH_TEMPERATURE_TEST_ARRAY[1000:1010, 1000:1010] = _CTTH_TEMPERATURE_TEST_FRAME CTTH_TEMPERATURE_TEST_FRAME_RES = _CTTH_TEMPERATURE_TEST_FRAME.astype(np.float32) * 1.0 + 150 CTTH_TEMPERATURE_TEST_FRAME_RES[8, 5] = np.nan fake_ct = { "01-PALETTE": { "attrs": { "CLASS": b"PALETTE", "PAL_COLORMODEL": b"RGB", "PAL_TYPE": b"DIRECTINDEX", }, "value": np.array( [ [100, 100, 100], [0, 120, 0], [0, 0, 0], [250, 190, 250], [220, 160, 220], [255, 150, 0], [255, 100, 0], [255, 220, 0], [255, 180, 0], [255, 255, 140], [240, 240, 0], [250, 240, 200], [215, 215, 150], [255, 255, 255], [230, 230, 230], [0, 80, 215], [0, 180, 230], [0, 240, 240], [90, 200, 160], [200, 0, 200], [95, 60, 30], ], dtype=np.uint8, ), }, "02-PALETTE": { "attrs": { "CLASS": b"PALETTE", "PAL_COLORMODEL": b"RGB", "PAL_TYPE": b"DIRECTINDEX", }, "value": np.array( [[100, 100, 100], [255, 100, 0], [0, 80, 215], [95, 60, 30]], dtype=np.uint8 ), }, "CT": { "attrs": { "CLASS": b"IMAGE", "ID": b"CT", "IMAGE_COLORMODEL": b"RGB", "IMAGE_SUBCLASS": b"IMAGE_INDEXED", "IMAGE_VERSION": b"1.0", "N_COLS": 3712, "N_LINES": 1856, "OFFSET": 0.0, "PALETTE": " 01-PALETTE", "PRODUCT": b"CT__", "SCALING_FACTOR": 1.0, }, "value": (CTYPE_TEST_ARRAY), }, "CT_PHASE": { "attrs": { "CLASS": b"IMAGE", "ID": b"CT_PHASE", "IMAGE_COLORMODEL": b"RGB", "IMAGE_SUBCLASS": b"IMAGE_INDEXED", "IMAGE_VERSION": b"1.0", "N_COLS": 3712, "N_LINES": 1856, "OFFSET": 0.0, "PALETTE": " 02-PALETTE", "PRODUCT": b"CT__", "SCALING_FACTOR": 1.0, }, "value": (np.random.rand(1856, 3712) * 255).astype(np.uint8), }, "CT_QUALITY": { "attrs": { "CLASS": b"IMAGE", "ID": b"CT_QUALITY", "IMAGE_COLORMODEL": b"RGB", "IMAGE_SUBCLASS": b"IMAGE_INDEXED", "IMAGE_VERSION": b"1.0", "N_COLS": 3712, "N_LINES": 1856, "OFFSET": 0.0, "PRODUCT": b"CT__", "SCALING_FACTOR": 1.0, }, "value": (np.random.rand(1856, 3712) * 65535).astype(np.uint16), }, "attrs": { "CFAC": 13642337, "COFF": 1856, "GP_SC_ID": 323, "IMAGE_ACQUISITION_TIME": b"201611090800", "LFAC": 13642337, "LOFF": 1856, "NB_PARAMETERS": 3, "NC": 3712, "NL": 1856, "NOMINAL_PRODUCT_TIME": b"201611090814", "PACKAGE": b"SAFNWC/MSG", "PRODUCT_ALGORITHM_VERSION": b" 2.2", "PRODUCT_NAME": b"CT__", "PROJECTION_NAME": b"GEOS<+000.0>", "REGION_NAME": b"MSG-N", "SAF": b"NWC", "SGS_PRODUCT_COMPLETENESS": 99, "SGS_PRODUCT_QUALITY": 79, "SPECTRAL_CHANNEL_ID": 0, }, } fake_ct = OrderedDict(sorted(fake_ct.items(), key=lambda t: t[0])) fake_ctth = { "01-PALETTE": { "attrs": { "CLASS": b"PALETTE", "PAL_COLORMODEL": b"RGB", "PAL_TYPE": b"DIRECTINDEX", }, "value": np.array( [ [0, 0, 0], [255, 255, 255], [255, 255, 255], [255, 255, 255], [255, 255, 255], [255, 255, 255], [255, 255, 255], [255, 255, 255], [255, 255, 255], [255, 255, 255], [255, 255, 255], [255, 255, 255], [255, 255, 255], [255, 255, 255], [255, 255, 255], [239, 239, 223], [239, 239, 223], [238, 214, 210], [238, 214, 210], [0, 255, 255], [0, 255, 255], [0, 216, 255], [0, 216, 255], [0, 178, 255], [0, 178, 255], [0, 140, 48], [0, 140, 48], [0, 255, 0], [0, 255, 0], [153, 255, 0], [153, 255, 0], [178, 255, 0], [178, 255, 0], [216, 255, 0], [216, 255, 0], [255, 255, 0], [255, 255, 0], [255, 216, 0], [255, 216, 0], [255, 164, 0], [255, 164, 0], [255, 102, 0], [255, 102, 0], [255, 76, 0], [255, 76, 0], [178, 51, 0], [178, 51, 0], [153, 20, 47], [153, 20, 47], [126, 0, 43], [126, 0, 43], [255, 0, 216], [255, 0, 216], [255, 0, 128], [255, 0, 128], [255, 0, 128], [255, 0, 128], [255, 0, 128], [255, 0, 128], [255, 0, 128], [255, 0, 128], [255, 0, 128], [255, 0, 128], [255, 0, 128], ], dtype=np.uint8, ), }, "02-PALETTE": { "attrs": { "CLASS": b"PALETTE", "PAL_COLORMODEL": b"RGB", "PAL_TYPE": b"DIRECTINDEX", }, "value": (np.random.rand(128, 3) * 255).astype(np.uint8), }, "03-PALETTE": { "attrs": { "CLASS": b"PALETTE", "PAL_COLORMODEL": b"RGB", "PAL_TYPE": b"DIRECTINDEX", }, "value": (np.random.rand(256, 3) * 255).astype(np.uint8), }, "04-PALETTE": { "attrs": { "CLASS": b"PALETTE", "PAL_COLORMODEL": b"RGB", "PAL_TYPE": b"DIRECTINDEX", }, "value": np.array( [ [78, 119, 145], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [12, 12, 12], [24, 24, 24], [36, 36, 36], [48, 48, 48], [60, 60, 60], [72, 72, 72], [84, 84, 84], [96, 96, 96], [108, 108, 108], [120, 120, 120], [132, 132, 132], [144, 144, 144], [156, 156, 156], [168, 168, 168], [180, 180, 180], [192, 192, 192], [204, 204, 204], [216, 216, 216], [228, 228, 228], [240, 240, 240], [240, 240, 240], ], dtype=np.uint8, ), }, "CTTH_EFFECT": { "attrs": { "CLASS": b"IMAGE", "ID": b"CTTH_EFFECT", "IMAGE_COLORMODEL": b"RGB", "IMAGE_SUBCLASS": b"IMAGE_INDEXED", "IMAGE_VERSION": b"1.0", "N_COLS": 3712, "N_LINES": 1856, "OFFSET": -50.0, "PALETTE": " 04-PALETTE", "PRODUCT": b"CTTH", "SCALING_FACTOR": 5.0, }, "value": (np.random.rand(1856, 3712) * 255).astype(np.uint8), }, "CTTH_HEIGHT": { "attrs": { "CLASS": b"IMAGE", "ID": b"CTTH_HEIGHT", "IMAGE_COLORMODEL": b"RGB", "IMAGE_SUBCLASS": b"IMAGE_INDEXED", "IMAGE_VERSION": b"1.0", "N_COLS": 3712, "N_LINES": 1856, "OFFSET": -2000.0, "PALETTE": " 02-PALETTE", "PRODUCT": b"CTTH", "SCALING_FACTOR": 200.0, }, "value": (CTTH_HEIGHT_TEST_ARRAY), }, "CTTH_PRESS": { "attrs": { "CLASS": b"IMAGE", "ID": b"CTTH_PRESS", "IMAGE_COLORMODEL": b"RGB", "IMAGE_SUBCLASS": b"IMAGE_INDEXED", "IMAGE_VERSION": b"1.0", "N_COLS": 3712, "N_LINES": 1856, "OFFSET": -250.0, "PALETTE": " 01-PALETTE", "PRODUCT": b"CTTH", "SCALING_FACTOR": 25.0, }, "value": (CTTH_PRESSURE_TEST_ARRAY), }, "CTTH_QUALITY": { "attrs": { "CLASS": b"IMAGE", "ID": b"CTTH_QUALITY", "IMAGE_COLORMODEL": b"RGB", "IMAGE_SUBCLASS": b"IMAGE_INDEXED", "IMAGE_VERSION": b"1.0", "N_COLS": 3712, "N_LINES": 1856, "OFFSET": 0.0, "PRODUCT": b"CTTH", "SCALING_FACTOR": 1.0, }, "value": (np.random.rand(1856, 3712) * 65535).astype(np.uint16), }, "CTTH_TEMPER": { "attrs": { "CLASS": b"IMAGE", "ID": b"CTTH_TEMPER", "IMAGE_COLORMODEL": b"RGB", "IMAGE_SUBCLASS": b"IMAGE_INDEXED", "IMAGE_VERSION": b"1.0", "N_COLS": 3712, "N_LINES": 1856, "OFFSET": 150.0, "PALETTE": " 03-PALETTE", "PRODUCT": b"CTTH", "SCALING_FACTOR": 1.0, }, "value": (CTTH_TEMPERATURE_TEST_ARRAY), }, "attrs": { "CFAC": 13642337, "COFF": 1856, "GP_SC_ID": 323, "IMAGE_ACQUISITION_TIME": b"201611090800", "LFAC": 13642337, "LOFF": 1856, "NB_PARAMETERS": 5, "NC": 3712, "NL": 1856, "NOMINAL_PRODUCT_TIME": b"201611090816", "PACKAGE": b"SAFNWC/MSG", "PRODUCT_ALGORITHM_VERSION": b" 2.2", "PRODUCT_NAME": b"CTTH", "PROJECTION_NAME": b"GEOS<+000.0>", "REGION_NAME": b"MSG-N", "SAF": b"NWC", "SGS_PRODUCT_COMPLETENESS": 87, "SGS_PRODUCT_QUALITY": 69, "SPECTRAL_CHANNEL_ID": 0, }, } fake_ctth = OrderedDict(sorted(fake_ctth.items(), key=lambda t: t[0])) PROJ_KM = { "gdal_projection": "+proj=geos +a=6378.137000 +b=6356.752300 +lon_0=0.000000 +h=35785.863000", "gdal_xgeo_up_left": -5569500.0, "gdal_ygeo_up_left": 5437500.0, "gdal_xgeo_low_right": 5566500.0, "gdal_ygeo_low_right": 2653500.0, } PROJ = { "gdal_projection": "+proj=geos +a=6378137.000 +b=6356752.300 +lon_0=0.000000 +h=35785863.000", "gdal_xgeo_up_left": -5569500.0, "gdal_ygeo_up_left": 5437500.0, "gdal_xgeo_low_right": 5566500.0, "gdal_ygeo_low_right": 2653500.0, } AREA_DEF_DICT = { "proj_dict": {'proj': 'geos', 'lon_0': 0, 'h': 35785831, 'x_0': 0, 'y_0': 0, 'a': 6378169, 'b': 6356583.8, 'units': 'm', 'no_defs': None, 'type': 'crs'}, "area_id": 'MSG-N', "x_size": 3712, "y_size": 1856, "area_extent": (-5570248.2825, 1501.0099, 5567247.8793, 5570247.8784) } class TestH5NWCSAF(unittest.TestCase): """Test the nwcsaf msg reader.""" def setUp(self): """Set up the tests.""" self.filename_ct = os.path.join( tempfile.gettempdir(), "SAFNWC_MSG3_CT___201611090800_MSG-N_______.PLAX.CTTH.0.h5", ) self.filename_ctth = os.path.join( tempfile.gettempdir(), "SAFNWC_MSG3_CTTH_201611090800_MSG-N_______.PLAX.CTTH.0.h5", ) def fill_h5(root, stuff): for key, val in stuff.items(): if key in ["value", "attrs"]: continue if "value" in val: root[key] = val["value"] else: grp = root.create_group(key) fill_h5(grp, stuff[key]) if "attrs" in val: for attrs, val in val["attrs"].items(): if isinstance(val, str) and val.startswith( "" ): root[key].attrs[attrs] = root[val[24:]].ref else: root[key].attrs[attrs] = val h5f = h5py.File(self.filename_ct, mode="w") fill_h5(h5f, fake_ct) for attr, val in fake_ct["attrs"].items(): h5f.attrs[attr] = val h5f.close() h5f = h5py.File(self.filename_ctth, mode="w") fill_h5(h5f, fake_ctth) for attr, val in fake_ctth["attrs"].items(): h5f.attrs[attr] = val h5f.close() def test_get_area_def(self): """Get the area definition.""" from satpy.readers.nwcsaf_msg2013_hdf5 import Hdf5NWCSAF from satpy.tests.utils import make_dataid filename_info = {} filetype_info = {} dsid = make_dataid(name="ct") test = Hdf5NWCSAF(self.filename_ct, filename_info, filetype_info) area_def = test.get_area_def(dsid) aext_res = AREA_DEF_DICT['area_extent'] for i in range(4): self.assertAlmostEqual(area_def.area_extent[i], aext_res[i], 4) proj_dict = AREA_DEF_DICT['proj_dict'] self.assertEqual(proj_dict['proj'], area_def.proj_dict['proj']) # Not all elements passed on Appveyor, so skip testing every single element of the proj-dict: # for key in proj_dict: # self.assertEqual(proj_dict[key], area_def.proj_dict[key]) self.assertEqual(AREA_DEF_DICT['x_size'], area_def.width) self.assertEqual(AREA_DEF_DICT['y_size'], area_def.height) self.assertEqual(AREA_DEF_DICT['area_id'], area_def.area_id) def test_get_dataset(self): """Retrieve datasets from a NWCSAF msgv2013 hdf5 file.""" from satpy.readers.nwcsaf_msg2013_hdf5 import Hdf5NWCSAF from satpy.tests.utils import make_dataid filename_info = {} filetype_info = {} dsid = make_dataid(name="ct") test = Hdf5NWCSAF(self.filename_ct, filename_info, filetype_info) ds = test.get_dataset(dsid, {"file_key": "CT"}) self.assertEqual(ds.shape, (1856, 3712)) self.assertEqual(ds.dtype, np.uint8) np.testing.assert_allclose(ds.data[1000:1010, 1000:1010].compute(), CTYPE_TEST_FRAME) filename_info = {} filetype_info = {} dsid = make_dataid(name="ctth_alti") test = Hdf5NWCSAF(self.filename_ctth, filename_info, filetype_info) ds = test.get_dataset(dsid, {"file_key": "CTTH_HEIGHT"}) self.assertEqual(ds.shape, (1856, 3712)) self.assertEqual(ds.dtype, np.float32) np.testing.assert_allclose(ds.data[1000:1010, 1000:1010].compute(), CTTH_HEIGHT_TEST_FRAME_RES) filename_info = {} filetype_info = {} dsid = make_dataid(name="ctth_pres") test = Hdf5NWCSAF(self.filename_ctth, filename_info, filetype_info) ds = test.get_dataset(dsid, {"file_key": "CTTH_PRESS"}) self.assertEqual(ds.shape, (1856, 3712)) self.assertEqual(ds.dtype, np.float32) np.testing.assert_allclose(ds.data[1000:1010, 1000:1010].compute(), CTTH_PRESSURE_TEST_FRAME_RES) filename_info = {} filetype_info = {} dsid = make_dataid(name="ctth_tempe") test = Hdf5NWCSAF(self.filename_ctth, filename_info, filetype_info) ds = test.get_dataset(dsid, {"file_key": "CTTH_TEMPER"}) self.assertEqual(ds.shape, (1856, 3712)) self.assertEqual(ds.dtype, np.float32) np.testing.assert_allclose(ds.data[1000:1010, 1000:1010].compute(), CTTH_TEMPERATURE_TEST_FRAME_RES) def tearDown(self): """Destroy.""" try: os.remove(self.filename_ct) os.remove(self.filename_ctth) except OSError: pass satpy-0.34.0/satpy/tests/reader_tests/test_nwcsaf_nc.py000066400000000000000000000320441420401153000232670ustar00rootroot00000000000000#!/usr/bin/env python # Copyright (c) 2018-2022 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unittests for NWC SAF reader.""" import unittest from unittest import mock import numpy as np import pytest import xarray as xr PROJ_KM = {'gdal_projection': '+proj=geos +a=6378.137000 +b=6356.752300 +lon_0=0.000000 +h=35785.863000', 'gdal_xgeo_up_left': -5569500.0, 'gdal_ygeo_up_left': 5437500.0, 'gdal_xgeo_low_right': 5566500.0, 'gdal_ygeo_low_right': 2653500.0} PROJ = {'gdal_projection': '+proj=geos +a=6378137.000 +b=6356752.300 +lon_0=0.000000 +h=35785863.000', 'gdal_xgeo_up_left': -5569500.0, 'gdal_ygeo_up_left': 5437500.0, 'gdal_xgeo_low_right': 5566500.0, 'gdal_ygeo_low_right': 2653500.0} class TestNcNWCSAF(unittest.TestCase): """Test the NcNWCSAF reader.""" @mock.patch('satpy.readers.nwcsaf_nc.unzip_file') @mock.patch('satpy.readers.nwcsaf_nc.xr.open_dataset') def setUp(self, xr_open_dataset, unzip): """Set up the test case.""" from satpy.readers.nwcsaf_nc import NcNWCSAF xr_open_dataset.return_value = xr.Dataset({"nx": xr.DataArray(), "ny": xr.DataArray()}, attrs={"source": "bla", "satellite_identifier": "blu"}) self.fake_dataset = xr_open_dataset.return_value unzip.return_value = '' self.filehandler_class = NcNWCSAF self.fh = self.filehandler_class('filename', {}, {}) def test_sensor_name(self): """Test that the correct sensor name is being set.""" self.fh.set_platform_and_sensor(platform_name='Metop-B') self.assertEqual(self.fh.sensor, set(['avhrr-3'])) self.assertEqual(self.fh.sensor_names, set(['avhrr-3'])) self.fh.set_platform_and_sensor(platform_name='NOAA-20') self.assertEqual(self.fh.sensor, set(['viirs'])) self.assertEqual(self.fh.sensor_names, set(['viirs'])) self.fh.set_platform_and_sensor(platform_name='Himawari-8') self.assertEqual(self.fh.sensor, set(['ahi'])) self.assertEqual(self.fh.sensor_names, set(['ahi'])) self.fh.set_platform_and_sensor(sat_id='GOES16') self.assertEqual(self.fh.sensor, set(['abi'])) self.assertEqual(self.fh.sensor_names, set(['abi'])) self.fh.set_platform_and_sensor(platform_name='GOES-17') self.assertEqual(self.fh.sensor, set(['abi'])) self.assertEqual(self.fh.sensor_names, set(['abi'])) self.fh.set_platform_and_sensor(sat_id='MSG4') self.assertEqual(self.fh.sensor, set(['seviri'])) self.fh.set_platform_and_sensor(platform_name='Meteosat-11') self.assertEqual(self.fh.sensor, set(['seviri'])) self.assertEqual(self.fh.sensor_names, set(['seviri'])) def test_get_area_def(self): """Test that get_area_def() returns proper area.""" dsid = {'name': 'foo'} self.fh.nc[dsid['name']] = xr.DataArray(np.zeros((5, 10))) # a, b and h in kilometers self.fh.nc.attrs = PROJ_KM _check_area_def(self.fh.get_area_def(dsid)) # a, b and h in meters self.fh.nc.attrs = PROJ _check_area_def(self.fh.get_area_def(dsid)) def test_scale_dataset_attr_removal(self): """Test the scaling of the dataset and removal of obsolete attributes.""" import numpy as np import xarray as xr attrs = {'scale_factor': np.array(10), 'add_offset': np.array(20)} var = xr.DataArray([1, 2, 3], attrs=attrs) var = self.fh.scale_dataset(var, 'dummy') np.testing.assert_allclose(var, [30, 40, 50]) self.assertNotIn('scale_factor', var.attrs) self.assertNotIn('add_offset', var.attrs) def test_scale_dataset_floating(self): """Test the scaling of the dataset with floating point values.""" import numpy as np import xarray as xr attrs = {'scale_factor': np.array(1.5), 'add_offset': np.array(2.5), '_FillValue': 1} var = xr.DataArray([1, 2, 3], attrs=attrs) var = self.fh.scale_dataset(var, 'dummy') np.testing.assert_allclose(var, [np.nan, 5.5, 7]) self.assertNotIn('scale_factor', var.attrs) self.assertNotIn('add_offset', var.attrs) attrs = {'scale_factor': np.array(1.5), 'add_offset': np.array(2.5), 'valid_min': 1.1} var = xr.DataArray([1, 2, 3], attrs=attrs) var = self.fh.scale_dataset(var, 'dummy') np.testing.assert_allclose(var, [np.nan, 5.5, 7]) self.assertNotIn('scale_factor', var.attrs) self.assertNotIn('add_offset', var.attrs) attrs = {'scale_factor': np.array(1.5), 'add_offset': np.array(2.5), 'valid_max': 2.1} var = xr.DataArray([1, 2, 3], attrs=attrs) var = self.fh.scale_dataset(var, 'dummy') np.testing.assert_allclose(var, [4, 5.5, np.nan]) self.assertNotIn('scale_factor', var.attrs) self.assertNotIn('add_offset', var.attrs) attrs = {'scale_factor': np.array(1.5), 'add_offset': np.array(2.5), 'valid_range': (1.1, 2.1)} var = xr.DataArray([1, 2, 3], attrs=attrs) var = self.fh.scale_dataset(var, 'dummy') np.testing.assert_allclose(var, [np.nan, 5.5, np.nan]) self.assertNotIn('scale_factor', var.attrs) self.assertNotIn('add_offset', var.attrs) # CTTH NWCSAF/Geo v2016/v2018: attrs = {'scale_factor': np.array(1.), 'add_offset': np.array(-2000.), 'valid_range': (0., 27000.)} var = xr.DataArray([1, 2, 3], attrs=attrs) var = self.fh.scale_dataset(var, 'dummy') np.testing.assert_allclose(var, [-1999., -1998., -1997.]) self.assertNotIn('scale_factor', var.attrs) self.assertNotIn('add_offset', var.attrs) self.assertEqual(var.attrs['valid_range'][0], -2000.) self.assertEqual(var.attrs['valid_range'][1], 25000.) def test_get_dataset_scales_and_offsets(self): """Test that get_dataset() returns scaled and offseted data.""" dsid = {'name': 'cpp_cot'} scale = 4 offset = 8 the_array = xr.DataArray(np.ones((5, 10)), attrs={"scale_factor": np.array(scale, dtype=float), "add_offset": np.array(offset, dtype=float)}) self.fh.nc[dsid['name']] = the_array info = dict(name="cpp_cot", file_type="nc_nwcsaf_cpp") res = self.fh.get_dataset(dsid, info) np.testing.assert_allclose(res, the_array * scale + offset) def test_get_dataset_scales_and_offsets_palette_meanings_using_other_dataset(self): """Test that get_dataset() returns scaled palette_meanings while another dataset as scaling source.""" dsid = {'name': 'cpp_cot'} scale = 4 offset = 8 array = xr.DataArray(np.ones((5, 3)), attrs={"palette_meanings": "1 2 3 4", "fill_value_color": (0, 0, 0)}) self.fh.nc[dsid['name']] = array so_array = xr.DataArray(np.ones((10, 10)), attrs={"scale_factor": np.array(scale, dtype=float), "add_offset": np.array(offset, dtype=float)}, dims=["lines", "colors"]) info = dict(name="cpp_cot", file_type="nc_nwcsaf_cpp", scale_offset_dataset="scaleoffset") self.fh.nc["scaleoffset"] = so_array res = self.fh.get_dataset(dsid, info) np.testing.assert_allclose(res.attrs["palette_meanings"], np.arange(5) * scale + offset) def test_get_dataset_raises_when_dataset_missing(self): """Test that get_dataset() raises an error when the requested dataset is missing.""" dsid = {'name': 'cpp_cot'} info = dict(name="cpp_cot", file_type="nc_nwcsaf_cpp") with pytest.raises(KeyError): self.fh.get_dataset(dsid, info) def test_get_dataset_uses_file_key_if_present(self): """Test that get_dataset() uses a file_key if present.""" dsid_cpp = {'name': 'cpp_cot'} dsid_cmic = {'name': 'cmic_cot'} scale = 4 offset = 8 the_array = xr.DataArray(np.ones((5, 10)), attrs={"scale_factor": np.array(scale, dtype=float), "add_offset": np.array(offset, dtype=float)}) file_key = "cmic_cot" self.fh.nc[file_key] = the_array info_cpp = dict(name="cpp_cot", file_key=file_key, file_type="nc_nwcsaf_cpp") res_cpp = self.fh.get_dataset(dsid_cpp, info_cpp) info_cmic = dict(name="cmic_cot", file_type="nc_nwcsaf_cpp") res_cmic = self.fh.get_dataset(dsid_cmic, info_cmic) np.testing.assert_allclose(res_cpp, res_cmic) class TestNcNWCSAFFileKeyPrefix(unittest.TestCase): """Test the NcNWCSAF reader when using a file key prefix.""" @mock.patch('satpy.readers.nwcsaf_nc.unzip_file') @mock.patch('satpy.readers.nwcsaf_nc.xr.open_dataset') def setUp(self, xr_open_dataset, unzip): """Set up the test case.""" from satpy.readers.nwcsaf_nc import NcNWCSAF xr_open_dataset.return_value = xr.Dataset({"nx": xr.DataArray(), "ny": xr.DataArray()}, attrs={"source": "bla", "satellite_identifier": "blu"}) self.fake_dataset = xr_open_dataset.return_value unzip.return_value = '' self.filehandler_class = NcNWCSAF self.file_key_prefix = "cmic_" self.fh = self.filehandler_class('filename', {}, {"file_key_prefix": self.file_key_prefix}) def test_get_dataset_uses_file_key_prefix(self): """Test that get_dataset() uses a file_key_prefix.""" dsid_cpp = {'name': 'cpp_cot'} dsid_cmic = {'name': 'cmic_cot'} scale = 4 offset = 8 the_array = xr.DataArray(np.ones((5, 10)), attrs={"scale_factor": np.array(scale, dtype=float), "add_offset": np.array(offset, dtype=float)}) file_key = "cot" self.fh.nc[self.file_key_prefix + file_key] = the_array info_cpp = dict(name="cpp_cot", file_key=file_key, file_type="nc_nwcsaf_cpp") res_cpp = self.fh.get_dataset(dsid_cpp, info_cpp) info_cmic = dict(name="cmic_cot", file_type="nc_nwcsaf_cpp") res_cmic = self.fh.get_dataset(dsid_cmic, info_cmic) np.testing.assert_allclose(res_cpp, res_cmic) def test_get_dataset_scales_and_offsets_palette_meanings_using_other_dataset(self): """Test that get_dataset() returns scaled palette_meanings using another dataset as scaling source.""" dsid = {'name': 'cpp_cot_pal'} scale = 4 offset = 8 array = xr.DataArray(np.ones((5, 3)), attrs={"palette_meanings": "1 2 3 4", "fill_value_color": (0, 0, 0)}) self.fh.nc[dsid['name']] = array so_array = xr.DataArray(np.ones((10, 10)), attrs={"scale_factor": np.array(scale, dtype=float), "add_offset": np.array(offset, dtype=float)}, dims=["lines", "colors"]) info = dict(name="cpp_cot_pal", file_type="nc_nwcsaf_cpp", scale_offset_dataset="scaleoffset") self.fh.nc[self.file_key_prefix + "scaleoffset"] = so_array res = self.fh.get_dataset(dsid, info) np.testing.assert_allclose(res.attrs["palette_meanings"], np.arange(5) * scale + offset) def _check_area_def(area_definition): correct_h = float(PROJ['gdal_projection'].split('+h=')[-1]) correct_a = float(PROJ['gdal_projection'].split('+a=')[-1].split()[0]) assert area_definition.proj_dict['h'] == correct_h assert area_definition.proj_dict['a'] == correct_a assert area_definition.proj_dict['units'] == 'm' correct_extent = (PROJ["gdal_xgeo_up_left"], PROJ["gdal_ygeo_low_right"], PROJ["gdal_xgeo_low_right"], PROJ["gdal_ygeo_up_left"]) assert area_definition.area_extent == correct_extent satpy-0.34.0/satpy/tests/reader_tests/test_olci_nc.py000066400000000000000000000232671420401153000227430ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.olci_nc module.""" import unittest import unittest.mock as mock class TestOLCIReader(unittest.TestCase): """Test various olci_nc filehandlers.""" @mock.patch('xarray.open_dataset') def test_instantiate(self, mocked_dataset): """Test initialization of file handlers.""" import xarray as xr from satpy.readers.olci_nc import NCOLCI1B, NCOLCI2, NCOLCIBase, NCOLCICal, NCOLCIChannelBase, NCOLCIGeo from satpy.tests.utils import make_dataid cal_data = xr.Dataset( { 'solar_flux': (('bands'), [0, 1, 2]), 'detector_index': (('bands'), [0, 1, 2]), }, {'bands': [0, 1, 2], }, ) ds_id = make_dataid(name='Oa01', calibration='reflectance') ds_id2 = make_dataid(name='wsqf', calibration='reflectance') filename_info = {'mission_id': 'S3A', 'dataset_name': 'Oa01', 'start_time': 0, 'end_time': 0} test = NCOLCIBase('somedir/somefile.nc', filename_info, 'c') test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() test = NCOLCICal('somedir/somefile.nc', filename_info, 'c') test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() test = NCOLCIGeo('somedir/somefile.nc', filename_info, 'c') test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() test = NCOLCIChannelBase('somedir/somefile.nc', filename_info, 'c') test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() cal = mock.Mock() cal.nc = cal_data test = NCOLCI1B('somedir/somefile.nc', filename_info, 'c', cal) test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() test = NCOLCI2('somedir/somefile.nc', filename_info, 'c') test.get_dataset(ds_id, {'nc_key': 'the_key'}) test.get_dataset(ds_id2, {'nc_key': 'the_key'}) mocked_dataset.assert_called() mocked_dataset.reset_mock() @mock.patch('xarray.open_dataset') def test_open_file_objects(self, mocked_open_dataset): """Test initialization of file handlers.""" from satpy.readers.olci_nc import NCOLCIBase filename_info = {'mission_id': 'S3A', 'dataset_name': 'Oa01', 'start_time': 0, 'end_time': 0} open_file = mock.MagicMock() file_handler = NCOLCIBase(open_file, filename_info, 'c') # deepcode ignore W0104: This is a property that is actually a function call. file_handler.nc # pylint: disable=W0104 mocked_open_dataset.assert_called() open_file.open.assert_called() assert (open_file.open.return_value in mocked_open_dataset.call_args[0] or open_file.open.return_value == mocked_open_dataset.call_args[1].get('filename_or_obj')) @mock.patch('xarray.open_dataset') def test_get_dataset(self, mocked_dataset): """Test reading datasets.""" import numpy as np import xarray as xr from satpy.readers.olci_nc import NCOLCI2 from satpy.tests.utils import make_dataid mocked_dataset.return_value = xr.Dataset({'mask': (['rows', 'columns'], np.array([1 << x for x in range(30)]).reshape(5, 6))}, coords={'rows': np.arange(5), 'columns': np.arange(6)}) ds_id = make_dataid(name='mask') filename_info = {'mission_id': 'S3A', 'dataset_name': 'mask', 'start_time': 0, 'end_time': 0} test = NCOLCI2('somedir/somefile.nc', filename_info, 'c') res = test.get_dataset(ds_id, {'nc_key': 'mask'}) self.assertEqual(res.dtype, np.dtype('bool')) @mock.patch('xarray.open_dataset') def test_olci_angles(self, mocked_dataset): """Test reading datasets.""" import numpy as np import xarray as xr from satpy.readers.olci_nc import NCOLCIAngles from satpy.tests.utils import make_dataid attr_dict = { 'ac_subsampling_factor': 1, 'al_subsampling_factor': 2, } mocked_dataset.return_value = xr.Dataset({'SAA': (['tie_rows', 'tie_columns'], np.array([1 << x for x in range(30)]).reshape(5, 6)), 'SZA': (['tie_rows', 'tie_columns'], np.array([1 << x for x in range(30)]).reshape(5, 6)), 'OAA': (['tie_rows', 'tie_columns'], np.array([1 << x for x in range(30)]).reshape(5, 6)), 'OZA': (['tie_rows', 'tie_columns'], np.array([1 << x for x in range(30)]).reshape(5, 6))}, coords={'rows': np.arange(5), 'columns': np.arange(6)}, attrs=attr_dict) filename_info = {'mission_id': 'S3A', 'dataset_name': 'Oa01', 'start_time': 0, 'end_time': 0} ds_id = make_dataid(name='solar_azimuth_angle') ds_id2 = make_dataid(name='satellite_zenith_angle') test = NCOLCIAngles('somedir/somefile.nc', filename_info, 'c') test.get_dataset(ds_id, filename_info) test.get_dataset(ds_id2, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() @mock.patch('xarray.open_dataset') def test_olci_meteo(self, mocked_dataset): """Test reading datasets.""" import numpy as np import xarray as xr from satpy.readers.olci_nc import NCOLCIMeteo from satpy.tests.utils import make_dataid attr_dict = { 'ac_subsampling_factor': 1, 'al_subsampling_factor': 2, } data = {'humidity': (['tie_rows', 'tie_columns'], np.array([1 << x for x in range(30)]).reshape(5, 6)), 'total_ozone': (['tie_rows', 'tie_columns'], np.array([1 << x for x in range(30)]).reshape(5, 6)), 'sea_level_pressure': (['tie_rows', 'tie_columns'], np.array([1 << x for x in range(30)]).reshape(5, 6)), 'total_columnar_water_vapour': (['tie_rows', 'tie_columns'], np.array([1 << x for x in range(30)]).reshape(5, 6))} mocked_dataset.return_value = xr.Dataset(data, coords={'rows': np.arange(5), 'columns': np.arange(6)}, attrs=attr_dict) filename_info = {'mission_id': 'S3A', 'dataset_name': 'humidity', 'start_time': 0, 'end_time': 0} ds_id = make_dataid(name='humidity') ds_id2 = make_dataid(name='total_ozone') test = NCOLCIMeteo('somedir/somefile.nc', filename_info, 'c') test.get_dataset(ds_id, filename_info) test.get_dataset(ds_id2, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() class TestBitFlags(unittest.TestCase): """Test the bitflag reading.""" def test_bitflags(self): """Test the BitFlags class.""" from functools import reduce import numpy as np from satpy.readers.olci_nc import BitFlags flag_list = ['INVALID', 'WATER', 'LAND', 'CLOUD', 'SNOW_ICE', 'INLAND_WATER', 'TIDAL', 'COSMETIC', 'SUSPECT', 'HISOLZEN', 'SATURATED', 'MEGLINT', 'HIGHGLINT', 'WHITECAPS', 'ADJAC', 'WV_FAIL', 'PAR_FAIL', 'AC_FAIL', 'OC4ME_FAIL', 'OCNN_FAIL', 'Extra_1', 'KDM_FAIL', 'Extra_2', 'CLOUD_AMBIGUOUS', 'CLOUD_MARGIN', 'BPAC_ON', 'WHITE_SCATT', 'LOWRW', 'HIGHRW'] bits = np.array([1 << x for x in range(len(flag_list))]) bflags = BitFlags(bits) items = ["INVALID", "SNOW_ICE", "INLAND_WATER", "SUSPECT", "AC_FAIL", "CLOUD", "HISOLZEN", "OCNN_FAIL", "CLOUD_MARGIN", "CLOUD_AMBIGUOUS", "LOWRW", "LAND"] mask = reduce(np.logical_or, [bflags[item] for item in items]) expected = np.array([True, False, True, True, True, True, False, False, True, True, False, False, False, False, False, False, False, True, False, True, False, False, False, True, True, False, False, True, False]) self.assertTrue(all(mask == expected)) satpy-0.34.0/satpy/tests/reader_tests/test_omps_edr.py000066400000000000000000000325121420401153000231360ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.omps_edr module.""" import os import unittest from unittest import mock import numpy as np from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler from satpy.tests.utils import convert_file_content_to_data_array DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (10, 300) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32) DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0) DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) class FakeHDF5FileHandler2(FakeHDF5FileHandler): """Swap-in HDF5 File Handler.""" def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" file_content = {} attrs = [] if 'SO2NRT' in filename: k = 'HDFEOS/SWATHS/OMPS Column Amount SO2/Data Fields/ColumnAmountSO2_TRM' file_content[k] = DEFAULT_FILE_DATA file_content[k + '/shape'] = DEFAULT_FILE_SHAPE file_content[k + '/attr/ScaleFactor'] = 1.1 file_content[k + '/attr/Offset'] = 0.1 file_content[k + '/attr/MissingValue'] = -1 file_content[k + '/attr/Title'] = 'Vertical Column Amount SO2 (TRM)' file_content[k + '/attr/Units'] = 'D.U.' file_content[k + '/attr/ValidRange'] = (-10, 2000) k = 'HDFEOS/SWATHS/OMPS Column Amount SO2/Geolocation Fields/Longitude' file_content[k] = DEFAULT_LON_DATA file_content[k + '/shape'] = DEFAULT_FILE_SHAPE file_content[k + '/attr/ScaleFactor'] = 1.1 file_content[k + '/attr/Offset'] = 0.1 file_content[k + '/attr/Units'] = 'deg' file_content[k + '/attr/MissingValue'] = -1 file_content[k + '/attr/Title'] = 'Geodetic Longitude' file_content[k + '/attr/ValidRange'] = (-180, 180) k = 'HDFEOS/SWATHS/OMPS Column Amount SO2/Geolocation Fields/Latitude' file_content[k] = DEFAULT_LAT_DATA file_content[k + '/shape'] = DEFAULT_FILE_SHAPE file_content[k + '/attr/ScaleFactor'] = 1.1 file_content[k + '/attr/Offset'] = 0.1 file_content[k + '/attr/Units'] = 'deg' file_content[k + '/attr/MissingValue'] = -1 file_content[k + '/attr/Title'] = 'Geodetic Latitude' file_content[k + '/attr/ValidRange'] = (-90, 90) elif 'NMSO2' in filename: file_content['GEOLOCATION_DATA/Longitude'] = DEFAULT_LON_DATA file_content['GEOLOCATION_DATA/Longitude/shape'] = DEFAULT_FILE_SHAPE file_content['GEOLOCATION_DATA/Longitude/attr/valid_max'] = 180 file_content['GEOLOCATION_DATA/Longitude/attr/valid_min'] = -180 file_content['GEOLOCATION_DATA/Longitude/attr/_FillValue'] = -1.26765e+30 file_content['GEOLOCATION_DATA/Longitude/attr/long_name'] = 'Longitude' file_content['GEOLOCATION_DATA/Longitude/attr/standard_name'] = 'longitude' file_content['GEOLOCATION_DATA/Longitude/attr/units'] = 'degrees_east' file_content['GEOLOCATION_DATA/Latitude'] = DEFAULT_LAT_DATA file_content['GEOLOCATION_DATA/Latitude/shape'] = DEFAULT_FILE_SHAPE file_content['GEOLOCATION_DATA/Latitude/attr/valid_max'] = 90 file_content['GEOLOCATION_DATA/Latitude/attr/valid_min'] = -90 file_content['GEOLOCATION_DATA/Latitude/attr/_FillValue'] = -1.26765e+30 file_content['GEOLOCATION_DATA/Latitude/attr/long_name'] = 'Latitude' file_content['GEOLOCATION_DATA/Latitude/attr/standard_name'] = 'latitude' file_content['GEOLOCATION_DATA/Latitude/attr/units'] = 'degress_north' k = 'SCIENCE_DATA/ColumnAmountSO2_TRM' file_content[k] = DEFAULT_FILE_DATA file_content[k + '/shape'] = DEFAULT_FILE_SHAPE file_content[k + '/attr/_FillValue'] = -1.26765e+30 file_content[k + '/attr/long_name'] = 'Column Amount SO2 (TRM)' file_content[k + '/attr/units'] = 'DU' file_content[k + '/attr/valid_max'] = 2000 file_content[k + '/attr/valid_min'] = -10 k = 'SCIENCE_DATA/ColumnAmountSO2_STL' file_content[k] = DEFAULT_FILE_DATA file_content[k + '/shape'] = DEFAULT_FILE_SHAPE file_content[k + '/attr/_FillValue'] = -1.26765e+30 file_content[k + '/attr/long_name'] = 'Column Amount SO2 (STL)' file_content[k + '/attr/units'] = 'DU' k = 'SCIENCE_DATA/ColumnAmountSO2_TRL' file_content[k] = DEFAULT_FILE_DATA file_content[k + '/shape'] = DEFAULT_FILE_SHAPE file_content[k + '/attr/_FillValue'] = -1.26765e+30 file_content[k + '/attr/long_name'] = 'Column Amount SO2 (TRL)' file_content[k + '/attr/units'] = 'DU' file_content[k + '/attr/valid_max'] = 2000 file_content[k + '/attr/valid_min'] = -10 file_content[k + '/attr/DIMENSION_LIST'] = [10, 10] attrs = ['_FillValue', 'long_name', 'units', 'valid_max', 'valid_min', 'DIMENSION_LIST'] k = 'SCIENCE_DATA/ColumnAmountSO2_TRU' file_content[k] = DEFAULT_FILE_DATA file_content[k + '/shape'] = DEFAULT_FILE_SHAPE file_content[k + '/attr/long_name'] = 'Column Amount SO2 (TRU)' file_content[k + '/attr/units'] = 'DU' file_content[k + '/attr/valid_max'] = 2000 file_content[k + '/attr/valid_min'] = -10 # Dataset with out unit k = 'SCIENCE_DATA/ColumnAmountSO2_PBL' file_content[k] = DEFAULT_FILE_DATA file_content[k + '/shape'] = DEFAULT_FILE_SHAPE file_content[k + '/attr/_FillValue'] = -1.26765e+30 file_content[k + '/attr/long_name'] = 'Column Amount SO2 (PBL)' file_content[k + '/attr/valid_max'] = 2000 file_content[k + '/attr/valid_min'] = -10 else: for k in ['Reflectivity331', 'UVAerosolIndex']: k = 'SCIENCE_DATA/' + k file_content[k] = DEFAULT_FILE_DATA file_content[k + '/shape'] = DEFAULT_FILE_SHAPE file_content[k + '/attr/Units'] = 'Unitless' if k == 'UVAerosolIndex': file_content[k + '/attr/ValidRange'] = (-30, 30) file_content[k + '/attr/Title'] = 'UV Aerosol Index' else: file_content[k + '/attr/ValidRange'] = (-0.15, 1.15) file_content[k + '/attr/Title'] = 'Effective Surface Reflectivity at 331 nm' file_content[k + '/attr/_FillValue'] = -1. file_content['GEOLOCATION_DATA/Longitude'] = DEFAULT_LON_DATA file_content['GEOLOCATION_DATA/Longitude/shape'] = DEFAULT_FILE_SHAPE file_content['GEOLOCATION_DATA/Longitude/attr/ValidRange'] = (-180, 180) file_content['GEOLOCATION_DATA/Longitude/attr/_FillValue'] = -999. file_content['GEOLOCATION_DATA/Longitude/attr/Title'] = 'Geodetic Longitude' file_content['GEOLOCATION_DATA/Longitude/attr/Units'] = 'deg' file_content['GEOLOCATION_DATA/Latitude'] = DEFAULT_LAT_DATA file_content['GEOLOCATION_DATA/Latitude/shape'] = DEFAULT_FILE_SHAPE file_content['GEOLOCATION_DATA/Latitude/attr/ValidRange'] = (-90, 90) file_content['GEOLOCATION_DATA/Latitude/attr/_FillValue'] = -999. file_content['GEOLOCATION_DATA/Latitude/attr/Title'] = 'Geodetic Latitude' file_content['GEOLOCATION_DATA/Latitude/attr/Units'] = 'deg' convert_file_content_to_data_array(file_content, attrs) return file_content class TestOMPSEDRReader(unittest.TestCase): """Test OMPS EDR Reader.""" yaml_file = "omps_edr.yaml" def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.omps_edr import EDREOSFileHandler, EDRFileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(EDRFileHandler, '__bases__', (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True self.p2 = mock.patch.object(EDREOSFileHandler, '__bases__', (EDRFileHandler,)) self.fake_handler2 = self.p2.start() self.p2.is_local = True def tearDown(self): """Stop wrapping the NetCDF4 file handler.""" self.p2.stop() self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'OMPS-NPP-TC_EDR_SO2NRT-2016m0607t192031-o00001-2016m0607t192947.he5', 'OMPS-NPP-TC_EDR_TO3-v1.0-2016m0607t192031-o00001-2016m0607t192947.h5', 'OMPS-NPP_NMSO2-PCA-L2_v1.1_2018m1129t112824_o00001_2018m1129t114426.h5', ]) self.assertEqual(len(loadables), 3) r.create_filehandlers(loadables) # make sure we have some files self.assertTrue(r.file_handlers) def test_basic_load_so2(self): """Test basic load of so2 datasets.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'OMPS-NPP-TC_EDR_SO2NRT-2016m0607t192031-o00001-2016m0607t192947.he5', 'OMPS-NPP-TC_EDR_TO3-v1.0-2016m0607t192031-o00001-2016m0607t192947.h5', 'OMPS-NPP_NMSO2-PCA-L2_v1.1_2018m1129t112824_o00001_2018m1129t114426.h5', ]) self.assertEqual(len(loadables), 3) r.create_filehandlers(loadables) ds = r.load(['so2_trm']) self.assertEqual(len(ds), 1) for d in ds.values(): self.assertEqual(d.attrs['resolution'], 50000) self.assertTupleEqual(d.shape, DEFAULT_FILE_SHAPE) self.assertIn('area', d.attrs) self.assertIsNotNone(d.attrs['area']) ds = r.load(['tcso2_trm_sampo']) self.assertEqual(len(ds), 1) for d in ds.values(): self.assertEqual(d.attrs['resolution'], 50000) self.assertTupleEqual(d.shape, DEFAULT_FILE_SHAPE) ds = r.load(['tcso2_stl_sampo']) self.assertEqual(len(ds), 0) # Dataset without _FillValue ds = r.load(['tcso2_tru_sampo']) self.assertEqual(len(ds), 1) # Dataset without unit ds = r.load(['tcso2_pbl_sampo']) self.assertEqual(len(ds), 0) def test_basic_load_to3(self): """Test basic load of to3 datasets.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'OMPS-NPP-TC_EDR_SO2NRT-2016m0607t192031-o00001-2016m0607t192947.he5', 'OMPS-NPP-TC_EDR_TO3-v1.0-2016m0607t192031-o00001-2016m0607t192947.h5', 'OMPS-NPP_NMSO2-PCA-L2_v1.1_2018m1129t112824_o00001_2018m1129t114426.h5', ]) self.assertEqual(len(loadables), 3) r.create_filehandlers(loadables) ds = r.load(['reflectivity_331', 'uvaerosol_index']) self.assertEqual(len(ds), 2) for d in ds.values(): self.assertEqual(d.attrs['resolution'], 50000) self.assertTupleEqual(d.shape, DEFAULT_FILE_SHAPE) self.assertIn('area', d.attrs) self.assertIsNotNone(d.attrs['area']) @mock.patch('satpy.readers.hdf5_utils.HDF5FileHandler._get_reference') @mock.patch('h5py.File') def test_load_so2_DIMENSION_LIST(self, mock_h5py_file, mock_hdf5_utils_get_reference): """Test load of so2 datasets with DIMENSION_LIST.""" from satpy.readers import load_reader mock_h5py_file.return_value = mock.MagicMock() mock_hdf5_utils_get_reference.return_value = [[[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]]] r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'OMPS-NPP_NMSO2-PCA-L2_v1.1_2018m1129t112824_o00001_2018m1129t114426.h5', ]) r.create_filehandlers(loadables) ds = r.load(['tcso2_trl_sampo']) self.assertEqual(len(ds), 1) satpy-0.34.0/satpy/tests/reader_tests/test_safe_sar_l2_ocn.py000066400000000000000000000074571420401153000243570ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.safe_sar_l2_ocn module.""" import unittest import unittest.mock as mock import numpy as np import xarray as xr from satpy.tests.utils import make_dataid class TestSAFENC(unittest.TestCase): """Test various SAFE SAR L2 OCN file handlers.""" @mock.patch('satpy.readers.safe_sar_l2_ocn.xr') @mock.patch.multiple('satpy.readers.safe_sar_l2_ocn.SAFENC', __abstractmethods__=set()) def setUp(self, xr_): """Set up the tests.""" from satpy.readers.safe_sar_l2_ocn import SAFENC self.channels = ['owiWindSpeed', 'owiLon', 'owiLat', 'owiHs', 'owiNrcs', 'foo', 'owiPolarisationName', 'owiCalConstObsi'] # Mock file access to return a fake dataset. self.dummy3d = np.zeros((2, 2, 1)) self.dummy2d = np.zeros((2, 2)) self.dummy1d = np.zeros((2)) self.band = 1 self.nc = xr.Dataset( {'owiWindSpeed': xr.DataArray(self.dummy2d, dims=('owiAzSize', 'owiRaSize'), attrs={'_FillValue': np.nan}), 'owiLon': xr.DataArray(data=self.dummy2d, dims=('owiAzSize', 'owiRaSize')), 'owiLat': xr.DataArray(data=self.dummy2d, dims=('owiAzSize', 'owiRaSize')), 'owiHs': xr.DataArray(data=self.dummy3d, dims=('owiAzSize', 'owiRaSize', 'oswPartition')), 'owiNrcs': xr.DataArray(data=self.dummy3d, dims=('owiAzSize', 'owiRaSize', 'oswPolarization')), 'foo': xr.DataArray(self.dummy2d, dims=('owiAzSize', 'owiRaSize')), 'owiPolarisationName': xr.DataArray(self.dummy1d, dims=('owiPolarisation')), 'owiCalConstObsi': xr.DataArray(self.dummy1d, dims=('owiIncSize')) }, attrs={'_FillValue': np.nan, 'missionName': 'S1A'}) xr_.open_dataset.return_value = self.nc # Instantiate reader using the mocked open_dataset() method. Also, make # the reader believe all abstract methods have been implemented. self.reader = SAFENC(filename='dummy', filename_info={'start_time': 0, 'end_time': 0, 'fstart_time': 0, 'fend_time': 0, 'polarization': 'vv'}, filetype_info={}) def test_init(self): """Test reader initialization.""" self.assertEqual(self.reader.start_time, 0) self.assertEqual(self.reader.end_time, 0) self.assertEqual(self.reader.fstart_time, 0) self.assertEqual(self.reader.fend_time, 0) def test_get_dataset(self): """Test getting a dataset.""" for ch in self.channels: dt = self.reader.get_dataset( key=make_dataid(name=ch), info={}) # ... this only compares the valid (unmasked) elements self.assertTrue(np.all(self.nc[ch] == dt.to_masked_array()), msg='get_dataset() returns invalid data for ' 'dataset {}'.format(ch)) satpy-0.34.0/satpy/tests/reader_tests/test_sar_c_safe.py000066400000000000000000001107141420401153000234140ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.sar-c_safe module.""" import unittest import unittest.mock as mock from enum import Enum from io import BytesIO import dask.array as da import numpy as np import xarray as xr from satpy.dataset import DataQuery from satpy.readers.sar_c_safe import SAFEXMLAnnotation, SAFEXMLCalibration, SAFEXMLNoise class TestSAFEGRD(unittest.TestCase): """Test the SAFE GRD file handler.""" @mock.patch('rasterio.open') def setUp(self, mocked_rio_open): """Set up the test case.""" from satpy.readers.sar_c_safe import SAFEGRD filename_info = {'mission_id': 'S1A', 'dataset_name': 'foo', 'start_time': 0, 'end_time': 0, 'polarization': 'vv'} filetype_info = 'bla' self.noisefh = mock.MagicMock() self.noisefh.get_noise_correction.return_value = xr.DataArray(np.zeros((2, 2)), dims=['y', 'x']) self.calfh = mock.MagicMock() self.calfh.get_calibration_constant.return_value = 1 self.calfh.get_calibration.return_value = xr.DataArray(np.ones((2, 2)), dims=['y', 'x']) self.annotationfh = mock.MagicMock() self.test_fh = SAFEGRD('S1A_IW_GRDH_1SDV_20190201T024655_20190201T024720_025730_02DC2A_AE07.SAFE/measurement/' 's1a-iw-grd-vv-20190201t024655-20190201t024720-025730-02dc2a-001.tiff', filename_info, filetype_info, self.calfh, self.noisefh, self.annotationfh) self.mocked_rio_open = mocked_rio_open def test_instantiate(self): """Test initialization of file handlers.""" assert(self.test_fh._polarization == 'vv') assert(self.test_fh.calibration == self.calfh) assert(self.test_fh.noise == self.noisefh) self.mocked_rio_open.assert_called() @mock.patch('rioxarray.open_rasterio') def test_read_calibrated_natural(self, mocked_rioxarray_open): """Test the calibration routines.""" calibration = mock.MagicMock() calibration.name = "sigma_nought" mocked_rioxarray_open.return_value = xr.DataArray(da.from_array(np.array([[0, 1], [2, 3]])), dims=['y', 'x']) xarr = self.test_fh.get_dataset(DataQuery(name="measurement", polarization="vv", calibration=calibration, quantity='natural'), info=dict()) np.testing.assert_allclose(xarr, [[np.nan, 2], [5, 10]]) @mock.patch('rioxarray.open_rasterio') def test_read_calibrated_dB(self, mocked_rioxarray_open): """Test the calibration routines.""" calibration = mock.MagicMock() calibration.name = "sigma_nought" mocked_rioxarray_open.return_value = xr.DataArray(da.from_array(np.array([[0, 1], [2, 3]])), dims=['y', 'x']) xarr = self.test_fh.get_dataset(DataQuery(name="measurement", polarization="vv", calibration=calibration, quantity='dB'), info=dict()) np.testing.assert_allclose(xarr, [[np.nan, 3.0103], [6.9897, 10]]) def test_read_lon_lats(self): """Test reading lons and lats.""" class FakeGCP: def __init__(self, *args): self.row, self.col, self.x, self.y, self.z = args gcps = [FakeGCP(0, 0, 0, 0, 0), FakeGCP(0, 3, 1, 0, 0), FakeGCP(3, 0, 0, 1, 0), FakeGCP(3, 3, 1, 1, 0), FakeGCP(0, 7, 2, 0, 0), FakeGCP(3, 7, 2, 1, 0), FakeGCP(7, 7, 2, 2, 0), FakeGCP(7, 3, 1, 2, 0), FakeGCP(7, 0, 0, 2, 0), FakeGCP(0, 15, 3, 0, 0), FakeGCP(3, 15, 3, 1, 0), FakeGCP(7, 15, 3, 2, 0), FakeGCP(15, 15, 3, 3, 0), FakeGCP(15, 7, 2, 3, 0), FakeGCP(15, 3, 1, 3, 0), FakeGCP(15, 0, 0, 3, 0), ] crs = dict(init='epsg:4326') self.mocked_rio_open.return_value.gcps = [gcps, crs] self.mocked_rio_open.return_value.shape = [16, 16] query = DataQuery(name="longitude", polarization="vv") xarr = self.test_fh.get_dataset(query, info=dict()) expected = np.array([[3.79492915e-16, 5.91666667e-01, 9.09722222e-01, 1.00000000e+00, 9.08333333e-01, 6.80555556e-01, 3.62500000e-01, 8.32667268e-17, -3.61111111e-01, -6.75000000e-01, -8.95833333e-01, -9.77777778e-01, -8.75000000e-01, -5.41666667e-01, 6.80555556e-02, 1.00000000e+00], [1.19166667e+00, 1.32437500e+00, 1.36941964e+00, 1.34166667e+00, 1.25598214e+00, 1.12723214e+00, 9.70282738e-01, 8.00000000e-01, 6.31250000e-01, 4.78898810e-01, 3.57812500e-01, 2.82857143e-01, 2.68898810e-01, 3.30803571e-01, 4.83437500e-01, 7.41666667e-01], [1.82638889e+00, 1.77596726e+00, 1.72667765e+00, 1.67757937e+00, 1.62773172e+00, 1.57619402e+00, 1.52202558e+00, 1.46428571e+00, 1.40203373e+00, 1.33432894e+00, 1.26023065e+00, 1.17879819e+00, 1.08909084e+00, 9.90167942e-01, 8.81088790e-01, 7.60912698e-01], [2.00000000e+00, 1.99166667e+00, 1.99305556e+00, 2.00000000e+00, 2.00833333e+00, 2.01388889e+00, 2.01250000e+00, 2.00000000e+00, 1.97222222e+00, 1.92500000e+00, 1.85416667e+00, 1.75555556e+00, 1.62500000e+00, 1.45833333e+00, 1.25138889e+00, 1.00000000e+00], [1.80833333e+00, 2.01669643e+00, 2.18011267e+00, 2.30119048e+00, 2.38253827e+00, 2.42676446e+00, 2.43647747e+00, 2.41428571e+00, 2.36279762e+00, 2.28462160e+00, 2.18236607e+00, 2.05863946e+00, 1.91605017e+00, 1.75720663e+00, 1.58471726e+00, 1.40119048e+00], [1.34722222e+00, 1.89627976e+00, 2.29940830e+00, 2.57341270e+00, 2.73509779e+00, 2.80126842e+00, 2.78872945e+00, 2.71428571e+00, 2.59474206e+00, 2.44690334e+00, 2.28757440e+00, 2.13356009e+00, 2.00166525e+00, 1.90869473e+00, 1.87145337e+00, 1.90674603e+00], [7.12500000e-01, 1.67563988e+00, 2.36250177e+00, 2.80892857e+00, 3.05076318e+00, 3.12384850e+00, 3.06402742e+00, 2.90714286e+00, 2.68903770e+00, 2.44555485e+00, 2.21253720e+00, 2.02582766e+00, 1.92126913e+00, 1.93470451e+00, 2.10197669e+00, 2.45892857e+00], [5.55111512e-16, 1.40000000e+00, 2.38095238e+00, 3.00000000e+00, 3.31428571e+00, 3.38095238e+00, 3.25714286e+00, 3.00000000e+00, 2.66666667e+00, 2.31428571e+00, 2.00000000e+00, 1.78095238e+00, 1.71428571e+00, 1.85714286e+00, 2.26666667e+00, 3.00000000e+00], [-6.94444444e-01, 1.11458333e+00, 2.36631944e+00, 3.13888889e+00, 3.51041667e+00, 3.55902778e+00, 3.36284722e+00, 3.00000000e+00, 2.54861111e+00, 2.08680556e+00, 1.69270833e+00, 1.44444444e+00, 1.42013889e+00, 1.69791667e+00, 2.35590278e+00, 3.47222222e+00], [-1.27500000e+00, 8.64613095e-01, 2.33016227e+00, 3.21785714e+00, 3.62390731e+00, 3.64452239e+00, 3.37591199e+00, 2.91428571e+00, 2.35585317e+00, 1.79682398e+00, 1.33340774e+00, 1.06181406e+00, 1.07825255e+00, 1.47893282e+00, 2.36006448e+00, 3.81785714e+00], [-1.64583333e+00, 6.95312500e-01, 2.28404018e+00, 3.22916667e+00, 3.63950893e+00, 3.62388393e+00, 3.29110863e+00, 2.75000000e+00, 2.10937500e+00, 1.47805060e+00, 9.64843750e-01, 6.78571429e-01, 7.28050595e-01, 1.22209821e+00, 2.26953125e+00, 3.97916667e+00], [-1.71111111e+00, 6.51904762e-01, 2.23951247e+00, 3.16507937e+00, 3.54197279e+00, 3.48356009e+00, 3.10320862e+00, 2.51428571e+00, 1.83015873e+00, 1.16419501e+00, 6.29761905e-01, 3.40226757e-01, 4.08956916e-01, 9.49319728e-01, 2.07468254e+00, 3.89841270e+00], [-1.37500000e+00, 7.79613095e-01, 2.20813846e+00, 3.01785714e+00, 3.31605017e+00, 3.20999858e+00, 2.80698342e+00, 2.21428571e+00, 1.53918651e+00, 8.88966837e-01, 3.70907738e-01, 9.22902494e-02, 1.60395408e-01, 6.82504252e-01, 1.76589782e+00, 3.51785714e+00], [-5.41666667e-01, 1.12366071e+00, 2.20147747e+00, 2.77976190e+00, 2.94649235e+00, 2.78964711e+00, 2.39720451e+00, 1.85714286e+00, 1.25744048e+00, 6.86075680e-01, 2.31026786e-01, -1.97278912e-02, 2.17899660e-02, 4.43558673e-01, 1.33355655e+00, 2.77976190e+00], [8.84722222e-01, 1.72927083e+00, 2.23108879e+00, 2.44305556e+00, 2.41805060e+00, 2.20895337e+00, 1.86864335e+00, 1.45000000e+00, 1.00590278e+00, 5.89231151e-01, 2.52864583e-01, 4.96825397e-02, 3.25644841e-02, 2.54389881e-01, 7.68038194e-01, 1.62638889e+00], [3.00000000e+00, 2.64166667e+00, 2.30853175e+00, 2.00000000e+00, 1.71547619e+00, 1.45436508e+00, 1.21607143e+00, 1.00000000e+00, 8.05555556e-01, 6.32142857e-01, 4.79166667e-01, 3.46031746e-01, 2.32142857e-01, 1.36904762e-01, 5.97222222e-02, 0.00000000e+00]]) np.testing.assert_allclose(xarr.values, expected) annotation_xml = b""" S1B GRD HH EW EW 2020-03-15T05:04:28.137817 2020-03-15T05:05:32.416171 20698 160707 001 2020-03-15T05:04:28.137817 2020-03-15T05:05:32.416171 2020-03-15T04:33:22.256260 2020-03-15T05:04:28.320641 Slice 1 1 2020-03-15T05:04:29.485847 2020-03-15T05:05:36.317420 2 2020-03-15T05:05:30.253413 2020-03-15T05:06:34.046608 3 2020-03-15T05:06:31.020979 2020-03-15T05:07:31.775796 4.955163637998161e-03 Detected 16 bit Unsigned Integer 4.000000e+01 4.000000e+01 5.998353361537205e-03 3.425601970000000e+02 10 10 -1.366569000000000e+00 3.468272707039038e+01 4.873919e+02 0.000000e+00 2.451083e+02 0.000000e+00 2018-02-12T03:24:58.493342 4.964462411376810e-03 0 0 7.021017981690355e+01 5.609684402205929e+01 8.234046399593353e-04 1.918318045731997e+01 1.720012646010728e+01 2018-02-12T03:24:58.493342 4.964462411376810e-03 0 9 7.021017981690355e+01 5.609684402205929e+01 8.234046399593353e-04 1.918318045731997e+01 1.720012646010728e+01 2018-02-12T03:24:58.493342 4.964462411376810e-03 9 0 7.021017981690355e+01 5.609684402205929e+01 8.234046399593353e-04 1.918318045731997e+01 1.720012646010728e+01 2018-02-12T03:24:58.493342 4.964462411376810e-03 9 9 7.021017981690355e+01 5.609684402205929e+01 8.234046399593353e-04 1.918318045731997e+01 1.720012646010728e+01 """ noise_xml = b""" 2020-03-15T05:04:28.137817 0 0 2 4 6 8 9 0.00000e+00 2.00000e+00 4.00000e+00 6.00000e+00 8.00000e+00 9.00000e+00 2020-03-15T05:04:28.137817 5 0 2 4 7 8 9 0.00000e+00 2.00000e+00 4.00000e+00 7.00000e+00 8.00000e+00 9.00000e+00 2020-03-15T05:04:28.137817 9 0 2 5 7 8 9 0.00000e+00 2.00000e+00 5.00000e+00 7.00000e+00 8.00000e+00 9.00000e+00 IW1 0 1 1 3 0 1.000000e+00 IW1 2 0 9 1 2 4 6 8 2.000000e+00 2.000000e+00 2.000000e+00 2.000000e+00 IW2 2 2 4 4 2 4 3.000000e+00 3.000000e+00 IW3 2 5 4 8 2 4 4.000000e+00 4.000000e+00 IW2 5 2 7 5 5 6 5.000000e+00 5.000000e+00 IW3 5 6 7 9 5 6 6.000000e+00 6.000000e+00 IW2 8 2 9 6 8 7.000000e+00 IW3 8 7 9 9 8 8.000000e+00 """ noise_xml_with_holes = b""" 2020-03-15T05:04:28.137817 0 0 2 4 6 8 9 0.00000e+00 2.00000e+00 4.00000e+00 6.00000e+00 8.00000e+00 9.00000e+00 2020-03-15T05:04:28.137817 5 0 2 4 7 8 9 0.00000e+00 2.00000e+00 4.00000e+00 7.00000e+00 8.00000e+00 9.00000e+00 2020-03-15T05:04:28.137817 9 0 2 5 7 8 9 0.00000e+00 2.00000e+00 5.00000e+00 7.00000e+00 8.00000e+00 9.00000e+00 IW1 0 3 2 5 0 1.000000e+00 IW1 1 0 5 1 2 4 5 2.000000e+00 2.000000e+00 2.000000e+00 IW2 2 8 4 9 2 4 3.000000e+00 3.000000e+00 IW3 3 2 5 3 3 5 4.000000e+00 4.000000e+00 IW2 3 4 4 5 3 4 5.000000e+00 5.000000e+00 IW3 4 6 4 7 4 6.000000e+00 IW2 5 4 7 6 5 7 7.000000e+00 7.000000e+00 IW3 5 7 7 9 6 8.000000e+00 IW2 6 0 7 3 6 7 9.000000e+00 9.000000e+00 IW3 8 0 9 0 8 10.000000e+00 IW2 8 2 9 3 8 9 11.000000e+00 11.000000e+00 IW3 8 4 8 5 8 12.000000e+00 """ calibration_xml = b""" S1A GRD VV IW IW 2018-02-12T03:24:58.493726 2018-02-12T03:25:01.493726 20568 144162 001 1.000000e+00 2018-02-12T03:24:58.493726 0 0 2 4 6 8 9 1.894274e+03 1.788593e+03 1.320240e+03 1.277968e+03 1.277968e+03 1.277968e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.840695e+03 1.718649e+03 1.187203e+03 1.185249e+03 1.183303e+03 1.181365e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 2018-02-12T03:24:59.493726 3 0 2 4 6 8 9 1.894274e+03 1.788593e+03 1.320240e+03 1.277968e+03 1.277968e+03 1.277968e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.840695e+03 1.718649e+03 1.187203e+03 1.185249e+03 1.183303e+03 1.181365e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 2018-02-12T03:25:00.493726 6 0 2 4 6 8 9 1.894274e+03 1.788593e+03 1.320240e+03 1.277968e+03 1.277968e+03 1.277968e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.840695e+03 1.718649e+03 1.187203e+03 1.185249e+03 1.183303e+03 1.181365e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 2018-02-12T03:25:01.493726 9 0 2 4 6 8 9 1.894274e+03 1.788593e+03 1.320240e+03 1.277968e+03 1.277968e+03 1.277968e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.840695e+03 1.718649e+03 1.187203e+03 1.185249e+03 1.183303e+03 1.181365e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 """ class TestSAFEXMLNoise(unittest.TestCase): """Test the SAFE XML Noise file handler.""" def setUp(self): """Set up the test case.""" filename_info = dict(start_time=None, end_time=None, polarization="vv") self.annotation_fh = SAFEXMLAnnotation(BytesIO(annotation_xml), filename_info, mock.MagicMock()) self.noise_fh = SAFEXMLNoise(BytesIO(noise_xml), filename_info, mock.MagicMock(), self.annotation_fh) self.expected_azimuth_noise = np.array([[np.nan, 1, 1, 1, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan], [np.nan, 1, 1, 1, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan], [2, 2, 3, 3, 3, 4, 4, 4, 4, np.nan], [2, 2, 3, 3, 3, 4, 4, 4, 4, np.nan], [2, 2, 3, 3, 3, 4, 4, 4, 4, np.nan], [2, 2, 5, 5, 5, 5, 6, 6, 6, 6], [2, 2, 5, 5, 5, 5, 6, 6, 6, 6], [2, 2, 5, 5, 5, 5, 6, 6, 6, 6], [2, 2, 7, 7, 7, 7, 7, 8, 8, 8], [2, 2, 7, 7, 7, 7, 7, 8, 8, 8], ]) self.expected_range_noise = np.array([[0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], ]) self.noise_fh_with_holes = SAFEXMLNoise(BytesIO(noise_xml_with_holes), filename_info, mock.MagicMock(), self.annotation_fh) self.expected_azimuth_noise_with_holes = np.array( [[np.nan, np.nan, np.nan, 1, 1, 1, np.nan, np.nan, np.nan, np.nan], [2, 2, np.nan, 1, 1, 1, np.nan, np.nan, np.nan, np.nan], [2, 2, np.nan, 1, 1, 1, np.nan, np.nan, 3, 3], [2, 2, 4, 4, 5, 5, np.nan, np.nan, 3, 3], [2, 2, 4, 4, 5, 5, 6, 6, 3, 3], [2, 2, 4, 4, 7, 7, 7, 8, 8, 8], [9, 9, 9, 9, 7, 7, 7, 8, 8, 8], [9, 9, 9, 9, 7, 7, 7, 8, 8, 8], [10, np.nan, 11, 11, 12, 12, np.nan, np.nan, np.nan, np.nan], [10, np.nan, 11, 11, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan] ]) def test_azimuth_noise_array(self): """Test reading the azimuth-noise array.""" res = self.noise_fh.azimuth_noise_reader.read_azimuth_noise_array() np.testing.assert_array_equal(res, self.expected_azimuth_noise) def test_azimuth_noise_array_with_holes(self): """Test reading the azimuth-noise array.""" res = self.noise_fh_with_holes.azimuth_noise_reader.read_azimuth_noise_array() np.testing.assert_array_equal(res, self.expected_azimuth_noise_with_holes) def test_range_noise_array(self): """Test reading the range-noise array.""" res = self.noise_fh.read_range_noise_array(chunks=5) np.testing.assert_allclose(res, self.expected_range_noise) def test_get_noise_dataset(self): """Test using get_dataset for the noise.""" query = DataQuery(name="noise", polarization="vv") res = self.noise_fh.get_dataset(query, {}) np.testing.assert_allclose(res, self.expected_azimuth_noise * self.expected_range_noise) def test_get_noise_dataset_has_right_chunk_size(self): """Test using get_dataset for the noise has right chunk size in result.""" query = DataQuery(name="noise", polarization="vv") res = self.noise_fh.get_dataset(query, {}, chunks=3) assert res.data.chunksize == (3, 3) class Calibration(Enum): """Calibration levels.""" gamma = 1 sigma_nought = 2 beta_nought = 3 dn = 4 class TestSAFEXMLCalibration(unittest.TestCase): """Test the SAFE XML Calibration file handler.""" def setUp(self): """Set up the test case.""" filename_info = dict(start_time=None, end_time=None, polarization="vv") self.annotation_fh = SAFEXMLAnnotation(BytesIO(annotation_xml), filename_info, mock.MagicMock()) self.calibration_fh = SAFEXMLCalibration(BytesIO(calibration_xml), filename_info, mock.MagicMock(), self.annotation_fh) self.expected_gamma = np.array([[1840.695, 1779.672, 1718.649, 1452.926, 1187.203, 1186.226, 1185.249, 1184.276, 1183.303, 1181.365]]) * np.ones((10, 1)) def test_dn_calibration_array(self): """Test reading the dn calibration array.""" expected_dn = np.ones((10, 10)) * 1087 res = self.calibration_fh.get_calibration(Calibration.dn, chunks=5) np.testing.assert_allclose(res, expected_dn) def test_beta_calibration_array(self): """Test reading the beta calibration array.""" expected_beta = np.ones((10, 10)) * 1087 res = self.calibration_fh.get_calibration(Calibration.beta_nought, chunks=5) np.testing.assert_allclose(res, expected_beta) def test_sigma_calibration_array(self): """Test reading the sigma calibration array.""" expected_sigma = np.array([[1894.274, 1841.4335, 1788.593, 1554.4165, 1320.24, 1299.104, 1277.968, 1277.968, 1277.968, 1277.968]]) * np.ones((10, 1)) res = self.calibration_fh.get_calibration(Calibration.sigma_nought, chunks=5) np.testing.assert_allclose(res, expected_sigma) def test_gamma_calibration_array(self): """Test reading the gamma calibration array.""" res = self.calibration_fh.get_calibration(Calibration.gamma, chunks=5) np.testing.assert_allclose(res, self.expected_gamma) def test_get_calibration_dataset(self): """Test using get_dataset for the calibration.""" query = DataQuery(name="gamma", polarization="vv") res = self.calibration_fh.get_dataset(query, {}) np.testing.assert_allclose(res, self.expected_gamma) def test_get_calibration_dataset_has_right_chunk_size(self): """Test using get_dataset for the calibration yields array with right chunksize.""" query = DataQuery(name="gamma", polarization="vv") res = self.calibration_fh.get_dataset(query, {}, chunks=3) assert res.data.chunksize == (3, 3) np.testing.assert_allclose(res, self.expected_gamma) def test_get_calibration_constant(self): """Test getting the calibration constant.""" query = DataQuery(name="calibration_constant", polarization="vv") res = self.calibration_fh.get_dataset(query, {}) assert res == 1 class TestSAFEXMLAnnotation(unittest.TestCase): """Test the SAFE XML Annotation file handler.""" def setUp(self): """Set up the test case.""" filename_info = dict(start_time=None, end_time=None, polarization="vv") self.annotation_fh = SAFEXMLAnnotation(BytesIO(annotation_xml), filename_info, mock.MagicMock()) def test_incidence_angle(self): """Test reading the incidence angle.""" query = DataQuery(name="incidence_angle", polarization="vv") res = self.annotation_fh.get_dataset(query, {}) np.testing.assert_allclose(res, 19.18318046) satpy-0.34.0/satpy/tests/reader_tests/test_satpy_cf_nc.py000066400000000000000000000413441420401153000236210ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the CF reader.""" import os import unittest from contextlib import suppress from datetime import datetime import numpy as np import xarray as xr from satpy import Scene from satpy.dataset.dataid import WavelengthRange from satpy.readers.satpy_cf_nc import SatpyCFFileHandler class TestCFReader(unittest.TestCase): """Test case for CF reader.""" def setUp(self): """Create a test scene.""" tstart = datetime(2019, 4, 1, 12, 0) tend = datetime(2019, 4, 1, 12, 15) data_visir = [[1, 2], [3, 4]] y_visir = [1, 2] x_visir = [1, 2] z_visir = [1, 2, 3, 4, 5, 6, 7] qual_data = [[1, 2, 3, 4, 5, 6, 7], [1, 2, 3, 4, 5, 6, 7]] time_vis006 = [1, 2] lat = 33.0 * np.array([[1, 2], [3, 4]]) lon = -13.0 * np.array([[1, 2], [3, 4]]) common_attrs = {'start_time': tstart, 'end_time': tend, 'platform_name': 'tirosn', 'orbit_number': 99999} vis006 = xr.DataArray(data_visir, dims=('y', 'x'), coords={'y': y_visir, 'x': x_visir, 'acq_time': ('y', time_vis006)}, attrs={'name': 'image0', 'id_tag': 'ch_r06', 'coordinates': 'lat lon', 'resolution': 1000, 'calibration': 'reflectance', 'wavelength': WavelengthRange(min=0.58, central=0.63, max=0.68, unit='µm'), 'orbital_parameters': { 'projection_longitude': 1, 'projection_latitude': 1, 'projection_altitude': 1, 'satellite_nominal_longitude': 1, 'satellite_nominal_latitude': 1, 'satellite_actual_longitude': 1, 'satellite_actual_latitude': 1, 'satellite_actual_altitude': 1, 'nadir_longitude': 1, 'nadir_latitude': 1, 'only_in_1': False} }) ir_108 = xr.DataArray(data_visir, dims=('y', 'x'), coords={'y': y_visir, 'x': x_visir, 'acq_time': ('y', time_vis006)}, attrs={'name': 'image1', 'id_tag': 'ch_tb11', 'coordinates': 'lat lon'}) qual_f = xr.DataArray(qual_data, dims=('y', 'z'), coords={'y': y_visir, 'z': z_visir, 'acq_time': ('y', time_vis006)}, attrs={'name': 'qual_flags', 'id_tag': 'qual_flags'}) lat = xr.DataArray(lat, dims=('y', 'x'), coords={'y': y_visir, 'x': x_visir}, attrs={'name': 'lat', 'standard_name': 'latitude', 'modifiers': np.array([])}) lon = xr.DataArray(lon, dims=('y', 'x'), coords={'y': y_visir, 'x': x_visir}, attrs={'name': 'lon', 'standard_name': 'longitude', 'modifiers': np.array([])}) self.scene = Scene() self.scene.attrs['sensor'] = ['avhrr-1', 'avhrr-2', 'avhrr-3'] scene_dict = {'image0': vis006, 'image1': ir_108, 'lat': lat, 'lon': lon, 'qual_flags': qual_f} for key in scene_dict: self.scene[key] = scene_dict[key] self.scene[key].attrs.update(common_attrs) def test_write_and_read(self): """Save a file with cf_writer and read the data again.""" filename = 'testingcfwriter{:s}-viirs-mband-20201007075915-20201007080744.nc'.format( datetime.utcnow().strftime('%Y%j%H%M%S')) try: self.scene.save_datasets(writer='cf', filename=filename, header_attrs={'instrument': 'avhrr'}, engine='h5netcdf', flatten_attrs=True, pretty=True) scn_ = Scene(reader='satpy_cf_nc', filenames=[filename]) scn_.load(['image0', 'image1', 'lat']) np.testing.assert_array_equal(scn_['image0'].data, self.scene['image0'].data) np.testing.assert_array_equal(scn_['lat'].data, self.scene['lat'].data) # lat loaded as dataset np.testing.assert_array_equal(scn_['image0'].coords['lon'], self.scene['lon'].data) # lon loded as coord assert isinstance(scn_['image0'].attrs['wavelength'], WavelengthRange) finally: with suppress(PermissionError): os.remove(filename) def test_fix_modifier_attr(self): """Check that fix modifier can handle empty list as modifier attribute.""" self.reader = SatpyCFFileHandler('filename', {}, {'filetype': 'info'}) ds_info = {'modifiers': []} self.reader.fix_modifier_attr(ds_info) self.assertEqual(ds_info['modifiers'], ()) def _dataset_for_prefix_testing(self): data_visir = [[1, 2], [3, 4]] y_visir = [1, 2] x_visir = [1, 2] lat = 33.0 * np.array([[1, 2], [3, 4]]) lon = -13.0 * np.array([[1, 2], [3, 4]]) vis006 = xr.DataArray(data_visir, dims=('y', 'x'), coords={'y': y_visir, 'x': x_visir}, attrs={'name': '1', 'id_tag': 'ch_r06', 'coordinates': 'lat lon', 'resolution': 1000, 'calibration': 'reflectance', 'wavelength': WavelengthRange(min=0.58, central=0.63, max=0.68, unit='µm') }) lat = xr.DataArray(lat, dims=('y', 'x'), coords={'y': y_visir, 'x': x_visir}, attrs={'name': 'lat', 'standard_name': 'latitude', 'modifiers': np.array([])}) lon = xr.DataArray(lon, dims=('y', 'x'), coords={'y': y_visir, 'x': x_visir}, attrs={'name': 'lon', 'standard_name': 'longitude', 'modifiers': np.array([])}) scene = Scene() scene.attrs['sensor'] = ['avhrr-1', 'avhrr-2', 'avhrr-3'] scene['1'] = vis006 scene['lat'] = lat scene['lon'] = lon return scene def test_read_prefixed_channels(self): """Check channels starting with digit is prefixed and read back correctly.""" scene = self._dataset_for_prefix_testing() # Testing with default prefixing filename = 'testingcfwriter{:s}-viirs-mband-20201007075915-20201007080744.nc'.format( datetime.utcnow().strftime('%Y%j%H%M%S')) try: scene.save_datasets(writer='cf', filename=filename, header_attrs={'instrument': 'avhrr'}, engine='netcdf4', flatten_attrs=True, pretty=True) scn_ = Scene(reader='satpy_cf_nc', filenames=[filename]) scn_.load(['1']) np.testing.assert_array_equal(scn_['1'].data, scene['1'].data) np.testing.assert_array_equal(scn_['1'].coords['lon'], scene['lon'].data) # lon loaded as coord scn_ = Scene(reader='satpy_cf_nc', filenames=[filename], reader_kwargs={}) scn_.load(['1']) np.testing.assert_array_equal(scn_['1'].data, scene['1'].data) np.testing.assert_array_equal(scn_['1'].coords['lon'], scene['lon'].data) # lon loaded as coord # Check that variables starting with a digit is written to filename variable prefixed with xr.open_dataset(filename) as ds_disk: np.testing.assert_array_equal(ds_disk['CHANNEL_1'].data, scene['1'].data) finally: with suppress(PermissionError): os.remove(filename) def test_read_prefixed_channels_include_orig_name(self): """Check channels starting with digit and includeed orig name is prefixed and read back correctly.""" scene = self._dataset_for_prefix_testing() # Testing with default prefixing filename = 'testingcfwriter{:s}-viirs-mband-20201007075915-20201007080744.nc'.format( datetime.utcnow().strftime('%Y%j%H%M%S')) try: scene.save_datasets(writer='cf', filename=filename, header_attrs={'instrument': 'avhrr'}, engine='netcdf4', flatten_attrs=True, pretty=True, include_orig_name=True) scn_ = Scene(reader='satpy_cf_nc', filenames=[filename]) scn_.load(['1']) np.testing.assert_array_equal(scn_['1'].data, scene['1'].data) np.testing.assert_array_equal(scn_['1'].coords['lon'], scene['lon'].data) # lon loaded as coord self.assertEqual(scn_['1'].attrs['original_name'], '1') # Check that variables starting with a digit is written to filename variable prefixed with xr.open_dataset(filename) as ds_disk: np.testing.assert_array_equal(ds_disk['CHANNEL_1'].data, scene['1'].data) finally: with suppress(PermissionError): os.remove(filename) def test_read_prefixed_channels_by_user(self): """Check channels starting with digit is prefixed by user and read back correctly.""" scene = self._dataset_for_prefix_testing() filename = 'testingcfwriter{:s}-viirs-mband-20201007075915-20201007080744.nc'.format( datetime.utcnow().strftime('%Y%j%H%M%S')) try: scene.save_datasets(writer='cf', filename=filename, header_attrs={'instrument': 'avhrr'}, engine='netcdf4', flatten_attrs=True, pretty=True, numeric_name_prefix='USER') scn_ = Scene(reader='satpy_cf_nc', filenames=[filename], reader_kwargs={'numeric_name_prefix': 'USER'}) scn_.load(['1']) np.testing.assert_array_equal(scn_['1'].data, scene['1'].data) np.testing.assert_array_equal(scn_['1'].coords['lon'], scene['lon'].data) # lon loded as coord # Check that variables starting with a digit is written to filename variable prefixed with xr.open_dataset(filename) as ds_disk: np.testing.assert_array_equal(ds_disk['USER1'].data, scene['1'].data) finally: with suppress(PermissionError): os.remove(filename) def test_read_prefixed_channels_by_user2(self): """Check channels starting with digit is prefixed by user when saving and read back correctly without prefix.""" scene = self._dataset_for_prefix_testing() filename = 'testingcfwriter{:s}-viirs-mband-20201007075915-20201007080744.nc'.format( datetime.utcnow().strftime('%Y%j%H%M%S')) try: scene.save_datasets(writer='cf', filename=filename, header_attrs={'instrument': 'avhrr'}, engine='netcdf4', flatten_attrs=True, pretty=True, include_orig_name=False, numeric_name_prefix='USER') scn_ = Scene(reader='satpy_cf_nc', filenames=[filename]) scn_.load(['USER1']) np.testing.assert_array_equal(scn_['USER1'].data, scene['1'].data) np.testing.assert_array_equal(scn_['USER1'].coords['lon'], scene['lon'].data) # lon loded as coord finally: with suppress(PermissionError): os.remove(filename) def test_read_prefixed_channels_by_user_include_prefix(self): """Check channels starting with digit is prefixed by user and include original name when saving.""" scene = self._dataset_for_prefix_testing() filename = 'testingcfwriter2{:s}-viirs-mband-20201007075915-20201007080744.nc'.format( datetime.utcnow().strftime('%Y%j%H%M%S')) try: scene.save_datasets(writer='cf', filename=filename, header_attrs={'instrument': 'avhrr'}, engine='netcdf4', flatten_attrs=True, pretty=True, include_orig_name=True, numeric_name_prefix='USER') scn_ = Scene(reader='satpy_cf_nc', filenames=[filename]) scn_.load(['1']) np.testing.assert_array_equal(scn_['1'].data, scene['1'].data) np.testing.assert_array_equal(scn_['1'].coords['lon'], scene['lon'].data) # lon loded as coord finally: with suppress(PermissionError): os.remove(filename) def test_read_prefixed_channels_by_user_no_prefix(self): """Check channels starting with digit is not prefixed by user.""" scene = self._dataset_for_prefix_testing() filename = 'testingcfwriter3{:s}-viirs-mband-20201007075915-20201007080744.nc'.format( datetime.utcnow().strftime('%Y%j%H%M%S')) try: scene.save_datasets(writer='cf', filename=filename, header_attrs={'instrument': 'avhrr'}, engine='netcdf4', flatten_attrs=True, pretty=True, numeric_name_prefix='') scn_ = Scene(reader='satpy_cf_nc', filenames=[filename]) scn_.load(['1']) np.testing.assert_array_equal(scn_['1'].data, scene['1'].data) np.testing.assert_array_equal(scn_['1'].coords['lon'], scene['lon'].data) # lon loded as coord finally: with suppress(PermissionError): os.remove(filename) def test_orbital_parameters(self): """Test that the orbital parameters in attributes are handled correctly.""" filename = 'testingcfwriter4{:s}-viirs-mband-20201007075915-20201007080744.nc'.format( datetime.utcnow().strftime('%Y%j%H%M%S')) try: self.scene.save_datasets(writer='cf', filename=filename, header_attrs={'instrument': 'avhrr'}) scn_ = Scene(reader='satpy_cf_nc', filenames=[filename]) scn_.load(['image0']) orig_attrs = self.scene['image0'].attrs['orbital_parameters'] new_attrs = scn_['image0'].attrs['orbital_parameters'] assert isinstance(new_attrs, dict) for key in orig_attrs: assert orig_attrs[key] == new_attrs[key] finally: with suppress(PermissionError): os.remove(filename) satpy-0.34.0/satpy/tests/reader_tests/test_scmi.py000066400000000000000000000237041420401153000222640ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The scmi_abi_l1b reader tests package.""" import unittest from unittest import mock import numpy as np import xarray as xr class FakeDataset(object): """Fake dataset.""" def __init__(self, info, attrs, dims=None): """Init the dataset.""" for var_name, var_data in list(info.items()): if isinstance(var_data, np.ndarray): info[var_name] = xr.DataArray(var_data) self.info = info self.attrs = attrs self.dims = dims or {} def __getitem__(self, key): """Get item.""" return self.info.get(key, self.dims.get(key)) def __contains__(self, key): """Check contains.""" return key in self.info or key in self.dims def rename(self, *args, **kwargs): """Rename the dataset.""" return self def close(self): """Close the dataset.""" return class TestSCMIFileHandler(unittest.TestCase): """Test the SCMIFileHandler reader.""" @mock.patch('satpy.readers.scmi.xr') def setUp(self, xr_): """Set up for test.""" from satpy.readers.scmi import SCMIFileHandler rad_data = (np.arange(10.).reshape((2, 5)) + 1.) rad_data = (rad_data + 1.) / 0.5 rad_data = rad_data.astype(np.int16) self.expected_rad = rad_data.astype(np.float64) * 0.5 + -1. self.expected_rad[-1, -2] = np.nan time = xr.DataArray(0.) rad = xr.DataArray( rad_data, dims=('y', 'x'), attrs={ 'scale_factor': 0.5, 'add_offset': -1., '_FillValue': 20, 'standard_name': 'toa_bidirectional_reflectance', }, coords={ 'time': time, } ) xr_.open_dataset.return_value = FakeDataset( { 'Sectorized_CMI': rad, "nominal_satellite_subpoint_lat": np.array(0.0), "nominal_satellite_subpoint_lon": np.array(-89.5), "nominal_satellite_height": np.array(35786.02), }, { 'start_date_time': "2017210120000", 'satellite_id': 'GOES-16', 'satellite_longitude': -90., 'satellite_latitude': 0., 'satellite_altitude': 35785831., }, {'y': 2, 'x': 5}, ) self.reader = SCMIFileHandler('filename', {'platform_shortname': 'G16'}, {'filetype': 'info'}) def test_basic_attributes(self): """Test getting basic file attributes.""" from datetime import datetime from satpy.tests.utils import make_dataid self.assertEqual(self.reader.start_time, datetime(2017, 7, 29, 12, 0, 0, 0)) self.assertEqual(self.reader.end_time, datetime(2017, 7, 29, 12, 0, 0, 0)) self.assertEqual(self.reader.get_shape(make_dataid(name='C05'), {}), (2, 5)) def test_data_load(self): """Test data loading.""" from satpy.tests.utils import make_dataid res = self.reader.get_dataset( make_dataid(name='C05', calibration='reflectance'), {}) np.testing.assert_allclose(res.data, self.expected_rad, equal_nan=True) self.assertNotIn('scale_factor', res.attrs) self.assertNotIn('_FillValue', res.attrs) self.assertEqual(res.attrs['standard_name'], 'toa_bidirectional_reflectance') class TestSCMIFileHandlerArea(unittest.TestCase): """Test the SCMIFileHandler's area creation.""" @mock.patch('satpy.readers.scmi.xr') def create_reader(self, proj_name, proj_attrs, xr_): """Create a fake reader.""" from satpy.readers.scmi import SCMIFileHandler proj = xr.DataArray([], attrs=proj_attrs) x__ = xr.DataArray( [0, 1], attrs={'scale_factor': 2., 'add_offset': -1., 'units': 'meters'}, ) y__ = xr.DataArray( [0, 1], attrs={'scale_factor': -2., 'add_offset': 1., 'units': 'meters'}, ) xr_.open_dataset.return_value = FakeDataset({ 'goes_imager_projection': proj, 'x': x__, 'y': y__, 'Sectorized_CMI': np.ones((2, 2))}, { 'satellite_id': 'GOES-16', 'grid_mapping': proj_name, }, { 'y': y__.size, 'x': x__.size, } ) return SCMIFileHandler('filename', {'platform_shortname': 'G16'}, {'filetype': 'info'}) @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') def test_get_area_def_geos(self, adef): """Test the area generation for geos projection.""" reader = self.create_reader( 'goes_imager_projection', { 'semi_major_axis': 1., 'semi_minor_axis': 1., 'perspective_point_height': 1., 'longitude_of_projection_origin': -90., 'sweep_angle_axis': u'x', 'grid_mapping_name': 'geostationary', } ) reader.get_area_def(None) self.assertEqual(adef.call_count, 1) call_args = tuple(adef.call_args)[0] self.assertDictEqual(call_args[3], { 'a': 1.0, 'b': 1.0, 'h': 1.0, 'lon_0': -90.0, 'lat_0': 0.0, 'proj': 'geos', 'sweep': 'x', 'units': 'm'}) self.assertEqual(call_args[4], reader.ncols) self.assertEqual(call_args[5], reader.nlines) np.testing.assert_allclose(call_args[6], (-2., -2., 2, 2.)) @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') def test_get_area_def_lcc(self, adef): """Test the area generation for lcc projection.""" reader = self.create_reader( 'goes_imager_projection', { 'semi_major_axis': 1., 'semi_minor_axis': 1., 'longitude_of_central_meridian': -90., 'standard_parallel': 25., 'latitude_of_projection_origin': 25., 'grid_mapping_name': 'lambert_conformal_conic', } ) reader.get_area_def(None) self.assertEqual(adef.call_count, 1) call_args = tuple(adef.call_args)[0] self.assertDictEqual(call_args[3], { 'a': 1.0, 'b': 1.0, 'lon_0': -90.0, 'lat_0': 25.0, 'lat_1': 25.0, 'proj': 'lcc', 'units': 'm'}) self.assertEqual(call_args[4], reader.ncols) self.assertEqual(call_args[5], reader.nlines) np.testing.assert_allclose(call_args[6], (-2., -2., 2, 2.)) @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') def test_get_area_def_stere(self, adef): """Test the area generation for stere projection.""" reader = self.create_reader( 'goes_imager_projection', { 'semi_major_axis': 1., 'semi_minor_axis': 1., 'straight_vertical_longitude_from_pole': -90., 'standard_parallel': 60., 'latitude_of_projection_origin': 90., 'grid_mapping_name': 'polar_stereographic', } ) reader.get_area_def(None) self.assertEqual(adef.call_count, 1) call_args = tuple(adef.call_args)[0] self.assertDictEqual(call_args[3], { 'a': 1.0, 'b': 1.0, 'lon_0': -90.0, 'lat_0': 90.0, 'lat_ts': 60.0, 'proj': 'stere', 'units': 'm'}) self.assertEqual(call_args[4], reader.ncols) self.assertEqual(call_args[5], reader.nlines) np.testing.assert_allclose(call_args[6], (-2., -2., 2, 2.)) @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') def test_get_area_def_merc(self, adef): """Test the area generation for merc projection.""" reader = self.create_reader( 'goes_imager_projection', { 'semi_major_axis': 1., 'semi_minor_axis': 1., 'longitude_of_projection_origin': -90., 'standard_parallel': 0., 'grid_mapping_name': 'mercator', } ) reader.get_area_def(None) self.assertEqual(adef.call_count, 1) call_args = tuple(adef.call_args)[0] self.assertDictEqual(call_args[3], { 'a': 1.0, 'b': 1.0, 'lon_0': -90.0, 'lat_0': 0.0, 'lat_ts': 0.0, 'proj': 'merc', 'units': 'm'}) self.assertEqual(call_args[4], reader.ncols) self.assertEqual(call_args[5], reader.nlines) np.testing.assert_allclose(call_args[6], (-2., -2., 2, 2.)) @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') def test_get_area_def_bad(self, adef): """Test the area generation for bad projection.""" reader = self.create_reader( 'goes_imager_projection', { 'semi_major_axis': 1., 'semi_minor_axis': 1., 'longitude_of_projection_origin': -90., 'standard_parallel': 0., 'grid_mapping_name': 'fake', } ) self.assertRaises(ValueError, reader.get_area_def, None) satpy-0.34.0/satpy/tests/reader_tests/test_seadas_l2.py000066400000000000000000000131571420401153000231670ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the 'seadas_l2' reader.""" import numpy as np import pytest from pyhdf.SD import SD, SDC from pyresample.geometry import SwathDefinition from pytest_lazyfixture import lazy_fixture from satpy import Scene, available_readers @pytest.fixture(scope="module") def seadas_l2_modis_chlor_a(tmp_path_factory): """Create MODIS SEADAS file.""" filename = "a1.21322.1758.seadas.hdf" full_path = str(tmp_path_factory.mktemp("seadas_l2") / filename) return _create_seadas_chlor_a_file(full_path, "Aqua", "MODISA") @pytest.fixture(scope="module") def seadas_l2_viirs_npp_chlor_a(tmp_path_factory): """Create VIIRS NPP SEADAS file.""" filename = "SEADAS_npp_d20211118_t1728125_e1739327.hdf" full_path = str(tmp_path_factory.mktemp("seadas") / filename) return _create_seadas_chlor_a_file(full_path, "NPP", "VIIRSN") @pytest.fixture(scope="module") def seadas_l2_viirs_j01_chlor_a(tmp_path_factory): """Create VIIRS JPSS-01 SEADAS file.""" filename = "SEADAS_j01_d20211118_t1728125_e1739327.hdf" full_path = str(tmp_path_factory.mktemp("seadas") / filename) return _create_seadas_chlor_a_file(full_path, "JPSS-1", "VIIRSJ1") def _create_seadas_chlor_a_file(full_path, mission, sensor): h = SD(full_path, SDC.WRITE | SDC.CREATE) setattr(h, "Sensor Name", sensor) h.Mission = mission setattr(h, "Start Time", "2021322175853191") setattr(h, "End Time", "2021322180551214") lon_info = { "type": SDC.FLOAT32, "data": np.zeros((5, 5), dtype=np.float32), "dim_labels": ["Number of Scan Lines", "Number of Pixel Control Points"], "attrs": { "long_name": "Longitude\x00", "standard_name": "longitude\x00", "units": "degrees_east\x00", "valid_range": (-180.0, 180.0), } } lat_info = { "type": SDC.FLOAT32, "data": np.zeros((5, 5), np.float32), "dim_labels": ["Number of Scan Lines", "Number of Pixel Control Points"], "attrs": { "long_name": "Latitude\x00", "standard_name": "latitude\x00", "units": "degrees_north\x00", "valid_range": (-90.0, 90.0), } } _add_variable_to_file(h, "longitude", lon_info) _add_variable_to_file(h, "latitude", lat_info) chlor_a_info = { "type": SDC.FLOAT32, "data": np.zeros((5, 5), np.float32), "dim_labels": ["Number of Scan Lines", "Number of Pixel Control Points"], "attrs": { "long_name": "Chlorophyll Concentration, OCI Algorithm\x00", "units": "mg m^-3\x00", "standard_name": "mass_concentration_of_chlorophyll_in_sea_water\x00", "valid_range": (0.001, 100.0), } } _add_variable_to_file(h, "chlor_a", chlor_a_info) return [full_path] def _add_variable_to_file(h, var_name, var_info): v = h.create(var_name, var_info['type'], var_info['data'].shape) v[:] = var_info['data'] for dim_count, dimension_name in enumerate(var_info['dim_labels']): v.dim(dim_count).setname(dimension_name) if var_info.get('fill_value'): v.setfillvalue(var_info['fill_value']) for attr_key, attr_val in var_info['attrs'].items(): setattr(v, attr_key, attr_val) class TestSEADAS: """Test the SEADAS L2 file reader.""" def test_available_reader(self): """Test that SEADAS L2 reader is available.""" assert 'seadas_l2' in available_readers() @pytest.mark.parametrize( "input_files", [ lazy_fixture("seadas_l2_modis_chlor_a"), lazy_fixture("seadas_l2_viirs_npp_chlor_a"), lazy_fixture("seadas_l2_viirs_j01_chlor_a"), ]) def test_scene_available_datasets(self, input_files): """Test that datasets are available.""" scene = Scene(reader='seadas_l2', filenames=input_files) available_datasets = scene.all_dataset_names() assert len(available_datasets) > 0 assert 'chlor_a' in available_datasets @pytest.mark.parametrize( ("input_files", "exp_plat", "exp_sensor", "exp_rps"), [ (lazy_fixture("seadas_l2_modis_chlor_a"), "Aqua", {"modis"}, 10), (lazy_fixture("seadas_l2_viirs_npp_chlor_a"), "Suomi-NPP", {"viirs"}, 16), (lazy_fixture("seadas_l2_viirs_j01_chlor_a"), "NOAA-20", {"viirs"}, 16), ]) def test_load_chlor_a(self, input_files, exp_plat, exp_sensor, exp_rps): """Test that we can load 'chlor_a'.""" scene = Scene(reader='seadas_l2', filenames=input_files) scene.load(['chlor_a']) data_arr = scene['chlor_a'] assert data_arr.attrs['platform_name'] == exp_plat assert data_arr.attrs['sensor'] == exp_sensor assert data_arr.attrs['units'] == 'mg m^-3' assert data_arr.dtype.type == np.float32 assert isinstance(data_arr.attrs["area"], SwathDefinition) assert data_arr.attrs["rows_per_scan"] == exp_rps satpy-0.34.0/satpy/tests/reader_tests/test_seviri_base.py000066400000000000000000000300641420401153000236210ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Test the MSG common (native and hrit format) functionionalities.""" import unittest from datetime import datetime import dask.array as da import numpy as np import pytest import xarray as xr from satpy import CHUNK_SIZE from satpy.readers.seviri_base import ( NoValidOrbitParams, OrbitPolynomial, OrbitPolynomialFinder, chebyshev, dec10216, get_cds_time, get_padding_area, get_satpos, pad_data_horizontally, pad_data_vertically, ) def chebyshev4(c, x, domain): """Evaluate 4th order Chebyshev polynomial.""" start_x, end_x = domain t = (x - 0.5 * (end_x + start_x)) / (0.5 * (end_x - start_x)) return c[0] + c[1]*t + c[2]*(2*t**2 - 1) + c[3]*(4*t**3 - 3*t) - 0.5*c[0] class SeviriBaseTest(unittest.TestCase): """Test SEVIRI base.""" def test_dec10216(self): """Test the dec10216 function.""" res = dec10216(np.array([255, 255, 255, 255, 255], dtype=np.uint8)) exp = (np.ones((4, )) * 1023).astype(np.uint16) np.testing.assert_equal(res, exp) res = dec10216(np.array([1, 1, 1, 1, 1], dtype=np.uint8)) exp = np.array([4, 16, 64, 257], dtype=np.uint16) np.testing.assert_equal(res, exp) def test_chebyshev(self): """Test the chebyshev function.""" coefs = [1, 2, 3, 4] time = 123 domain = [120, 130] res = chebyshev(coefs=[1, 2, 3, 4], time=time, domain=domain) exp = chebyshev4(coefs, time, domain) np.testing.assert_allclose(res, exp) def test_get_cds_time(self): """Test the get_cds_time function.""" # Scalar self.assertEqual(get_cds_time(days=21246, msecs=12*3600*1000), np.datetime64('2016-03-03 12:00')) # Array days = np.array([21246, 21247, 21248]) msecs = np.array([12*3600*1000, 13*3600*1000 + 1, 14*3600*1000 + 2]) expected = np.array([np.datetime64('2016-03-03 12:00:00.000'), np.datetime64('2016-03-04 13:00:00.001'), np.datetime64('2016-03-05 14:00:00.002')]) np.testing.assert_equal(get_cds_time(days=days, msecs=msecs), expected) days = 21246 msecs = 12*3600*1000 expected = np.datetime64('2016-03-03 12:00:00.000') np.testing.assert_equal(get_cds_time(days=days, msecs=msecs), expected) def test_pad_data_horizontally_bad_shape(self): """Test the error handling for the horizontal hrv padding.""" data = xr.DataArray(data=np.zeros((1, 10)), dims=('y', 'x')) east_bound = 5 west_bound = 10 final_size = (1, 20) with self.assertRaises(IndexError): pad_data_horizontally(data, final_size, east_bound, west_bound) def test_pad_data_vertically_bad_shape(self): """Test the error handling for the vertical hrv padding.""" data = xr.DataArray(data=np.zeros((10, 1)), dims=('y', 'x')) south_bound = 5 north_bound = 10 final_size = (20, 1) with self.assertRaises(IndexError): pad_data_vertically(data, final_size, south_bound, north_bound) @staticmethod def test_pad_data_horizontally(): """Test the horizontal hrv padding.""" data = xr.DataArray(data=np.zeros((1, 10)), dims=('y', 'x')) east_bound = 4 west_bound = 13 final_size = (1, 20) res = pad_data_horizontally(data, final_size, east_bound, west_bound) expected = np.array([[np.nan, np.nan, np.nan, 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan]]) np.testing.assert_equal(res, expected) @staticmethod def test_pad_data_vertically(): """Test the vertical hrv padding.""" data = xr.DataArray(data=np.zeros((10, 1)), dims=('y', 'x')) south_bound = 4 north_bound = 13 final_size = (20, 1) res = pad_data_vertically(data, final_size, south_bound, north_bound) expected = np.zeros(final_size) expected[:] = np.nan expected[south_bound-1:north_bound] = 0. np.testing.assert_equal(res, expected) @staticmethod def test_get_padding_area_float(): """Test padding area generator for floats.""" shape = (10, 10) dtype = np.float64 res = get_padding_area(shape, dtype) expected = da.full(shape, np.nan, dtype=dtype, chunks=CHUNK_SIZE) np.testing.assert_array_equal(res, expected) @staticmethod def test_get_padding_area_int(): """Test padding area generator for integers.""" shape = (10, 10) dtype = np.int64 res = get_padding_area(shape, dtype) expected = da.full(shape, 0, dtype=dtype, chunks=CHUNK_SIZE) np.testing.assert_array_equal(res, expected) ORBIT_POLYNOMIALS = { 'StartTime': np.array([ [ datetime(2006, 1, 1, 6), datetime(2006, 1, 1, 12), datetime(2006, 1, 1, 18), datetime(1958, 1, 1, 0)] ]), 'EndTime': np.array([ [ datetime(2006, 1, 1, 12), datetime(2006, 1, 1, 18), datetime(2006, 1, 2, 0), datetime(1958, 1, 1, 0) ] ]), 'X': [np.zeros(8), [8.41607082e+04, 2.94319260e+00, 9.86748617e-01, -2.70135453e-01, -3.84364650e-02, 8.48718433e-03, 7.70548174e-04, -1.44262718e-04], np.zeros(8)], 'Y': [np.zeros(8), [-5.21170255e+03, 5.12998948e+00, -1.33370453e+00, -3.09634144e-01, 6.18232793e-02, 7.50505681e-03, -1.35131011e-03, -1.12054405e-04], np.zeros(8)], 'Z': [np.zeros(8), [-6.51293855e+02, 1.45830459e+02, 5.61379400e+01, -3.90970565e+00, -7.38137565e-01, 3.06131644e-02, 3.82892428e-03, -1.12739309e-04], np.zeros(8)], } ORBIT_POLYNOMIALS_SYNTH = { # 12-31: Contiguous # 01-01: Small gap (12:00 - 13:00) # 01-02: Large gap (04:00 - 18:00) # 01-03: Overlap (10:00 - 13:00) 'StartTime': np.array([ [ datetime(2005, 12, 31, 10), datetime(2005, 12, 31, 12), datetime(2006, 1, 1, 10), datetime(2006, 1, 1, 13), datetime(2006, 1, 2, 0), datetime(2006, 1, 2, 18), datetime(2006, 1, 3, 6), datetime(2006, 1, 3, 10), ] ]), 'EndTime': np.array([ [ datetime(2005, 12, 31, 12), datetime(2005, 12, 31, 18), datetime(2006, 1, 1, 12), datetime(2006, 1, 1, 18), datetime(2006, 1, 2, 4), datetime(2006, 1, 2, 22), datetime(2006, 1, 3, 13), datetime(2006, 1, 3, 18), ] ]), 'X': [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0], 'Y': [1.1, 2.1, 3.1, 4.1, 5.1, 6.1, 7.1, 8.1], 'Z': [1.2, 2.2, 3.2, 4.2, 5.2, 6.2, 7.2, 8.2], } ORBIT_POLYNOMIALS_INVALID = { 'StartTime': np.array([ [ datetime(1958, 1, 1), datetime(1958, 1, 1) ] ]), 'EndTime': np.array([ [ datetime(1958, 1, 1), datetime(1958, 1, 1) ] ]), 'X': [1, 2], 'Y': [3, 4], 'Z': [5, 6], } class TestSatellitePosition: """Test locating the satellite.""" @pytest.fixture def orbit_polynomial(self): """Get an orbit polynomial for testing.""" return OrbitPolynomial( start_time=datetime(2006, 1, 1, 12), end_time=datetime(2006, 1, 1, 18), coefs=( np.array([8.41607082e+04, 2.94319260e+00, 9.86748617e-01, -2.70135453e-01, -3.84364650e-02, 8.48718433e-03, 7.70548174e-04, -1.44262718e-04]), np.array([-5.21170255e+03, 5.12998948e+00, -1.33370453e+00, -3.09634144e-01, 6.18232793e-02, 7.50505681e-03, -1.35131011e-03, -1.12054405e-04]), np.array([-6.51293855e+02, 1.45830459e+02, 5.61379400e+01, -3.90970565e+00, -7.38137565e-01, 3.06131644e-02, 3.82892428e-03, -1.12739309e-04]) ) ) @pytest.fixture def time(self): """Get scan timestamp for testing.""" return datetime(2006, 1, 1, 12, 15, 9, 304888) def test_eval_polynomial(self, orbit_polynomial, time): """Test getting the position in cartesian coordinates.""" x, y, z = orbit_polynomial.evaluate(time) np.testing.assert_allclose( [x, y, z], [42078421.37095518, -2611352.744615312, -419828.9699940758] ) def test_get_satpos(self, orbit_polynomial, time): """Test getting the position in geodetic coordinates.""" lon, lat, alt = get_satpos( orbit_polynomial=orbit_polynomial, time=time, semi_major_axis=6378169.00, semi_minor_axis=6356583.80 ) np.testing.assert_allclose( [lon, lat, alt], [-3.55117540817073, -0.5711243456528018, 35783296.150123544] ) class TestOrbitPolynomialFinder: """Unit tests for orbit polynomial finder.""" @pytest.mark.parametrize( ('orbit_polynomials', 'time', 'orbit_polynomial_exp'), [ # Contiguous validity intervals (that's the norm) ( ORBIT_POLYNOMIALS_SYNTH, datetime(2005, 12, 31, 12, 15), OrbitPolynomial( coefs=(2.0, 2.1, 2.2), start_time=np.datetime64('2005-12-31 12:00'), end_time=np.datetime64('2005-12-31 18:00') ) ), # No interval enclosing the given timestamp, but closest interval # not too far away ( ORBIT_POLYNOMIALS_SYNTH, datetime(2006, 1, 1, 12, 15), OrbitPolynomial( coefs=(3.0, 3.1, 3.2), start_time=np.datetime64('2006-01-01 10:00'), end_time=np.datetime64('2006-01-01 12:00') ) ), # Overlapping intervals ( ORBIT_POLYNOMIALS_SYNTH, datetime(2006, 1, 3, 12, 15), OrbitPolynomial( coefs=(8.0, 8.1, 8.2), start_time=np.datetime64('2006-01-03 10:00'), end_time=np.datetime64('2006-01-03 18:00') ) ), ] ) def test_get_orbit_polynomial(self, orbit_polynomials, time, orbit_polynomial_exp): """Test getting the satellite locator.""" finder = OrbitPolynomialFinder(orbit_polynomials) orbit_polynomial = finder.get_orbit_polynomial(time=time) assert orbit_polynomial == orbit_polynomial_exp @pytest.mark.parametrize( ('orbit_polynomials', 'time'), [ # No interval enclosing the given timestamp and closest interval # too far away (ORBIT_POLYNOMIALS_SYNTH, datetime(2006, 1, 2, 12, 15)), # No valid polynomials at all (ORBIT_POLYNOMIALS_INVALID, datetime(2006, 1, 1, 12, 15)) ] ) def test_get_orbit_polynomial_exceptions(self, orbit_polynomials, time): """Test exceptions thrown while getting the satellite locator.""" finder = OrbitPolynomialFinder(orbit_polynomials) with pytest.raises(NoValidOrbitParams): finder.get_orbit_polynomial(time=time) satpy-0.34.0/satpy/tests/reader_tests/test_seviri_l1b_calibration.py000066400000000000000000000306361420401153000257410ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unittesting the native msg reader.""" import unittest from datetime import datetime import numpy as np import pytest import xarray as xr from satpy.readers.seviri_base import SEVIRICalibrationAlgorithm, SEVIRICalibrationHandler COUNTS_INPUT = xr.DataArray( np.array([[377., 377., 377., 376., 375.], [376., 375., 376., 374., 374.], [374., 373., 373., 374., 374.], [347., 345., 345., 348., 347.], [306., 306., 307., 307., 308.]], dtype=np.float32) ) RADIANCES_OUTPUT = xr.DataArray( np.array([[66.84162903, 66.84162903, 66.84162903, 66.63659668, 66.4315567], [66.63659668, 66.4315567, 66.63659668, 66.22652435, 66.22652435], [66.22652435, 66.02148438, 66.02148438, 66.22652435, 66.22652435], [60.69055939, 60.28048706, 60.28048706, 60.89559937, 60.69055939], [52.28409576, 52.28409576, 52.48912811, 52.48912811, 52.69416809]], dtype=np.float32) ) GAIN = 0.20503567620766011 OFFSET = -10.456819486590666 CAL_TYPE1 = 1 CAL_TYPE2 = 2 CAL_TYPEBAD = -1 CHANNEL_NAME = 'IR_108' PLATFORM_ID = 323 # Met-10 TBS_OUTPUT1 = xr.DataArray( np.array([[269.29684448, 269.29684448, 269.29684448, 269.13296509, 268.96871948], [269.13296509, 268.96871948, 269.13296509, 268.80422974, 268.80422974], [268.80422974, 268.63937378, 268.63937378, 268.80422974, 268.80422974], [264.23751831, 263.88912964, 263.88912964, 264.41116333, 264.23751831], [256.77682495, 256.77682495, 256.96743774, 256.96743774, 257.15756226]], dtype=np.float32) ) TBS_OUTPUT2 = xr.DataArray( np.array([[268.94519043, 268.94519043, 268.94519043, 268.77984619, 268.61422729], [268.77984619, 268.61422729, 268.77984619, 268.44830322, 268.44830322], [268.44830322, 268.28204346, 268.28204346, 268.44830322, 268.44830322], [263.84396362, 263.49285889, 263.49285889, 264.01898193, 263.84396362], [256.32858276, 256.32858276, 256.52044678, 256.52044678, 256.71188354]], dtype=np.float32) ) VIS008_SOLAR_IRRADIANCE = 73.1807 VIS008_RADIANCE = xr.DataArray( np.array([[0.62234485, 0.59405649, 0.59405649, 0.59405649, 0.59405649], [0.59405649, 0.62234485, 0.62234485, 0.59405649, 0.62234485], [0.76378691, 0.79207528, 0.79207528, 0.76378691, 0.79207528], [3.30974245, 3.33803129, 3.33803129, 3.25316572, 3.47947311], [7.52471399, 7.83588648, 8.2602129, 8.57138538, 8.99571133]], dtype=np.float32) ) VIS008_REFLECTANCE = xr.DataArray( np.array([[2.8066392, 2.6790648, 2.6790648, 2.6790648, 2.6790648], [2.6790648, 2.8066392, 2.8066392, 2.6790648, 2.8066392], [3.444512, 3.572086, 3.572086, 3.444512, 3.572086], [14.926213, 15.053792, 15.053792, 14.671064, 15.691662], [33.934814, 35.33813, 37.251755, 38.655075, 40.56869]], dtype=np.float32) ) class TestSEVIRICalibrationAlgorithm(unittest.TestCase): """Unit Tests for SEVIRI calibration algorithm.""" def setUp(self): """Set up the SEVIRI Calibration algorithm for testing.""" self.algo = SEVIRICalibrationAlgorithm( platform_id=PLATFORM_ID, scan_time=datetime(2020, 8, 15, 13, 0, 40) ) def test_convert_to_radiance(self): """Test the conversion from counts to radiances.""" result = self.algo.convert_to_radiance(COUNTS_INPUT, GAIN, OFFSET) xr.testing.assert_allclose(result, RADIANCES_OUTPUT) self.assertEqual(result.dtype, np.float32) def test_ir_calibrate(self): """Test conversion from radiance to brightness temperature.""" result = self.algo.ir_calibrate(RADIANCES_OUTPUT, CHANNEL_NAME, CAL_TYPE1) xr.testing.assert_allclose(result, TBS_OUTPUT1, rtol=1E-5) self.assertEqual(result.dtype, np.float32) result = self.algo.ir_calibrate(RADIANCES_OUTPUT, CHANNEL_NAME, CAL_TYPE2) xr.testing.assert_allclose(result, TBS_OUTPUT2, rtol=1E-5) with self.assertRaises(NotImplementedError): self.algo.ir_calibrate(RADIANCES_OUTPUT, CHANNEL_NAME, CAL_TYPEBAD) def test_vis_calibrate(self): """Test conversion from radiance to reflectance.""" result = self.algo.vis_calibrate(VIS008_RADIANCE, VIS008_SOLAR_IRRADIANCE) xr.testing.assert_allclose(result, VIS008_REFLECTANCE) self.assertTrue(result.sun_earth_distance_correction_applied) self.assertEqual(result.dtype, np.float32) class TestSeviriCalibrationHandler: """Unit tests for SEVIRI calibration handler.""" def test_init(self): """Test initialization of the calibration handler.""" with pytest.raises(ValueError): SEVIRICalibrationHandler( platform_id=None, channel_name=None, coefs=None, calib_mode='invalid', scan_time=None ) def _get_calibration_handler(self, calib_mode='NOMINAL', ext_coefs=None): """Provide a calibration handler.""" return SEVIRICalibrationHandler( platform_id=324, channel_name='IR_108', coefs={ 'coefs': { 'NOMINAL': { 'gain': 10, 'offset': -1 }, 'GSICS': { 'gain': 20, 'offset': -2 }, 'EXTERNAL': ext_coefs or {} }, 'radiance_type': 1 }, calib_mode=calib_mode, scan_time=None ) def test_calibrate_exceptions(self): """Test exceptions raised by the calibration handler.""" calib = self._get_calibration_handler() with pytest.raises(ValueError): calib.calibrate(None, 'invalid') @pytest.mark.parametrize( ('calib_mode', 'ext_coefs', 'expected'), [ ('NOMINAL', {}, (10, -1)), ('GSICS', {}, (20, -40)), ('GSICS', {'gain': 30, 'offset': -3}, (30, -3)), ('NOMINAL', {'gain': 30, 'offset': -3}, (30, -3)) ] ) def test_get_gain_offset(self, calib_mode, ext_coefs, expected): """Test selection of gain and offset.""" calib = self._get_calibration_handler(calib_mode=calib_mode, ext_coefs=ext_coefs) coefs = calib.get_gain_offset() assert coefs == expected class TestFileHandlerCalibrationBase: """Base class for file handler calibration tests.""" platform_id = 324 gains_nominal = np.arange(1, 13) offsets_nominal = np.arange(-1, -13, -1) # No GSICS coefficients for VIS channels -> set to zero gains_gsics = [0, 0, 0, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 0] offsets_gsics = [0, 0, 0, -0.4, -0.5, -0.6, -0.7, -0.8, -0.9, -1.0, -1.1, 0] radiance_types = 2 * np.ones(12) scan_time = datetime(2020, 1, 1) external_coefs = { 'VIS006': {'gain': 10, 'offset': -10}, 'IR_108': {'gain': 20, 'offset': -20}, 'HRV': {'gain': 5, 'offset': -5} } spectral_channel_ids = {'VIS006': 1, 'IR_108': 9, 'HRV': 12} expected = { 'VIS006': { 'counts': { 'NOMINAL': xr.DataArray( [[0, 10], [100, 255]], dims=('y', 'x') ) }, 'radiance': { 'NOMINAL': xr.DataArray( [[np.nan, 9], [99, 254]], dims=('y', 'x') ), 'GSICS': xr.DataArray( [[np.nan, 9], [99, 254]], dims=('y', 'x') ), 'EXTERNAL': xr.DataArray( [[np.nan, 90], [990, 2540]], dims=('y', 'x') ) }, 'reflectance': { 'NOMINAL': xr.DataArray( [[np.nan, 40.47923], [445.27155, 1142.414]], dims=('y', 'x') ), 'EXTERNAL': xr.DataArray( [[np.nan, 404.7923], [4452.7153, 11424.14]], dims=('y', 'x') ) } }, 'IR_108': { 'counts': { 'NOMINAL': xr.DataArray( [[0, 10], [100, 255]], dims=('y', 'x') ) }, 'radiance': { 'NOMINAL': xr.DataArray( [[np.nan, 81], [891, 2286]], dims=('y', 'x') ), 'GSICS': xr.DataArray( [[np.nan, 8.19], [89.19, 228.69]], dims=('y', 'x') ), 'EXTERNAL': xr.DataArray( [[np.nan, 180], [1980, 5080]], dims=('y', 'x') ) }, 'brightness_temperature': { 'NOMINAL': xr.DataArray( [[np.nan, 279.82318], [543.2585, 812.77167]], dims=('y', 'x') ), 'GSICS': xr.DataArray( [[np.nan, 189.20985], [285.53293, 356.06668]], dims=('y', 'x') ), 'EXTERNAL': xr.DataArray( [[np.nan, 335.14236], [758.6249, 1262.7567]], dims=('y', 'x') ), } }, 'HRV': { 'counts': { 'NOMINAL': xr.DataArray( [[0, 10], [100, 255]], dims=('y', 'x') ) }, 'radiance': { 'NOMINAL': xr.DataArray( [[np.nan, 108], [1188, 3048]], dims=('y', 'x') ), 'GSICS': xr.DataArray( [[np.nan, 108], [1188, 3048]], dims=('y', 'x') ), 'EXTERNAL': xr.DataArray( [[np.nan, 45], [495, 1270]], dims=('y', 'x') ) }, 'reflectance': { 'NOMINAL': xr.DataArray( [[np.nan, 401.28372], [4414.121, 11325.118]], dims=('y', 'x') ), 'EXTERNAL': xr.DataArray( [[np.nan, 167.20154], [1839.217, 4718.799]], dims=('y', 'x') ) } } } @pytest.fixture(name='counts') def counts(self): """Provide fake image counts.""" return xr.DataArray( [[0, 10], [100, 255]], dims=('y', 'x') ) def _get_expected( self, channel, calibration, calib_mode, use_ext_coefs ): if use_ext_coefs: return self.expected[channel][calibration]['EXTERNAL'] return self.expected[channel][calibration][calib_mode] satpy-0.34.0/satpy/tests/reader_tests/test_seviri_l1b_hrit.py000066400000000000000000000440701420401153000244150ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The HRIT msg reader tests package.""" import unittest from datetime import datetime from unittest import mock import numpy as np import pytest import xarray as xr from numpy import testing as npt import satpy.tests.reader_tests.test_seviri_l1b_hrit_setup as setup from satpy.readers.seviri_l1b_hrit import HRITMSGEpilogueFileHandler, HRITMSGFileHandler, HRITMSGPrologueFileHandler from satpy.tests.reader_tests.test_seviri_base import ORBIT_POLYNOMIALS_INVALID from satpy.tests.reader_tests.test_seviri_l1b_calibration import TestFileHandlerCalibrationBase from satpy.tests.utils import assert_attrs_equal, make_dataid class TestHRITMSGBase(unittest.TestCase): """Baseclass for SEVIRI HRIT reader tests.""" def assert_attrs_equal(self, attrs, attrs_exp): """Assert equality of dataset attributes.""" assert_attrs_equal(attrs, attrs_exp, tolerance=1e-4) class TestHRITMSGFileHandlerHRV(TestHRITMSGBase): """Test the HRITFileHandler.""" def setUp(self): """Set up the hrit file handler for testing HRV.""" self.start_time = datetime(2016, 3, 3, 0, 0) self.nlines = 464 self.reader = setup.get_fake_file_handler( start_time=self.start_time, nlines=self.nlines, ncols=5568, ) self.reader.mda.update({ 'segment_sequence_number': 18, 'planned_start_segment_number': 1 }) self.reader.fill_hrv = True @mock.patch('satpy.readers.hrit_base.np.memmap') def test_read_hrv_band(self, memmap): """Test reading the hrv band.""" nbits = self.reader.mda['number_of_bits_per_pixel'] memmap.return_value = np.random.randint(0, 256, size=int((464 * 5568 * nbits) / 8), dtype=np.uint8) res = self.reader.read_band('HRV', None) self.assertEqual(res.shape, (464, 5568)) @mock.patch('satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset') @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate') def test_get_dataset(self, calibrate, parent_get_dataset): """Test getting the hrv dataset.""" key = make_dataid(name='HRV', calibration='reflectance') info = setup.get_fake_dataset_info() parent_get_dataset.return_value = mock.MagicMock() calibrate.return_value = xr.DataArray(data=np.zeros((464, 5568)), dims=('y', 'x')) res = self.reader.get_dataset(key, info) self.assertEqual(res.shape, (464, 11136)) # Test method calls parent_get_dataset.assert_called_with(key, info) calibrate.assert_called_with(parent_get_dataset(), key['calibration']) self.assert_attrs_equal(res.attrs, setup.get_attrs_exp()) np.testing.assert_equal( res['acq_time'], setup.get_acq_time_exp(self.start_time, self.nlines) ) @mock.patch('satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset') @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate') def test_get_dataset_non_fill(self, calibrate, parent_get_dataset): """Test getting a non-filled hrv dataset.""" key = make_dataid(name='HRV', calibration='reflectance') key.name = 'HRV' info = setup.get_fake_dataset_info() self.reader.fill_hrv = False parent_get_dataset.return_value = mock.MagicMock() calibrate.return_value = xr.DataArray(data=np.zeros((464, 5568)), dims=('y', 'x')) res = self.reader.get_dataset(key, info) self.assertEqual(res.shape, (464, 5568)) # Test method calls parent_get_dataset.assert_called_with(key, info) calibrate.assert_called_with(parent_get_dataset(), key['calibration']) self.assert_attrs_equal(res.attrs, setup.get_attrs_exp()) np.testing.assert_equal( res['acq_time'], setup.get_acq_time_exp(self.start_time, self.nlines) ) def test_get_area_def(self): """Test getting the area def.""" from pyresample.utils import proj4_radius_parameters area = self.reader.get_area_def(make_dataid(name='HRV', resolution=1000)) self.assertEqual(area.area_extent, (-45561979844414.07, -3720765401003.719, 45602912357076.38, 77771774058.38356)) proj_dict = area.proj_dict a, b = proj4_radius_parameters(proj_dict) self.assertEqual(a, 6378169.0) self.assertAlmostEqual(b, 6356583.8) self.assertEqual(proj_dict['h'], 35785831.0) self.assertEqual(proj_dict['lon_0'], 0.0) self.assertEqual(proj_dict['proj'], 'geos') self.assertEqual(proj_dict['units'], 'm') self.reader.fill_hrv = False area = self.reader.get_area_def(make_dataid(name='HRV', resolution=1000)) npt.assert_allclose(area.defs[0].area_extent, (-22017598561055.01, -2926674655354.9604, 23564847539690.22, 77771774058.38356)) npt.assert_allclose(area.defs[1].area_extent, (-30793529275853.656, -3720765401003.719, 14788916824891.568, -2926674655354.9604)) self.assertEqual(area.defs[0].area_id, 'msg_seviri_fes_1km') self.assertEqual(area.defs[1].area_id, 'msg_seviri_fes_1km') class TestHRITMSGFileHandler(TestHRITMSGBase): """Test the HRITFileHandler.""" def setUp(self): """Set up the hrit file handler for testing.""" self.start_time = datetime(2016, 3, 3, 0, 0) self.nlines = 464 self.ncols = 3712 self.projection_longitude = 9.5 self.reader = setup.get_fake_file_handler( start_time=self.start_time, nlines=self.nlines, ncols=self.ncols, projection_longitude=self.projection_longitude ) def _get_fake_data(self): return xr.DataArray( data=np.zeros((self.nlines, self.ncols)), dims=('y', 'x') ) def test_get_area_def(self): """Test getting the area def.""" from pyresample.utils import proj4_radius_parameters area = self.reader.get_area_def(make_dataid(name='VIS006', resolution=3000)) proj_dict = area.proj_dict a, b = proj4_radius_parameters(proj_dict) self.assertEqual(a, 6378169.0) self.assertAlmostEqual(b, 6356583.8) self.assertEqual(proj_dict['h'], 35785831.0) self.assertEqual(proj_dict['lon_0'], self.projection_longitude) self.assertEqual(proj_dict['proj'], 'geos') self.assertEqual(proj_dict['units'], 'm') self.assertEqual(area.area_extent, (-77771774058.38356, -3720765401003.719, 30310525626438.438, 77771774058.38356)) # Data shifted by 1.5km to N-W self.reader.mda['offset_corrected'] = False area = self.reader.get_area_def(make_dataid(name='VIS006', resolution=3000)) self.assertEqual(area.area_extent, (-77771772558.38356, -3720765402503.719, 30310525627938.438, 77771772558.38356)) self.assertEqual(area.area_id, 'msg_seviri_rss_3km') @mock.patch('satpy.readers.hrit_base.np.memmap') def test_read_band(self, memmap): """Test reading a band.""" nbits = self.reader.mda['number_of_bits_per_pixel'] memmap.return_value = np.random.randint(0, 256, size=int((464 * 3712 * nbits) / 8), dtype=np.uint8) res = self.reader.read_band('VIS006', None) self.assertEqual(res.shape, (464, 3712)) @mock.patch('satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset') @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate') def test_get_dataset(self, calibrate, parent_get_dataset): """Test getting the dataset.""" data = self._get_fake_data() parent_get_dataset.return_value = mock.MagicMock() calibrate.return_value = data key = make_dataid(name='VIS006', calibration='reflectance') info = setup.get_fake_dataset_info() res = self.reader.get_dataset(key, info) # Test method calls expected = data.copy() expected['acq_time'] = ( 'y', setup.get_acq_time_exp(self.start_time, self.nlines) ) xr.testing.assert_equal(res, expected) self.assert_attrs_equal( res.attrs, setup.get_attrs_exp(self.projection_longitude) ) @mock.patch('satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset') @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate') def test_get_dataset_with_raw_metadata(self, calibrate, parent_get_dataset): """Test getting the dataset.""" calibrate.return_value = self._get_fake_data() key = make_dataid(name='VIS006', calibration='reflectance') info = setup.get_fake_dataset_info() self.reader.include_raw_metadata = True res = self.reader.get_dataset(key, info) assert 'raw_metadata' in res.attrs def test_get_raw_mda(self): """Test provision of raw metadata.""" self.reader.mda = {'segment': 1, 'loff': 123} self.reader.prologue_.reduce = lambda max_size: {'prologue': 1} self.reader.epilogue_.reduce = lambda max_size: {'epilogue': 1} expected = {'prologue': 1, 'epilogue': 1, 'segment': 1} self.assertDictEqual(self.reader._get_raw_mda(), expected) # Make sure _get_raw_mda() doesn't modify the original dictionary self.assertIn('loff', self.reader.mda) def test_satpos_no_valid_orbit_polynomial(self): """Test satellite position if there is no valid orbit polynomial.""" reader = setup.get_fake_file_handler( start_time=self.start_time, nlines=self.nlines, ncols=self.ncols, projection_longitude=self.projection_longitude, orbit_polynomials=ORBIT_POLYNOMIALS_INVALID ) self.assertNotIn( 'satellite_actual_longitude', reader.mda['orbital_parameters'] ) class TestHRITMSGPrologueFileHandler(unittest.TestCase): """Test the HRIT prologue file handler.""" def setUp(self, *mocks): """Set up the test case.""" fh = setup.get_fake_file_handler( start_time=datetime(2016, 3, 3, 0, 0), nlines=464, ncols=3712, ) self.reader = fh.prologue_ @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGPrologueFileHandler.read_prologue') @mock.patch('satpy.readers.hrit_base.HRITFileHandler.__init__', autospec=True) def test_extra_kwargs(self, init, *mocks): """Test whether the prologue file handler accepts extra keyword arguments.""" def init_patched(self, *args, **kwargs): self.mda = {} init.side_effect = init_patched HRITMSGPrologueFileHandler(filename='dummy_prologue_filename', filename_info={'service': ''}, filetype_info=None, ext_calib_coefs={}, mda_max_array_size=123, calib_mode='nominal') @mock.patch('satpy.readers.seviri_l1b_hrit.utils.reduce_mda') def test_reduce(self, reduce_mda): """Test metadata reduction.""" reduce_mda.return_value = 'reduced' # Set buffer self.assertEqual(self.reader.reduce(123), 'reduced') # Read buffer self.assertEqual(self.reader.reduce(123), 'reduced') reduce_mda.assert_called_once() class TestHRITMSGEpilogueFileHandler(unittest.TestCase): """Test the HRIT epilogue file handler.""" @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGEpilogueFileHandler.read_epilogue') @mock.patch('satpy.readers.hrit_base.HRITFileHandler.__init__', autospec=True) def setUp(self, init, *mocks): """Set up the test case.""" def init_patched(self, *args, **kwargs): self.mda = {} init.side_effect = init_patched self.reader = HRITMSGEpilogueFileHandler(filename='dummy_epilogue_filename', filename_info={'service': ''}, filetype_info=None, calib_mode='nominal') @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGEpilogueFileHandler.read_epilogue') @mock.patch('satpy.readers.hrit_base.HRITFileHandler.__init__', autospec=True) def test_extra_kwargs(self, init, *mocks): """Test whether the epilogue file handler accepts extra keyword arguments.""" def init_patched(self, *args, **kwargs): self.mda = {} init.side_effect = init_patched HRITMSGEpilogueFileHandler(filename='dummy_epilogue_filename', filename_info={'service': ''}, filetype_info=None, ext_calib_coefs={}, mda_max_array_size=123, calib_mode='nominal') @mock.patch('satpy.readers.seviri_l1b_hrit.utils.reduce_mda') def test_reduce(self, reduce_mda): """Test metadata reduction.""" reduce_mda.return_value = 'reduced' # Set buffer self.assertEqual(self.reader.reduce(123), 'reduced') reduce_mda.assert_called() # Read buffer reduce_mda.reset_mock() self.reader._reduced = 'red' self.assertEqual(self.reader.reduce(123), 'red') reduce_mda.assert_not_called() class TestHRITMSGCalibration(TestFileHandlerCalibrationBase): """Unit tests for calibration.""" @pytest.fixture(name='file_handler') def file_handler(self): """Create a mocked file handler.""" prolog = { 'RadiometricProcessing': { 'Level15ImageCalibration': { 'CalSlope': self.gains_nominal, 'CalOffset': self.offsets_nominal, }, 'MPEFCalFeedback': { 'GSICSCalCoeff': self.gains_gsics, 'GSICSOffsetCount': self.offsets_gsics, } }, 'ImageDescription': { 'Level15ImageProduction': { 'PlannedChanProcessing': self.radiance_types } } } epilog = { 'ImageProductionStats': { 'ActualScanningSummary': { 'ForwardScanStart': self.scan_time } } } mda = { 'image_segment_line_quality': { 'line_validity': np.zeros(2), 'line_radiometric_quality': np.zeros(2), 'line_geometric_quality': np.zeros(2) }, } with mock.patch( 'satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.__init__', return_value=None ): fh = HRITMSGFileHandler() fh.platform_id = self.platform_id fh.mda = mda fh.prologue = prolog fh.epilogue = epilog return fh @pytest.mark.parametrize( ('channel', 'calibration', 'calib_mode', 'use_ext_coefs'), [ # VIS channel, internal coefficients ('VIS006', 'counts', 'NOMINAL', False), ('VIS006', 'radiance', 'NOMINAL', False), ('VIS006', 'radiance', 'GSICS', False), ('VIS006', 'reflectance', 'NOMINAL', False), # VIS channel, external coefficients (mode should have no effect) ('VIS006', 'radiance', 'GSICS', True), ('VIS006', 'reflectance', 'NOMINAL', True), # IR channel, internal coefficients ('IR_108', 'counts', 'NOMINAL', False), ('IR_108', 'radiance', 'NOMINAL', False), ('IR_108', 'radiance', 'GSICS', False), ('IR_108', 'brightness_temperature', 'NOMINAL', False), ('IR_108', 'brightness_temperature', 'GSICS', False), # IR channel, external coefficients (mode should have no effect) ('IR_108', 'radiance', 'NOMINAL', True), ('IR_108', 'brightness_temperature', 'GSICS', True), # HRV channel, internal coefficiens ('HRV', 'counts', 'NOMINAL', False), ('HRV', 'radiance', 'NOMINAL', False), ('HRV', 'radiance', 'GSICS', False), ('HRV', 'reflectance', 'NOMINAL', False), # HRV channel, external coefficients (mode should have no effect) ('HRV', 'radiance', 'GSICS', True), ('HRV', 'reflectance', 'NOMINAL', True), ] ) def test_calibrate( self, file_handler, counts, channel, calibration, calib_mode, use_ext_coefs ): """Test the calibration.""" external_coefs = self.external_coefs if use_ext_coefs else {} expected = self._get_expected( channel=channel, calibration=calibration, calib_mode=calib_mode, use_ext_coefs=use_ext_coefs ) fh = file_handler fh.mda['spectral_channel_id'] = self.spectral_channel_ids[channel] fh.channel_name = channel fh.calib_mode = calib_mode fh.ext_calib_coefs = external_coefs res = fh.calibrate(counts, calibration) xr.testing.assert_allclose(res, expected) satpy-0.34.0/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py000066400000000000000000000170251420401153000256350ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Setup for SEVIRI HRIT reader tests.""" from datetime import datetime from unittest import mock import numpy as np from satpy.readers.seviri_l1b_hrit import HRITMSGFileHandler, HRITMSGPrologueFileHandler from satpy.tests.reader_tests.test_seviri_base import ORBIT_POLYNOMIALS def new_get_hd(instance, hdr_info): """Generate some metadata.""" instance.mda = {'spectral_channel_id': 1} instance.mda.setdefault('number_of_bits_per_pixel', 10) instance.mda['projection_parameters'] = {'a': 6378169.00, 'b': 6356583.80, 'h': 35785831.00, 'SSP_longitude': 0.0} instance.mda['orbital_parameters'] = {} instance.mda['total_header_length'] = 12 def get_new_read_prologue(prologue): """Create mocked read_prologue() method.""" def new_read_prologue(self): self.prologue = prologue return new_read_prologue def get_fake_file_handler(start_time, nlines, ncols, projection_longitude=0, orbit_polynomials=ORBIT_POLYNOMIALS): """Create a mocked SEVIRI HRIT file handler.""" prologue = get_fake_prologue(projection_longitude, orbit_polynomials) mda = get_fake_mda(nlines=nlines, ncols=ncols, start_time=start_time) filename_info = get_fake_filename_info(start_time) epilogue = get_fake_epilogue() m = mock.mock_open() with mock.patch('satpy.readers.seviri_l1b_hrit.np.fromfile') as fromfile, \ mock.patch('satpy.readers.hrit_base.open', m, create=True) as newopen, \ mock.patch('satpy.readers.seviri_l1b_hrit.CHANNEL_NAMES'), \ mock.patch.object(HRITMSGFileHandler, '_get_hd', new=new_get_hd), \ mock.patch.object(HRITMSGPrologueFileHandler, 'read_prologue', new=get_new_read_prologue(prologue)): fromfile.return_value = np.array( [(1, 2)], dtype=[('total_header_length', int), ('hdr_id', int)] ) newopen.return_value.__enter__.return_value.tell.return_value = 1 prologue = HRITMSGPrologueFileHandler( filename='dummy_prologue_filename', filename_info=filename_info, filetype_info={} ) epilogue = mock.MagicMock(epilogue=epilogue) reader = HRITMSGFileHandler( 'filename', filename_info, {'filetype': 'info'}, prologue, epilogue ) reader.mda.update(mda) return reader def get_fake_prologue(projection_longitude, orbit_polynomials): """Create a fake HRIT prologue.""" return { "SatelliteStatus": { "SatelliteDefinition": { "SatelliteId": 324, "NominalLongitude": -3.5 }, 'Orbit': { 'OrbitPolynomial': orbit_polynomials, } }, 'GeometricProcessing': { 'EarthModel': { 'TypeOfEarthModel': 2, 'EquatorialRadius': 6378.169, 'NorthPolarRadius': 6356.5838, 'SouthPolarRadius': 6356.5838 } }, 'ImageDescription': { 'ProjectionDescription': { 'LongitudeOfSSP': projection_longitude }, 'Level15ImageProduction': { 'ImageProcDirection': 1 } }, 'ImageAcquisition': { 'PlannedAcquisitionTime': { 'TrueRepeatCycleStart': datetime(2006, 1, 1, 12, 15, 9, 304888) } } } def get_fake_epilogue(): """Create a fake HRIT epilogue.""" return { 'ImageProductionStats': { 'ActualL15CoverageHRV': { 'LowerSouthLineActual': 1, 'LowerNorthLineActual': 8256, 'LowerEastColumnActual': 2877, 'LowerWestColumnActual': 8444, 'UpperSouthLineActual': 8257, 'UpperNorthLineActual': 11136, 'UpperEastColumnActual': 1805, 'UpperWestColumnActual': 7372 } } } def get_fake_mda(nlines, ncols, start_time): """Create fake metadata.""" nbits = 10 tline = get_acq_time_cds(start_time, nlines) return { 'number_of_bits_per_pixel': nbits, 'number_of_lines': nlines, 'number_of_columns': ncols, 'data_field_length': nlines * ncols * nbits, 'cfac': 5, 'lfac': 5, 'coff': 10, 'loff': 10, 'image_segment_line_quality': { 'line_mean_acquisition': tline } } def get_fake_filename_info(start_time): """Create fake filename information.""" return { 'platform_shortname': 'MSG3', 'start_time': start_time, 'service': 'MSG' } def get_fake_dataset_info(): """Create fake dataset info.""" return { 'units': 'units', 'wavelength': 'wavelength', 'standard_name': 'standard_name' } def get_acq_time_cds(start_time, nlines): """Get fake scanline acquisition times.""" days_since_1958 = (start_time - datetime(1958, 1, 1)).days tline = np.zeros( nlines, dtype=[('days', '>u2'), ('milliseconds', '>u4')] ) tline['days'][1:-1] = days_since_1958 * np.ones(nlines - 2) tline['milliseconds'][1:-1] = np.arange(nlines - 2) return tline def get_acq_time_exp(start_time, nlines): """Get expected scanline acquisition times.""" tline_exp = np.zeros(464, dtype='datetime64[ms]') tline_exp[0] = np.datetime64('NaT') tline_exp[-1] = np.datetime64('NaT') tline_exp[1:-1] = np.datetime64(start_time) tline_exp[1:-1] += np.arange(nlines - 2).astype('timedelta64[ms]') return tline_exp def get_attrs_exp(projection_longitude=0.0): """Get expected dataset attributes.""" return { 'units': 'units', 'wavelength': 'wavelength', 'standard_name': 'standard_name', 'platform_name': 'Meteosat-11', 'sensor': 'seviri', 'satellite_longitude': projection_longitude, 'satellite_latitude': 0.0, 'satellite_altitude': 35785831.0, 'orbital_parameters': {'projection_longitude': projection_longitude, 'projection_latitude': 0., 'projection_altitude': 35785831.0, 'satellite_nominal_longitude': -3.5, 'satellite_nominal_latitude': 0.0, 'satellite_actual_longitude': -3.55117540817073, 'satellite_actual_latitude': -0.5711243456528018, 'satellite_actual_altitude': 35783296.150123544}, 'georef_offset_corrected': True } satpy-0.34.0/satpy/tests/reader_tests/test_seviri_l1b_icare.py000066400000000000000000000223061420401153000245300ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the SEVIRI L1b HDF4 from ICARE reader.""" import os import unittest from unittest import mock import numpy as np from satpy.readers import load_reader from satpy.tests.reader_tests.test_hdf4_utils import FakeHDF4FileHandler DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (10, 300) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) class FakeHDF4FileHandler2(FakeHDF4FileHandler): """Swap in HDF4 file handler.""" def get_test_content(self, filename, filename_info, filename_type): """Mimic reader input file content.""" file_content = {} file_content['/attr/Nadir_Pixel_Size'] = 3000. file_content['/attr/Beginning_Acquisition_Date'] = "2004-12-29T12:15:00Z" file_content['/attr/End_Acquisition_Date'] = "2004-12-29T12:27:44Z" file_content['/attr/Geolocation'] = ('1.3642337E7', '1856.0', '1.3642337E7', '1856.0') file_content['/attr/Altitude'] = '42164.0' file_content['/attr/Geographic_Projection'] = 'geos' file_content['/attr/Projection_Longitude'] = '0.0' file_content['/attr/Sub_Satellite_Longitude'] = '3.4' file_content['/attr/Sensors'] = 'MSG1/SEVIRI' file_content['/attr/Zone'] = 'G' file_content['/attr/_FillValue'] = 1 file_content['/attr/scale_factor'] = 1. file_content['/attr/add_offset'] = 0. # test one IR and one VIS channel file_content['Normalized_Radiance'] = DEFAULT_FILE_DATA file_content['Normalized_Radiance/attr/_FillValue'] = 1 file_content['Normalized_Radiance/attr/scale_factor'] = 1. file_content['Normalized_Radiance/attr/add_offset'] = 0. file_content['Normalized_Radiance/shape'] = DEFAULT_FILE_SHAPE file_content['Brightness_Temperature'] = DEFAULT_FILE_DATA file_content['Brightness_Temperature/attr/_FillValue'] = 1 file_content['Brightness_Temperature/attr/scale_factor'] = 1. file_content['Brightness_Temperature/attr/add_offset'] = 0. file_content['Brightness_Temperature/shape'] = DEFAULT_FILE_SHAPE # convert tp xarrays from xarray import DataArray for key, val in file_content.items(): if isinstance(val, np.ndarray): attrs = {} for a in ['_FillValue', 'scale_factor', 'add_offset']: if key + '/attr/' + a in file_content: attrs[a] = file_content[key + '/attr/' + a] file_content[key] = DataArray(val, dims=('fakeDim0', 'fakeDim1'), attrs=attrs) if 'y' not in file_content['Normalized_Radiance'].dims: file_content['Normalized_Radiance'] = file_content['Normalized_Radiance'].rename({'fakeDim0': 'x', 'fakeDim1': 'y'}) return file_content class TestSEVIRIICAREReader(unittest.TestCase): """Test SEVIRI L1b HDF4 from ICARE Reader.""" yaml_file = 'seviri_l1b_icare.yaml' def setUp(self): """Wrap HDF4 file handler with own fake file handler.""" from satpy._config import config_search_paths from satpy.readers.seviri_l1b_icare import SEVIRI_ICARE self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) self.p = mock.patch.object(SEVIRI_ICARE, '__bases__', (FakeHDF4FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the HDF4 file handler.""" self.p.stop() def compare_areas(self, v): """Compare produced AreaDefinition with expected.""" test_area = {'area_id': 'geosmsg', 'width': 10, 'height': 300, 'area_extent': (-5567248.2834071, -5570248.6866857, -5537244.2506213, -4670127.7031114)} self.assertEqual(v.attrs['area'].area_id, test_area['area_id']) self.assertEqual(v.attrs['area'].width, test_area['width']) self.assertEqual(v.attrs['area'].height, test_area['height']) np.testing.assert_almost_equal(v.attrs['area'].area_extent, test_area['area_extent']) def test_init(self): """Test basic init with no extra parameters.""" r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf', 'GEO_L1B-MSG1_2004-12-29T12-15-00_G_IR108_V1-04.hdf' ]) self.assertEqual(len(loadables), 2) r.create_filehandlers(loadables) self.assertTrue(r.file_handlers) def test_load_dataset_vis(self): """Test loading all datasets from a full swath file.""" from datetime import datetime r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf' ]) r.create_filehandlers(loadables) datasets = r.load(['VIS008']) self.assertEqual(len(datasets), 1) for v in datasets.values(): dt = datetime(2004, 12, 29, 12, 27, 44) self.assertEqual(v.attrs['end_time'], dt) self.assertEqual(v.attrs['calibration'], 'reflectance') def test_load_dataset_ir(self): """Test loading all datasets from a full swath file.""" r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'GEO_L1B-MSG1_2004-12-29T12-15-00_G_IR108_V1-04.hdf' ]) r.create_filehandlers(loadables) datasets = r.load(['IR_108']) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.attrs['calibration'], 'brightness_temperature') def test_area_def_lores(self): """Test loading all datasets from an area of interest file.""" r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf' ]) r.create_filehandlers(loadables) ds = r.load(['VIS008']) self.compare_areas(ds['VIS008']) self.assertEqual(ds['VIS008'].attrs['area'].proj_id, 'msg_lowres') def test_area_def_hires(self): """Test loading all datasets from an area of interest file.""" r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'GEO_L1B-MSG1_2004-12-29T12-15-00_G_HRV_V1-04.hdf', ]) r.create_filehandlers(loadables) ds = r.load(['HRV']) self.compare_areas(ds['HRV']) self.assertEqual(ds['HRV'].attrs['area'].proj_id, 'msg_hires') def test_sensor_names(self): """Check satellite name conversion is correct, including error case.""" file_data = FakeHDF4FileHandler2.get_test_content(mock.MagicMock(), mock.MagicMock(), mock.MagicMock(), mock.MagicMock()) sensor_list = {'Meteosat-08': 'MSG1/SEVIRI', 'Meteosat-09': 'MSG2/SEVIRI', 'Meteosat-10': 'MSG3/SEVIRI', 'Meteosat-11': 'MSG4/SEVIRI'} with mock.patch('satpy.tests.reader_tests.test_seviri_l1b_icare.' 'FakeHDF4FileHandler2.get_test_content') as patched_func: def _run_target(): patched_func.return_value = file_data return self.p.target(mock.MagicMock(), mock.MagicMock(), mock.MagicMock()).sensor_name for sat in sensor_list: file_data['/attr/Sensors'] = sensor_list[sat] plat, sens = _run_target() self.assertEqual(plat, sat) with self.assertRaises(NameError): file_data['/attr/Sensors'] = 'BADSAT/NOSENSE' plat, sens = _run_target() def test_bad_bandname(self): """Check reader raises an error if a band bandname is passed.""" with self.assertRaises(NameError): self.p.target(mock.MagicMock(), mock.MagicMock(), mock.MagicMock())._get_dsname({'name': 'badband'}) satpy-0.34.0/satpy/tests/reader_tests/test_seviri_l1b_native.py000066400000000000000000001545251420401153000247440ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unittesting the Native SEVIRI reader.""" from __future__ import annotations import os import unittest from datetime import datetime from unittest import mock import dask.array as da import numpy as np import pytest import xarray as xr from satpy.readers.eum_base import time_cds_short from satpy.readers.seviri_l1b_native import ImageBoundaries, NativeMSGFileHandler, Padder, get_available_channels from satpy.tests.reader_tests.test_seviri_base import ORBIT_POLYNOMIALS, ORBIT_POLYNOMIALS_INVALID from satpy.tests.reader_tests.test_seviri_l1b_calibration import TestFileHandlerCalibrationBase from satpy.tests.utils import assert_attrs_equal, make_dataid CHANNEL_INDEX_LIST = ['VIS006', 'VIS008', 'IR_016', 'IR_039', 'WV_062', 'WV_073', 'IR_087', 'IR_097', 'IR_108', 'IR_120', 'IR_134', 'HRV'] AVAILABLE_CHANNELS = {} for item in CHANNEL_INDEX_LIST: AVAILABLE_CHANNELS[item] = True SEC15HDR = '15_SECONDARY_PRODUCT_HEADER' IDS = 'SelectedBandIDs' TEST1_HEADER_CHNLIST: dict[str, dict[str, dict]] = {SEC15HDR: {IDS: {}}} TEST1_HEADER_CHNLIST[SEC15HDR][IDS]['Value'] = 'XX--XX--XX--' TEST2_HEADER_CHNLIST: dict[str, dict[str, dict]] = {SEC15HDR: {IDS: {}}} TEST2_HEADER_CHNLIST[SEC15HDR][IDS]['Value'] = 'XX-XXXX----X' TEST3_HEADER_CHNLIST: dict[str, dict[str, dict]] = {SEC15HDR: {IDS: {}}} TEST3_HEADER_CHNLIST[SEC15HDR][IDS]['Value'] = 'XXXXXXXXXXXX' TEST_AREA_EXTENT_EARTHMODEL1_VISIR_FULLDISK = { 'earth_model': 1, 'dataset_id': make_dataid(name='VIS006', resolution=3000), 'is_full_disk': True, 'is_rapid_scan': 0, 'fill_disk': False, 'expected_area_def': { 'Area ID': 'msg_seviri_fes_3km', 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, 'Number of columns': 3712, 'Number of rows': 3712, 'Area extent': (5568748.2758, 5568748.2758, -5568748.2758, -5568748.2758) } } TEST_AREA_EXTENT_EARTHMODEL1_VISIR_RAPIDSCAN = { 'earth_model': 1, 'dataset_id': make_dataid(name='VIS006', resolution=3000), 'is_full_disk': False, 'is_rapid_scan': 1, 'fill_disk': False, 'expected_area_def': { 'Area ID': 'msg_seviri_rss_3km', 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', 'lon_0': '9.5', 'no_defs': 'None', 'proj': 'geos', 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, 'Number of columns': 3712, 'Number of rows': 1392, 'Area extent': (5568748.275756836, 5568748.275756836, -5568748.275756836, 1392187.068939209) } } TEST_AREA_EXTENT_EARTHMODEL1_VISIR_RAPIDSCAN_FILL = { 'earth_model': 1, 'dataset_id': make_dataid(name='VIS006', resolution=3000), 'is_full_disk': False, 'is_rapid_scan': 1, 'fill_disk': True, 'expected_area_def': { 'Area ID': 'msg_seviri_rss_3km', 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', 'lon_0': '9.5', 'no_defs': 'None', 'proj': 'geos', 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, 'Number of columns': 3712, 'Number of rows': 3712, 'Area extent': (5568748.2758, 5568748.2758, -5568748.2758, -5568748.2758) } } TEST_AREA_EXTENT_EARTHMODEL1_VISIR_ROI = { 'earth_model': 1, 'dataset_id': make_dataid(name='VIS006', resolution=3000), 'is_full_disk': False, 'is_rapid_scan': 0, 'fill_disk': False, 'expected_area_def': { 'Area ID': 'msg_seviri_fes_3km', 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, 'Number of columns': 2516, 'Number of rows': 1829, 'Area extent': (5337717.232, 5154692.6389, -2211297.1332, -333044.7514) } } TEST_AREA_EXTENT_EARTHMODEL1_VISIR_ROI_FILL = { 'earth_model': 1, 'dataset_id': make_dataid(name='VIS006', resolution=3000), 'is_full_disk': False, 'is_rapid_scan': 0, 'fill_disk': True, 'expected_area_def': { 'Area ID': 'msg_seviri_fes_3km', 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, 'Number of columns': 3712, 'Number of rows': 3712, 'Area extent': (5568748.2758, 5568748.2758, -5568748.2758, -5568748.2758) } } TEST_AREA_EXTENT_EARTHMODEL1_HRV_FULLDISK = { 'earth_model': 1, 'dataset_id': make_dataid(name='HRV', resolution=1000), 'is_full_disk': True, 'is_rapid_scan': 0, 'fill_disk': False, 'expected_area_def': { 'Area ID': 'msg_seviri_fes_1km', 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, 'Number of columns': 5568, 'Number of rows': 11136, 'Area extent 0': (5567747.920155525, 2625352.665781975, -1000.1343488693237, -5567747.920155525), 'Area extent 1': (3602483.924627304, 5569748.188853264, -1966264.1298770905, 2625352.665781975) } } TEST_AREA_EXTENT_EARTHMODEL1_HRV_FULLDISK_FILL = { 'earth_model': 1, 'dataset_id': make_dataid(name='HRV', resolution=1000), 'is_full_disk': True, 'is_rapid_scan': 0, 'fill_disk': True, 'expected_area_def': { 'Area ID': 'msg_seviri_fes_1km', 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, 'Number of columns': 11136, 'Number of rows': 11136, 'Area extent': (5567747.920155525, 5569748.188853264, -5569748.188853264, -5567747.920155525) } } TEST_AREA_EXTENT_EARTHMODEL1_HRV_RAPIDSCAN = { 'earth_model': 1, 'dataset_id': make_dataid(name='HRV', resolution=1000), 'is_full_disk': False, 'is_rapid_scan': 1, 'fill_disk': False, 'expected_area_def': { 'Area ID': 'msg_seviri_rss_1km', 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', 'lon_0': '9.5', 'no_defs': 'None', 'proj': 'geos', 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, 'Number of columns': 5568, 'Number of rows': 8192, 'Area extent': (5567747.920155525, 2625352.665781975, -1000.1343488693237, -5567747.920155525) } } TEST_AREA_EXTENT_EARTHMODEL1_HRV_RAPIDSCAN_FILL = { 'earth_model': 1, 'dataset_id': make_dataid(name='HRV', resolution=1000), 'is_full_disk': False, 'is_rapid_scan': 1, 'fill_disk': True, 'expected_area_def': { 'Area ID': 'msg_seviri_rss_1km', 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', 'lon_0': '9.5', 'no_defs': 'None', 'proj': 'geos', 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, 'Number of columns': 11136, 'Number of rows': 11136, 'Area extent': (5567747.920155525, 5569748.188853264, -5569748.188853264, -5567747.920155525) } } TEST_AREA_EXTENT_EARTHMODEL1_HRV_ROI = { 'earth_model': 1, 'dataset_id': make_dataid(name='HRV', resolution=1000), 'is_full_disk': False, 'is_rapid_scan': 0, 'fill_disk': False, 'expected_area_def': { 'Area ID': 'msg_seviri_fes_1km', 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, 'Number of columns': 7548, 'Number of rows': 5487, 'Area extent': (5336716.885566711, 5155692.568421364, -2212297.179698944, -332044.6038246155) } } TEST_AREA_EXTENT_EARTHMODEL1_HRV_ROI_FILL = { 'earth_model': 1, 'dataset_id': make_dataid(name='HRV', resolution=1000), 'is_full_disk': False, 'is_rapid_scan': 0, 'fill_disk': True, 'expected_area_def': { 'Area ID': 'msg_seviri_fes_1km', 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, 'Number of columns': 11136, 'Number of rows': 11136, 'Area extent': (5567747.920155525, 5569748.188853264, -5569748.188853264, -5567747.920155525) } } TEST_AREA_EXTENT_EARTHMODEL2_VISIR_FULLDISK = { 'earth_model': 2, 'dataset_id': make_dataid(name='VIS006', resolution=3000), 'is_full_disk': True, 'is_rapid_scan': 0, 'fill_disk': False, 'expected_area_def': { 'Area ID': 'msg_seviri_fes_3km', 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, 'Number of columns': 3712, 'Number of rows': 3712, 'Area extent': (5567248.0742, 5570248.4773, -5570248.4773, -5567248.0742) } } TEST_AREA_EXTENT_EARTHMODEL2_HRV_FULLDISK = { 'earth_model': 2, 'dataset_id': make_dataid(name='HRV', resolution=1000), 'is_full_disk': True, 'is_rapid_scan': 0, 'fill_disk': False, 'expected_area_def': { 'Area ID': 'msg_seviri_fes_1km', 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, 'Number of columns': 5568, 'Number of rows': 11136, 'Area extent 0': (5566247.718632221, 2626852.867305279, -2500.3358721733093, -5566247.718632221), 'Area extent 1': (3600983.723104, 5571248.390376568, -1967764.3314003944, 2626852.867305279) } } TEST_AREA_EXTENT_EARTHMODEL2_HRV_FULLDISK_FILL = { 'earth_model': 2, 'dataset_id': make_dataid(name='HRV', resolution=1000), 'is_full_disk': True, 'is_rapid_scan': 0, 'fill_disk': True, 'expected_area_def': { 'Area ID': 'msg_seviri_fes_1km', 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, 'Number of columns': 11136, 'Number of rows': 11136, 'Area extent': (5566247.718632221, 5571248.390376568, -5571248.390376568, -5566247.718632221) } } TEST_AREA_EXTENT_EARTHMODEL2_VISIR_RAPIDSCAN = { 'earth_model': 2, 'dataset_id': make_dataid(name='VIS006', resolution=3000), 'is_full_disk': False, 'is_rapid_scan': 1, 'fill_disk': False, 'expected_area_def': { 'Area ID': 'msg_seviri_rss_3km', 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', 'lon_0': '9.5', 'no_defs': 'None', 'proj': 'geos', 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, 'Number of columns': 3712, 'Number of rows': 1392, 'Area extent': (5567248.074173927, 5570248.477339745, -5570248.477339745, 1393687.2705221176) } } TEST_AREA_EXTENT_EARTHMODEL2_VISIR_RAPIDSCAN_FILL = { 'earth_model': 2, 'dataset_id': make_dataid(name='VIS006', resolution=3000), 'is_full_disk': False, 'is_rapid_scan': 1, 'fill_disk': True, 'expected_area_def': { 'Area ID': 'msg_seviri_rss_3km', 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', 'lon_0': '9.5', 'no_defs': 'None', 'proj': 'geos', 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, 'Number of columns': 3712, 'Number of rows': 3712, 'Area extent': (5567248.0742, 5570248.4773, -5570248.4773, -5567248.0742) } } TEST_AREA_EXTENT_EARTHMODEL2_HRV_RAPIDSCAN = { 'earth_model': 2, 'dataset_id': make_dataid(name='HRV', resolution=1000), 'is_full_disk': False, 'is_rapid_scan': 1, 'fill_disk': False, 'expected_area_def': { 'Area ID': 'msg_seviri_rss_1km', 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, 'Number of columns': 5568, 'Number of rows': 8192, 'Area extent': (5566247.718632221, 2626852.867305279, -2500.3358721733093, -5566247.718632221) } } TEST_AREA_EXTENT_EARTHMODEL2_HRV_RAPIDSCAN_FILL = { 'earth_model': 2, 'dataset_id': make_dataid(name='HRV', resolution=1000), 'is_full_disk': False, 'is_rapid_scan': 1, 'fill_disk': True, 'expected_area_def': { 'Area ID': 'msg_seviri_rss_1km', 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, 'Number of columns': 11136, 'Number of rows': 11136, 'Area extent': (5566247.718632221, 5571248.390376568, -5571248.390376568, -5566247.718632221) } } TEST_AREA_EXTENT_EARTHMODEL2_VISIR_ROI = { 'earth_model': 2, 'dataset_id': make_dataid(name='VIS006', resolution=3000), 'is_full_disk': False, 'is_rapid_scan': 0, 'fill_disk': False, 'expected_area_def': { 'Area ID': 'msg_seviri_fes_3km', 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, 'Number of columns': 2516, 'Number of rows': 1829, 'Area extent': (5336217.0304, 5156192.8405, -2212797.3348, -331544.5498) } } TEST_AREA_EXTENT_EARTHMODEL2_VISIR_ROI_FILL = { 'earth_model': 2, 'dataset_id': make_dataid(name='VIS006', resolution=3000), 'is_full_disk': False, 'is_rapid_scan': 0, 'fill_disk': True, 'expected_area_def': { 'Area ID': 'msg_seviri_fes_3km', 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, 'Number of columns': 3712, 'Number of rows': 3712, 'Area extent': (5567248.0742, 5570248.4773, -5570248.4773, -5567248.0742) } } TEST_AREA_EXTENT_EARTHMODEL2_HRV_ROI = { 'earth_model': 2, 'dataset_id': make_dataid(name='HRV', resolution=1000), 'is_full_disk': False, 'is_rapid_scan': 0, 'fill_disk': False, 'expected_area_def': { 'Area ID': 'msg_seviri_fes_1km', 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, 'Number of columns': 7548, 'Number of rows': 5487, 'Area extent': (5335216.684043407, 5157192.769944668, -2213797.381222248, -330544.4023013115) } } TEST_AREA_EXTENT_EARTHMODEL2_HRV_ROI_FILL = { 'earth_model': 2, 'dataset_id': make_dataid(name='HRV', resolution=1000), 'is_full_disk': False, 'is_rapid_scan': 0, 'fill_disk': True, 'expected_area_def': { 'Area ID': 'msg_seviri_fes_1km', 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, 'Number of columns': 11136, 'Number of rows': 11136, 'Area extent': (5566247.718632221, 5571248.390376568, -5571248.390376568, -5566247.718632221) } } TEST_IS_ROI_FULLDISK = { 'is_full_disk': True, 'is_rapid_scan': 0, 'is_roi': False } TEST_IS_ROI_RAPIDSCAN = { 'is_full_disk': False, 'is_rapid_scan': 1, 'is_roi': False } TEST_IS_ROI_ROI = { 'is_full_disk': False, 'is_rapid_scan': 0, 'is_roi': True } TEST_CALIBRATION_MODE = { 'earth_model': 1, 'dataset_id': make_dataid(name='IR_108', calibration='radiance'), 'is_full_disk': True, 'is_rapid_scan': 0, 'calibration': 'radiance', 'CalSlope': [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0.95, 0.96, 0.97], 'CalOffset': [-1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0], 'GSICSCalCoeff': [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0.95, 0.96, 0.97], 'GSICSOffsetCount': [-51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0] } TEST_PADDER_RSS_ROI = { 'img_bounds': {'south': [2], 'north': [4], 'east': [2], 'west': [3]}, 'is_full_disk': False, 'dataset_id': make_dataid(name='VIS006'), 'dataset': xr.DataArray(np.ones((3, 2)), dims=['y', 'x']).astype(np.float32), 'final_shape': (5, 5), 'expected_padded_data': xr.DataArray(np.array([[np.nan, np.nan, np.nan, np.nan, np.nan], [np.nan, 1.0, 1.0, np.nan, np.nan], [np.nan, 1.0, 1.0, np.nan, np.nan], [np.nan, 1.0, 1.0, np.nan, np.nan], [np.nan, np.nan, np.nan, np.nan, np.nan]]), dims=['y', 'x']).astype(np.float32) } TEST_PADDER_FES_HRV = { 'img_bounds': {'south': [1, 4], 'north': [3, 5], 'east': [2, 3], 'west': [3, 4]}, 'is_full_disk': True, 'dataset_id': make_dataid(name='HRV'), 'dataset': xr.DataArray(np.ones((5, 2)), dims=['y', 'x']).astype(np.float32), 'final_shape': (5, 5), 'expected_padded_data': xr.DataArray(np.array([[np.nan, 1.0, 1.0, np.nan, np.nan], [np.nan, 1.0, 1.0, np.nan, np.nan], [np.nan, 1.0, 1.0, np.nan, np.nan], [np.nan, np.nan, 1.0, 1.0, np.nan], [np.nan, np.nan, 1.0, 1.0, np.nan]]), dims=['y', 'x']).astype(np.float32) } class TestNativeMSGFileHandler(unittest.TestCase): """Test the NativeMSGFileHandler.""" def test_get_available_channels(self): """Test the derivation of the available channel list.""" available_chs = get_available_channels(TEST1_HEADER_CHNLIST) trues = ['WV_062', 'WV_073', 'IR_108', 'VIS006', 'VIS008', 'IR_120'] for bandname in AVAILABLE_CHANNELS: if bandname in trues: self.assertTrue(available_chs[bandname]) else: self.assertFalse(available_chs[bandname]) available_chs = get_available_channels(TEST2_HEADER_CHNLIST) trues = ['VIS006', 'VIS008', 'IR_039', 'WV_062', 'WV_073', 'IR_087', 'HRV'] for bandname in AVAILABLE_CHANNELS: if bandname in trues: self.assertTrue(available_chs[bandname]) else: self.assertFalse(available_chs[bandname]) available_chs = get_available_channels(TEST3_HEADER_CHNLIST) for bandname in AVAILABLE_CHANNELS: self.assertTrue(available_chs[bandname]) class TestNativeMSGArea(unittest.TestCase): """Test NativeMSGFileHandler.get_area_extent. The expected results have been verified by manually inspecting the output of geoferenced imagery. """ @staticmethod def create_test_header(earth_model, dataset_id, is_full_disk, is_rapid_scan): """Create mocked NativeMSGFileHandler. Contains sufficient attributes for NativeMSGFileHandler.get_area_extent to be able to execute. """ if dataset_id['name'] == 'HRV': reference_grid = 'ReferenceGridHRV' column_dir_grid_step = 1.0001343488693237 line_dir_grid_step = 1.0001343488693237 else: reference_grid = 'ReferenceGridVIS_IR' column_dir_grid_step = 3.0004031658172607 line_dir_grid_step = 3.0004031658172607 if is_full_disk: north = 3712 east = 1 west = 3712 south = 1 n_visir_cols = 3712 n_visir_lines = 3712 n_hrv_cols = 11136 n_hrv_lines = 11136 ssp_lon = 0 elif is_rapid_scan: north = 3712 east = 1 west = 3712 south = 2321 n_visir_cols = 3712 n_visir_lines = 1392 n_hrv_cols = 11136 n_hrv_lines = 4176 ssp_lon = 9.5 else: north = 3574 east = 78 west = 2591 south = 1746 n_visir_cols = 2516 n_visir_lines = north - south + 1 n_hrv_cols = n_visir_cols * 3 n_hrv_lines = n_visir_lines * 3 ssp_lon = 0 header = { '15_DATA_HEADER': { 'ImageDescription': { reference_grid: { 'ColumnDirGridStep': column_dir_grid_step, 'LineDirGridStep': line_dir_grid_step, 'GridOrigin': 2, # south-east corner }, 'ProjectionDescription': { 'LongitudeOfSSP': ssp_lon } }, 'GeometricProcessing': { 'EarthModel': { 'TypeOfEarthModel': earth_model, 'EquatorialRadius': 6378169.0, 'NorthPolarRadius': 6356583.800000001, 'SouthPolarRadius': 6356583.800000001, } }, 'SatelliteStatus': { 'SatelliteDefinition': { 'SatelliteId': 324 } } }, '15_SECONDARY_PRODUCT_HEADER': { 'NorthLineSelectedRectangle': {'Value': north}, 'EastColumnSelectedRectangle': {'Value': east}, 'WestColumnSelectedRectangle': {'Value': west}, 'SouthLineSelectedRectangle': {'Value': south}, 'SelectedBandIDs': {'Value': 'xxxxxxxxxxxx'}, 'NumberColumnsVISIR': {'Value': n_visir_cols}, 'NumberLinesVISIR': {'Value': n_visir_lines}, 'NumberColumnsHRV': {'Value': n_hrv_cols}, 'NumberLinesHRV': {'Value': n_hrv_lines}, } } return header @staticmethod def create_test_trailer(is_rapid_scan): """Create Test Trailer. Mocked Trailer with sufficient attributes for NativeMSGFileHandler.get_area_extent to be able to execute. """ trailer = { '15TRAILER': { 'ImageProductionStats': { 'ActualL15CoverageHRV': { 'UpperNorthLineActual': 11136, 'UpperWestColumnActual': 7533, 'UpperSouthLineActual': 8193, 'UpperEastColumnActual': 1966, 'LowerNorthLineActual': 8192, 'LowerWestColumnActual': 5568, 'LowerSouthLineActual': 1, 'LowerEastColumnActual': 1 }, 'ActualScanningSummary': { 'ReducedScan': is_rapid_scan } } } } return trailer def prepare_area_defs(self, test_dict): """Prepare calculated and expected area definitions for equal checking.""" earth_model = test_dict['earth_model'] dataset_id = test_dict['dataset_id'] is_full_disk = test_dict['is_full_disk'] is_rapid_scan = test_dict['is_rapid_scan'] fill_disk = test_dict['fill_disk'] header = self.create_test_header(earth_model, dataset_id, is_full_disk, is_rapid_scan) trailer = self.create_test_trailer(is_rapid_scan) expected_area_def = test_dict['expected_area_def'] with mock.patch('satpy.readers.seviri_l1b_native.np.fromfile') as fromfile, \ mock.patch('satpy.readers.seviri_l1b_native.recarray2dict') as recarray2dict, \ mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap') as _get_memmap, \ mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer'), \ mock.patch( 'satpy.readers.seviri_l1b_native.NativeMSGFileHandler._has_archive_header' ) as _has_archive_header: _has_archive_header.return_value = True fromfile.return_value = header recarray2dict.side_effect = (lambda x: x) _get_memmap.return_value = np.arange(3) fh = NativeMSGFileHandler(None, {}, None) fh.fill_disk = fill_disk fh.header = header fh.trailer = trailer fh.image_boundaries = ImageBoundaries(header, trailer, fh.mda) calc_area_def = fh.get_area_def(dataset_id) return (calc_area_def, expected_area_def) # Earth model 1 tests def test_earthmodel1_visir_fulldisk(self): """Test the VISIR FES with the EarthModel 1.""" calculated, expected = self.prepare_area_defs( TEST_AREA_EXTENT_EARTHMODEL1_VISIR_FULLDISK ) np.testing.assert_allclose(np.array(calculated.area_extent), np.array(expected['Area extent'])) self.assertEqual(calculated.width, expected['Number of columns']) self.assertEqual(calculated.height, expected['Number of rows']) self.assertEqual(calculated.area_id, expected['Area ID']) def test_earthmodel1_hrv_fulldisk(self): """Test the HRV FES with the EarthModel 1.""" calculated, expected = self.prepare_area_defs( TEST_AREA_EXTENT_EARTHMODEL1_HRV_FULLDISK ) np.testing.assert_allclose(np.array(calculated.defs[0].area_extent), np.array(expected['Area extent 0'])) np.testing.assert_allclose(np.array(calculated.defs[1].area_extent), np.array(expected['Area extent 1'])) self.assertEqual(calculated.width, expected['Number of columns']) self.assertEqual(calculated.height, expected['Number of rows']) self.assertEqual(calculated.defs[0].area_id, expected['Area ID']) self.assertEqual(calculated.defs[1].area_id, expected['Area ID']) def test_earthmodel1_hrv_fulldisk_fill(self): """Test the HRV FES padded to fulldisk with the EarthModel 1.""" calculated, expected = self.prepare_area_defs( TEST_AREA_EXTENT_EARTHMODEL1_HRV_FULLDISK_FILL ) np.testing.assert_allclose(np.array(calculated.area_extent), np.array(expected['Area extent'])) self.assertEqual(calculated.width, expected['Number of columns']) self.assertEqual(calculated.height, expected['Number of rows']) self.assertEqual(calculated.area_id, expected['Area ID']) def test_earthmodel1_visir_rapidscan(self): """Test the VISIR RSS with the EarthModel 1.""" calculated, expected = self.prepare_area_defs( TEST_AREA_EXTENT_EARTHMODEL1_VISIR_RAPIDSCAN ) np.testing.assert_allclose(np.array(calculated.area_extent), np.array(expected['Area extent'])) self.assertEqual(calculated.width, expected['Number of columns']) self.assertEqual(calculated.height, expected['Number of rows']) self.assertEqual(calculated.area_id, expected['Area ID']) def test_earthmodel1_visir_rapidscan_fill(self): """Test the VISIR RSS padded to fulldisk with the EarthModel 1.""" calculated, expected = self.prepare_area_defs( TEST_AREA_EXTENT_EARTHMODEL1_VISIR_RAPIDSCAN_FILL ) np.testing.assert_allclose(np.array(calculated.area_extent), np.array(expected['Area extent'])) self.assertEqual(calculated.width, expected['Number of columns']) self.assertEqual(calculated.height, expected['Number of rows']) self.assertEqual(calculated.area_id, expected['Area ID']) def test_earthmodel1_hrv_rapidscan(self): """Test the HRV RSS with the EarthModel 1.""" calculated, expected = self.prepare_area_defs( TEST_AREA_EXTENT_EARTHMODEL1_HRV_RAPIDSCAN ) np.testing.assert_allclose(np.array(calculated.area_extent), np.array(expected['Area extent'])) self.assertEqual(calculated.width, expected['Number of columns']) self.assertEqual(calculated.height, expected['Number of rows']) self.assertEqual(calculated.area_id, expected['Area ID']) def test_earthmodel1_hrv_rapidscan_fill(self): """Test the HRV RSS padded to fulldisk with the EarthModel 1.""" calculated, expected = self.prepare_area_defs( TEST_AREA_EXTENT_EARTHMODEL1_HRV_RAPIDSCAN_FILL ) np.testing.assert_allclose(np.array(calculated.area_extent), np.array(expected['Area extent'])) self.assertEqual(calculated.width, expected['Number of columns']) self.assertEqual(calculated.height, expected['Number of rows']) self.assertEqual(calculated.area_id, expected['Area ID']) def test_earthmodel1_visir_roi(self): """Test the VISIR ROI with the EarthModel 1.""" calculated, expected = self.prepare_area_defs( TEST_AREA_EXTENT_EARTHMODEL1_VISIR_ROI ) np.testing.assert_allclose(np.array(calculated.area_extent), np.array(expected['Area extent'])) self.assertEqual(calculated.width, expected['Number of columns']) self.assertEqual(calculated.height, expected['Number of rows']) self.assertEqual(calculated.area_id, expected['Area ID']) def test_earthmodel1_visir_roi_fill(self): """Test the VISIR ROI padded to fulldisk with the EarthModel 1.""" calculated, expected = self.prepare_area_defs( TEST_AREA_EXTENT_EARTHMODEL1_VISIR_ROI_FILL ) np.testing.assert_allclose(np.array(calculated.area_extent), np.array(expected['Area extent'])) self.assertEqual(calculated.width, expected['Number of columns']) self.assertEqual(calculated.height, expected['Number of rows']) self.assertEqual(calculated.area_id, expected['Area ID']) def test_earthmodel1_hrv_roi(self): """Test the HRV ROI with the EarthModel 1.""" calculated, expected = self.prepare_area_defs( TEST_AREA_EXTENT_EARTHMODEL1_HRV_ROI ) np.testing.assert_allclose(np.array(calculated.area_extent), np.array(expected['Area extent'])) self.assertEqual(calculated.width, expected['Number of columns']) self.assertEqual(calculated.height, expected['Number of rows']) self.assertEqual(calculated.area_id, expected['Area ID']) def test_earthmodel1_hrv_roi_fill(self): """Test the HRV ROI padded to fulldisk with the EarthModel 1.""" calculated, expected = self.prepare_area_defs( TEST_AREA_EXTENT_EARTHMODEL1_HRV_ROI_FILL ) np.testing.assert_allclose(np.array(calculated.area_extent), np.array(expected['Area extent'])) self.assertEqual(calculated.width, expected['Number of columns']) self.assertEqual(calculated.height, expected['Number of rows']) self.assertEqual(calculated.area_id, expected['Area ID']) # Earth model 2 tests def test_earthmodel2_visir_fulldisk(self): """Test the VISIR FES with the EarthModel 2.""" calculated, expected = self.prepare_area_defs( TEST_AREA_EXTENT_EARTHMODEL2_VISIR_FULLDISK ) np.testing.assert_allclose(np.array(calculated.area_extent), np.array(expected['Area extent'])) self.assertEqual(calculated.width, expected['Number of columns']) self.assertEqual(calculated.height, expected['Number of rows']) self.assertEqual(calculated.area_id, expected['Area ID']) def test_earthmodel2_hrv_fulldisk(self): """Test the HRV FES with the EarthModel 2.""" calculated, expected = self.prepare_area_defs( TEST_AREA_EXTENT_EARTHMODEL2_HRV_FULLDISK ) np.testing.assert_allclose(np.array(calculated.defs[0].area_extent), np.array(expected['Area extent 0'])) np.testing.assert_allclose(np.array(calculated.defs[1].area_extent), np.array(expected['Area extent 1'])) self.assertEqual(calculated.width, expected['Number of columns']) self.assertEqual(calculated.height, expected['Number of rows']) self.assertEqual(calculated.defs[0].area_id, expected['Area ID']) self.assertEqual(calculated.defs[1].area_id, expected['Area ID']) def test_earthmodel2_hrv_fulldisk_fill(self): """Test the HRV FES padded to fulldisk with the EarthModel 2.""" calculated, expected = self.prepare_area_defs( TEST_AREA_EXTENT_EARTHMODEL2_HRV_FULLDISK_FILL ) np.testing.assert_allclose(np.array(calculated.area_extent), np.array(expected['Area extent'])) self.assertEqual(calculated.width, expected['Number of columns']) self.assertEqual(calculated.height, expected['Number of rows']) self.assertEqual(calculated.area_id, expected['Area ID']) def test_earthmodel2_visir_rapidscan(self): """Test the VISIR RSS with the EarthModel 2.""" calculated, expected = self.prepare_area_defs( TEST_AREA_EXTENT_EARTHMODEL2_VISIR_RAPIDSCAN ) np.testing.assert_allclose(np.array(calculated.area_extent), np.array(expected['Area extent'])) self.assertEqual(calculated.width, expected['Number of columns']) self.assertEqual(calculated.height, expected['Number of rows']) self.assertEqual(calculated.area_id, expected['Area ID']) def test_earthmodel2_visir_rapidscan_fill(self): """Test the VISIR RSS padded to fulldisk with the EarthModel 2.""" calculated, expected = self.prepare_area_defs( TEST_AREA_EXTENT_EARTHMODEL2_VISIR_RAPIDSCAN_FILL ) np.testing.assert_allclose(np.array(calculated.area_extent), np.array(expected['Area extent'])) self.assertEqual(calculated.width, expected['Number of columns']) self.assertEqual(calculated.height, expected['Number of rows']) self.assertEqual(calculated.area_id, expected['Area ID']) def test_earthmodel2_hrv_rapidscan(self): """Test the HRV RSS with the EarthModel 2.""" calculated, expected = self.prepare_area_defs( TEST_AREA_EXTENT_EARTHMODEL2_HRV_RAPIDSCAN ) np.testing.assert_allclose(np.array(calculated.area_extent), np.array(expected['Area extent'])) self.assertEqual(calculated.width, expected['Number of columns']) self.assertEqual(calculated.height, expected['Number of rows']) self.assertEqual(calculated.area_id, expected['Area ID']) def test_earthmodel2_hrv_rapidscan_fill(self): """Test the HRV RSS padded to fulldisk with the EarthModel 2.""" calculated, expected = self.prepare_area_defs( TEST_AREA_EXTENT_EARTHMODEL2_HRV_RAPIDSCAN_FILL ) np.testing.assert_allclose(np.array(calculated.area_extent), np.array(expected['Area extent'])) self.assertEqual(calculated.width, expected['Number of columns']) self.assertEqual(calculated.height, expected['Number of rows']) self.assertEqual(calculated.area_id, expected['Area ID']) def test_earthmodel2_visir_roi(self): """Test the VISIR ROI with the EarthModel 2.""" calculated, expected = self.prepare_area_defs( TEST_AREA_EXTENT_EARTHMODEL2_VISIR_ROI ) np.testing.assert_allclose(np.array(calculated.area_extent), np.array(expected['Area extent'])) self.assertEqual(calculated.width, expected['Number of columns']) self.assertEqual(calculated.height, expected['Number of rows']) self.assertEqual(calculated.area_id, expected['Area ID']) def test_earthmodel2_visir_roi_fill(self): """Test the VISIR ROI padded to fulldisk with the EarthModel 2.""" calculated, expected = self.prepare_area_defs( TEST_AREA_EXTENT_EARTHMODEL2_VISIR_ROI_FILL ) np.testing.assert_allclose(np.array(calculated.area_extent), np.array(expected['Area extent'])) self.assertEqual(calculated.width, expected['Number of columns']) self.assertEqual(calculated.height, expected['Number of rows']) self.assertEqual(calculated.area_id, expected['Area ID']) def test_earthmodel2_hrv_roi(self): """Test the HRV ROI with the EarthModel 2.""" calculated, expected = self.prepare_area_defs( TEST_AREA_EXTENT_EARTHMODEL2_HRV_ROI ) np.testing.assert_allclose(np.array(calculated.area_extent), np.array(expected['Area extent'])) self.assertEqual(calculated.width, expected['Number of columns']) self.assertEqual(calculated.height, expected['Number of rows']) self.assertEqual(calculated.area_id, expected['Area ID']) def test_earthmodel2_hrv_roi_fill(self): """Test the HRV ROI padded to fulldisk with the EarthModel 2.""" calculated, expected = self.prepare_area_defs( TEST_AREA_EXTENT_EARTHMODEL2_HRV_ROI_FILL ) np.testing.assert_allclose(np.array(calculated.area_extent), np.array(expected['Area extent'])) self.assertEqual(calculated.width, expected['Number of columns']) self.assertEqual(calculated.height, expected['Number of rows']) self.assertEqual(calculated.area_id, expected['Area ID']) # Test check for Region Of Interest (ROI) data def prepare_is_roi(self, test_dict): """Prepare calculated and expected check for region of interest data for equal checking.""" earth_model = 2 dataset_id = make_dataid(name='VIS006') is_full_disk = test_dict['is_full_disk'] is_rapid_scan = test_dict['is_rapid_scan'] header = self.create_test_header(earth_model, dataset_id, is_full_disk, is_rapid_scan) trailer = self.create_test_trailer(is_rapid_scan) expected_is_roi = test_dict['is_roi'] with mock.patch('satpy.readers.seviri_l1b_native.np.fromfile') as fromfile, \ mock.patch('satpy.readers.seviri_l1b_native.recarray2dict') as recarray2dict, \ mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap') as _get_memmap, \ mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer'), \ mock.patch( 'satpy.readers.seviri_l1b_native.NativeMSGFileHandler._has_archive_header' ) as _has_archive_header: _has_archive_header.return_value = True fromfile.return_value = header recarray2dict.side_effect = (lambda x: x) _get_memmap.return_value = np.arange(3) fh = NativeMSGFileHandler(None, {}, None) fh.header = header fh.trailer = trailer calc_is_roi = fh.is_roi() return (calc_is_roi, expected_is_roi) def test_is_roi_fulldisk(self): """Test check for region of interest with FES data.""" calculated, expected = self.prepare_is_roi(TEST_IS_ROI_FULLDISK) self.assertEqual(calculated, expected) def test_is_roi_rapidscan(self): """Test check for region of interest with RSS data.""" calculated, expected = self.prepare_is_roi(TEST_IS_ROI_RAPIDSCAN) self.assertEqual(calculated, expected) def test_is_roi_roi(self): """Test check for region of interest with ROI data.""" calculated, expected = self.prepare_is_roi(TEST_IS_ROI_ROI) self.assertEqual(calculated, expected) TEST_HEADER_CALIB = { 'RadiometricProcessing': { 'Level15ImageCalibration': { 'CalSlope': TestFileHandlerCalibrationBase.gains_nominal, 'CalOffset': TestFileHandlerCalibrationBase.offsets_nominal, }, 'MPEFCalFeedback': { 'GSICSCalCoeff': TestFileHandlerCalibrationBase.gains_gsics, 'GSICSOffsetCount': TestFileHandlerCalibrationBase.offsets_gsics } }, 'ImageDescription': { 'Level15ImageProduction': { 'PlannedChanProcessing': TestFileHandlerCalibrationBase.radiance_types } }, } class TestNativeMSGCalibration(TestFileHandlerCalibrationBase): """Unit tests for calibration.""" @pytest.fixture(name='file_handler') def file_handler(self): """Create a mocked file handler.""" header = { '15_DATA_HEADER': { 'ImageAcquisition': { 'PlannedAcquisitionTime': { 'TrueRepeatCycleStart': self.scan_time } } } } header['15_DATA_HEADER'].update(TEST_HEADER_CALIB) with mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler.__init__', return_value=None): fh = NativeMSGFileHandler() fh.header = header fh.platform_id = self.platform_id return fh @pytest.mark.parametrize( ('channel', 'calibration', 'calib_mode', 'use_ext_coefs'), [ # VIS channel, internal coefficients ('VIS006', 'counts', 'NOMINAL', False), ('VIS006', 'radiance', 'NOMINAL', False), ('VIS006', 'radiance', 'GSICS', False), ('VIS006', 'reflectance', 'NOMINAL', False), # VIS channel, external coefficients (mode should have no effect) ('VIS006', 'radiance', 'GSICS', True), ('VIS006', 'reflectance', 'NOMINAL', True), # IR channel, internal coefficients ('IR_108', 'counts', 'NOMINAL', False), ('IR_108', 'radiance', 'NOMINAL', False), ('IR_108', 'radiance', 'GSICS', False), ('IR_108', 'brightness_temperature', 'NOMINAL', False), ('IR_108', 'brightness_temperature', 'GSICS', False), # IR channel, external coefficients (mode should have no effect) ('IR_108', 'radiance', 'NOMINAL', True), ('IR_108', 'brightness_temperature', 'GSICS', True), # HRV channel, internal coefficiens ('HRV', 'counts', 'NOMINAL', False), ('HRV', 'radiance', 'NOMINAL', False), ('HRV', 'radiance', 'GSICS', False), ('HRV', 'reflectance', 'NOMINAL', False), # HRV channel, external coefficients (mode should have no effect) ('HRV', 'radiance', 'GSICS', True), ('HRV', 'reflectance', 'NOMINAL', True), ] ) def test_calibrate( self, file_handler, counts, channel, calibration, calib_mode, use_ext_coefs ): """Test the calibration.""" external_coefs = self.external_coefs if use_ext_coefs else {} expected = self._get_expected( channel=channel, calibration=calibration, calib_mode=calib_mode, use_ext_coefs=use_ext_coefs ) fh = file_handler fh.calib_mode = calib_mode fh.ext_calib_coefs = external_coefs dataset_id = make_dataid(name=channel, calibration=calibration) res = fh.calibrate(counts, dataset_id) xr.testing.assert_allclose(res, expected) class TestNativeMSGDataset: """Tests for getting the dataset.""" @pytest.fixture def file_handler(self): """Create a file handler for testing.""" header = { '15_DATA_HEADER': { 'SatelliteStatus': { 'SatelliteDefinition': { 'NominalLongitude': 0.0 }, 'Orbit': { 'OrbitPolynomial': ORBIT_POLYNOMIALS } }, 'ImageAcquisition': { 'PlannedAcquisitionTime': { 'TrueRepeatCycleStart': datetime( 2006, 1, 1, 12, 15, 9, 304888 ) } } }, } header['15_DATA_HEADER'].update(TEST_HEADER_CALIB) mda = { 'channel_list': ['VIS006', 'IR_108'], 'number_of_lines': 4, 'number_of_columns': 4, 'is_full_disk': True, 'platform_name': 'MSG-3', 'offset_corrected': True, 'projection_parameters': { 'ssp_longitude': 0.0, 'h': 35785831.0, 'a': 6378169.0, 'b': 6356583.8 } } num_visir_cols = 5 # will be divided by 1.25 -> 4 columns visir_rec = [ ('line_data', np.uint8, (num_visir_cols,)), ('acq_time', time_cds_short) ] vis006_line1 = ( [1, 2, 3, 4, 5], # line_data (1, 1000) # acq_time (days, milliseconds) ) vis006_line2 = ([6, 7, 8, 9, 10], (1, 2000)) vis006_line3 = ([11, 12, 13, 14, 15], (1, 3000)) vis006_line4 = ([16, 17, 18, 19, 20], (1, 4000)) ir108_line1 = ([20, 19, 18, 17, 16], (1, 1000)) ir108_line2 = ([15, 14, 13, 12, 11], (1, 2000)) ir108_line3 = ([10, 9, 8, 7, 6], (1, 3000)) ir108_line4 = ([5, 4, 3, 2, 1], (1, 4000)) data = np.array( [[(vis006_line1,), (ir108_line1,)], [(vis006_line2,), (ir108_line2,)], [(vis006_line3,), (ir108_line3,)], [(vis006_line4,), (ir108_line4,)]], dtype=[('visir', visir_rec)] ) with mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler.__init__', return_value=None): fh = NativeMSGFileHandler() fh.header = header fh.mda = mda fh.dask_array = da.from_array(data) fh.platform_id = 324 fh.fill_disk = False fh.calib_mode = 'NOMINAL' fh.ext_calib_coefs = {} fh.include_raw_metadata = False fh.mda_max_array_size = 100 return fh def test_get_dataset(self, file_handler): """Test getting the dataset.""" dataset_id = make_dataid( name='VIS006', resolution=3000, calibration='counts' ) dataset_info = { 'units': '1', 'wavelength': (1, 2, 3), 'standard_name': 'counts' } dataset = file_handler.get_dataset(dataset_id, dataset_info) expected = xr.DataArray( np.array([[4., 32., 193., 5.], [24., 112., 514., 266.], [44., 192., 835., 527.], [64., 273., 132., 788.]], dtype=np.float32), dims=('y', 'x'), attrs={ 'orbital_parameters': { 'satellite_actual_longitude': -3.55117540817073, 'satellite_actual_latitude': -0.5711243456528018, 'satellite_actual_altitude': 35783296.150123544, 'satellite_nominal_longitude': 0.0, 'satellite_nominal_latitude': 0.0, 'projection_longitude': 0.0, 'projection_latitude': 0.0, 'projection_altitude': 35785831.0 }, 'georef_offset_corrected': True, 'platform_name': 'MSG-3', 'sensor': 'seviri', 'units': '1', 'wavelength': (1, 2, 3), 'standard_name': 'counts' } ) expected['acq_time'] = ('y', [np.datetime64('1958-01-02 00:00:01'), np.datetime64('1958-01-02 00:00:02'), np.datetime64('1958-01-02 00:00:03'), np.datetime64('1958-01-02 00:00:04')]) xr.testing.assert_equal(dataset, expected) assert 'raw_metadata' not in dataset.attrs assert_attrs_equal(dataset.attrs, expected.attrs, tolerance=1e-4) def test_get_dataset_with_raw_metadata(self, file_handler): """Test provision of raw metadata.""" file_handler.include_raw_metadata = True dataset_id = make_dataid( name='VIS006', resolution=3000, calibration='counts' ) dataset_info = { 'units': '1', 'wavelength': (1, 2, 3), 'standard_name': 'counts' } res = file_handler.get_dataset(dataset_id, dataset_info) assert 'raw_metadata' in res.attrs def test_satpos_no_valid_orbit_polynomial(self, file_handler): """Test satellite position if there is no valid orbit polynomial.""" file_handler.header['15_DATA_HEADER']['SatelliteStatus'][ 'Orbit']['OrbitPolynomial'] = ORBIT_POLYNOMIALS_INVALID dataset_id = make_dataid( name='VIS006', resolution=3000, calibration='counts' ) dataset_info = { 'units': '1', 'wavelength': (1, 2, 3), 'standard_name': 'counts' } res = file_handler.get_dataset(dataset_id, dataset_info) assert 'satellite_actual_longitude' not in res.attrs[ 'orbital_parameters'] class TestNativeMSGPadder(unittest.TestCase): """Test Padder of the native l1b seviri reader.""" @staticmethod def prepare_padder(test_dict): """Initialize Padder and pad test data.""" dataset_id = test_dict['dataset_id'] img_bounds = test_dict['img_bounds'] is_full_disk = test_dict['is_full_disk'] dataset = test_dict['dataset'] final_shape = test_dict['final_shape'] expected_padded_data = test_dict['expected_padded_data'] padder = Padder(dataset_id, img_bounds, is_full_disk) padder._final_shape = final_shape calc_padded_data = padder.pad_data(dataset) return (calc_padded_data, expected_padded_data) def test_padder_rss_roi(self): """Test padder for RSS and ROI data (applies to both VISIR and HRV).""" calculated, expected = self.prepare_padder(TEST_PADDER_RSS_ROI) np.testing.assert_array_equal(calculated, expected) def test_padder_fes_hrv(self): """Test padder for FES HRV data.""" calculated, expected = self.prepare_padder(TEST_PADDER_FES_HRV) np.testing.assert_array_equal(calculated, expected) class TestNativeMSGFilenames: """Test identification of Native format filenames.""" @pytest.fixture def reader(self): """Return reader for SEVIRI Native format.""" from satpy._config import config_search_paths from satpy.readers import load_reader reader_configs = config_search_paths( os.path.join("readers", "seviri_l1b_native.yaml")) reader = load_reader(reader_configs) return reader def test_file_pattern(self, reader): """Test file pattern matching.""" filenames = [ # Valid "MSG2-SEVI-MSG15-0100-NA-20080219094242.289000000Z", "MSG2-SEVI-MSG15-0201-NA-20080219094242.289000000Z", "MSG2-SEVI-MSG15-0301-NA-20080219094242.289000000Z-123456.nat", "MSG2-SEVI-MSG15-0401-NA-20080219094242.289000000Z-20201231181545-123456.nat", # Invalid "MSG2-SEVI-MSG15-010-NA-20080219094242.289000000Z", ] files = reader.select_files_from_pathnames(filenames) assert len(files) == 4 @pytest.mark.parametrize( 'file_content,exp_header_size', [ (b'FormatName : NATIVE', 450400), # with ascii header (b'foobar', 445286), # without ascii header ] ) def test_header_type(file_content, exp_header_size): """Test identification of the file header type.""" header = TestNativeMSGArea.create_test_header( dataset_id=make_dataid(name='VIS006', resolution=3000), earth_model=1, is_full_disk=True, is_rapid_scan=0 ) if file_content == b'foobar': header.pop('15_SECONDARY_PRODUCT_HEADER') with mock.patch('satpy.readers.seviri_l1b_native.np.fromfile') as fromfile, \ mock.patch('satpy.readers.seviri_l1b_native.recarray2dict') as recarray2dict, \ mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap') as _get_memmap, \ mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer'), \ mock.patch("builtins.open", mock.mock_open(read_data=file_content)): fromfile.return_value = header recarray2dict.side_effect = (lambda x: x) _get_memmap.return_value = np.arange(3) fh = NativeMSGFileHandler('myfile', {}, None) assert fh.header_type.itemsize == exp_header_size assert '15_SECONDARY_PRODUCT_HEADER' in fh.header satpy-0.34.0/satpy/tests/reader_tests/test_seviri_l1b_nc.py000066400000000000000000000260721420401153000240510ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The HRIT msg reader tests package.""" from datetime import datetime from unittest import mock import numpy as np import pytest import xarray as xr from satpy.readers.seviri_l1b_nc import NCSEVIRIFileHandler from satpy.tests.reader_tests.test_seviri_base import ORBIT_POLYNOMIALS from satpy.tests.reader_tests.test_seviri_l1b_calibration import TestFileHandlerCalibrationBase from satpy.tests.utils import assert_attrs_equal, make_dataid def to_cds_time(time): """Convert datetime to (days, msecs) since 1958-01-01.""" if isinstance(time, datetime): time = np.datetime64(time) t0 = np.datetime64('1958-01-01 00:00') delta = time - t0 days = (delta / np.timedelta64(1, 'D')).astype(int) msecs = delta / np.timedelta64(1, 'ms') - days * 24 * 3600 * 1E3 return days, msecs class TestNCSEVIRIFileHandler(TestFileHandlerCalibrationBase): """Unit tests for SEVIRI netCDF reader.""" def _get_fake_dataset(self, counts): """Create a fake dataset.""" acq_time_day = np.repeat([1, 1], 11).reshape(2, 11) acq_time_msec = np.repeat([1000, 2000], 11).reshape(2, 11) orbit_poly_start_day, orbit_poly_start_msec = to_cds_time( np.array([datetime(2019, 12, 31, 18), datetime(2019, 12, 31, 22)], dtype='datetime64') ) orbit_poly_end_day, orbit_poly_end_msec = to_cds_time( np.array([datetime(2019, 12, 31, 22), datetime(2020, 1, 1, 2)], dtype='datetime64') ) counts = counts.rename({ 'y': 'num_rows_vis_ir', 'x': 'num_columns_vis_ir' }) scan_time_days, scan_time_msecs = to_cds_time(self.scan_time) ds = xr.Dataset( { 'VIS006': counts.copy(), 'IR_108': counts.copy(), 'HRV': (('num_rows_hrv', 'num_columns_hrv'), [[1, 2, 3], [4, 5, 6], [7, 8, 9]]), 'planned_chan_processing': self.radiance_types, 'channel_data_visir_data_l10_line_mean_acquisition_time_day': ( ('num_rows_vis_ir', 'channels_vis_ir_dim'), acq_time_day ), 'channel_data_visir_data_l10_line_mean_acquisition_msec': ( ('num_rows_vis_ir', 'channels_vis_ir_dim'), acq_time_msec ), 'orbit_polynomial_x': ( ('orbit_polynomial_dim_row', 'orbit_polynomial_dim_col'), ORBIT_POLYNOMIALS['X'][0:2] ), 'orbit_polynomial_y': ( ('orbit_polynomial_dim_row', 'orbit_polynomial_dim_col'), ORBIT_POLYNOMIALS['Y'][0:2] ), 'orbit_polynomial_z': ( ('orbit_polynomial_dim_row', 'orbit_polynomial_dim_col'), ORBIT_POLYNOMIALS['Z'][0:2] ), 'orbit_polynomial_start_time_day': ( 'orbit_polynomial_dim_row', orbit_poly_start_day ), 'orbit_polynomial_start_time_msec': ( 'orbit_polynomial_dim_row', orbit_poly_start_msec ), 'orbit_polynomial_end_time_day': ( 'orbit_polynomial_dim_row', orbit_poly_end_day ), 'orbit_polynomial_end_time_msec': ( 'orbit_polynomial_dim_row', orbit_poly_end_msec ), }, attrs={ 'equatorial_radius': 6378.169, 'north_polar_radius': 6356.5838, 'south_polar_radius': 6356.5838, 'longitude_of_SSP': 0.0, 'nominal_longitude': -3.5, 'satellite_id': self.platform_id, 'true_repeat_cycle_start_day': scan_time_days, 'true_repeat_cycle_start_mi_sec': scan_time_msecs, 'planned_repeat_cycle_end_day': scan_time_days, 'planned_repeat_cycle_end_mi_sec': scan_time_msecs, 'north_most_line': 3712, 'east_most_pixel': 1, 'west_most_pixel': 3712, 'south_most_line': 1, 'vis_ir_column_dir_grid_step': 3.0004032, 'vis_ir_line_dir_grid_step': 3.0004032, 'type_of_earth_model': '0x02', } ) ds['VIS006'].attrs.update({ 'scale_factor': self.gains_nominal[0], 'add_offset': self.offsets_nominal[0] }) ds['IR_108'].attrs.update({ 'scale_factor': self.gains_nominal[8], 'add_offset': self.offsets_nominal[8], }) # Add some attributes so that the reader can strip them strip_attrs = { 'comment': None, 'long_name': None, 'valid_min': None, 'valid_max': None } for name in ['VIS006', 'IR_108']: ds[name].attrs.update(strip_attrs) return ds @pytest.fixture(name='file_handler') def file_handler(self, counts): """Create a mocked file handler.""" with mock.patch( 'satpy.readers.seviri_l1b_nc.xr.open_dataset', return_value=self._get_fake_dataset(counts) ): return NCSEVIRIFileHandler( 'filename', {'platform_shortname': 'MSG3', 'start_time': self.scan_time, 'service': 'MSG'}, {'filetype': 'info'} ) @pytest.mark.parametrize( ('channel', 'calibration', 'use_ext_coefs'), [ # VIS channel, internal coefficients ('VIS006', 'counts', False), ('VIS006', 'radiance', False), ('VIS006', 'reflectance', False), # VIS channel, external coefficients ('VIS006', 'radiance', True), ('VIS006', 'reflectance', True), # IR channel, internal coefficients ('IR_108', 'counts', False), ('IR_108', 'radiance', False), ('IR_108', 'brightness_temperature', False), # IR channel, external coefficients ('IR_108', 'radiance', True), ('IR_108', 'brightness_temperature', True), # FUTURE: Enable once HRV reading has been fixed. # # HRV channel, internal coefficiens # ('HRV', 'counts', False), # ('HRV', 'radiance', False), # ('HRV', 'reflectance', False), # # HRV channel, external coefficients (mode should have no effect) # ('HRV', 'radiance', True), # ('HRV', 'reflectance', True), ] ) def test_calibrate( self, file_handler, channel, calibration, use_ext_coefs ): """Test the calibration.""" file_handler.nc = file_handler.nc.rename({ 'num_rows_vis_ir': 'y', 'num_columns_vis_ir': 'x' }) external_coefs = self.external_coefs if use_ext_coefs else {} expected = self._get_expected( channel=channel, calibration=calibration, calib_mode='NOMINAL', use_ext_coefs=use_ext_coefs ) fh = file_handler fh.ext_calib_coefs = external_coefs dataset_id = make_dataid(name=channel, calibration=calibration) res = fh.calibrate(fh.nc[channel], dataset_id) xr.testing.assert_allclose(res, expected) @pytest.mark.parametrize( ('channel', 'calibration'), [ ('VIS006', 'reflectance'), ('IR_108', 'brightness_temperature') ] ) def test_get_dataset(self, file_handler, channel, calibration): """Test getting the dataset.""" dataset_id = make_dataid(name=channel, calibration=calibration) dataset_info = { 'nc_key': channel, 'units': 'units', 'wavelength': 'wavelength', 'standard_name': 'standard_name' } res = file_handler.get_dataset(dataset_id, dataset_info) # Test scanline acquisition times expected = self._get_expected( channel=channel, calibration=calibration, calib_mode='NOMINAL', use_ext_coefs=False ) expected.attrs = { 'orbital_parameters': { 'satellite_actual_longitude': -3.541742131915741, 'satellite_actual_latitude': -0.5203765167594427, 'satellite_actual_altitude': 35783419.16135868, 'satellite_nominal_longitude': -3.5, 'satellite_nominal_latitude': 0.0, 'projection_longitude': 0.0, 'projection_latitude': 0.0, 'projection_altitude': 35785831.0 }, 'georef_offset_corrected': True, 'platform_name': 'Meteosat-11', 'sensor': 'seviri', 'units': 'units', 'wavelength': 'wavelength', 'standard_name': 'standard_name' } expected['acq_time'] = ('y', [np.datetime64('1958-01-02 00:00:01'), np.datetime64('1958-01-02 00:00:02')]) expected = expected[::-1] # reader flips data upside down xr.testing.assert_allclose(res, expected) for key in ['sun_earth_distance_correction_applied', 'sun_earth_distance_correction_factor']: res.attrs.pop(key, None) assert_attrs_equal(res.attrs, expected.attrs, tolerance=1e-4) def test_satpos_no_valid_orbit_polynomial(self, file_handler): """Test satellite position if there is no valid orbit polynomial.""" dataset_id = make_dataid(name='VIS006', calibration='counts') dataset_info = { 'nc_key': 'VIS006', 'units': 'units', 'wavelength': 'wavelength', 'standard_name': 'standard_name' } file_handler.nc['orbit_polynomial_start_time_day'] = 0 file_handler.nc['orbit_polynomial_end_time_day'] = 0 res = file_handler.get_dataset(dataset_id, dataset_info) assert 'satellite_actual_longitude' not in res.attrs[ 'orbital_parameters'] satpy-0.34.0/satpy/tests/reader_tests/test_seviri_l2_bufr.py000066400000000000000000000233571420401153000242510ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unittesting the SEVIRI L2 BUFR reader.""" import sys import unittest from datetime import datetime from unittest import mock import dask.array as da import numpy as np import pytest from pyresample import geometry from satpy.tests.utils import make_dataid FILETYPE_INFO = {'file_type': 'seviri_l2_bufr_asr'} FILENAME_INFO = {'start_time': '20191112000000', 'spacecraft': 'MSG1'} FILENAME_INFO2 = {'start_time': '20191112000000', 'spacecraft': 'MSG1', 'server': 'TESTSERVER'} MPEF_PRODUCT_HEADER = { 'NominalTime': datetime(2019, 11, 6, 18, 0), 'SpacecraftName': '08', 'RectificationLongitude': 'E0415' } DATASET_INFO = { 'name': 'testdata', 'key': '#1#brightnessTemperature', 'coordinates': ('longitude', 'latitude'), 'fill_value': 0 } DATASET_INFO_LAT = { 'name': 'latitude', 'key': 'latitude', 'fill_value': -1.e+100 } DATASET_INFO_LON = { 'name': 'longitude', 'key': 'longitude', 'fill_value': -1.e+100 } DATASET_ATTRS = { 'platform_name': 'MET08', 'ssp_lon': 41.5, 'seg_size': 16 } AREA_DEF = geometry.AreaDefinition( 'msg_seviri_iodc_48km', 'MSG SEVIRI Indian Ocean Data Coverage service area definition with 48 km resolution', "", {'a': 6378169., 'b': 6356583.8, 'lon_0': DATASET_ATTRS['ssp_lon'], 'h': 35785831., 'proj': 'geos', 'units': 'm'}, 232, 232, (-5570248.6866, -5567248.2834, 5567248.2834, 5570248.6866) ) AREA_DEF_EXT = geometry.AreaDefinition( 'msg_seviri_iodc_9km_ext', 'MSG SEVIRI Indian Ocean Data Coverage service area definition with 9 km resolution ' '(extended outside original 3km grid)', "", {'a': 6378169., 'b': 6356583.8, 'lon_0': DATASET_ATTRS['ssp_lon'], 'h': 35785831., 'proj': 'geos', 'units': 'm'}, 1238, 1238, (-5571748.888268564, -5571748.888155806, 5571748.888155806, 5571748.888268564) ) TEST_FILES = [ 'ASRBUFRProd_20191106130000Z_00_OMPEFS01_MET08_FES_E0000', 'MSG1-SEVI-MSGASRE-0101-0101-20191106130000.000000000Z-20191106131702-1362128.bfr', 'MSG1-SEVI-MSGASRE-0101-0101-20191106101500.000000000Z-20191106103218-1362148' ] # Test data DATA = np.random.uniform(low=250, high=350, size=(128,)) LAT = np.random.uniform(low=-80, high=80, size=(128,)) LON = np.random.uniform(low=-38.5, high=121.5, size=(128,)) class SeviriL2BufrData: """Mock SEVIRI L2 BUFR data.""" @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") def __init__(self, filename, with_adef=False): """Initialize by mocking test data for testing the SEVIRI L2 BUFR reader.""" import eccodes as ec from satpy.readers.seviri_l2_bufr import SeviriL2BufrFileHandler self.buf1 = ec.codes_bufr_new_from_samples('BUFR4_local_satellite') ec.codes_set(self.buf1, 'unpack', 1) # write the bufr test data twice as we want to read in and the concatenate the data in the reader # 55 id corresponds to METEOSAT 8` ec.codes_set(self.buf1, 'satelliteIdentifier', 55) ec.codes_set_array(self.buf1, 'latitude', LAT) ec.codes_set_array(self.buf1, 'latitude', LAT) ec.codes_set_array(self.buf1, 'longitude', LON) ec.codes_set_array(self.buf1, 'longitude', LON) ec.codes_set_array(self.buf1, '#1#brightnessTemperature', DATA) ec.codes_set_array(self.buf1, '#1#brightnessTemperature', DATA) self.m = mock.mock_open() # only our offline product contain MPEF product headers so we get the metadata from there if ('BUFRProd' in filename): with mock.patch('satpy.readers.seviri_l2_bufr.np.fromfile') as fromfile: fromfile.return_value = MPEF_PRODUCT_HEADER with mock.patch('satpy.readers.seviri_l2_bufr.recarray2dict') as recarray2dict: recarray2dict.side_effect = (lambda x: x) self.fh = SeviriL2BufrFileHandler(filename, FILENAME_INFO2, FILETYPE_INFO, with_area_definition=with_adef) self.fh.mpef_header = MPEF_PRODUCT_HEADER else: # No Mpef Header so we get the metadata from the BUFR messages with mock.patch('satpy.readers.seviri_l2_bufr.open', self.m, create=True): with mock.patch('eccodes.codes_bufr_new_from_file', side_effect=[self.buf1, None, self.buf1, None, self.buf1, None]) as ec1: ec1.return_value = ec1.side_effect with mock.patch('eccodes.codes_set') as ec2: ec2.return_value = 1 with mock.patch('eccodes.codes_release') as ec5: ec5.return_value = 1 self.fh = SeviriL2BufrFileHandler(filename, FILENAME_INFO, FILETYPE_INFO, with_area_definition=with_adef) def get_data(self, dataset_info): """Read data from mock file.""" with mock.patch('satpy.readers.seviri_l2_bufr.open', self.m, create=True): with mock.patch('eccodes.codes_bufr_new_from_file', side_effect=[self.buf1, self.buf1, None]) as ec1: ec1.return_value = ec1.side_effect with mock.patch('eccodes.codes_set') as ec2: ec2.return_value = 1 with mock.patch('eccodes.codes_release') as ec5: ec5.return_value = 1 z = self.fh.get_dataset(make_dataid(name=dataset_info['name'], resolution=48000), dataset_info) return z @pytest.mark.parametrize("input_file", TEST_FILES) class TestSeviriL2BufrReader: """Test SEVIRI L2 BUFR Reader.""" @staticmethod def test_lonslats(input_file): """Test reading of longitude and latitude data with SEVIRI L2 BUFR reader.""" bufr_obj = SeviriL2BufrData(input_file) zlat = bufr_obj.get_data(DATASET_INFO_LAT) zlon = bufr_obj.get_data(DATASET_INFO_LON) np.testing.assert_array_equal(zlat.values, np.concatenate((LAT, LAT), axis=0)) np.testing.assert_array_equal(zlon.values, np.concatenate((LON, LON), axis=0)) @staticmethod def test_attributes_with_swath_definition(input_file): """Test correctness of dataset attributes with data loaded with a SwathDefinition (default behaviour).""" bufr_obj = SeviriL2BufrData(input_file) z = bufr_obj.get_data(DATASET_INFO) assert z.attrs['platform_name'] == DATASET_ATTRS['platform_name'] assert z.attrs['ssp_lon'] == DATASET_ATTRS['ssp_lon'] assert z.attrs['seg_size'] == DATASET_ATTRS['seg_size'] @staticmethod def test_attributes_with_area_definition(input_file): """Test correctness of dataset attributes with data loaded with a AreaDefinition.""" bufr_obj = SeviriL2BufrData(input_file, with_adef=True) _ = bufr_obj.get_data(DATASET_INFO_LAT) # We need to load the lat/lon data in order to _ = bufr_obj.get_data(DATASET_INFO_LON) # populate the file handler with these data z = bufr_obj.get_data(DATASET_INFO) assert z.attrs['platform_name'] == DATASET_ATTRS['platform_name'] assert z.attrs['ssp_lon'] == DATASET_ATTRS['ssp_lon'] assert z.attrs['seg_size'] == DATASET_ATTRS['seg_size'] @staticmethod def test_data_with_swath_definition(input_file): """Test data loaded with SwathDefinition (default behaviour).""" bufr_obj = SeviriL2BufrData(input_file) with pytest.raises(NotImplementedError): bufr_obj.fh.get_area_def(None) # concatenate original test arrays as get_dataset will have read and concatented the data x1 = np.concatenate((DATA, DATA), axis=0) z = bufr_obj.get_data(DATASET_INFO) np.testing.assert_array_equal(z.values, x1) def test_data_with_area_definition(self, input_file): """Test data loaded with AreaDefinition.""" bufr_obj = SeviriL2BufrData(input_file, with_adef=True) _ = bufr_obj.get_data(DATASET_INFO_LAT) # We need to load the lat/lon data in order to _ = bufr_obj.get_data(DATASET_INFO_LON) # populate the file handler with these data z = bufr_obj.get_data(DATASET_INFO) ad = bufr_obj.fh.get_area_def(None) assert ad == AREA_DEF data_1d = np.concatenate((DATA, DATA), axis=0) # Put BUFR data on 2D grid that the 2D array returned by get_dataset should correspond to lons_1d, lats_1d = da.compute(bufr_obj.fh.longitude, bufr_obj.fh.latitude) icol, irow = ad.get_array_indices_from_lonlat(lons_1d, lats_1d) data_2d = np.empty(ad.shape) data_2d[:] = np.nan data_2d[irow.compressed(), icol.compressed()] = data_1d[~irow.mask] np.testing.assert_array_equal(z.values, data_2d) # Test that the correct AreaDefinition is identified for products with 3 pixel segements bufr_obj.fh.seg_size = 3 ad_ext = bufr_obj.fh._construct_area_def(make_dataid(name='dummmy', resolution=9000)) assert ad_ext == AREA_DEF_EXT satpy-0.34.0/satpy/tests/reader_tests/test_seviri_l2_grib.py000066400000000000000000000207451420401153000242340ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2019 Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # satpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . """SEVIRI L2 GRIB-reader test package.""" import datetime import sys import unittest from unittest import mock import numpy as np from satpy.tests.utils import make_dataid # Dictionary to be used as fake GRIB message FAKE_MESSAGE = { 'longitudeOfSubSatellitePointInDegrees': 9.5, 'dataDate': 20191020, 'dataTime': 1745, 'Nx': 1000, 'Ny': 1200, 'earthMajorAxis': 6400., 'earthMinorAxis': 6300., 'NrInRadiusOfEarth': 6., 'XpInGridLengths': 500, 'parameterNumber': 30, 'missingValue': 9999, } # List to be used as fake GID source FAKE_GID = [0, 1, 2, 3, None] class Test_SeviriL2GribFileHandler(unittest.TestCase): """Test the SeviriL2GribFileHandler reader.""" @mock.patch('satpy.readers.seviri_l2_grib.ec') def setUp(self, ec_): """Set up the test by creating a mocked eccodes library.""" fake_gid_generator = (i for i in FAKE_GID) ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) ec_.codes_get.side_effect = lambda gid, key: FAKE_MESSAGE[key] ec_.codes_get_values.return_value = np.ones(1000*1200) self.ec_ = ec_ @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") @mock.patch('satpy.readers.seviri_l2_grib.xr') @mock.patch('satpy.readers.seviri_l2_grib.da') def test_data_reading(self, da_, xr_): """Test the reading of data from the product.""" from satpy import CHUNK_SIZE from satpy.readers.seviri_l2_grib import REPEAT_CYCLE_DURATION, SeviriL2GribFileHandler with mock.patch("builtins.open", mock.mock_open()) as mock_file: with mock.patch('satpy.readers.seviri_l2_grib.ec', self.ec_): self.reader = SeviriL2GribFileHandler( filename='test.grib', filename_info={ 'spacecraft': 'MET11', 'start_time': datetime.datetime(year=2020, month=10, day=20, hour=19, minute=45, second=0) }, filetype_info={} ) dataset_id = make_dataid(name='dummmy', resolution=3000) # Checks that the codes_grib_multi_support_on function has been called self.ec_.codes_grib_multi_support_on.assert_called() # Restarts the id generator and clears the call history fake_gid_generator = (i for i in FAKE_GID) self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) self.ec_.codes_grib_new_from_file.reset_mock() self.ec_.codes_release.reset_mock() # Checks the correct execution of the get_dataset function with a valid parameter_number valid_dataset = self.reader.get_dataset(dataset_id, {'parameter_number': 30}) # Checks the correct file open call mock_file.assert_called_with('test.grib', 'rb') # Checks that the dataset has been created as a DataArray object self.assertEqual(valid_dataset._extract_mock_name(), 'xr.DataArray()') # Checks that codes_release has been called after each codes_grib_new_from_file call # (except after the last one which has returned a None) self.assertEqual(self.ec_.codes_grib_new_from_file.call_count, self.ec_.codes_release.call_count + 1) # Restarts the id generator and clears the call history fake_gid_generator = (i for i in FAKE_GID) self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) self.ec_.codes_grib_new_from_file.reset_mock() self.ec_.codes_release.reset_mock() # Checks the correct execution of the get_dataset function with an invalid parameter_number invalid_dataset = self.reader.get_dataset(dataset_id, {'parameter_number': 50}) # Checks that the function returns None self.assertEqual(invalid_dataset, None) # Checks that codes_release has been called after each codes_grib_new_from_file call # (except after the last one which has returned a None) self.assertEqual(self.ec_.codes_grib_new_from_file.call_count, self.ec_.codes_release.call_count + 1) # Checks the basic data reading self.assertEqual(REPEAT_CYCLE_DURATION, 15) # Checks the correct execution of the _get_global_attributes and _get_metadata_from_msg functions attributes = self.reader._get_attributes() expected_attributes = { 'orbital_parameters': { 'projection_longitude': 9.5 }, 'sensor': 'seviri', 'platform_name': 'Meteosat-11' } self.assertEqual(attributes, expected_attributes) # Checks the reading of an array from the message self.reader._get_xarray_from_msg(0) # Checks that dask.array has been called with the correct arguments name, args, kwargs = da_.mock_calls[0] self.assertTrue(np.all(args[0] == np.ones((1200, 1000)))) self.assertEqual(args[1], CHUNK_SIZE) # Checks that xarray.DataArray has been called with the correct arguments name, args, kwargs = xr_.mock_calls[0] self.assertEqual(kwargs['dims'], ('y', 'x')) # Checks the correct execution of the _get_proj_area function pdict, area_dict = self.reader._get_proj_area(0) expected_pdict = { 'a': 6400000., 'b': 6300000., 'h': 32000000., 'ssp_lon': 9.5, 'nlines': 1000, 'ncols': 1200, 'a_name': 'msg_seviri_rss_3km', 'a_desc': 'MSG SEVIRI Rapid Scanning Service area definition with 3 km resolution', 'p_id': '', } self.assertEqual(pdict, expected_pdict) expected_area_dict = { 'center_point': 500, 'north': 1200, 'east': 1, 'west': 1000, 'south': 1, } self.assertEqual(area_dict, expected_area_dict) # Checks the correct execution of the get_area_def function with mock.patch('satpy.readers.seviri_l2_grib.calculate_area_extent', mock.Mock(name='calculate_area_extent')) as cae: with mock.patch('satpy.readers.seviri_l2_grib.get_area_definition', mock.Mock()) as gad: self.reader.get_area_def(mock.Mock(resolution=400.)) # Asserts that calculate_area_extent has been called with the correct arguments expected_args = ({'center_point': 500, 'east': 1, 'west': 1000, 'south': 1, 'north': 1200, 'column_step': 400., 'line_step': 400.},) name, args, kwargs = cae.mock_calls[0] self.assertEqual(args, expected_args) # Asserts that get_area_definition has been called with the correct arguments name, args, kwargs = gad.mock_calls[0] self.assertEqual(args[0], expected_pdict) # The second argument must be the return result of calculate_area_extent self.assertEqual(args[1]._extract_mock_name(), 'calculate_area_extent()') satpy-0.34.0/satpy/tests/reader_tests/test_slstr_l1b.py000066400000000000000000000241351420401153000232350ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.nc_slstr module.""" import unittest import unittest.mock as mock import warnings from datetime import datetime import numpy as np import xarray as xr from satpy.dataset.dataid import DataID, ModifierTuple, WavelengthRange from satpy.readers.slstr_l1b import NCSLSTR1B, NCSLSTRAngles, NCSLSTRFlag, NCSLSTRGeo local_id_keys_config = {'name': { 'required': True, }, 'wavelength': { 'type': WavelengthRange, }, 'resolution': None, 'calibration': { 'enum': [ 'reflectance', 'brightness_temperature', 'radiance', 'counts' ] }, 'stripe': { 'enum': [ 'a', 'b', 'c', 'i', 'f', ] }, 'view': { 'enum': [ 'nadir', 'oblique', ] }, 'modifiers': { 'required': True, 'default': ModifierTuple(), 'type': ModifierTuple, }, } class TestSLSTRL1B(unittest.TestCase): """Common setup for SLSTR_L1B tests.""" @mock.patch('satpy.readers.slstr_l1b.xr') def setUp(self, xr_): """Create a fake dataset using the given radiance data.""" self.base_data = np.array(([1., 2., 3.], [4., 5., 6.])) self.det_data = np.array(([0, 1, 1], [0, 1, 0])) self.start_time = "2020-05-10T12:01:15.585Z" self.end_time = "2020-05-10T12:06:18.012Z" self.rad = xr.DataArray( self.base_data, dims=('columns', 'rows'), attrs={'scale_factor': 1.0, 'add_offset': 0.0, '_FillValue': -32768, 'units': 'mW.m-2.sr-1.nm-1', } ) det = xr.DataArray( self.base_data, dims=('columns', 'rows'), attrs={'scale_factor': 1.0, 'add_offset': 0.0, '_FillValue': 255, } ) self.fake_dataset = xr.Dataset( data_vars={ 'S5_radiance_an': self.rad, 'S9_BT_ao': self.rad, 'foo_radiance_an': self.rad, 'S5_solar_irradiances': self.rad, 'geometry_tn': self.rad, 'latitude_an': self.rad, 'x_tx': self.rad, 'y_tx': self.rad, 'x_in': self.rad, 'y_in': self.rad, 'x_an': self.rad, 'y_an': self.rad, 'flags_an': self.rad, 'detector_an': det, }, attrs={ "start_time": self.start_time, "stop_time": self.end_time, }, ) def make_dataid(**items): """Make a data id.""" return DataID(local_id_keys_config, **items) class TestSLSTRReader(TestSLSTRL1B): """Test various nc_slstr file handlers.""" class FakeSpl: """Fake return function for SPL interpolation.""" @staticmethod def ev(foo_x, foo_y): """Fake function to return interpolated data.""" return np.zeros((3, 2)) @mock.patch('satpy.readers.slstr_l1b.xr') @mock.patch('scipy.interpolate.RectBivariateSpline') def test_instantiate(self, bvs_, xr_): """Test initialization of file handlers.""" bvs_.return_value = self.FakeSpl xr_.open_dataset.return_value = self.fake_dataset good_start = datetime.strptime(self.start_time, '%Y-%m-%dT%H:%M:%S.%fZ') good_end = datetime.strptime(self.end_time, '%Y-%m-%dT%H:%M:%S.%fZ') ds_id = make_dataid(name='foo', calibration='radiance', stripe='a', view='nadir') ds_id_500 = make_dataid(name='foo', calibration='radiance', stripe='a', view='nadir', resolution=500) filename_info = {'mission_id': 'S3A', 'dataset_name': 'foo', 'start_time': 0, 'end_time': 0, 'stripe': 'a', 'view': 'n'} test = NCSLSTR1B('somedir/S1_radiance_an.nc', filename_info, 'c') assert test.view == 'nadir' assert test.stripe == 'a' test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'foo'})) self.assertEqual(test.start_time, good_start) self.assertEqual(test.end_time, good_end) xr_.open_dataset.assert_called() xr_.open_dataset.reset_mock() filename_info = {'mission_id': 'S3A', 'dataset_name': 'foo', 'start_time': 0, 'end_time': 0, 'stripe': 'c', 'view': 'o'} test = NCSLSTR1B('somedir/S1_radiance_co.nc', filename_info, 'c') assert test.view == 'oblique' assert test.stripe == 'c' test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'foo'})) self.assertEqual(test.start_time, good_start) self.assertEqual(test.end_time, good_end) xr_.open_dataset.assert_called() xr_.open_dataset.reset_mock() filename_info = {'mission_id': 'S3A', 'dataset_name': 'foo', 'start_time': 0, 'end_time': 0, 'stripe': 'a', 'view': 'n'} test = NCSLSTRGeo('somedir/geometry_an.nc', filename_info, 'c') test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'latitude_{stripe:1s}{view:1s}'})) self.assertEqual(test.start_time, good_start) self.assertEqual(test.end_time, good_end) xr_.open_dataset.assert_called() xr_.open_dataset.reset_mock() test = NCSLSTRFlag('somedir/S1_radiance_an.nc', filename_info, 'c') test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'flags_{stripe:1s}{view:1s}'})) assert test.view == 'nadir' assert test.stripe == 'a' self.assertEqual(test.start_time, good_start) self.assertEqual(test.end_time, good_end) xr_.open_dataset.assert_called() xr_.open_dataset.reset_mock() test = NCSLSTRAngles('somedir/S1_radiance_an.nc', filename_info, 'c') test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'geometry_t{view:1s}'})) self.assertEqual(test.start_time, good_start) self.assertEqual(test.end_time, good_end) xr_.open_dataset.assert_called() xr_.open_dataset.reset_mock() test.get_dataset(ds_id_500, dict(filename_info, **{'file_key': 'geometry_t{view:1s}'})) class TestSLSTRCalibration(TestSLSTRL1B): """Test the implementation of the calibration factors.""" @mock.patch('satpy.readers.slstr_l1b.xr') def test_radiance_calibration(self, xr_): """Test radiance calibration steps.""" from satpy.readers.slstr_l1b import CHANCALIB_FACTORS xr_.open_dataset.return_value = self.fake_dataset ds_id = make_dataid(name='foo', calibration='radiance', stripe='a', view='nadir') filename_info = {'mission_id': 'S3A', 'dataset_name': 'foo', 'start_time': 0, 'end_time': 0, 'stripe': 'a', 'view': 'n'} test = NCSLSTR1B('somedir/S1_radiance_co.nc', filename_info, 'c') # Check warning is raised if we don't have calibration with warnings.catch_warnings(record=True) as w: test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'foo'})) assert issubclass(w[-1].category, UserWarning) # Check user calibration is used correctly test = NCSLSTR1B('somedir/S1_radiance_co.nc', filename_info, 'c', user_calibration={'foo_nadir': 0.4}) data = test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'foo'})) np.testing.assert_allclose(data.values, self.base_data * 0.4) # Check internal calibration is used correctly ds_id = make_dataid(name='S5', calibration='radiance', stripe='a', view='nadir') filename_info['dataset_name'] = 'S5' test = NCSLSTR1B('somedir/S1_radiance_an.nc', filename_info, 'c') data = test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'S5'})) np.testing.assert_allclose(data.values, self.base_data * CHANCALIB_FACTORS['S5_nadir']) @mock.patch('satpy.readers.slstr_l1b.xr') @mock.patch('satpy.readers.slstr_l1b.da') def test_reflectance_calibration(self, da_, xr_): """Test reflectance calibration.""" xr_.open_dataset.return_value = self.fake_dataset da_.map_blocks.return_value = self.rad / 100. filename_info = {'mission_id': 'S3A', 'dataset_name': 'S5', 'start_time': 0, 'end_time': 0, 'stripe': 'a', 'view': 'n'} ds_id = make_dataid(name='S5', calibration='reflectance', stripe='a', view='nadir') test = NCSLSTR1B('somedir/S1_radiance_an.nc', filename_info, 'c') data = test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'S5'})) self.assertEqual(data.units, '%') np.testing.assert_allclose(data.values, self.rad * np.pi) def test_cal_rad(self): """Test the radiance to reflectance converter.""" rad = np.array([10., 20., 30., 40., 50., 60., 70.]) didx = np.array([1, 2., 1., 3., 2., 2., 0.]) solflux = np.array([100., 200., 300., 400.]) good_rad = np.array([1. / 20., 1. / 15., 3. / 20., 1. / 10., 1. / 6., 2. / 10., 7. / 10.]) out_rad = NCSLSTR1B._cal_rad(rad, didx, solflux) np.testing.assert_allclose(out_rad, good_rad) satpy-0.34.0/satpy/tests/reader_tests/test_slstr_l2.py000066400000000000000000000056321420401153000230750ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.slstr_l2 module.""" import unittest from unittest import mock from unittest.mock import MagicMock, patch import xarray as xr from satpy.readers.slstr_l2 import SLSTRL2FileHandler class TestSLSTRL2Reader(unittest.TestCase): """Test Sentinel-3 SST L2 reader.""" @mock.patch('xarray.open_dataset') def test_instantiate(self, mocked_dataset): """Test initialization of file handlers.""" filename_info = {} tmp = MagicMock(start_time='20191120T125002Z', stop_time='20191120T125002Z') tmp.rename.return_value = tmp xr.open_dataset.return_value = tmp SLSTRL2FileHandler('somedir/somefile.nc', filename_info, None) mocked_dataset.assert_called() mocked_dataset.reset_mock() with patch('tarfile.open') as tf: tf.return_value.__enter__.return_value = MagicMock(getnames=lambda *a: ["GHRSST-SSTskin.nc"]) SLSTRL2FileHandler('somedir/somefile.tar', filename_info, None) mocked_dataset.assert_called() mocked_dataset.reset_mock() @mock.patch('xarray.open_dataset') def test_get_dataset(self, mocked_dataset): """Test retrieval of datasets.""" filename_info = {} tmp = MagicMock(start_time='20191120T125002Z', stop_time='20191120T125002Z') tmp.rename.return_value = tmp xr.open_dataset.return_value = tmp test = SLSTRL2FileHandler('somedir/somefile.nc', filename_info, None) test.nc = {'longitude': xr.Dataset(), 'latitude': xr.Dataset(), 'sea_surface_temperature': xr.Dataset(), 'sea_ice_fraction': xr.Dataset(), } test.get_dataset('longitude', {'standard_name': 'longitude'}) test.get_dataset('latitude', {'standard_name': 'latitude'}) test.get_dataset('sea_surface_temperature', {'standard_name': 'sea_surface_temperature'}) test.get_dataset('sea_ice_fraction', {'standard_name': 'sea_ice_fraction'}) with self.assertRaises(KeyError): test.get_dataset('erroneous dataset', {'standard_name': 'erroneous dataset'}) mocked_dataset.assert_called() mocked_dataset.reset_mock() satpy-0.34.0/satpy/tests/reader_tests/test_smos_l2_wind.py000066400000000000000000000205451420401153000237300ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2020 Satpy developers # # This file is part of Satpy. # # Satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # Satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # Satpy. If not, see . """Module for testing the satpy.readers.smos_l2_wind module.""" import os import unittest from datetime import datetime from unittest import mock import numpy as np import xarray as xr from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler class FakeNetCDF4FileHandlerSMOSL2WIND(FakeNetCDF4FileHandler): """Swap-in NetCDF4 File Handler.""" def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" from xarray import DataArray dt_s = filename_info.get('start_time', datetime(2020, 4, 22, 12, 0, 0)) dt_e = filename_info.get('end_time', datetime(2020, 4, 22, 12, 0, 0)) if filetype_info['file_type'] == 'smos_l2_wind': file_content = { '/attr/time_coverage_start': dt_s.strftime('%Y-%m-%dT%H:%M:%S Z'), '/attr/time_coverage_end': dt_e.strftime('%Y-%m-%dT%H:%M:%S Z'), '/attr/platform_shortname': 'SM', '/attr/platform': 'SMOS', '/attr/instrument': 'MIRAS', '/attr/processing_level': 'L2', '/attr/geospatial_bounds_vertical_crs': 'EPSG:4623', } file_content['lat'] = np.arange(-90., 90.25, 0.25) file_content['lat/shape'] = (len(file_content['lat']),) file_content['lat'] = DataArray(file_content['lat'], dims=('lat')) file_content['lat'].attrs['_FillValue'] = -999.0 file_content['lon'] = np.arange(0., 360., 0.25) file_content['lon/shape'] = (len(file_content['lon']),) file_content['lon'] = DataArray(file_content['lon'], dims=('lon')) file_content['lon'].attrs['_FillValue'] = -999.0 file_content['wind_speed'] = np.ndarray(shape=(1, # Time dimension len(file_content['lat']), len(file_content['lon']))) file_content['wind_speed/shape'] = (1, len(file_content['lat']), len(file_content['lon'])) file_content['wind_speed'] = DataArray(file_content['wind_speed'], dims=('time', 'lat', 'lon'), coords=[[1], file_content['lat'], file_content['lon']]) file_content['wind_speed'].attrs['_FillValue'] = -999.0 else: raise AssertionError() return file_content class TestSMOSL2WINDReader(unittest.TestCase): """Test SMOS L2 WINDReader.""" yaml_file = "smos_l2_wind.yaml" def setUp(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.smos_l2_wind import SMOSL2WINDFileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(SMOSL2WINDFileHandler, '__bases__', (FakeNetCDF4FileHandlerSMOSL2WIND,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the NetCDF4 file handler.""" self.p.stop() def test_init(self): """Test basic initialization of this reader.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc', ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) # make sure we have some files self.assertTrue(r.file_handlers) def test_load_wind_speed(self): """Load wind_speed dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) with mock.patch('satpy.readers.smos_l2_wind.netCDF4.Variable', xr.DataArray): loadables = r.select_files_from_pathnames([ 'SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc', ]) r.create_filehandlers(loadables) ds = r.load(['wind_speed']) self.assertEqual(len(ds), 1) for d in ds.values(): self.assertEqual(d.attrs['platform_shortname'], 'SM') self.assertEqual(d.attrs['sensor'], 'MIRAS') self.assertIn('area', d.attrs) self.assertIsNotNone(d.attrs['area']) self.assertIn('y', d.dims) self.assertIn('x', d.dims) self.assertEqual(d.shape, (719, 1440)) self.assertEqual(d.y[0].data, -89.75) self.assertEqual(d.y[d.shape[0] - 1].data, 89.75) def test_load_lat(self): """Load lat dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) with mock.patch('satpy.readers.smos_l2_wind.netCDF4.Variable', xr.DataArray): loadables = r.select_files_from_pathnames([ 'SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc', ]) r.create_filehandlers(loadables) ds = r.load(['lat']) self.assertEqual(len(ds), 1) for d in ds.values(): self.assertIn('y', d.dims) self.assertEqual(d.shape, (719,)) self.assertEqual(d.data[0], -89.75) self.assertEqual(d.data[d.shape[0] - 1], 89.75) def test_load_lon(self): """Load lon dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) with mock.patch('satpy.readers.smos_l2_wind.netCDF4.Variable', xr.DataArray): loadables = r.select_files_from_pathnames([ 'SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc', ]) r.create_filehandlers(loadables) ds = r.load(['lon']) self.assertEqual(len(ds), 1) for d in ds.values(): self.assertIn('x', d.dims) self.assertEqual(d.shape, (1440,)) self.assertEqual(d.data[0], -180.0) self.assertEqual(d.data[d.shape[0] - 1], 179.75) def test_adjust_lon(self): """Load adjust longitude dataset.""" from xarray import DataArray from satpy.readers.smos_l2_wind import SMOSL2WINDFileHandler smos_l2_wind_fh = SMOSL2WINDFileHandler('SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc', {}, filetype_info={'file_type': 'smos_l2_wind'}) data = DataArray(np.arange(0., 360., 0.25), dims=('lon')) adjusted = smos_l2_wind_fh._adjust_lon_coord(data) expected = DataArray(np.concatenate((np.arange(0, 180., 0.25), np.arange(-180.0, 0, 0.25))), dims=('lon')) self.assertEqual(adjusted.data.tolist(), expected.data.tolist()) def test_roll_dataset(self): """Load roll of dataset along the lon coordinate.""" from xarray import DataArray from satpy.readers.smos_l2_wind import SMOSL2WINDFileHandler smos_l2_wind_fh = SMOSL2WINDFileHandler('SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc', {}, filetype_info={'file_type': 'smos_l2_wind'}) data = DataArray(np.arange(0., 360., 0.25), dims=('lon')) data = smos_l2_wind_fh._adjust_lon_coord(data) adjusted = smos_l2_wind_fh._roll_dataset_lon_coord(data) expected = np.arange(-180., 180., 0.25) self.assertEqual(adjusted.data.tolist(), expected.tolist()) satpy-0.34.0/satpy/tests/reader_tests/test_tropomi_l2.py000066400000000000000000000220741420401153000234160ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2019 Satpy developers # # This file is part of Satpy. # # Satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # Satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # Satpy. If not, see . """Module for testing the satpy.readers.tropomi_l2 module.""" import os import unittest from datetime import datetime from unittest import mock import numpy as np import xarray as xr from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (3246, 450) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) DEFAULT_BOUND_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1] * 4, dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE+(4,)) class FakeNetCDF4FileHandlerTL2(FakeNetCDF4FileHandler): """Swap-in NetCDF4 File Handler.""" def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" dt_s = filename_info.get('start_time', datetime(2016, 1, 1, 12, 0, 0)) dt_e = filename_info.get('end_time', datetime(2016, 1, 1, 12, 0, 0)) if filetype_info['file_type'] == 'tropomi_l2': file_content = { '/attr/time_coverage_start': dt_s.strftime('%Y-%m-%dT%H:%M:%S.000Z'), '/attr/time_coverage_end': dt_e.strftime('%Y-%m-%dT%H:%M:%S.000Z'), '/attr/platform_shortname': 'S5P', '/attr/sensor': 'TROPOMI', } file_content['PRODUCT/latitude'] = DEFAULT_FILE_DATA file_content['PRODUCT/longitude'] = DEFAULT_FILE_DATA file_content['PRODUCT/SUPPORT_DATA/GEOLOCATIONS/latitude_bounds'] = DEFAULT_BOUND_DATA file_content['PRODUCT/SUPPORT_DATA/GEOLOCATIONS/longitude_bounds'] = DEFAULT_BOUND_DATA if 'NO2' in filename: file_content['PRODUCT/nitrogen_dioxide_total_column'] = DEFAULT_FILE_DATA if 'SO2' in filename: file_content['PRODUCT/sulfurdioxide_total_vertical_column'] = DEFAULT_FILE_DATA for k in list(file_content.keys()): if not k.startswith('PRODUCT'): continue file_content[k + '/shape'] = DEFAULT_FILE_SHAPE self._convert_data_content_to_dataarrays(file_content) file_content['PRODUCT/latitude'].attrs['_FillValue'] = -999.0 file_content['PRODUCT/longitude'].attrs['_FillValue'] = -999.0 file_content['PRODUCT/SUPPORT_DATA/GEOLOCATIONS/latitude_bounds'].attrs['_FillValue'] = -999.0 file_content['PRODUCT/SUPPORT_DATA/GEOLOCATIONS/longitude_bounds'].attrs['_FillValue'] = -999.0 if 'NO2' in filename: file_content['PRODUCT/nitrogen_dioxide_total_column'].attrs['_FillValue'] = -999.0 if 'SO2' in filename: file_content['PRODUCT/sulfurdioxide_total_vertical_column'].attrs['_FillValue'] = -999.0 else: raise NotImplementedError("Test data for file types other than " "'tropomi_l2' are not supported.") return file_content def _convert_data_content_to_dataarrays(self, file_content): """Convert data content to xarray's dataarrays.""" from xarray import DataArray for key, val in file_content.items(): if isinstance(val, np.ndarray): if 1 < val.ndim <= 2: file_content[key] = DataArray(val, dims=('scanline', 'ground_pixel')) elif val.ndim > 2: file_content[key] = DataArray(val, dims=('scanline', 'ground_pixel', 'corner')) else: file_content[key] = DataArray(val) class TestTROPOMIL2Reader(unittest.TestCase): """Test TROPOMI L2 Reader.""" yaml_file = "tropomi_l2.yaml" def setUp(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.tropomi_l2 import TROPOMIL2FileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(TROPOMIL2FileHandler, '__bases__', (FakeNetCDF4FileHandlerTL2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the NetCDF4 file handler.""" self.p.stop() def test_init(self): """Test basic initialization of this reader.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'S5P_OFFL_L2__NO2____20180709T170334_20180709T184504_03821_01_010002_20180715T184729.nc', ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) # make sure we have some files self.assertTrue(r.file_handlers) def test_load_no2(self): """Load NO2 dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) with mock.patch('satpy.readers.tropomi_l2.netCDF4.Variable', xr.DataArray): loadables = r.select_files_from_pathnames([ 'S5P_OFFL_L2__NO2____20180709T170334_20180709T184504_03821_01_010002_20180715T184729.nc', ]) r.create_filehandlers(loadables) ds = r.load(['nitrogen_dioxide_total_column']) self.assertEqual(len(ds), 1) for d in ds.values(): self.assertEqual(d.attrs['platform_shortname'], 'S5P') self.assertEqual(d.attrs['sensor'], 'tropomi') self.assertIn('area', d.attrs) self.assertIsNotNone(d.attrs['area']) self.assertIn('y', d.dims) self.assertIn('x', d.dims) def test_load_so2(self): """Load SO2 dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) with mock.patch('satpy.readers.tropomi_l2.netCDF4.Variable', xr.DataArray): loadables = r.select_files_from_pathnames([ 'S5P_OFFL_L2__SO2____20181224T055107_20181224T073237_06198_01_010105_20181230T150634.nc', ]) r.create_filehandlers(loadables) ds = r.load(['sulfurdioxide_total_vertical_column']) self.assertEqual(len(ds), 1) for d in ds.values(): self.assertEqual(d.attrs['platform_shortname'], 'S5P') self.assertIn('area', d.attrs) self.assertIsNotNone(d.attrs['area']) self.assertIn('y', d.dims) self.assertIn('x', d.dims) def test_load_bounds(self): """Load bounds dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) with mock.patch('satpy.readers.tropomi_l2.netCDF4.Variable', xr.DataArray): loadables = r.select_files_from_pathnames([ 'S5P_OFFL_L2__NO2____20180709T170334_20180709T184504_03821_01_010002_20180715T184729.nc', ]) r.create_filehandlers(loadables) keys = ['latitude_bounds', 'longitude_bounds'] ds = r.load(keys) self.assertEqual(len(ds), 2) for key in keys: self.assertEqual(ds[key].attrs['platform_shortname'], 'S5P') self.assertIn('y', ds[key].dims) self.assertIn('x', ds[key].dims) self.assertIn('corner', ds[key].dims) # check assembled bounds left = np.vstack([ds[key][:, :, 0], ds[key][-1:, :, 3]]) right = np.vstack([ds[key][:, -1:, 1], ds[key][-1:, -1:, 2]]) dest = np.hstack([left, right]) dest = xr.DataArray(dest, dims=('y', 'x') ) dest.attrs = ds[key].attrs self.assertEqual(dest.attrs['platform_shortname'], 'S5P') self.assertIn('y', dest.dims) self.assertIn('x', dest.dims) self.assertEqual(DEFAULT_FILE_SHAPE[0] + 1, dest.shape[0]) self.assertEqual(DEFAULT_FILE_SHAPE[1] + 1, dest.shape[1]) self.assertIsNone(np.testing.assert_array_equal(dest[:-1, :-1], ds[key][:, :, 0])) self.assertIsNone(np.testing.assert_array_equal(dest[-1, :-1], ds[key][-1, :, 3])) self.assertIsNone(np.testing.assert_array_equal(dest[:, -1], np.append(ds[key][:, -1, 1], ds[key][-1:, -1:, 2])) ) satpy-0.34.0/satpy/tests/reader_tests/test_utils.py000066400000000000000000000373071420401153000224750ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2014-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Testing of helper functions.""" import os import unittest from datetime import datetime from unittest import mock import dask.array as da import numpy as np import numpy.testing import pyresample.geometry import xarray as xr from pyproj import CRS from satpy.readers import utils as hf class TestHelpers(unittest.TestCase): """Test the area helpers.""" def test_lonlat_from_geos(self): """Get lonlats from geos.""" import pyproj geos_area = mock.MagicMock() lon_0 = 0 h = 35785831.00 geos_area.crs = CRS({ 'a': 6378169.00, 'b': 6356583.80, 'h': h, 'lon_0': lon_0, 'proj': 'geos'}) proj = pyproj.Proj(geos_area.crs) expected = proj(0, 0, inverse=True) np.testing.assert_allclose(expected, hf._lonlat_from_geos_angle(0, 0, geos_area)) expected = proj(0, 1000000, inverse=True) np.testing.assert_allclose(expected, hf._lonlat_from_geos_angle(0, 1000000 / h, geos_area)) expected = proj(1000000, 0, inverse=True) np.testing.assert_allclose(expected, hf._lonlat_from_geos_angle(1000000 / h, 0, geos_area)) expected = proj(2000000, -2000000, inverse=True) np.testing.assert_allclose(expected, hf._lonlat_from_geos_angle(2000000 / h, -2000000 / h, geos_area)) def test_get_geostationary_bbox(self): """Get the geostationary bbox.""" geos_area = mock.MagicMock() lon_0 = 0 geos_area.crs = CRS({ 'proj': 'geos', 'lon_0': lon_0, 'a': 6378169.00, 'b': 6356583.80, 'h': 35785831.00, 'units': 'm'}) geos_area.area_extent = [-5500000., -5500000., 5500000., 5500000.] lon, lat = hf.get_geostationary_bounding_box(geos_area, 20) elon = np.array([-74.802824, -73.667708, -69.879687, -60.758081, -32.224989, 32.224989, 60.758081, 69.879687, 73.667708, 74.802824, 74.802824, 73.667708, 69.879687, 60.758081, 32.224989, -32.224989, -60.758081, -69.879687, -73.667708, -74.802824]) elat = -np.array([-6.81982903e-15, -1.93889346e+01, -3.84764764e+01, -5.67707359e+01, -7.18862588e+01, -7.18862588e+01, -5.67707359e+01, -3.84764764e+01, -1.93889346e+01, 0.00000000e+00, 6.81982903e-15, 1.93889346e+01, 3.84764764e+01, 5.67707359e+01, 7.18862588e+01, 7.18862588e+01, 5.67707359e+01, 3.84764764e+01, 1.93889346e+01, -0.00000000e+00]) np.testing.assert_allclose(lon, elon + lon_0) np.testing.assert_allclose(lat, elat) def test_get_geostationary_angle_extent(self): """Get max geostationary angles.""" geos_area = mock.MagicMock() proj_dict = { 'proj': 'geos', 'sweep': 'x', 'lon_0': -89.5, 'a': 6378169.00, 'b': 6356583.80, 'h': 35785831.00, 'units': 'm'} geos_area.crs = CRS(proj_dict) expected = (0.15185342867090912, 0.15133555510297725) np.testing.assert_allclose(expected, hf.get_geostationary_angle_extent(geos_area)) proj_dict['a'] = 1000.0 proj_dict['b'] = 1000.0 proj_dict['h'] = np.sqrt(2) * 1000.0 - 1000.0 geos_area.reset_mock() geos_area.crs = CRS(proj_dict) expected = (np.deg2rad(45), np.deg2rad(45)) np.testing.assert_allclose(expected, hf.get_geostationary_angle_extent(geos_area)) proj_dict = { 'proj': 'geos', 'sweep': 'x', 'lon_0': -89.5, 'ellps': 'GRS80', 'h': 35785831.00, 'units': 'm'} geos_area.crs = CRS(proj_dict) expected = (0.15185277703584374, 0.15133971368991794) np.testing.assert_allclose(expected, hf.get_geostationary_angle_extent(geos_area)) def test_geostationary_mask(self): """Test geostationary mask.""" # Compute mask of a very elliptical earth area = pyresample.geometry.AreaDefinition( 'FLDK', 'Full Disk', 'geos', {'a': '6378169.0', 'b': '3000000.0', 'h': '35785831.0', 'lon_0': '145.0', 'proj': 'geos', 'units': 'm'}, 101, 101, (-6498000.088960204, -6498000.088960204, 6502000.089024927, 6502000.089024927)) mask = hf.get_geostationary_mask(area).astype(int).compute() # Check results along a couple of lines # a) Horizontal self.assertTrue(np.all(mask[50, :8] == 0)) self.assertTrue(np.all(mask[50, 8:93] == 1)) self.assertTrue(np.all(mask[50, 93:] == 0)) # b) Vertical self.assertTrue(np.all(mask[:31, 50] == 0)) self.assertTrue(np.all(mask[31:70, 50] == 1)) self.assertTrue(np.all(mask[70:, 50] == 0)) # c) Top left to bottom right self.assertTrue(np.all(mask[range(33), range(33)] == 0)) self.assertTrue(np.all(mask[range(33, 68), range(33, 68)] == 1)) self.assertTrue(np.all(mask[range(68, 101), range(68, 101)] == 0)) # d) Bottom left to top right self.assertTrue(np.all(mask[range(101-1, 68-1, -1), range(33)] == 0)) self.assertTrue(np.all(mask[range(68-1, 33-1, -1), range(33, 68)] == 1)) self.assertTrue(np.all(mask[range(33-1, -1, -1), range(68, 101)] == 0)) @mock.patch('satpy.readers.utils.AreaDefinition') def test_sub_area(self, adef): """Sub area slicing.""" area = mock.MagicMock() area.pixel_size_x = 1.5 area.pixel_size_y = 1.5 area.pixel_upper_left = (0, 0) area.area_id = 'fakeid' area.name = 'fake name' area.proj_id = 'fakeproj' area.crs = 'some_crs' hf.get_sub_area(area, slice(1, 4), slice(0, 3)) adef.assert_called_once_with('fakeid', 'fake name', 'fakeproj', 'some_crs', 3, 3, (0.75, -3.75, 5.25, 0.75)) def test_np2str(self): """Test the np2str function.""" # byte object npstring = np.string_('hej') self.assertEqual(hf.np2str(npstring), 'hej') # single element numpy array np_arr = np.array([npstring]) self.assertEqual(hf.np2str(np_arr), 'hej') # scalar numpy array np_arr = np.array(npstring) self.assertEqual(hf.np2str(np_arr), 'hej') # multi-element array npstring = np.array([npstring, npstring]) self.assertRaises(ValueError, hf.np2str, npstring) # non-array self.assertRaises(ValueError, hf.np2str, 5) def test_get_earth_radius(self): """Test earth radius computation.""" a = 2. b = 1. def re(lat): """Compute ellipsoid radius at the given geodetic latitude. Reference: Capderou, M.: Handbook of Satellite Orbits, Equation (2.20). """ lat = np.deg2rad(lat) e2 = 1 - b ** 2 / a ** 2 n = a / np.sqrt(1 - e2*np.sin(lat)**2) return n * np.sqrt((1 - e2)**2 * np.sin(lat)**2 + np.cos(lat)**2) for lon in (0, 180, 270): self.assertEqual(hf.get_earth_radius(lon=lon, lat=0., a=a, b=b), a) for lat in (90, -90): self.assertEqual(hf.get_earth_radius(lon=0., lat=lat, a=a, b=b), b) self.assertTrue(np.isclose(hf.get_earth_radius(lon=123, lat=45., a=a, b=b), re(45.))) def test_reduce_mda(self): """Test metadata size reduction.""" mda = {'a': 1, 'b': np.array([1, 2, 3]), 'c': np.array([1, 2, 3, 4]), 'd': {'a': 1, 'b': np.array([1, 2, 3]), 'c': np.array([1, 2, 3, 4]), 'd': {'a': 1, 'b': np.array([1, 2, 3]), 'c': np.array([1, 2, 3, 4])}}} exp = {'a': 1, 'b': np.array([1, 2, 3]), 'd': {'a': 1, 'b': np.array([1, 2, 3]), 'd': {'a': 1, 'b': np.array([1, 2, 3])}}} numpy.testing.assert_equal(hf.reduce_mda(mda, max_size=3), exp) # Make sure, reduce_mda() doesn't modify the original dictionary self.assertIn('c', mda) self.assertIn('c', mda['d']) self.assertIn('c', mda['d']['d']) @mock.patch('satpy.readers.utils.bz2.BZ2File') @mock.patch('satpy.readers.utils.Popen') def test_unzip_file_pbzip2(self, mock_popen, mock_bz2): """Test the bz2 file unzipping techniques.""" process_mock = mock.Mock() attrs = {'communicate.return_value': (b'output', b'error'), 'returncode': 0} process_mock.configure_mock(**attrs) mock_popen.return_value = process_mock bz2_mock = mock.MagicMock() bz2_mock.read.return_value = b'TEST' mock_bz2.return_value = bz2_mock filename = 'tester.DAT.bz2' whichstr = 'satpy.readers.utils.which' # no bz2 installed with mock.patch(whichstr) as whichmock: whichmock.return_value = None new_fname = hf.unzip_file(filename) self.assertTrue(bz2_mock.read.called) self.assertTrue(os.path.exists(new_fname)) if os.path.exists(new_fname): os.remove(new_fname) # bz2 installed with mock.patch(whichstr) as whichmock: whichmock.return_value = '/usr/bin/pbzip2' new_fname = hf.unzip_file(filename) self.assertTrue(mock_popen.called) self.assertTrue(os.path.exists(new_fname)) if os.path.exists(new_fname): os.remove(new_fname) filename = 'tester.DAT' new_fname = hf.unzip_file(filename) self.assertIsNone(new_fname) @mock.patch("os.remove") @mock.patch("satpy.readers.utils.unzip_file", return_value='dummy.txt') def test_pro_reading_gets_unzipped_file(self, fake_unzip_file, fake_remove): """Test the bz2 file unzipping context manager.""" filename = 'dummy.txt.bz2' expected_filename = filename[:-4] with hf.unzip_context(filename) as new_filename: self.assertEqual(new_filename, expected_filename) fake_unzip_file.assert_called_with(filename) fake_remove.assert_called_with(expected_filename) def test_apply_rad_correction(self): """Test radiance correction technique using user-supplied coefs.""" slope = 0.5 offset = -0.1 res = hf.apply_rad_correction(1.0, slope, offset) np.testing.assert_allclose(2.2, res) def test_get_user_calibration_factors(self): """Test the retrieval of user-supplied calibration factors.""" radcor_dict = {'WV063': {'slope': 1.015, 'offset': -0.0556}, 'IR108': {'slo': 1.015, 'off': -0.0556}} # Test that correct values are returned from the dict slope, offset = hf.get_user_calibration_factors('WV063', radcor_dict) self.assertEqual(slope, 1.015) self.assertEqual(offset, -0.0556) # Test that channels not present in dict return 1.0, 0.0 with self.assertWarns(UserWarning): slope, offset = hf.get_user_calibration_factors('IR097', radcor_dict) self.assertEqual(slope, 1.) self.assertEqual(offset, 0.) # Check that incorrect dict keys throw an error with self.assertRaises(KeyError): hf.get_user_calibration_factors('IR108', radcor_dict) class TestSunEarthDistanceCorrection(unittest.TestCase): """Tests for applying Sun-Earth distance correction to reflectance.""" def setUp(self): """Create input / output arrays for the tests.""" self.test_date = datetime(2020, 8, 15, 13, 0, 40) raw_refl = xr.DataArray(da.from_array([10., 20., 40., 1., 98., 50.]), attrs={'start_time': self.test_date, 'scheduled_time': self.test_date}) corr_refl = xr.DataArray(da.from_array([10.50514689, 21.01029379, 42.02058758, 1.05051469, 102.95043957, 52.52573447]), attrs={'start_time': self.test_date, 'scheduled_time': self.test_date}) self.raw_refl = raw_refl self.corr_refl = corr_refl def test_get_utc_time(self): """Test the retrieval of scene time from a dataset.""" # First check correct time is returned with 'start_time' tmp_array = self.raw_refl.copy() del tmp_array.attrs['scheduled_time'] utc_time = hf.get_array_date(tmp_array, None) self.assertEqual(utc_time, self.test_date) # Now check correct time is returned with 'scheduled_time' tmp_array = self.raw_refl.copy() del tmp_array.attrs['start_time'] utc_time = hf.get_array_date(tmp_array, None) self.assertEqual(utc_time, self.test_date) # Now check correct time is returned with utc_date passed tmp_array = self.raw_refl.copy() new_test_date = datetime(2019, 2, 1, 15, 2, 12) utc_time = hf.get_array_date(tmp_array, new_test_date) self.assertEqual(utc_time, new_test_date) # Finally, ensure error is raised if no datetime is available tmp_array = self.raw_refl.copy() del tmp_array.attrs['scheduled_time'] del tmp_array.attrs['start_time'] with self.assertRaises(KeyError): hf.get_array_date(tmp_array, None) def test_apply_sunearth_corr(self): """Test the correction of reflectances with sun-earth distance.""" out_refl = hf.apply_earthsun_distance_correction(self.raw_refl) np.testing.assert_allclose(out_refl, self.corr_refl) self.assertTrue(out_refl.attrs['sun_earth_distance_correction_applied']) assert isinstance(out_refl.data, da.Array) def test_remove_sunearth_corr(self): """Test the removal of the sun-earth distance correction.""" out_refl = hf.remove_earthsun_distance_correction(self.corr_refl) np.testing.assert_allclose(out_refl, self.raw_refl) self.assertFalse(out_refl.attrs['sun_earth_distance_correction_applied']) assert isinstance(out_refl.data, da.Array) satpy-0.34.0/satpy/tests/reader_tests/test_vaisala_gld360.py000066400000000000000000000056031420401153000240260ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unittesting the Vaisala GLD360 reader.""" import unittest from io import StringIO import numpy as np from satpy.readers.vaisala_gld360 import VaisalaGLD360TextFileHandler from satpy.tests.utils import make_dataid class TestVaisalaGLD360TextFileHandler(unittest.TestCase): """Test the VaisalaGLD360TextFileHandler.""" def test_vaisala_gld360(self): """Test basic functionality for vaisala file handler.""" expected_power = np.array([12.3, 13.2, -31.]) expected_lat = np.array([30.5342, -0.5727, 12.1529]) expected_lon = np.array([-90.1152, 104.0688, -10.8756]) expected_time = np.array(['2017-06-20T00:00:00.007178000', '2017-06-20T00:00:00.020162000', '2017-06-20T00:00:00.023183000'], dtype='datetime64[ns]') filename = StringIO( u'2017-06-20 00:00:00.007178 30.5342 -90.1152 12.3 kA\n' '2017-06-20 00:00:00.020162 -0.5727 104.0688 13.2 kA\n' '2017-06-20 00:00:00.023183 12.1529 -10.8756 -31.0 kA' ) filename_info = {} filetype_info = {} self.handler = VaisalaGLD360TextFileHandler( filename, filename_info, filetype_info ) filename.close() # test power dataset_id = make_dataid(name='power') dataset_info = {'units': 'kA'} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(result, expected_power, rtol=1e-05) # test lat dataset_id = make_dataid(name='latitude') dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(result, expected_lat, rtol=1e-05) # test lon dataset_id = make_dataid(name='longitude') dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(result, expected_lon, rtol=1e-05) # test time dataset_id = make_dataid(name='time') dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(result, expected_time) satpy-0.34.0/satpy/tests/reader_tests/test_vii_base_nc.py000066400000000000000000000435371420401153000236000ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2020 Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # satpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . """The vii_base_nc reader tests package.""" import datetime import os import unittest import uuid from unittest import mock import numpy as np import xarray as xr from netCDF4 import Dataset from satpy.readers.vii_base_nc import SCAN_ALT_TIE_POINTS, TIE_POINTS_FACTOR, ViiNCBaseFileHandler TEST_FILE = 'test_file_vii_base_nc.nc' class TestViiNCBaseFileHandler(unittest.TestCase): """Test the ViiNCBaseFileHandler reader.""" @mock.patch('satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_geo_interpolation') def setUp(self, pgi_): """Set up the test.""" # Easiest way to test the reader is to create a test netCDF file on the fly # uses a UUID to avoid permission conflicts during execution of tests in parallel self.test_file_name = TEST_FILE + str(uuid.uuid1()) + ".nc" with Dataset(self.test_file_name, 'w') as nc: # Add global attributes nc.sensing_start_time_utc = "20170920173040.888" nc.sensing_end_time_utc = "20170920174117.555" nc.spacecraft = "test_spacecraft" nc.instrument = "test_instrument" # Create data group g1 = nc.createGroup('data') # Add dimensions to data group g1.createDimension('num_pixels', 10) g1.createDimension('num_lines', 100) # Create data/measurement_data group g1_1 = g1.createGroup('measurement_data') # Add dimensions to data/measurement_data group g1_1.createDimension('num_tie_points_act', 10) g1_1.createDimension('num_tie_points_alt', 100) # Add variables to data/measurement_data group tpw = g1_1.createVariable('tpw', np.float32, dimensions=('num_pixels', 'num_lines')) tpw[:] = 1. tpw.test_attr = 'attr' lon = g1_1.createVariable('longitude', np.float32, dimensions=('num_tie_points_act', 'num_tie_points_alt')) lon[:] = 100. lat = g1_1.createVariable('latitude', np.float32, dimensions=('num_tie_points_act', 'num_tie_points_alt')) lat[:] = 10. # Create quality group g2 = nc.createGroup('quality') # Add dimensions to quality group g2.createDimension('gap_items', 2) # Add variables to quality group var = g2.createVariable('duration_of_product', np.double, dimensions=()) var[:] = 1.0 var = g2.createVariable('duration_of_data_present', np.double, dimensions=()) var[:] = 2.0 var = g2.createVariable('duration_of_data_missing', np.double, dimensions=()) var[:] = 3.0 var = g2.createVariable('duration_of_data_degraded', np.double, dimensions=()) var[:] = 4.0 var = g2.createVariable('gap_start_time_utc', np.double, dimensions=('gap_items',)) var[:] = [5.0, 6.0] var = g2.createVariable('gap_end_time_utc', np.double, dimensions=('gap_items',)) var[:] = [7.0, 8.0] # Create longitude and latitude "interpolated" arrays interp_longitude = xr.DataArray(np.ones((10, 100))) interp_latitude = xr.DataArray(np.ones((10, 100)) * 2.) pgi_.return_value = (interp_longitude, interp_latitude) # Filename info valid for all readers filename_info = { 'creation_time': datetime.datetime(year=2017, month=9, day=22, hour=22, minute=40, second=10), 'sensing_start_time': datetime.datetime(year=2017, month=9, day=20, hour=12, minute=30, second=30), 'sensing_end_time': datetime.datetime(year=2017, month=9, day=20, hour=18, minute=30, second=50) } # Create a reader self.reader = ViiNCBaseFileHandler( filename=self.test_file_name, filename_info=filename_info, filetype_info={ 'cached_longitude': 'data/measurement_data/longitude', 'cached_latitude': 'data/measurement_data/latitude' } ) # Create a second reader where orthorectification and interpolation are inhibited # by means of the filetype_info flags self.reader_2 = ViiNCBaseFileHandler( filename=self.test_file_name, filename_info=filename_info, filetype_info={ 'cached_longitude': 'data/measurement_data/longitude', 'cached_latitude': 'data/measurement_data/latitude', 'interpolate': False, 'orthorect': False }, orthorect=True ) # Create a third reader without defining cached latitude and longitude # by means of the filetype_info flags self.reader_3 = ViiNCBaseFileHandler( filename=self.test_file_name, filename_info=filename_info, filetype_info={}, orthorect=True ) def tearDown(self): """Remove the previously created test file.""" # Catch Windows PermissionError for removing the created test file. try: os.remove(self.test_file_name) except OSError: pass def test_file_reading(self): """Test the file product reading.""" # Checks that the basic functionalities are correctly executed expected_start_time = datetime.datetime(year=2017, month=9, day=20, hour=17, minute=30, second=40, microsecond=888000) self.assertEqual(self.reader.start_time, expected_start_time) expected_end_time = datetime.datetime(year=2017, month=9, day=20, hour=17, minute=41, second=17, microsecond=555000) self.assertEqual(self.reader.end_time, expected_end_time) self.assertEqual(self.reader.spacecraft_name, "test_spacecraft") self.assertEqual(self.reader.sensor, "test_instrument") self.assertEqual(self.reader.ssp_lon, None) # Checks that the global attributes are correctly read expected_global_attributes = { 'filename': self.test_file_name, 'start_time': expected_start_time, 'end_time': expected_end_time, 'spacecraft_name': "test_spacecraft", 'ssp_lon': None, 'sensor': "test_instrument", 'filename_start_time': datetime.datetime(year=2017, month=9, day=20, hour=12, minute=30, second=30), 'filename_end_time': datetime.datetime(year=2017, month=9, day=20, hour=18, minute=30, second=50), 'platform_name': "test_spacecraft", 'quality_group': { 'duration_of_product': 1., 'duration_of_data_present': 2., 'duration_of_data_missing': 3., 'duration_of_data_degraded': 4., 'gap_start_time_utc': (5., 6.), 'gap_end_time_utc': (7., 8.) } } global_attributes = self.reader._get_global_attributes() # Since the global_attributes dictionary contains numpy arrays, # it is not possible to peform a simple equality test # Must iterate on all keys to confirm that the dictionaries are equal self.assertEqual(global_attributes.keys(), expected_global_attributes.keys()) for key in expected_global_attributes: if key not in ['quality_group']: # Quality check must be valid for both iterable and not iterable elements try: equal = all(global_attributes[key] == expected_global_attributes[key]) except (TypeError, ValueError): equal = global_attributes[key] == expected_global_attributes[key] self.assertTrue(equal) else: self.assertEqual(global_attributes[key].keys(), expected_global_attributes[key].keys()) for inner_key in global_attributes[key]: # Equality check must be valid for both iterable and not iterable elements try: equal = all(global_attributes[key][inner_key] == expected_global_attributes[key][inner_key]) except (TypeError, ValueError): equal = global_attributes[key][inner_key] == expected_global_attributes[key][inner_key] self.assertTrue(equal) @mock.patch('satpy.readers.vii_base_nc.tie_points_interpolation') @mock.patch('satpy.readers.vii_base_nc.tie_points_geo_interpolation') def test_functions(self, tpgi_, tpi_): """Test the functions.""" with self.assertRaises(NotImplementedError): self.reader._perform_orthorectification(mock.Mock(), mock.Mock()) with self.assertRaises(NotImplementedError): self.reader._perform_calibration(mock.Mock(), mock.Mock()) # Checks that the _perform_interpolation function is correctly executed variable = xr.DataArray( dims=('y', 'x'), name='test_name', attrs={ 'key_1': 'value_1', 'key_2': 'value_2' }, data=np.zeros((10, 100)), ) tpi_.return_value = [xr.DataArray( dims=('num_tie_points_act', 'num_tie_points_alt'), data=np.ones((10, 100)) )] return_value = self.reader._perform_interpolation(variable) tpi_.assert_called_with([variable], SCAN_ALT_TIE_POINTS, TIE_POINTS_FACTOR) self.assertTrue(np.allclose(return_value, np.ones((10, 100)))) self.assertEqual(return_value.attrs, {'key_1': 'value_1', 'key_2': 'value_2'}) self.assertEqual(return_value.name, 'test_name') self.assertEqual(return_value.dims, ('num_pixels', 'num_lines')) # Checks that the _perform_geo_interpolation function is correctly executed variable_lon = xr.DataArray( dims=('y', 'x'), name='test_lon', attrs={ 'key_1': 'value_lon_1', 'key_2': 'value_lon_2' }, data=np.zeros((10, 100)) ) variable_lat = xr.DataArray( dims=('y', 'x'), name='test_lat', attrs={ 'key_1': 'value_lat_1', 'key_2': 'value_lat_2' }, data=np.ones((10, 100)) * 2. ) tpgi_.return_value = ( xr.DataArray( dims=('num_tie_points_act', 'num_tie_points_alt'), data=np.ones((10, 100)) ), xr.DataArray( dims=('num_tie_points_act', 'num_tie_points_alt'), data=6 * np.ones((10, 100)) ) ) return_lon, return_lat = self.reader._perform_geo_interpolation(variable_lon, variable_lat) tpgi_.assert_called_with(variable_lon, variable_lat, SCAN_ALT_TIE_POINTS, TIE_POINTS_FACTOR) self.assertTrue(np.allclose(return_lon, np.ones((10, 100)))) self.assertEqual(return_lon.attrs, {'key_1': 'value_lon_1', 'key_2': 'value_lon_2'}) self.assertEqual(return_lon.name, 'test_lon') self.assertEqual(return_lon.dims, ('num_pixels', 'num_lines')) self.assertTrue(np.allclose(return_lat, 6 * np.ones((10, 100)))) self.assertEqual(return_lat.attrs, {'key_1': 'value_lat_1', 'key_2': 'value_lat_2'}) self.assertEqual(return_lat.name, 'test_lat') self.assertEqual(return_lat.dims, ('num_pixels', 'num_lines')) def test_standardize_dims(self): """Test the standardize dims function.""" test_variable = xr.DataArray( dims=('num_pixels', 'num_lines'), name='test_data', attrs={ 'key_1': 'value_lat_1', 'key_2': 'value_lat_2' }, data=np.ones((10, 100)) * 1. ) out_variable = self.reader._standardize_dims(test_variable) self.assertTrue(np.allclose(out_variable.values, np.ones((100, 10)))) self.assertEqual(out_variable.dims, ('y', 'x')) self.assertEqual(out_variable.attrs['key_1'], 'value_lat_1') @mock.patch('satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_calibration') @mock.patch('satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_interpolation') @mock.patch('satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_orthorectification') def test_dataset(self, po_, pi_, pc_): """Test the execution of the get_dataset function.""" # Checks the correct execution of the get_dataset function with a valid file_key variable = self.reader.get_dataset(None, {'file_key': 'data/measurement_data/tpw', 'calibration': None}) pc_.assert_not_called() pi_.assert_not_called() po_.assert_not_called() self.assertTrue(np.allclose(variable.values, np.ones((100, 10)))) self.assertEqual(variable.dims, ('y', 'x')) self.assertEqual(variable.attrs['test_attr'], 'attr') self.assertEqual(variable.attrs['units'], None) # Checks the correct execution of the get_dataset function with a valid file_key # and required calibration and interpolation self.reader.get_dataset(None, {'file_key': 'data/measurement_data/tpw', 'calibration': 'reflectance', 'interpolate': True, 'standard_name': 'longitude'}) pc_.assert_called() pi_.assert_called() po_.assert_not_called() # Checks the correct execution of the get_dataset function with a valid file_key # and required orthorectification self.reader.orthorect = True self.reader.get_dataset(None, {'file_key': 'data/measurement_data/tpw', 'calibration': None, 'orthorect_data': 'test_orthorect_data'}) po_.assert_called() # Checks the correct execution of the get_dataset function with an invalid file_key invalid_dataset = self.reader.get_dataset(None, {'file_key': 'test_invalid', 'calibration': None}) # Checks that the function returns None self.assertEqual(invalid_dataset, None) pc_.reset_mock() pi_.reset_mock() po_.reset_mock() # Checks the correct execution of the get_dataset function with a 'cached_longitude' file_key longitude = self.reader.get_dataset(None, {'file_key': 'cached_longitude', 'calibration': 'reflectance', 'interpolate': True}) pc_.assert_not_called() pi_.assert_not_called() self.assertEqual(longitude[0, 0], 1.) # Checks the correct execution of the get_dataset function with a 'cached_latitude' file_key latitude = self.reader.get_dataset(None, {'file_key': 'cached_latitude', 'calibration': None}) self.assertEqual(latitude[0, 0], 2.) # Repeats some check with the reader where orthorectification and interpolation are inhibited # by means of the filetype_info flags pc_.reset_mock() pi_.reset_mock() po_.reset_mock() # Checks the correct execution of the get_dataset function with a valid file_key # and required calibration and interpolation self.reader_2.get_dataset(None, {'file_key': 'data/measurement_data/tpw', 'calibration': 'reflectance', 'interpolate': True, 'standard_name': 'longitude'}) pc_.assert_called() pi_.assert_not_called() po_.assert_not_called() # Checks the correct execution of the get_dataset function with a valid file_key # and required orthorectification self.reader_2.get_dataset(None, {'file_key': 'data/measurement_data/tpw', 'calibration': None, 'orthorect_data': 'test_orthorect_data'}) po_.assert_not_called() # Checks the correct execution of the get_dataset function with a 'cached_longitude' file_key longitude = self.reader_2.get_dataset(None, {'file_key': 'cached_longitude', 'calibration': None}) self.assertEqual(longitude[0, 0], 100.) # Checks the correct execution of the get_dataset function with a 'cached_longitude' file_key # in a reader without defined longitude longitude = self.reader_3.get_dataset(None, {'file_key': 'cached_longitude', 'calibration': 'reflectance', 'interpolate': True}) # Checks that the function returns None self.assertEqual(longitude, None) satpy-0.34.0/satpy/tests/reader_tests/test_vii_l1b_nc.py000066400000000000000000000162161420401153000233360ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2020 Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # satpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . """The vii_l1b_nc reader tests package.""" import datetime import os import unittest import uuid import dask.array as da import numpy as np import xarray as xr from netCDF4 import Dataset from satpy.readers.vii_l1b_nc import ViiL1bNCFileHandler from satpy.readers.vii_utils import MEAN_EARTH_RADIUS TEST_FILE = 'test_file_vii_l1b_nc.nc' class TestViiL1bNCFileHandler(unittest.TestCase): """Test the ViiL1bNCFileHandler reader.""" def setUp(self): """Set up the test.""" # Easiest way to test the reader is to create a test netCDF file on the fly # uses a UUID to avoid permission conflicts during execution of tests in parallel self.test_file_name = TEST_FILE + str(uuid.uuid1()) + ".nc" with Dataset(self.test_file_name, 'w') as nc: # Create data group g1 = nc.createGroup('data') # Add dimensions to data group g1.createDimension('num_chan_solar', 11) g1.createDimension('num_chan_thermal', 9) g1.createDimension('num_pixels', 72) g1.createDimension('num_lines', 600) # Create calibration_data group g1_1 = g1.createGroup('calibration_data') # Add variables to data/calibration_data group bt_a = g1_1.createVariable('bt_conversion_a', np.float32, dimensions=('num_chan_thermal',)) bt_a[:] = np.arange(9) bt_b = g1_1.createVariable('bt_conversion_b', np.float32, dimensions=('num_chan_thermal',)) bt_b[:] = np.arange(9) cw = g1_1.createVariable('channel_cw_thermal', np.float32, dimensions=('num_chan_thermal',)) cw[:] = np.arange(9) isi = g1_1.createVariable('Band_averaged_solar_irradiance', np.float32, dimensions=('num_chan_solar',)) isi[:] = np.arange(11) # Create measurement_data group g1_2 = g1.createGroup('measurement_data') # Add dimensions to data/measurement_data group g1_2.createDimension('num_tie_points_act', 10) g1_2.createDimension('num_tie_points_alt', 100) # Add variables to data/measurement_data group sza = g1_2.createVariable('solar_zenith', np.float32, dimensions=('num_tie_points_act', 'num_tie_points_alt')) sza[:] = 25.0 delta_lat = g1_2.createVariable('delta_lat', np.float32, dimensions=('num_pixels', 'num_lines')) delta_lat[:] = 1.0 self.reader = ViiL1bNCFileHandler( filename=self.test_file_name, filename_info={ 'creation_time': datetime.datetime(year=2017, month=9, day=22, hour=22, minute=40, second=10), 'sensing_start_time': datetime.datetime(year=2017, month=9, day=20, hour=12, minute=30, second=30), 'sensing_end_time': datetime.datetime(year=2017, month=9, day=20, hour=18, minute=30, second=50) }, filetype_info={} ) def tearDown(self): """Remove the previously created test file.""" # Catch Windows PermissionError for removing the created test file. try: os.remove(self.test_file_name) except OSError: pass def test_calibration_functions(self): """Test the calibration functions.""" radiance = np.array([[1.0, 2.0, 5.0], [7.0, 10.0, 20.0]]) cw = 13.0 a = 3.0 b = 100.0 bt = self.reader._calibrate_bt(radiance, cw, a, b) expected_bt = np.array([[675.04993213, 753.10301462, 894.93149648], [963.20401882, 1048.95086402, 1270.95546218]]) self.assertTrue(np.allclose(bt, expected_bt)) angle_factor = 0.4 isi = 2.0 refl = self.reader._calibrate_refl(radiance, angle_factor, isi) expected_refl = np.array([[0.628318531, 1.256637061, 3.141592654], [4.398229715, 6.283185307, 12.56637061]]) self.assertTrue(np.allclose(refl, expected_refl)) def test_functions(self): """Test the functions.""" # Checks that the _perform_orthorectification function is correctly executed variable = xr.DataArray( dims=('num_pixels', 'num_lines'), name='test_name', attrs={ 'key_1': 'value_1', 'key_2': 'value_2' }, data=da.from_array(np.ones((72, 600))) ) orthorect_variable = self.reader._perform_orthorectification(variable, 'data/measurement_data/delta_lat') expected_values = np.degrees(np.ones((72, 600)) / MEAN_EARTH_RADIUS) + np.ones((72, 600)) self.assertTrue(np.allclose(orthorect_variable.values, expected_values)) # Checks that the _perform_calibration function is correctly executed in all cases # radiance calibration: return value is simply a copy of the variable return_variable = self.reader._perform_calibration(variable, {'calibration': 'radiance'}) self.assertTrue(np.all(return_variable == variable)) # invalid calibration: raises a ValueError with self.assertRaises(ValueError): self.reader._perform_calibration(variable, {'calibration': 'invalid', 'name': 'test'}) # brightness_temperature calibration: checks that the return value is correct calibrated_variable = self.reader._perform_calibration(variable, {'calibration': 'brightness_temperature', 'chan_thermal_index': 3}) expected_values = np.ones((72, 600)) * 302007.42728603 self.assertTrue(np.allclose(calibrated_variable.values, expected_values)) # reflectance calibration: checks that the return value is correct calibrated_variable = self.reader._perform_calibration(variable, {'calibration': 'reflectance', 'wavelength': [0.658, 0.668, 0.678], 'chan_solar_index': 2}) expected_values = np.ones((72, 600)) * 1.733181982 * (0.678 - 0.658) self.assertTrue(np.allclose(calibrated_variable.values, expected_values)) satpy-0.34.0/satpy/tests/reader_tests/test_vii_l2_nc.py000066400000000000000000000070311420401153000231700ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2020 Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # satpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . """The vii_2_nc reader tests package.""" import datetime import os import unittest import uuid import dask.array as da import numpy as np import xarray as xr from netCDF4 import Dataset from satpy.readers.vii_l2_nc import ViiL2NCFileHandler TEST_FILE = 'test_file_vii_l2_nc.nc' class TestViiL2NCFileHandler(unittest.TestCase): """Test the ViiL2NCFileHandler reader.""" def setUp(self): """Set up the test.""" # Easiest way to test the reader is to create a test netCDF file on the fly # uses a UUID to avoid permission conflicts during execution of tests in parallel self.test_file_name = TEST_FILE + str(uuid.uuid1()) + ".nc" with Dataset(self.test_file_name, 'w') as nc: # Create data group g1 = nc.createGroup('data') # Add dimensions to data group g1.createDimension('num_pixels', 10) g1.createDimension('num_lines', 100) # Create measurement_data group g1_2 = g1.createGroup('measurement_data') # Add variables to data/measurement_data group delta_lat = g1_2.createVariable('delta_lat', np.float32, dimensions=('num_pixels', 'num_lines')) delta_lat[:] = 0.1 self.reader = ViiL2NCFileHandler( filename=self.test_file_name, filename_info={ 'creation_time': datetime.datetime(year=2017, month=9, day=22, hour=22, minute=40, second=10), 'sensing_start_time': datetime.datetime(year=2017, month=9, day=20, hour=12, minute=30, second=30), 'sensing_end_time': datetime.datetime(year=2017, month=9, day=20, hour=18, minute=30, second=50) }, filetype_info={} ) def tearDown(self): """Remove the previously created test file.""" # Catch Windows PermissionError for removing the created test file. try: os.remove(self.test_file_name) except OSError: pass def test_functions(self): """Test the functions.""" # Checks that the _perform_orthorectification function is correctly executed variable = xr.DataArray( dims=('num_pixels', 'num_lines'), name='test_name', attrs={ 'key_1': 'value_1', 'key_2': 'value_2' }, data=da.from_array(np.ones((10, 100))) ) orthorect_variable = self.reader._perform_orthorectification(variable, 'data/measurement_data/delta_lat') expected_values = 1.1 * np.ones((10, 100)) self.assertTrue(np.allclose(orthorect_variable.values, expected_values)) self.assertEqual(orthorect_variable.attrs['key_1'], 'value_1') satpy-0.34.0/satpy/tests/reader_tests/test_vii_utils.py000066400000000000000000000027371420401153000233430ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2020 Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # satpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . """The vii_utils reader tests package.""" import unittest import satpy.readers.vii_utils # Constants to be tested C1 = 1.191062e+8 C2 = 1.4387863e+4 TIE_POINTS_FACTOR = 8 SCAN_ALT_TIE_POINTS = 4 MEAN_EARTH_RADIUS = 6371008.7714 class TestViiUtils(unittest.TestCase): """Test the vii_utils module.""" def test_constants(self): """Test the constant values.""" # Test the value of the constants self.assertEqual(satpy.readers.vii_utils.C1, C1) self.assertEqual(satpy.readers.vii_utils.C2, C2) self.assertEqual(satpy.readers.vii_utils.TIE_POINTS_FACTOR, TIE_POINTS_FACTOR) self.assertEqual(satpy.readers.vii_utils.SCAN_ALT_TIE_POINTS, SCAN_ALT_TIE_POINTS) self.assertEqual(satpy.readers.vii_utils.MEAN_EARTH_RADIUS, MEAN_EARTH_RADIUS) satpy-0.34.0/satpy/tests/reader_tests/test_viirs_compact.py000066400000000000000000003554661420401153000242100ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.viirs_compact module.""" import os import tempfile import unittest from contextlib import suppress import h5py import numpy as np class TestCompact(unittest.TestCase): """Test class for reading compact viirs format.""" def setUp(self): """Create a fake file from scratch.""" fake_dnb = { "All_Data": { "ModeGran": {"value": 0}, "ModeScan": { "value": np.array( [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 249, ], dtype=np.uint8, ) }, "NumberOfScans": {"value": np.array([47])}, "VIIRS-DNB-GEO_All": { "AlignmentCoefficient": { "value": np.array( [ 2.11257413e-02, 2.11152732e-02, 2.11079046e-02, 2.10680142e-02, 1.80840008e-02, 1.80402063e-02, 1.79968309e-02, 1.79477539e-02, 2.20463774e-03, 2.17431062e-03, 2.14360282e-03, 2.11503846e-03, 2.08630669e-03, 2.05924874e-03, 2.03177333e-03, 2.00573727e-03, 1.98072987e-03, 1.95503305e-03, 1.93077011e-03, 1.90702057e-03, 1.88353716e-03, 1.86104013e-03, 1.83863181e-03, 1.81696517e-03, 1.79550308e-03, 1.77481642e-03, 1.75439729e-03, 1.73398503e-03, 1.71459839e-03, 1.69516564e-03, 1.67622324e-03, 1.65758410e-03, 1.63990213e-03, 1.62128301e-03, 1.60375470e-03, 1.58667017e-03, 1.61543000e-03, 1.59775047e-03, 1.50719041e-03, 1.48937735e-03, 1.47257745e-03, 1.50070526e-03, 1.48288533e-03, 9.29064234e-04, 9.12246935e-04, 8.95748264e-04, 8.71886965e-04, 8.55044520e-04, 8.38686305e-04, 8.18263041e-04, 8.01501446e-04, 7.85346841e-04, 1.15984806e-03, 1.14326552e-03, 1.12648588e-03, 1.11018715e-03, 1.09399087e-03, 1.19698711e-03, 1.18051842e-03, 1.16404379e-03, 1.14832399e-03, 9.92591376e-04, 9.75896895e-04, 9.59663419e-04, 9.43415158e-04, 9.27662419e-04, 8.92253709e-04, 8.75947590e-04, 8.60177504e-04, 8.44484195e-04, 8.35279003e-04, 8.19236680e-04, 8.03303672e-04, 7.87482015e-04, 7.60449213e-04, 7.44239136e-04, 7.28625571e-04, 7.12990935e-04, 6.89090986e-04, 6.73000410e-04, 6.57248020e-04, 6.41623745e-04, 6.20219158e-04, 6.04308851e-04, 5.88596100e-04, 5.73108089e-04, 3.65344196e-04, 3.49639275e-04, 3.34273063e-04, 4.81286290e-04, 4.65485587e-04, 4.49862011e-04, 4.34543617e-04, 4.19324206e-04, 2.60536268e-04, 2.45052564e-04, 2.29740850e-04, 2.34466774e-04, 2.18822126e-04, 2.03370175e-04, 1.88058810e-04, 1.60192372e-04, 1.44485937e-04, 1.28920830e-04, 3.45615146e-04, 3.30171984e-04, 3.14682693e-04, 2.99300562e-04, 2.83925037e-04, 2.68518896e-04, 2.53254839e-04, 2.37950648e-04, 2.22716670e-04, 2.07562072e-04, 1.92296386e-04, 1.77147449e-04, 1.61994336e-04, 1.46895778e-04, 1.31844325e-04, 1.16730320e-04, 1.01757469e-04, 8.67861963e-05, 7.18669180e-05, 5.70719567e-05, 4.24701866e-05, 2.84846719e-05, 1.70599415e-05, -1.47213286e-05, -2.33691408e-05, -3.68025649e-05, -5.12388433e-05, -6.59972284e-05, -8.08926561e-05, -9.58433884e-05, -1.10882705e-04, -1.25976600e-04, -1.41044657e-04, -1.56166439e-04, -1.71307023e-04, -1.86516074e-04, -2.01731804e-04, -2.16980450e-04, -2.32271064e-04, -2.47527263e-04, -2.62940506e-04, -2.78283434e-04, -2.93711084e-04, -3.09180934e-04, -3.24661058e-04, -3.40237195e-04, -1.27807143e-04, -1.43646437e-04, -1.59638614e-04, -1.87593061e-04, -2.03169184e-04, -2.18941437e-04, -2.34920750e-04, -2.30605408e-04, -2.46262236e-04, -2.62226094e-04, -4.19838558e-04, -4.35510388e-04, -4.51152271e-04, -4.67120990e-04, -4.83241311e-04, -3.37647041e-04, -3.53568990e-04, -3.69836489e-04, -5.76354389e-04, -5.92070050e-04, -6.08178903e-04, -6.24440494e-04, -6.45648804e-04, -6.61431870e-04, -6.77491073e-04, -6.93967624e-04, -7.17683870e-04, -7.33471534e-04, -7.49999890e-04, -7.66390527e-04, -7.93468382e-04, -8.09502264e-04, -8.25728697e-04, -8.42282083e-04, -8.51265620e-04, -8.67322611e-04, -8.83649045e-04, -9.00280487e-04, -9.35055199e-04, -9.51097580e-04, -9.67527216e-04, -9.84144746e-04, -1.00128003e-03, -1.15522649e-03, -1.17168750e-03, -1.18826574e-03, -1.20496599e-03, -1.10272120e-03, -1.11865194e-03, -1.13539130e-03, -1.15241797e-03, -1.16964686e-03, -7.97322951e-04, -8.14269355e-04, -8.31696263e-04, -8.51555436e-04, -8.68656265e-04, -8.86220601e-04, -9.09406052e-04, -9.26509325e-04, -9.44124535e-04, -1.49479776e-03, -1.51314179e-03, -1.48387800e-03, -1.50146009e-03, -1.51945755e-03, -1.61006744e-03, -1.62846781e-03, -1.59783731e-03, -1.61545863e-03, -1.63336343e-03, -1.65167439e-03, -1.67034590e-03, -1.68956630e-03, -1.70884258e-03, -1.72863202e-03, -1.74859120e-03, -1.76901231e-03, -1.79015659e-03, -1.81144674e-03, -1.83329231e-03, -1.85552111e-03, -1.87840930e-03, -1.90151483e-03, -1.92550803e-03, -1.94982730e-03, -1.97511422e-03, -2.00066133e-03, -2.02709576e-03, -2.05422146e-03, -2.08215159e-03, -2.11093877e-03, -2.14011059e-03, -2.17073411e-03, -2.20196834e-03, -2.23409734e-03, -2.26700748e-03, -2.30150856e-03, -2.33719964e-03, -2.37406371e-03, -2.41223071e-03, -2.45184498e-03, -2.49327719e-03, -2.53651105e-03, -2.58166087e-03, -2.62895599e-03, -2.67871981e-03, -2.73117283e-03, -5.49861044e-03, -5.55437338e-03, -5.61159104e-03, -5.67073002e-03, -5.73173212e-03, -5.79498662e-03, -5.85969677e-03, -5.92768658e-03, -5.99809457e-03, -6.07080618e-03, -6.14715228e-03, -6.22711331e-03, ], dtype=np.float32, ) }, "ExpansionCoefficient": { "value": np.array( [ 1.17600127e-03, 1.17271533e-03, 1.17000856e-03, 1.16674276e-03, 2.11251900e-03, 2.10516527e-03, 2.09726905e-03, 2.08941335e-03, 1.63907595e-02, 1.58577170e-02, 1.53679820e-02, 1.49007449e-02, 1.44708352e-02, 1.40612368e-02, 1.36818690e-02, 1.33193973e-02, 1.29744308e-02, 1.26568424e-02, 1.23488475e-02, 1.20567940e-02, 1.17803067e-02, 1.15150018e-02, 1.12629030e-02, 1.10203745e-02, 1.07905651e-02, 1.05690639e-02, 1.03563424e-02, 1.01526314e-02, 9.95650515e-03, 9.76785459e-03, 9.58597753e-03, 9.41115711e-03, 9.23914276e-03, 9.07964632e-03, 8.92116502e-03, 8.76654685e-03, 9.04925726e-03, 8.88936501e-03, 9.14804544e-03, 8.98920093e-03, 8.83030891e-03, 9.06952657e-03, 8.90891161e-03, 1.36343827e-02, 1.32706892e-02, 1.29242949e-02, 1.36271119e-02, 1.32572902e-02, 1.29025253e-02, 1.35165229e-02, 1.31412474e-02, 1.27808526e-02, 8.91761761e-03, 8.74674786e-03, 8.58181808e-03, 8.42147414e-03, 8.26664641e-03, 7.81304855e-03, 7.67400907e-03, 7.54208490e-03, 7.40892906e-03, 8.81091598e-03, 8.62924196e-03, 8.45206063e-03, 8.28018785e-03, 8.11239891e-03, 8.62185098e-03, 8.43446422e-03, 8.25031102e-03, 8.07087123e-03, 8.30837712e-03, 8.11944436e-03, 7.93648325e-03, 7.75875151e-03, 8.14332347e-03, 7.94676598e-03, 7.75293307e-03, 7.56529858e-03, 7.88933039e-03, 7.68536143e-03, 7.48489471e-03, 7.28917075e-03, 7.55438488e-03, 7.34063145e-03, 7.13229552e-03, 6.92783622e-03, 1.06161544e-02, 1.01234140e-02, 9.64432582e-03, 6.52031973e-03, 6.29310543e-03, 6.06948463e-03, 5.84984245e-03, 5.63343242e-03, 8.61937553e-03, 8.08268972e-03, 7.55874207e-03, 6.79610623e-03, 6.32849289e-03, 5.86955249e-03, 5.41723240e-03, 5.56734810e-03, 5.01116784e-03, 4.46233014e-03, 1.40874484e-03, 1.34475902e-03, 1.28140685e-03, 1.21824886e-03, 1.15505024e-03, 1.09222531e-03, 1.02962845e-03, 9.67168540e-04, 9.04808170e-04, 8.42478999e-04, 7.80681905e-04, 7.18652213e-04, 6.56902499e-04, 5.95146266e-04, 5.33432467e-04, 4.72071581e-04, 4.10460081e-04, 3.49062117e-04, 2.87777104e-04, 2.26464268e-04, 1.65259655e-04, 1.03993290e-04, 4.27830964e-05, -1.84028686e-05, -7.95840388e-05, -1.40780976e-04, -2.01987947e-04, -2.63233029e-04, -3.24499299e-04, -3.85862397e-04, -4.47216793e-04, -5.08567959e-04, -5.70152479e-04, -6.31901203e-04, -6.93684444e-04, -7.55490037e-04, -8.17523745e-04, -8.79664498e-04, -9.41973762e-04, -1.00450485e-03, -1.06710335e-03, -1.12990546e-03, -1.19290419e-03, -1.25615683e-03, -1.31971564e-03, -1.38323894e-03, -4.38789371e-03, -4.93527949e-03, -5.48970094e-03, -5.34658274e-03, -5.79780247e-03, -6.25621388e-03, -6.72366377e-03, -7.48283789e-03, -8.00681766e-03, -8.54192488e-03, -5.58420410e-03, -5.79793099e-03, -6.01683883e-03, -6.23886706e-03, -6.46463828e-03, -9.56355780e-03, -1.00387875e-02, -1.05282217e-02, -6.87109074e-03, -7.07587786e-03, -7.28309387e-03, -7.49528036e-03, -7.23363785e-03, -7.42882164e-03, -7.62982434e-03, -7.83343613e-03, -7.51076965e-03, -7.69859226e-03, -7.88733363e-03, -8.08352232e-03, -7.69890239e-03, -7.87641760e-03, -8.05852562e-03, -8.24564695e-03, -8.00882280e-03, -8.18727538e-03, -8.36882368e-03, -8.55544209e-03, -8.04922916e-03, -8.21674801e-03, -8.38823151e-03, -8.56383517e-03, -8.74411128e-03, -7.35407788e-03, -7.48245185e-03, -7.61653157e-03, -7.75389513e-03, -8.20003450e-03, -8.35770369e-03, -8.51695240e-03, -8.67962278e-03, -8.84699915e-03, -1.26767000e-02, -1.30308550e-02, -1.34020159e-02, -1.27902590e-02, -1.31374933e-02, -1.35022206e-02, -1.28020663e-02, -1.31427627e-02, -1.35003338e-02, -8.81921593e-03, -8.97676684e-03, -8.73885304e-03, -8.89289286e-03, -9.05076787e-03, -8.79113190e-03, -8.94579384e-03, -8.66949651e-03, -8.81993212e-03, -8.97467043e-03, -9.13402718e-03, -9.29924846e-03, -9.47104022e-03, -9.64829233e-03, -9.83224157e-03, -1.00242840e-02, -1.02243433e-02, -1.04304748e-02, -1.06464764e-02, -1.08723603e-02, -1.11076497e-02, -1.13517633e-02, -1.16107482e-02, -1.18797245e-02, -1.21643478e-02, -1.24597261e-02, -1.27725713e-02, -1.31026637e-02, -1.34509858e-02, -1.38195883e-02, -1.42097492e-02, -1.46267340e-02, -1.50670996e-02, -1.55417984e-02, -1.60482023e-02, -1.65943075e-02, -1.71795618e-02, -1.78127103e-02, -1.84999816e-02, -1.92504879e-02, -2.00698171e-02, -2.09702197e-02, -2.19654124e-02, -2.30720937e-02, -2.43106075e-02, -2.57069822e-02, -2.72962451e-02, -1.43178934e-02, -1.48085468e-02, -1.53383436e-02, -1.59113277e-02, -1.65353119e-02, -1.72161739e-02, -1.79625414e-02, -1.87847745e-02, -1.96950957e-02, -2.07099430e-02, -2.18482167e-02, -2.31328830e-02, ], dtype=np.float32, ) }, "Latitude": {"value": np.random.rand(96, 332).astype(np.float32)}, "Longitude": {"value": np.random.rand(96, 332).astype(np.float32)}, "LunarAzimuthAngle": { "value": np.random.rand(96, 332).astype(np.float32) }, "LunarZenithAngle": { "value": np.random.rand(96, 332).astype(np.float32) }, "MidTime": { "value": np.array( [ 1950675122400462, 1950675124187044, 1950675125973621, 1950675127760200, 1950675129546777, 1950675131333401, 1950675133119981, 1950675134906559, 1950675136693138, 1950675138479716, 1950675140266341, 1950675142052918, 1950675143839498, 1950675145626075, 1950675147412654, 1950675149199278, 1950675150985857, 1950675152772434, 1950675154559014, 1950675156345591, 1950675158132216, 1950675159918795, 1950675161705373, 1950675163491595, 1950675165278173, 1950675167064395, 1950675168850973, 1950675170637195, 1950675172423773, 1950675174209995, 1950675175996573, 1950675177782795, 1950675179569373, 1950675181355595, 1950675183142173, 1950675184928395, 1950675186714973, 1950675188501195, 1950675190287773, 1950675192073995, 1950675193860573, 1950675195646795, 1950675197433373, 1950675199219595, 1950675201006173, 1950675202792395, 1950675204578973, -993, ] ) }, "MoonIllumFraction": {"value": 11.518141746520996}, "MoonPhaseAngle": {"value": 140.32131958007812}, "NumberOfTiePointZoneGroupsScan": {"value": 62}, "NumberOfTiePointZoneGroupsTrack": {"value": 1}, "NumberOfTiePointZonesScan": { "value": np.array( [ 1, 1, 1, 1, 1, 1, 1, 1, 28, 2, 3, 2, 3, 3, 3, 5, 4, 5, 4, 4, 4, 4, 4, 3, 5, 3, 4, 3, 23, 23, 3, 4, 3, 5, 3, 4, 4, 4, 4, 4, 5, 4, 5, 3, 3, 3, 2, 3, 2, 40, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, ], dtype=np.int32, ) }, "NumberOfTiePointZonesTrack": {"value": 1}, "PadByte1": {"value": np.array([0, 0, 0], dtype=np.uint8)}, "QF1_SCAN_VIIRSSDRGEO": { "value": np.array( [ 0, 128, 0, 128, 0, 128, 0, 128, 0, 128, 0, 128, 0, 128, 0, 128, 0, 128, 2, 130, 2, 130, 2, 142, 14, 142, 14, 142, 14, 142, 14, 142, 14, 142, 14, 142, 14, 142, 14, 142, 14, 142, 14, 142, 14, 142, 14, 0, ], dtype=np.uint8, ) }, "QF2_SCAN_VIIRSSDRGEO": { "value": np.array( [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, ], dtype=np.uint8, ) }, "SCAttitude": { "value": np.array( [ [-9.22587514e-01, 3.92340779e00, 5.93621433e-01], [-2.82428920e-01, 3.98425841e00, 7.05978215e-01], [5.63421488e-01, 3.83695555e00, 3.93174857e-01], [-3.16407561e-01, 3.85351181e00, 5.33868372e-01], [-1.10977542e00, 3.82791996e00, 6.06707633e-01], [-1.46703672e00, 3.94862103e00, 6.45296216e-01], [-1.14162290e00, 3.79930806e00, 7.45548725e-01], [-1.56181908e00, 3.68108273e00, 6.49301231e-01], [-1.46823406e00, 3.63365412e00, 5.03535330e-01], [-1.02590537e00, 3.64477968e00, 5.22250295e-01], [-5.35379410e-01, 3.69151831e00, 4.32526857e-01], [-5.78065366e-02, 3.37806726e00, 4.95986529e-02], [-2.40110800e-01, 3.22970843e00, -9.55391768e-03], [-6.54527247e-01, 3.16465378e00, 1.89672917e-01], [-1.35780311e00, 3.24750924e00, 1.63008988e-01], [-1.47417045e00, 3.39788198e00, 1.84387550e-01], [-1.74577117e00, 3.53278613e00, 1.89606979e-01], [-1.46304774e00, 3.22666740e00, 1.59070507e-01], [-4.05473042e00, 3.06258607e00, 1.10443914e00], [-5.91582203e00, 2.83895302e00, 1.79846287e00], [-7.04713678e00, 2.55699897e00, 2.23985386e00], [-7.43741798e00, 2.21711683e00, 2.42266488e00], [-7.06249666e00, 1.81872594e00, 2.33713675e00], [-5.96051836e00, 1.36609375e00, 1.99506497e00], [-4.13137341e00, 8.60225558e-01, 1.39551389e00], [-1.57741416e00, 3.02793205e-01, 5.36690295e-01], [7.63817742e-12, 1.11727738e-10, 2.74194088e-11], [-1.24213686e-11, 8.01499769e-11, -1.34056446e-11], [1.78272761e-11, 9.04948685e-11, 1.77389995e-11], [-1.47259357e-11, 9.37734057e-11, -3.89882709e-11], [-1.94052344e-11, 1.49411969e-10, -2.48492286e-11], [3.40418752e-12, 1.25333730e-10, 1.14499972e-11], [5.64890669e-12, 1.35170833e-10, 2.27858565e-11], [8.78361273e-12, 1.02109009e-10, -5.92111386e-12], [1.47398396e-11, 8.59943505e-11, -8.54686872e-13], [-5.35027361e-12, 1.25450331e-10, -1.54262800e-11], [2.12667054e-11, 1.57356642e-10, 2.54392306e-11], [-6.39285022e-12, 1.42791029e-10, -8.58749790e-12], [-2.18451160e-11, 9.94347313e-11, -2.18451160e-11], [1.77587389e-11, 1.16834944e-10, 3.09037483e-11], [5.09583955e-12, 1.06878555e-10, 1.30452402e-11], [-1.25895900e-11, 1.06217646e-10, -1.07971496e-11], [1.45264981e-11, 1.03935242e-10, 1.73963136e-11], [-1.41730258e-12, 7.72037989e-11, 1.15057850e-11], [1.99397634e-11, 1.36618120e-10, 4.70010628e-11], [1.24784124e-11, 1.14499965e-10, 4.69658253e-12], [-1.83001236e-11, 5.19546177e-11, -1.31873679e-11], [-9.99299988e02, -9.99299988e02, -9.99299988e02], ], dtype=np.float32, ) }, "SCPosition": { "value": np.array( [ [2.3191672e06, -4.5127075e06, 5.1096645e06], [2.3202438e06, -4.5225140e06, 5.1005205e06], [2.3213098e06, -4.5323050e06, 5.0913595e06], [2.3223650e06, -4.5420810e06, 5.0821800e06], [2.3234100e06, -4.5518415e06, 5.0729835e06], [2.3244445e06, -4.5615875e06, 5.0637700e06], [2.3254692e06, -4.5713185e06, 5.0545390e06], [2.3264830e06, -4.5810340e06, 5.0452915e06], [2.3274862e06, -4.5907340e06, 5.0360255e06], [2.3284792e06, -4.6004185e06, 5.0267430e06], [2.3294620e06, -4.6100885e06, 5.0174430e06], [2.3304345e06, -4.6197430e06, 5.0081270e06], [2.3313962e06, -4.6293820e06, 4.9987935e06], [2.3323475e06, -4.6390050e06, 4.9894420e06], [2.3332888e06, -4.6486130e06, 4.9800740e06], [2.3342195e06, -4.6582060e06, 4.9706890e06], [2.3351398e06, -4.6677835e06, 4.9612880e06], [2.3360495e06, -4.6773440e06, 4.9518685e06], [2.3369522e06, -4.6868750e06, 4.9424430e06], [2.3378502e06, -4.6963695e06, 4.9330150e06], [2.3387432e06, -4.7058270e06, 4.9235845e06], [2.3396312e06, -4.7152475e06, 4.9141520e06], [2.3405140e06, -4.7246290e06, 4.9047175e06], [2.3413915e06, -4.7339725e06, 4.8952825e06], [2.3422642e06, -4.7432805e06, 4.8858430e06], [2.3431318e06, -4.7525505e06, 4.8764035e06], [2.3439710e06, -4.7618790e06, 4.8668965e06], [2.3447770e06, -4.7712820e06, 4.8573130e06], [2.3455728e06, -4.7806710e06, 4.8477115e06], [2.3463582e06, -4.7900425e06, 4.8380950e06], [2.3471335e06, -4.7994005e06, 4.8284610e06], [2.3478980e06, -4.8087395e06, 4.8188110e06], [2.3486522e06, -4.8180645e06, 4.8091435e06], [2.3493960e06, -4.8273715e06, 4.7994615e06], [2.3501298e06, -4.8366645e06, 4.7897610e06], [2.3508530e06, -4.8459395e06, 4.7800465e06], [2.3515658e06, -4.8552000e06, 4.7703130e06], [2.3522680e06, -4.8644420e06, 4.7605655e06], [2.3529602e06, -4.8736700e06, 4.7508000e06], [2.3536420e06, -4.8828800e06, 4.7410205e06], [2.3543132e06, -4.8920755e06, 4.7312230e06], [2.3549740e06, -4.9012520e06, 4.7214105e06], [2.3556248e06, -4.9104145e06, 4.7115800e06], [2.3562650e06, -4.9195590e06, 4.7017360e06], [2.3568952e06, -4.9286890e06, 4.6918745e06], [2.3575145e06, -4.9378000e06, 4.6819980e06], [2.3581235e06, -4.9468960e06, 4.6721035e06], [-9.9929999e02, -9.9929999e02, -9.9929999e02], ], dtype=np.float32, ) }, "SCSolarAzimuthAngle": { "value": np.array( [ -140.6137, -140.54446, -140.47484, -140.40486, -140.33464, -140.26427, -140.19333, -140.12198, -140.05042, -139.97855, -139.90648, -139.83394, -139.76117, -139.68803, -139.61465, -139.54103, -139.46695, -139.3923, -139.31741, -139.2424, -139.16727, -139.09201, -139.01662, -138.94112, -138.86546, -138.78972, -138.71251, -138.63487, -138.5569, -138.4786, -138.39995, -138.32097, -138.24161, -138.16193, -138.0819, -138.00153, -137.92078, -137.8397, -137.75827, -137.67648, -137.59433, -137.51183, -137.42896, -137.34573, -137.26213, -137.17819, -137.09386, -999.3, ], dtype=np.float32, ) }, "SCSolarZenithAngle": { "value": np.array( [ 135.88528, 135.96703, 136.04868, 136.1302, 136.21165, 136.2931, 136.37451, 136.4556, 136.53659, 136.61748, 136.69843, 136.77931, 136.86021, 136.94092, 137.02148, 137.10208, 137.18248, 137.26239, 137.34204, 137.42155, 137.50092, 137.58014, 137.65923, 137.73816, 137.81696, 137.8956, 137.97507, 138.05447, 138.13382, 138.21303, 138.29218, 138.37122, 138.45016, 138.529, 138.60777, 138.68642, 138.76498, 138.84343, 138.9218, 139.00005, 139.07823, 139.15627, 139.23422, 139.31207, 139.38983, 139.46748, 139.54503, -999.3, ], dtype=np.float32, ) }, "SCVelocity": { "value": np.array( [ [605.31726, -5492.9614, -5113.397], [599.4935, -5484.5615, -5123.1396], [593.66986, -5476.142, -5132.8657], [587.8464, -5467.7017, -5142.573], [582.02313, -5459.241, -5152.263], [576.19995, -5450.7607, -5161.936], [570.37714, -5442.2607, -5171.592], [564.5546, -5433.741, -5181.2295], [558.73236, -5425.2, -5190.849], [552.9104, -5416.6396, -5200.4517], [547.0887, -5408.06, -5210.0366], [541.26746, -5399.4604, -5219.6035], [535.44666, -5390.841, -5229.153], [529.6263, -5382.201, -5238.684], [523.8063, -5373.5415, -5248.1978], [517.9866, -5364.863, -5257.694], [512.16754, -5356.1646, -5267.1724], [506.34906, -5347.446, -5276.632], [500.53455, -5338.72, -5286.0645], [494.72552, -5329.993, -5295.466], [488.9218, -5321.265, -5304.8364], [483.1238, -5312.536, -5314.1743], [477.33157, -5303.806, -5323.4795], [471.546, -5295.0767, -5332.7515], [465.7647, -5286.344, -5341.9937], [459.99005, -5277.613, -5351.2026], [454.19785, -5268.798, -5360.442], [448.38614, -5259.887, -5369.7207], [442.57404, -5250.955, -5378.983], [436.7639, -5242.0063, -5388.225], [430.9534, -5233.0366, -5397.4517], [425.145, -5224.0483, -5406.6567], [419.33627, -5215.0396, -5415.845], [413.52963, -5206.013, -5425.014], [407.72275, -5196.9663, -5434.1665], [401.91797, -5187.9023, -5443.299], [396.11307, -5178.8164, -5452.4136], [390.3103, -5169.7134, -5461.508], [384.50742, -5160.59, -5470.586], [378.70673, -5151.4497, -5479.644], [372.90598, -5142.288, -5488.6846], [367.1075, -5133.109, -5497.7046], [361.309, -5123.9097, -5506.708], [355.5128, -5114.6934, -5515.691], [349.71658, -5105.4565, -5524.657], [343.9228, -5096.202, -5533.602], [338.12906, -5086.927, -5542.53], [-999.3, -999.3, -999.3], ], dtype=np.float32, ) }, "SatelliteAzimuthAngle": { "value": np.random.rand(96, 332).astype(np.float32) }, "SatelliteZenithAngle": { "value": np.random.rand(96, 332).astype(np.float32) }, "SolarAzimuthAngle": { "value": np.random.rand(96, 332).astype(np.float32) }, "SolarZenithAngle": { "value": np.random.rand(96, 332).astype(np.float32) }, "StartTime": { "value": np.array( [ 1950675122120971, 1950675123907557, 1950675125694139, 1950675127480722, 1950675129267304, 1950675131053910, 1950675132840494, 1950675134627077, 1950675136413660, 1950675138200243, 1950675139986850, 1950675141773433, 1950675143560016, 1950675145346598, 1950675147133181, 1950675148919788, 1950675150706371, 1950675152492953, 1950675154279537, 1950675156066119, 1950675157852726, 1950675159639309, 1950675161425892, 1950675163212109, 1950675164998692, 1950675166784909, 1950675168571492, 1950675170357709, 1950675172144292, 1950675173930509, 1950675175717092, 1950675177503309, 1950675179289892, 1950675181076109, 1950675182862692, 1950675184648909, 1950675186435492, 1950675188221709, 1950675190008292, 1950675191794509, 1950675193581092, 1950675195367309, 1950675197153892, 1950675198940109, 1950675200726692, 1950675202512909, 1950675204299492, -993, ] ) }, "TiePointZoneGroupLocationScanCompact": { "value": np.array( [ 0, 2, 4, 6, 8, 10, 12, 14, 16, 45, 48, 52, 55, 59, 63, 67, 73, 78, 84, 89, 94, 99, 104, 109, 113, 119, 123, 128, 132, 156, 180, 184, 189, 193, 199, 203, 208, 213, 218, 223, 228, 234, 239, 245, 249, 253, 257, 260, 264, 267, 308, 310, 312, 314, 316, 318, 320, 322, 324, 326, 328, 330, ], dtype=np.int32, ) }, "TiePointZoneGroupLocationTrackCompact": {"value": 0}, "attrs": { "OriginalFilename": np.array( [ [ b"GDNBO_j01_d20191025_t0611251_e0612478_b10015_c20191025062405837630_cspp_dev.h5" ] ], dtype="|S78", ) }, }, "VIIRS-DNB-SDR_All": { "NumberOfBadChecksums": { "value": np.array( [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -993, ], dtype=np.int32, ) }, "NumberOfDiscardedPkts": { "value": np.array( [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -993, ], dtype=np.int32, ) }, "NumberOfMissingPkts": { "value": np.array( [ 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 18, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, -993, ], dtype=np.int32, ) }, "PadByte1": {"value": np.array([0, 0, 0], dtype=np.uint8)}, "QF1_VIIRSDNBSDR": { "value": (np.random.rand(768, 4064) * 255).astype(np.uint8) }, "QF2_SCAN_SDR": { "value": np.array( [ 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, ], dtype=np.uint8, ) }, "QF3_SCAN_RDR": { "value": np.array( [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, ], dtype=np.uint8, ) }, "Radiance": {"value": np.random.rand(768, 4064).astype(np.float32)}, "attrs": { "OriginalFilename": np.array( [ [ b"SVDNB_j01_d20191025_t0611251_e0612478_b10015_c20191025062427398006_cspp_dev.h5" ] ], dtype="|S78", ), "PixelOffsetScan": np.array([[0.5]], dtype=np.float32), "PixelOffsetTrack": np.array([[0.5]], dtype=np.float32), "TiePointZoneGroupLocationScan": np.array( [ [0], [2], [4], [6], [8], [10], [12], [14], [16], [464], [496], [544], [576], [648], [720], [792], [872], [928], [1008], [1072], [1136], [1200], [1264], [1328], [1400], [1480], [1552], [1640], [1712], [1896], [2080], [2152], [2240], [2312], [2392], [2464], [2528], [2592], [2656], [2720], [2784], [2864], [2920], [3000], [3072], [3144], [3216], [3248], [3296], [3328], [3968], [3976], [3984], [3992], [4000], [4008], [4016], [4024], [4032], [4040], [4048], [4056], ], dtype=np.int32, ), "TiePointZoneGroupLocationTrack": np.array( [[0]], dtype=np.int32 ), "TiePointZoneSizeScan": np.array( [ [2], [2], [2], [2], [2], [2], [2], [2], [16], [16], [16], [16], [24], [24], [24], [16], [14], [16], [16], [16], [16], [16], [16], [24], [16], [24], [22], [24], [8], [8], [24], [22], [24], [16], [24], [16], [16], [16], [16], [16], [16], [14], [16], [24], [24], [24], [16], [16], [16], [16], [8], [8], [8], [8], [8], [8], [8], [8], [8], [8], [8], [8], ], dtype=np.int32, ), "TiePointZoneSizeTrack": np.array([[16]], dtype=np.int32), }, }, "attrs": {"MissionStartTime": np.array([[1698019234000000]])}, }, "Data_Products": { "VIIRS-DNB-GEO": { "VIIRS-DNB-GEO_Aggr": { "attrs": { "AggregateBeginningDate": np.array( [[b"20191025"]], dtype="|S9" ), "AggregateBeginningGranuleID": np.array( [[b"J01002526558865"]], dtype="|S16" ), "AggregateBeginningOrbitNumber": np.array( [[10015]], dtype=np.uint64 ), "AggregateBeginningTime": np.array( [[b"061125.120971Z"]], dtype="|S15" ), "AggregateEndingDate": np.array( [[b"20191025"]], dtype="|S9" ), "AggregateEndingGranuleID": np.array( [[b"J01002526558865"]], dtype="|S16" ), "AggregateEndingOrbitNumber": np.array( [[10015]], dtype=np.uint64 ), "AggregateEndingTime": np.array( [[b"061247.849492Z"]], dtype="|S15" ), "AggregateNumberGranules": np.array([[1]], dtype=np.uint64), } }, "VIIRS-DNB-GEO_Gran_0": { "attrs": { "Ascending/Descending_Indicator": np.array( [[1]], dtype=np.uint8 ), "Beginning_Date": np.array([[b"20191025"]], dtype="|S9"), "Beginning_Time": np.array( [[b"061125.120971Z"]], dtype="|S15" ), "East_Bounding_Coordinate": np.array( [[-45.09228]], dtype=np.float32 ), "Ending_Date": np.array([[b"20191025"]], dtype="|S9"), "Ending_Time": np.array( [[b"061247.849492Z"]], dtype="|S15" ), "G-Ring_Latitude": np.array( [ [41.84151], [44.31062], [46.78565], [45.41409], [41.07657], [38.81504], [36.53401], [40.55788], ], dtype=np.float32, ), "G-Ring_Longitude": np.array( [ [-82.66234], [-82.55624], [-82.48891], [-62.80042], [-45.09228], [-46.58502], [-47.95933], [-64.54196], ], dtype=np.float32, ), "LeapSecondsGranuleStart": np.array([[37]], dtype=np.int32), "N_Algorithm_Version": np.array( [[b"1.O.000.014"]], dtype="|S12" ), "N_Anc_Filename": np.array( [ [ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0691_1.O.0.0" ], [ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0692_1.O.0.0" ], [ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0693_1.O.0.0" ], [ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0719_1.O.0.0" ], [ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0720_1.O.0.0" ], [ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0721_1.O.0.0" ], [ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0722_1.O.0.0" ], [ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0723_1.O.0.0" ], [ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0724_1.O.0.0" ], [ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0725_1.O.0.0" ], [ b"off_Planet-Eph-ANC_Static_JPL_000f_20151008_200001010000Z_20000101000000Z_ee00000000000000Z_np" # noqa ], [ b"off_USNO-PolarWander-UT1-ANC_Ser7_USNO_000f_20191025_201910250000Z_20191025000109Z_ee20191101120000Z_np" # noqa ], ], dtype="|S104", ), "N_Aux_Filename": np.array( [ [ b"CMNGEO-PARAM-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa ], [ b"CmnGeo-SAA-AC_j01_20151008180000Z_20170807130000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"TLE-AUX_j01_20191024053224Z_20191024000000Z_ee00000000000000Z_-_nobc_ops_all-_ops" # noqa ], [ b"VIIRS-SDR-GEO-DNB-PARAM-LUT_j01_20180507121508Z_20180315000000Z_ee00000000000000Z_PS-1-O-CCR-3963-006-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-GEO-IMG-PARAM-LUT_j01_20180430182354Z_20180315000000Z_ee00000000000000Z_PS-1-O-CCR-3963-006-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-GEO-MOD-PARAM-LUT_j01_20180430182652Z_20180315000000Z_ee00000000000000Z_PS-1-O-CCR-3963-006-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-QA-LUT_j01_20180109121411Z_20180409000000Z_ee00000000000000Z_PS-1-O-CCR-3742-003-LE-PE_all-_all_all-_ops" # noqa ], ], dtype="|S126", ), "N_Beginning_Orbit_Number": np.array( [[10015]], dtype=np.uint64 ), "N_Beginning_Time_IET": np.array( [[1950675122120971]], dtype=np.uint64 ), "N_Creation_Date": np.array([[b"20191025"]], dtype="|S9"), "N_Creation_Time": np.array( [[b"062136.412867Z"]], dtype="|S15" ), "N_Day_Night_Flag": np.array([[b"Night"]], dtype="|S6"), "N_Ending_Time_IET": np.array( [[1950675204849492]], dtype=np.uint64 ), "N_Granule_ID": np.array( [[b"J01002526558865"]], dtype="|S16" ), "N_Granule_Status": np.array([[b"N/A"]], dtype="|S4"), "N_Granule_Version": np.array([[b"A1"]], dtype="|S3"), "N_IDPS_Mode": np.array([[b"N/A"]], dtype="|S4"), "N_Input_Prod": np.array( [ [b"SPACECRAFT-DIARY-RDR:J01002526558800:A1"], [b"SPACECRAFT-DIARY-RDR:J01002526559000:A1"], [b"VIIRS-SCIENCE-RDR:J01002526558865:A1"], ], dtype="|S40", ), "N_JPSS_Document_Ref": np.array( [ [ b"474-00448-02-06_JPSS-DD-Vol-II-Part-6_0200H.pdf" ], [ b"474-00448-02-06_JPSS-VIIRS-SDR-DD-Part-6_0200H_VIIRS-DNB-GEO-PP.xml" ], [ b"474-00448-03-06_JPSS-OAD-Vol-III-Part-6-VIIRS-RDR-SDR_-1.pdf" ], ], dtype="|S68", ), "N_LEOA_Flag": np.array([[b"On"]], dtype="|S3"), "N_Nadir_Latitude_Max": np.array( [[45.3722]], dtype=np.float32 ), "N_Nadir_Latitude_Min": np.array( [[40.6172]], dtype=np.float32 ), "N_Nadir_Longitude_Max": np.array( [[-62.80047]], dtype=np.float32 ), "N_Nadir_Longitude_Min": np.array( [[-64.51342]], dtype=np.float32 ), "N_Number_Of_Scans": np.array([[47]], dtype=np.int32), "N_Primary_Label": np.array( [[b"Non-Primary"]], dtype="|S12" ), "N_Quality_Summary_Names": np.array( [ [b"Automatic Quality Flag"], [b"Percent Missing Data"], [b"Percent Out of Bounds"], ], dtype="|S23", ), "N_Quality_Summary_Values": np.array( [[1], [61], [0]], dtype=np.int32 ), "N_Reference_ID": np.array( [[b"VIIRS-DNB-GEO:J01002526558865:A1"]], dtype="|S33" ), "N_Software_Version": np.array( [[b"CSPP_SDR_3_1_3"]], dtype="|S15" ), "N_Spacecraft_Maneuver": np.array( [[b"Normal Operations"]], dtype="|S18" ), "North_Bounding_Coordinate": np.array( [[46.8018]], dtype=np.float32 ), "South_Bounding_Coordinate": np.array( [[36.53401]], dtype=np.float32 ), "West_Bounding_Coordinate": np.array( [[-82.66234]], dtype=np.float32 ), } }, "attrs": { "Instrument_Short_Name": np.array([[b"VIIRS"]], dtype="|S6"), "N_Anc_Type_Tasked": np.array([[b"Official"]], dtype="|S9"), "N_Collection_Short_Name": np.array( [[b"VIIRS-DNB-GEO"]], dtype="|S14" ), "N_Dataset_Type_Tag": np.array([[b"GEO"]], dtype="|S4"), "N_Processing_Domain": np.array([[b"ops"]], dtype="|S4"), "Operational_Mode": np.array( [[b"J01 Normal Operations, VIIRS Operational"]], dtype="|S41", ), }, }, "VIIRS-DNB-SDR": { "VIIRS-DNB-SDR_Aggr": { "attrs": { "AggregateBeginningDate": np.array( [[b"20191025"]], dtype="|S9" ), "AggregateBeginningGranuleID": np.array( [[b"J01002526558865"]], dtype="|S16" ), "AggregateBeginningOrbitNumber": np.array( [[10015]], dtype=np.uint64 ), "AggregateBeginningTime": np.array( [[b"061125.120971Z"]], dtype="|S15" ), "AggregateEndingDate": np.array( [[b"20191025"]], dtype="|S9" ), "AggregateEndingGranuleID": np.array( [[b"J01002526558865"]], dtype="|S16" ), "AggregateEndingOrbitNumber": np.array( [[10015]], dtype=np.uint64 ), "AggregateEndingTime": np.array( [[b"061247.849492Z"]], dtype="|S15" ), "AggregateNumberGranules": np.array([[1]], dtype=np.uint64), } }, "VIIRS-DNB-SDR_Gran_0": { "attrs": { "Ascending/Descending_Indicator": np.array( [[1]], dtype=np.uint8 ), "Band_ID": np.array([[b"N/A"]], dtype="|S4"), "Beginning_Date": np.array([[b"20191025"]], dtype="|S9"), "Beginning_Time": np.array( [[b"061125.120971Z"]], dtype="|S15" ), "East_Bounding_Coordinate": np.array( [[-45.09281]], dtype=np.float32 ), "Ending_Date": np.array([[b"20191025"]], dtype="|S9"), "Ending_Time": np.array( [[b"061247.849492Z"]], dtype="|S15" ), "G-Ring_Latitude": np.array( [ [41.84157], [44.31069], [46.78591], [45.41409], [41.07675], [38.81512], [36.53402], [40.55788], ], dtype=np.float32, ), "G-Ring_Longitude": np.array( [ [-82.65787], [-82.55148], [-82.47269], [-62.80042], [-45.09281], [-46.58528], [-47.95936], [-64.54196], ], dtype=np.float32, ), "N_Algorithm_Version": np.array( [[b"1.O.000.015"]], dtype="|S12" ), "N_Anc_Filename": np.array( [ [ b"off_Planet-Eph-ANC_Static_JPL_000f_20151008_200001010000Z_20000101000000Z_ee00000000000000Z_np" # noqa ], [ b"off_USNO-PolarWander-UT1-ANC_Ser7_USNO_000f_20191025_201910250000Z_20191025000109Z_ee20191101120000Z_np" # noqa ], ], dtype="|S104", ), "N_Aux_Filename": np.array( [ [ b"CMNGEO-PARAM-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-DNB-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-I1-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-I2-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-I3-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-I4-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-I5-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M1-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M10-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M11-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M12-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M13-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M14-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M15-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M16-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M2-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M3-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M4-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M5-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M6-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M7-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M8-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M9-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-RSBAUTOCAL-HISTORY-AUX_j01_20191024021527Z_20191024000000Z_ee00000000000000Z_-_nobc_ops_all-_ops" # noqa ], [ b"VIIRS-RSBAUTOCAL-VOLT-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-EDD154640-109C-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-BB-TEMP-COEFFS-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-CAL-AUTOMATE-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-Pred-SideA-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-COEFF-A-LUT_j01_20180109114311Z_20180409000000Z_ee00000000000000Z_PS-1-O-CCR-3742-003-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-COEFF-B-LUT_j01_20180109101739Z_20180409000000Z_ee00000000000000Z_PS-1-O-CCR-3742-004-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-DELTA-C-LUT_j01_20180109000000Z_20180409000000Z_ee00000000000000Z_PS-1-O-CCR-3742-003-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-DG-ANOMALY-DN-LIMITS-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-SideA-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-DNB-DN0-LUT_j01_20190930000000Z_20190928000000Z_ee00000000000000Z_PS-1-O-CCR-4262-026-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-DNB-FRAME-TO-ZONE-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-Op21-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-DNB-GAIN-RATIOS-LUT_j01_20190930000000Z_20190928000000Z_ee00000000000000Z_PS-1-O-CCR-4262-025-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-DNB-LGS-GAINS-LUT_j01_20180413122703Z_20180412000000Z_ee00000000000000Z_PS-1-O-CCR-3918-005-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-DNB-RVF-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-Op21-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-DNB-STRAY-LIGHT-CORRECTION-LUT_j01_20190930160523Z_20191001000000Z_ee00000000000000Z_PS-1-O-CCR-4322-024-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-EBBT-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-EMISSIVE-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-F-PREDICTED-LUT_j01_20180413123333Z_20180412000000Z_ee00000000000000Z_PS-1-O-CCR-3918-006-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-GAIN-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-HAM-ER-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-OBC-ER-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-OBC-RR-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-OBS-TO-PIXELS-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-SameAsSNPP-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-QA-LUT_j01_20180109121411Z_20180409000000Z_ee00000000000000Z_PS-1-O-CCR-3742-003-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-RADIOMETRIC-PARAM-V3-LUT_j01_20161117000000Z_20180111000000Z_ee00000000000000Z_PS-1-O-CCR-17-3436-v003-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-REFLECTIVE-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-SameAsSNPP-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-RELATIVE-SPECTRAL-RESPONSE-LUT_j01_20161031000000Z_20180111000000Z_ee00000000000000Z_PS-1-O-CCR-17-3436-v003-FusedM9-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-RTA-ER-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-RVF-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-M16-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-SOLAR-IRAD-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-Thuillier2002-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-TELE-COEFFS-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-SideA-LE-PE_all-_all_all-_ops" # noqa ], ], dtype="|S151", ), "N_Beginning_Orbit_Number": np.array( [[10015]], dtype=np.uint64 ), "N_Beginning_Time_IET": np.array( [[1950675122120971]], dtype=np.uint64 ), "N_Creation_Date": np.array([[b"20191025"]], dtype="|S9"), "N_Creation_Time": np.array( [[b"062411.116253Z"]], dtype="|S15" ), "N_Day_Night_Flag": np.array([[b"Night"]], dtype="|S6"), "N_Ending_Time_IET": np.array( [[1950675204849492]], dtype=np.uint64 ), "N_Graceful_Degradation": np.array([[b"No"]], dtype="|S3"), "N_Granule_ID": np.array( [[b"J01002526558865"]], dtype="|S16" ), "N_Granule_Status": np.array([[b"N/A"]], dtype="|S4"), "N_Granule_Version": np.array([[b"A1"]], dtype="|S3"), "N_IDPS_Mode": np.array([[b"N/A"]], dtype="|S4"), "N_Input_Prod": np.array( [ [b"GEO-VIIRS-OBC-IP:J01002526558865:A1"], [b"SPACECRAFT-DIARY-RDR:J01002526558800:A1"], [b"SPACECRAFT-DIARY-RDR:J01002526559000:A1"], [b"VIIRS-DNB-GEO:J01002526558865:A1"], [b"VIIRS-IMG-RGEO-TC:J01002526558865:A1"], [b"VIIRS-MOD-RGEO-TC:J01002526558865:A1"], [b"VIIRS-SCIENCE-RDR:J01002526558012:A1"], [b"VIIRS-SCIENCE-RDR:J01002526558865:A1"], ], dtype="|S40", ), "N_JPSS_Document_Ref": np.array( [ [ b"474-00448-02-06_JPSS-DD-Vol-II-Part-6_0200H.pdf" ], [ b"474-00448-02-06_JPSS-VIIRS-SDR-DD-Part-6_0200H_VIIRS-DNB-SDR-PP.xml" ], [ b"474-00448-03-06_JPSS-OAD-Vol-III-Part-6-VIIRS-RDR-SDR_-1.pdf" ], ], dtype="|S68", ), "N_LEOA_Flag": np.array([[b"On"]], dtype="|S3"), "N_Nadir_Latitude_Max": np.array( [[45.3722]], dtype=np.float32 ), "N_Nadir_Latitude_Min": np.array( [[40.6172]], dtype=np.float32 ), "N_Nadir_Longitude_Max": np.array( [[-62.80047]], dtype=np.float32 ), "N_Nadir_Longitude_Min": np.array( [[-64.51342]], dtype=np.float32 ), "N_Number_Of_Scans": np.array([[47]], dtype=np.int32), "N_Percent_Erroneous_Data": np.array( [[0.0]], dtype=np.float32 ), "N_Percent_Missing_Data": np.array( [[51.05127]], dtype=np.float32 ), "N_Percent_Not-Applicable_Data": np.array( [[0.0]], dtype=np.float32 ), "N_Primary_Label": np.array( [[b"Non-Primary"]], dtype="|S12" ), "N_Quality_Summary_Names": np.array( [ [b"Scan Quality Exclusion"], [b"Summary VIIRS SDR Quality"], ], dtype="|S26", ), "N_Quality_Summary_Values": np.array( [[24], [49]], dtype=np.int32 ), "N_RSB_Index": np.array([[17]], dtype=np.int32), "N_Reference_ID": np.array( [[b"VIIRS-DNB-SDR:J01002526558865:A1"]], dtype="|S33" ), "N_Satellite/Local_Azimuth_Angle_Max": np.array( [[179.9995]], dtype=np.float32 ), "N_Satellite/Local_Azimuth_Angle_Min": np.array( [[-179.9976]], dtype=np.float32 ), "N_Satellite/Local_Zenith_Angle_Max": np.array( [[69.83973]], dtype=np.float32 ), "N_Satellite/Local_Zenith_Angle_Min": np.array( [[0.00898314]], dtype=np.float32 ), "N_Software_Version": np.array( [[b"CSPP_SDR_3_1_3"]], dtype="|S15" ), "N_Solar_Azimuth_Angle_Max": np.array( [[73.93496]], dtype=np.float32 ), "N_Solar_Azimuth_Angle_Min": np.array( [[23.83542]], dtype=np.float32 ), "N_Solar_Zenith_Angle_Max": np.array( [[147.5895]], dtype=np.float32 ), "N_Solar_Zenith_Angle_Min": np.array( [[126.3929]], dtype=np.float32 ), "N_Spacecraft_Maneuver": np.array( [[b"Normal Operations"]], dtype="|S18" ), "North_Bounding_Coordinate": np.array( [[46.8018]], dtype=np.float32 ), "South_Bounding_Coordinate": np.array( [[36.53402]], dtype=np.float32 ), "West_Bounding_Coordinate": np.array( [[-82.65787]], dtype=np.float32 ), } }, "attrs": { "Instrument_Short_Name": np.array([[b"VIIRS"]], dtype="|S6"), "N_Collection_Short_Name": np.array( [[b"VIIRS-DNB-SDR"]], dtype="|S14" ), "N_Dataset_Type_Tag": np.array([[b"SDR"]], dtype="|S4"), "N_Instrument_Flight_SW_Version": np.array( [[20], [65534]], dtype=np.int32 ), "N_Processing_Domain": np.array([[b"ops"]], dtype="|S4"), "Operational_Mode": np.array( [[b"J01 Normal Operations, VIIRS Operational"]], dtype="|S41", ), }, }, }, "attrs": { "CVIIRS_Version": np.array([[b"2.0.1"]], dtype="|S5"), "Compact_VIIRS_SDR_Version": np.array([[b"3.1"]], dtype="|S3"), "Distributor": np.array([[b"cspp"]], dtype="|S5"), "Mission_Name": np.array([[b"JPSS-1"]], dtype="|S7"), "N_Dataset_Source": np.array([[b"all-"]], dtype="|S5"), "N_GEO_Ref": np.array( [ [ b"GDNBO_j01_d20191025_t0611251_e0612478_b10015_c20191025062405837630_cspp_dev.h5" ] ], dtype="|S78", ), "N_HDF_Creation_Date": np.array([[b"20191025"]], dtype="|S8"), "N_HDF_Creation_Time": np.array([[b"062502.927000Z"]], dtype="|S14"), "Platform_Short_Name": np.array([[b"J01"]], dtype="|S4"), "Satellite_Id_Filename": np.array([[b"j01"]], dtype="|S3"), }, } self.filename = os.path.join( tempfile.gettempdir(), "SVDNBC_j01_d20191025_t0611251_e0612478_b10015_c20191025062459000870_eum_ops.h5", ) h5f = h5py.File(self.filename, mode="w") def fill_h5(root, stuff): for key, val in stuff.items(): if key in ["value", "attrs"]: continue if "value" in val: root[key] = val["value"] else: grp = root.create_group(key) fill_h5(grp, stuff[key]) if "attrs" in val: for attrs, val in val["attrs"].items(): root[key].attrs[attrs] = val fill_h5(h5f, fake_dnb) for attr, val in fake_dnb["attrs"].items(): h5f.attrs[attr] = val h5f.close() self.client = None def _dataset_iterator(self): from satpy.readers.viirs_compact import VIIRSCompactFileHandler from satpy.tests.utils import make_dataid filename_info = {} filetype_info = {'file_type': 'compact_dnb'} test = VIIRSCompactFileHandler(self.filename, filename_info, filetype_info) dsid = make_dataid(name='DNB', calibration='radiance') ds1 = test.get_dataset(dsid, {}) dsid = make_dataid(name='longitude_dnb') ds2 = test.get_dataset(dsid, {'standard_name': 'longitude'}) dsid = make_dataid(name='latitude_dnb') ds3 = test.get_dataset(dsid, {'standard_name': 'latitude'}) dsid = make_dataid(name='solar_zenith_angle') ds4 = test.get_dataset(dsid, {'standard_name': 'solar_zenith_angle'}) for ds in [ds1, ds2, ds3, ds4]: yield ds def test_get_dataset(self): """Retrieve datasets from a DNB file.""" for ds in self._dataset_iterator(): self.assertEqual(ds.shape, (752, 4064)) self.assertEqual(ds.dtype, np.float32) self.assertEqual(ds.compute().shape, (752, 4064)) self.assertEqual(ds.attrs['rows_per_scan'], 16) def test_distributed(self): """Check that distributed computations work.""" from dask.distributed import Client self.client = Client() for ds in self._dataset_iterator(): # Check that the computation is running fine. self.assertEqual(ds.compute().shape, (752, 4064)) def tearDown(self): """Destroy.""" with suppress(OSError): os.remove(self.filename) with suppress(AttributeError): self.client.close() satpy-0.34.0/satpy/tests/reader_tests/test_viirs_edr_active_fires.py000066400000000000000000000374211420401153000260430ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """VIIRS Active Fires Tests. This module implements tests for VIIRS Active Fires NetCDF and ASCII file readers. """ import io import os import unittest from unittest import mock import dask.dataframe as dd import numpy as np import pandas as pd from satpy.readers.file_handlers import BaseFileHandler from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler from satpy.tests.utils import convert_file_content_to_data_array DEFAULT_FILE_SHAPE = (1, 100) DEFAULT_LATLON_FILE_DTYPE = np.float32 DEFAULT_LATLON_FILE_DATA = np.arange(start=43, stop=45, step=0.02, dtype=DEFAULT_LATLON_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) DEFAULT_DETECTION_FILE_DTYPE = np.uint8 DEFAULT_DETECTION_FILE_DATA = np.arange(start=60, stop=100, step=0.4, dtype=DEFAULT_DETECTION_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) DEFAULT_M13_FILE_DTYPE = np.float32 DEFAULT_M13_FILE_DATA = np.arange(start=300, stop=340, step=0.4, dtype=DEFAULT_M13_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) DEFAULT_POWER_FILE_DTYPE = np.float32 DEFAULT_POWER_FILE_DATA = np.arange(start=1, stop=25, step=0.24, dtype=DEFAULT_POWER_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) class FakeModFiresNetCDF4FileHandler(FakeNetCDF4FileHandler): """Swap in CDF4 file handler.""" def get_test_content(self, filename, filename_info, filename_type): """Mimic reader input file content.""" file_content = {} file_content['/attr/data_id'] = "AFMOD" file_content['satellite_name'] = "npp" file_content['sensor'] = 'VIIRS' file_content['Fire Pixels/FP_latitude'] = DEFAULT_LATLON_FILE_DATA file_content['Fire Pixels/FP_longitude'] = DEFAULT_LATLON_FILE_DATA file_content['Fire Pixels/FP_power'] = DEFAULT_POWER_FILE_DATA file_content['Fire Pixels/FP_T13'] = DEFAULT_M13_FILE_DATA file_content['Fire Pixels/FP_T13/attr/units'] = 'kelvins' file_content['Fire Pixels/FP_confidence'] = DEFAULT_DETECTION_FILE_DATA file_content['Fire Pixels/attr/units'] = 'none' file_content['Fire Pixels/shape'] = DEFAULT_FILE_SHAPE attrs = ('FP_latitude', 'FP_longitude', 'FP_T13', 'FP_confidence') convert_file_content_to_data_array( file_content, attrs=attrs, dims=('z', 'fakeDim0', 'fakeDim1')) return file_content class FakeImgFiresNetCDF4FileHandler(FakeNetCDF4FileHandler): """Swap in CDF4 file handler.""" def get_test_content(self, filename, filename_info, filename_type): """Mimic reader input file content.""" file_content = {} file_content['/attr/data_id'] = "AFIMG" file_content['satellite_name'] = "npp" file_content['sensor'] = 'VIIRS' file_content['FP_latitude'] = DEFAULT_LATLON_FILE_DATA file_content['FP_longitude'] = DEFAULT_LATLON_FILE_DATA file_content['FP_power'] = DEFAULT_POWER_FILE_DATA file_content['FP_T4'] = DEFAULT_M13_FILE_DATA file_content['FP_T4/attr/units'] = 'kelvins' file_content['FP_confidence'] = DEFAULT_DETECTION_FILE_DATA attrs = ('FP_latitude', 'FP_longitude', 'FP_T13', 'FP_confidence') convert_file_content_to_data_array( file_content, attrs=attrs, dims=('z', 'fakeDim0', 'fakeDim1')) return file_content class FakeModFiresTextFileHandler(BaseFileHandler): """Fake file handler for text files at moderate resolution.""" def __init__(self, filename, filename_info, filetype_info, **kwargs): """Get fake file content from 'get_test_content'.""" super(FakeModFiresTextFileHandler, self).__init__(filename, filename_info, filetype_info) self.file_content = self.get_test_content() platform_key = {"NPP": "Suomi-NPP", "J01": "NOAA-20", "J02": "NOAA-21"} self.platform_name = platform_key.get(self.filename_info['satellite_name'].upper(), "unknown") def get_test_content(self): """Create fake test file content.""" fake_file = io.StringIO(u'''\n\n\n\n\n\n\n\n\n\n\n\n\n\n 24.64015007, -107.57017517, 317.38290405, 0.75, 0.75, 40, 4.28618050 25.90660477, -100.06127167, 331.17962646, 0.75, 0.75, 81, 20.61096764''') return dd.from_pandas(pd.read_csv(fake_file, skiprows=15, header=None, names=["latitude", "longitude", "T13", "Along-scan", "Along-track", "confidence_pct", "power"]), chunksize=1) class FakeImgFiresTextFileHandler(BaseFileHandler): """Fake file handler for text files at image resolution.""" def __init__(self, filename, filename_info, filetype_info, **kwargs): """Get fake file content from 'get_test_content'.""" super(FakeImgFiresTextFileHandler, self).__init__(filename, filename_info, filetype_info) self.file_content = self.get_test_content() def get_test_content(self): """Create fake test file content.""" fake_file = io.StringIO(u'''\n\n\n\n\n\n\n\n\n\n\n\n\n\n 24.64015007, -107.57017517, 317.38290405, 0.75, 0.75, 40, 4.28618050 25.90660477, -100.06127167, 331.17962646, 0.75, 0.75, 81, 20.61096764''') platform_key = {"NPP": "Suomi-NPP", "J01": "NOAA-20", "J02": "NOAA-21"} self.platform_name = platform_key.get(self.filename_info['satellite_name'].upper(), "unknown") return dd.from_pandas(pd.read_csv(fake_file, skiprows=15, header=None, names=["latitude", "longitude", "T4", "Along-scan", "Along-track", "confidence_cat", "power"]), chunksize=1) class TestModVIIRSActiveFiresNetCDF4(unittest.TestCase): """Test VIIRS Fires Reader.""" yaml_file = 'viirs_edr_active_fires.yaml' def setUp(self): """Wrap CDF4 file handler with own fake file handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_edr_active_fires import VIIRSActiveFiresFileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) self.p = mock.patch.object(VIIRSActiveFiresFileHandler, '__bases__', (FakeModFiresNetCDF4FileHandler,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the CDF4 file handler.""" self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'AFMOD_j02_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc' ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) self.assertTrue(r.file_handlers) def test_load_dataset(self): """Test loading all datasets.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'AFMOD_j02_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc' ]) r.create_filehandlers(loadables) datasets = r.load(['confidence_pct']) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.attrs['units'], '%') self.assertEqual(v.attrs['_FillValue'], 255) self.assertTrue(np.issubdtype(v.dtype, DEFAULT_DETECTION_FILE_DTYPE)) datasets = r.load(['T13']) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.attrs['units'], 'K') datasets = r.load(['power']) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.attrs['units'], 'MW') self.assertEqual(v.attrs['platform_name'], 'NOAA-21') self.assertEqual(v.attrs['sensor'], 'VIIRS') class TestImgVIIRSActiveFiresNetCDF4(unittest.TestCase): """Test VIIRS Fires Reader.""" yaml_file = 'viirs_edr_active_fires.yaml' def setUp(self): """Wrap CDF4 file handler with own fake file handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_edr_active_fires import VIIRSActiveFiresFileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) self.p = mock.patch.object(VIIRSActiveFiresFileHandler, '__bases__', (FakeImgFiresNetCDF4FileHandler,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the CDF4 file handler.""" self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc' ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) self.assertTrue(r.file_handlers) def test_load_dataset(self): """Test loading all datasets.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc' ]) r.create_filehandlers(loadables) datasets = r.load(['confidence_cat']) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.attrs['units'], '1') self.assertEqual(v.attrs['flag_meanings'], ['low', 'medium', 'high']) self.assertEqual(v.attrs['flag_values'], [7, 8, 9]) datasets = r.load(['T4']) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.attrs['units'], 'K') datasets = r.load(['power']) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.attrs['units'], 'MW') self.assertEqual(v.attrs['platform_name'], 'Suomi-NPP') self.assertEqual(v.attrs['sensor'], 'VIIRS') @mock.patch('satpy.readers.viirs_edr_active_fires.dd.read_csv') class TestModVIIRSActiveFiresText(unittest.TestCase): """Test VIIRS Fires Reader.""" yaml_file = 'viirs_edr_active_fires.yaml' def setUp(self): """Wrap file handler with own fake file handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_edr_active_fires import VIIRSActiveFiresTextFileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) self.p = mock.patch.object(VIIRSActiveFiresTextFileHandler, '__bases__', (FakeModFiresTextFileHandler,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the text file handler.""" self.p.stop() def test_init(self, mock_obj): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'AFEDR_j01_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt' ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) self.assertTrue(r.file_handlers) def test_load_dataset(self, csv_mock): """Test loading all datasets.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'AFEDR_j01_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt' ]) r.create_filehandlers(loadables) datasets = r.load(['confidence_pct']) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.attrs['units'], '%') datasets = r.load(['T13']) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.attrs['units'], 'K') datasets = r.load(['power']) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.attrs['units'], 'MW') self.assertEqual(v.attrs['platform_name'], 'NOAA-20') self.assertEqual(v.attrs['sensor'], 'VIIRS') @mock.patch('satpy.readers.viirs_edr_active_fires.dd.read_csv') class TestImgVIIRSActiveFiresText(unittest.TestCase): """Test VIIRS Fires Reader.""" yaml_file = 'viirs_edr_active_fires.yaml' def setUp(self): """Wrap file handler with own fake file handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_edr_active_fires import VIIRSActiveFiresTextFileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) self.p = mock.patch.object(VIIRSActiveFiresTextFileHandler, '__bases__', (FakeImgFiresTextFileHandler,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the text file handler.""" self.p.stop() def test_init(self, mock_obj): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt' ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) self.assertTrue(r.file_handlers) def test_load_dataset(self, mock_obj): """Test loading all datasets.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt' ]) r.create_filehandlers(loadables) datasets = r.load(['confidence_cat']) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.attrs['units'], '1') self.assertEqual(v.attrs['flag_meanings'], ['low', 'medium', 'high']) self.assertEqual(v.attrs['flag_values'], [7, 8, 9]) datasets = r.load(['T4']) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.attrs['units'], 'K') datasets = r.load(['power']) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.attrs['units'], 'MW') self.assertEqual(v.attrs['platform_name'], 'Suomi-NPP') self.assertEqual(v.attrs['sensor'], 'VIIRS') satpy-0.34.0/satpy/tests/reader_tests/test_viirs_edr_flood.py000066400000000000000000000125011420401153000244730ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the VIIRS EDR Flood reader.""" import os import unittest from unittest import mock import numpy as np from satpy.tests.reader_tests.test_hdf4_utils import FakeHDF4FileHandler DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (10, 300) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) class FakeHDF4FileHandler2(FakeHDF4FileHandler): """Swap in HDF4 file handler.""" def get_test_content(self, filename, filename_info, filename_type): """Mimic reader input file content.""" file_content = {} file_content['/attr/Satellitename'] = filename_info['platform_shortname'] file_content['/attr/SensorIdentifyCode'] = 'VIIRS' # only one dataset for the flood reader file_content['WaterDetection'] = DEFAULT_FILE_DATA file_content['WaterDetection/attr/_Fillvalue'] = 1 file_content['WaterDetection/attr/scale_factor'] = 1. file_content['WaterDetection/attr/add_offset'] = 0. file_content['WaterDetection/attr/units'] = 'none' file_content['WaterDetection/shape'] = DEFAULT_FILE_SHAPE file_content['WaterDetection/attr/ProjectionMinLatitude'] = 15. file_content['WaterDetection/attr/ProjectionMaxLatitude'] = 68. file_content['WaterDetection/attr/ProjectionMinLongitude'] = -124. file_content['WaterDetection/attr/ProjectionMaxLongitude'] = -61. # convert tp xarrays from xarray import DataArray for key, val in file_content.items(): if isinstance(val, np.ndarray): attrs = {} for a in ['_Fillvalue', 'units', 'ProjectionMinLatitude', 'ProjectionMaxLongitude', 'ProjectionMinLongitude', 'ProjectionMaxLatitude']: if key + '/attr/' + a in file_content: attrs[a] = file_content[key + '/attr/' + a] if val.ndim > 1: file_content[key] = DataArray(val, dims=('fakeDim0', 'fakeDim1'), attrs=attrs) else: file_content[key] = DataArray(val, attrs=attrs) if 'y' not in file_content['WaterDetection'].dims: file_content['WaterDetection'] = file_content['WaterDetection'].rename({'fakeDim0': 'x', 'fakeDim1': 'y'}) return file_content class TestVIIRSEDRFloodReader(unittest.TestCase): """Test VIIRS EDR Flood Reader.""" yaml_file = 'viirs_edr_flood.yaml' def setUp(self): """Wrap HDF4 file handler with own fake file handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_edr_flood import VIIRSEDRFlood self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) self.p = mock.patch.object(VIIRSEDRFlood, '__bases__', (FakeHDF4FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the HDF4 file handler.""" self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'WATER_VIIRS_Prj_SVI_npp_d20180824_t1828213_e1839433_b35361_cspp_dev_10_300_01.hdf' ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) self.assertTrue(r.file_handlers) def test_load_dataset(self): """Test loading all datasets from a full swath file.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'WATER_VIIRS_Prj_SVI_npp_d20180824_t1828213_e1839433_b35361_cspp_dev_10_300_01.hdf' ]) r.create_filehandlers(loadables) datasets = r.load(['WaterDetection']) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.attrs['units'], 'none') def test_load_dataset_aoi(self): """Test loading all datasets from an area of interest file.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'WATER_VIIRS_Prj_SVI_npp_d20180824_t1828213_e1839433_b35361_cspp_dev_001_10_300_01.hdf' ]) r.create_filehandlers(loadables) datasets = r.load(['WaterDetection']) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.attrs['units'], 'none') satpy-0.34.0/satpy/tests/reader_tests/test_viirs_l1b.py000066400000000000000000000361341420401153000232240ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.viirs_l1b module.""" import os import unittest from datetime import datetime, timedelta from unittest import mock import numpy as np from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler from satpy.tests.utils import convert_file_content_to_data_array DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (10, 300) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32) DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0) DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) class FakeNetCDF4FileHandler2(FakeNetCDF4FileHandler): """Swap-in NetCDF4 File Handler.""" def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" dt = filename_info.get('start_time', datetime(2016, 1, 1, 12, 0, 0)) file_type = filename[:5].lower() # num_lines = { # 'vl1bi': 3248 * 2, # 'vl1bm': 3248, # 'vl1bd': 3248, # }[file_type] # num_pixels = { # 'vl1bi': 6400, # 'vl1bm': 3200, # 'vl1bd': 4064, # }[file_type] # num_scans = 203 # num_luts = 65536 num_lines = DEFAULT_FILE_SHAPE[0] num_pixels = DEFAULT_FILE_SHAPE[1] num_scans = 5 num_luts = DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1] file_content = { '/dimension/number_of_scans': num_scans, '/dimension/number_of_lines': num_lines, '/dimension/number_of_pixels': num_pixels, '/dimension/number_of_LUT_values': num_luts, '/attr/time_coverage_start': dt.strftime('%Y-%m-%dT%H:%M:%S.000Z'), '/attr/time_coverage_end': (dt + timedelta(minutes=6)).strftime('%Y-%m-%dT%H:%M:%S.000Z'), '/attr/orbit_number': 26384, '/attr/instrument': 'viirs', '/attr/platform': 'Suomi-NPP', } self._fill_contents_with_default_data(file_content, file_type) self._set_dataset_specific_metadata(file_content) convert_file_content_to_data_array(file_content) return file_content @staticmethod def _fill_contents_with_default_data(file_content, file_type): """Fill file contents with default data.""" if file_type.startswith('vgeo'): file_content['/attr/OrbitNumber'] = file_content.pop('/attr/orbit_number') file_content['geolocation_data/latitude'] = DEFAULT_LAT_DATA file_content['geolocation_data/longitude'] = DEFAULT_LON_DATA file_content['geolocation_data/solar_zenith'] = DEFAULT_LON_DATA file_content['geolocation_data/solar_azimuth'] = DEFAULT_LON_DATA file_content['geolocation_data/sensor_zenith'] = DEFAULT_LON_DATA file_content['geolocation_data/sensor_azimuth'] = DEFAULT_LON_DATA if file_type.endswith('d'): file_content['geolocation_data/lunar_zenith'] = DEFAULT_LON_DATA file_content['geolocation_data/lunar_azimuth'] = DEFAULT_LON_DATA elif file_type == 'vl1bm': file_content['observation_data/M01'] = DEFAULT_FILE_DATA file_content['observation_data/M02'] = DEFAULT_FILE_DATA file_content['observation_data/M03'] = DEFAULT_FILE_DATA file_content['observation_data/M04'] = DEFAULT_FILE_DATA file_content['observation_data/M05'] = DEFAULT_FILE_DATA file_content['observation_data/M06'] = DEFAULT_FILE_DATA file_content['observation_data/M07'] = DEFAULT_FILE_DATA file_content['observation_data/M08'] = DEFAULT_FILE_DATA file_content['observation_data/M09'] = DEFAULT_FILE_DATA file_content['observation_data/M10'] = DEFAULT_FILE_DATA file_content['observation_data/M11'] = DEFAULT_FILE_DATA file_content['observation_data/M12'] = DEFAULT_FILE_DATA file_content['observation_data/M13'] = DEFAULT_FILE_DATA file_content['observation_data/M14'] = DEFAULT_FILE_DATA file_content['observation_data/M15'] = DEFAULT_FILE_DATA file_content['observation_data/M16'] = DEFAULT_FILE_DATA elif file_type == 'vl1bi': file_content['observation_data/I01'] = DEFAULT_FILE_DATA file_content['observation_data/I02'] = DEFAULT_FILE_DATA file_content['observation_data/I03'] = DEFAULT_FILE_DATA file_content['observation_data/I04'] = DEFAULT_FILE_DATA file_content['observation_data/I05'] = DEFAULT_FILE_DATA elif file_type == 'vl1bd': file_content['observation_data/DNB_observations'] = DEFAULT_FILE_DATA file_content['observation_data/DNB_observations/attr/units'] = 'Watts/cm^2/steradian' @staticmethod def _set_dataset_specific_metadata(file_content): """Set dataset-specific metadata.""" for k in list(file_content.keys()): if not k.startswith('observation_data') and not k.startswith('geolocation_data'): continue file_content[k + '/shape'] = DEFAULT_FILE_SHAPE if k[-3:] in ['M12', 'M13', 'M14', 'M15', 'M16', 'I04', 'I05']: file_content[k + '_brightness_temperature_lut'] = DEFAULT_FILE_DATA.ravel() file_content[k + '_brightness_temperature_lut/attr/units'] = 'Kelvin' file_content[k + '_brightness_temperature_lut/attr/valid_min'] = 0 file_content[k + '_brightness_temperature_lut/attr/valid_max'] = 65534 file_content[k + '_brightness_temperature_lut/attr/_FillValue'] = 65535 file_content[k + '/attr/units'] = 'Watts/meter^2/steradian/micrometer' elif k[-3:] in ['M01', 'M02', 'M03', 'M04', 'M05', 'M06', 'M07', 'M08', 'M09', 'M10', 'M11', 'I01', 'I02', 'I03']: file_content[k + '/attr/radiance_units'] = 'Watts/meter^2/steradian/micrometer' file_content[k + '/attr/radiance_scale_factor'] = 1.1 file_content[k + '/attr/radiance_add_offset'] = 0.1 elif k.endswith('longitude'): file_content[k + '/attr/units'] = 'degrees_east' elif k.endswith('latitude'): file_content[k + '/attr/units'] = 'degrees_north' elif k.endswith('zenith') or k.endswith('azimuth'): file_content[k + '/attr/units'] = 'degrees' file_content[k + '/attr/valid_min'] = 0 file_content[k + '/attr/valid_max'] = 65534 file_content[k + '/attr/_FillValue'] = 65535 file_content[k + '/attr/scale_factor'] = 1.1 file_content[k + '/attr/add_offset'] = 0.1 class TestVIIRSL1BReader(unittest.TestCase): """Test VIIRS L1B Reader.""" yaml_file = "viirs_l1b.yaml" def setUp(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_l1b import VIIRSL1BFileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(VIIRSL1BFileHandler, '__bases__', (FakeNetCDF4FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the NetCDF4 file handler.""" self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'VL1BM_snpp_d20161130_t012400_c20161130054822.nc', ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) # make sure we have some files self.assertTrue(r.file_handlers) def test_load_every_m_band_bt(self): """Test loading all M band brightness temperatures.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'VL1BM_snpp_d20161130_t012400_c20161130054822.nc', 'VGEOM_snpp_d20161130_t012400_c20161130054822.nc', ]) r.create_filehandlers(loadables) datasets = r.load(['M12', 'M13', 'M14', 'M15', 'M16']) self.assertEqual(len(datasets), 5) for v in datasets.values(): self.assertEqual(v.attrs['calibration'], 'brightness_temperature') self.assertEqual(v.attrs['units'], 'K') self.assertEqual(v.attrs['rows_per_scan'], 2) self.assertEqual(v.attrs['area'].lons.attrs['rows_per_scan'], 2) self.assertEqual(v.attrs['area'].lats.attrs['rows_per_scan'], 2) def test_load_every_m_band_refl(self): """Test loading all M band reflectances.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'VL1BM_snpp_d20161130_t012400_c20161130054822.nc', 'VGEOM_snpp_d20161130_t012400_c20161130054822.nc', ]) r.create_filehandlers(loadables) datasets = r.load(['M01', 'M02', 'M03', 'M04', 'M05', 'M06', 'M07', 'M08', 'M09', 'M10', 'M11']) self.assertEqual(len(datasets), 11) for v in datasets.values(): self.assertEqual(v.attrs['calibration'], 'reflectance') self.assertEqual(v.attrs['units'], '%') self.assertEqual(v.attrs['rows_per_scan'], 2) self.assertEqual(v.attrs['area'].lons.attrs['rows_per_scan'], 2) self.assertEqual(v.attrs['area'].lats.attrs['rows_per_scan'], 2) def test_load_every_m_band_rad(self): """Test loading all M bands as radiances.""" from satpy.readers import load_reader from satpy.tests.utils import make_dataid r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'VL1BM_snpp_d20161130_t012400_c20161130054822.nc', 'VGEOM_snpp_d20161130_t012400_c20161130054822.nc', ]) r.create_filehandlers(loadables) datasets = r.load([make_dataid(name='M01', calibration='radiance'), make_dataid(name='M02', calibration='radiance'), make_dataid(name='M03', calibration='radiance'), make_dataid(name='M04', calibration='radiance'), make_dataid(name='M05', calibration='radiance'), make_dataid(name='M06', calibration='radiance'), make_dataid(name='M07', calibration='radiance'), make_dataid(name='M08', calibration='radiance'), make_dataid(name='M09', calibration='radiance'), make_dataid(name='M10', calibration='radiance'), make_dataid(name='M11', calibration='radiance'), make_dataid(name='M12', calibration='radiance'), make_dataid(name='M13', calibration='radiance'), make_dataid(name='M14', calibration='radiance'), make_dataid(name='M15', calibration='radiance'), make_dataid(name='M16', calibration='radiance')]) self.assertEqual(len(datasets), 16) for v in datasets.values(): self.assertEqual(v.attrs['calibration'], 'radiance') self.assertEqual(v.attrs['units'], 'W m-2 um-1 sr-1') self.assertEqual(v.attrs['rows_per_scan'], 2) self.assertEqual(v.attrs['area'].lons.attrs['rows_per_scan'], 2) self.assertEqual(v.attrs['area'].lats.attrs['rows_per_scan'], 2) def test_load_dnb_radiance(self): """Test loading the main DNB dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'VL1BD_snpp_d20161130_t012400_c20161130054822.nc', 'VGEOD_snpp_d20161130_t012400_c20161130054822.nc', ]) r.create_filehandlers(loadables) datasets = r.load(['DNB']) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.attrs['calibration'], 'radiance') self.assertEqual(v.attrs['units'], 'W m-2 sr-1') self.assertEqual(v.attrs['rows_per_scan'], 2) self.assertEqual(v.attrs['area'].lons.attrs['rows_per_scan'], 2) self.assertEqual(v.attrs['area'].lats.attrs['rows_per_scan'], 2) def test_load_dnb_angles(self): """Test loading all DNB angle datasets.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'VL1BD_snpp_d20161130_t012400_c20161130054822.nc', 'VGEOD_snpp_d20161130_t012400_c20161130054822.nc', ]) r.create_filehandlers(loadables) datasets = r.load(['dnb_solar_zenith_angle', 'dnb_solar_azimuth_angle', 'dnb_satellite_zenith_angle', 'dnb_satellite_azimuth_angle', 'dnb_lunar_zenith_angle', 'dnb_lunar_azimuth_angle', ]) self.assertEqual(len(datasets), 6) for v in datasets.values(): self.assertEqual(v.attrs['units'], 'degrees') self.assertEqual(v.attrs['rows_per_scan'], 2) self.assertEqual(v.attrs['area'].lons.attrs['rows_per_scan'], 2) self.assertEqual(v.attrs['area'].lats.attrs['rows_per_scan'], 2) satpy-0.34.0/satpy/tests/reader_tests/test_viirs_sdr.py000066400000000000000000001275041420401153000233400ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.viirs_sdr module.""" import os import unittest from contextlib import contextmanager from unittest import mock import numpy as np from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (32, 300) # Mimicking one scan line of data DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32) DATASET_KEYS = {'GDNBO': 'VIIRS-DNB-GEO', 'SVDNB': 'VIIRS-DNB-SDR', 'GITCO': 'VIIRS-IMG-GEO-TC', 'GIMGO': 'VIIRS-IMG-GEO', 'SVI01': 'VIIRS-I1-SDR', 'SVI02': 'VIIRS-I2-SDR', 'SVI03': 'VIIRS-I3-SDR', 'SVI04': 'VIIRS-I4-SDR', 'SVI05': 'VIIRS-I5-SDR', 'GMTCO': 'VIIRS-MOD-GEO-TC', 'GMODO': 'VIIRS-MOD-GEO', 'SVM01': 'VIIRS-M1-SDR', 'SVM02': 'VIIRS-M2-SDR', 'SVM03': 'VIIRS-M3-SDR', 'SVM04': 'VIIRS-M4-SDR', 'SVM05': 'VIIRS-M5-SDR', 'SVM06': 'VIIRS-M6-SDR', 'SVM07': 'VIIRS-M7-SDR', 'SVM08': 'VIIRS-M8-SDR', 'SVM09': 'VIIRS-M9-SDR', 'SVM10': 'VIIRS-M10-SDR', 'SVM11': 'VIIRS-M11-SDR', 'SVM12': 'VIIRS-M12-SDR', 'SVM13': 'VIIRS-M13-SDR', 'SVM14': 'VIIRS-M14-SDR', 'SVM15': 'VIIRS-M15-SDR', 'SVM16': 'VIIRS-M16-SDR', } class FakeHDF5FileHandler2(FakeHDF5FileHandler): """Swap-in HDF5 File Handler.""" _num_test_granules = 1 _num_scans_per_gran = [48] def __init__(self, filename, filename_info, filetype_info, include_factors=True): """Create fake file handler.""" self.include_factors = include_factors super(FakeHDF5FileHandler2, self).__init__(filename, filename_info, filetype_info) @staticmethod def _add_basic_metadata_to_file_content(file_content, filename_info, num_grans): start_time = filename_info['start_time'] end_time = filename_info['end_time'].replace(year=start_time.year, month=start_time.month, day=start_time.day) begin_date = start_time.strftime('%Y%m%d') begin_time = start_time.strftime('%H%M%S.%fZ') ending_date = end_time.strftime('%Y%m%d') ending_time = end_time.strftime('%H%M%S.%fZ') new_file_content = { "{prefix2}/attr/AggregateNumberGranules": num_grans, "{prefix2}/attr/AggregateBeginningDate": begin_date, "{prefix2}/attr/AggregateBeginningTime": begin_time, "{prefix2}/attr/AggregateEndingDate": ending_date, "{prefix2}/attr/AggregateEndingTime": ending_time, "{prefix2}/attr/G-Ring_Longitude": np.array([0.0, 0.1, 0.2, 0.3]), "{prefix2}/attr/G-Ring_Latitude": np.array([0.0, 0.1, 0.2, 0.3]), "{prefix2}/attr/AggregateBeginningOrbitNumber": "{0:d}".format(filename_info['orbit']), "{prefix2}/attr/AggregateEndingOrbitNumber": "{0:d}".format(filename_info['orbit']), "{prefix1}/attr/Instrument_Short_Name": "VIIRS", "/attr/Platform_Short_Name": "NPP", } file_content.update(new_file_content) def _add_granule_specific_info_to_file_content( self, file_content, dataset_group, num_granules, num_scans_per_granule, gran_group_prefix): lons_lists = self._get_per_granule_lons() lats_lists = self._get_per_granule_lats() file_content["{prefix3}/NumberOfScans"] = np.array([48] * num_granules) for granule_idx in range(num_granules): prefix_gran = '{prefix}/{dataset_group}_Gran_{idx}'.format(prefix=gran_group_prefix, dataset_group=dataset_group, idx=granule_idx) num_scans = num_scans_per_granule[granule_idx] file_content[prefix_gran + '/attr/N_Number_Of_Scans'] = num_scans file_content[prefix_gran + '/attr/G-Ring_Longitude'] = lons_lists[granule_idx] file_content[prefix_gran + '/attr/G-Ring_Latitude'] = lats_lists[granule_idx] @staticmethod def _get_per_granule_lons(): return [ np.array( [ 50.51393, 49.566296, 48.865967, 18.96082, -4.0238385, -7.05221, -10.405702, 14.638646 ], dtype=np.float32), np.array( [ 53.52594, 51.685738, 50.439102, 14.629087, -10.247547, -13.951393, -18.256989, 8.36572 ], dtype=np.float32), np.array( [ 59.386833, 55.770416, 53.38952, 8.353765, -18.062435, -22.608992, -27.867302, -1.3537619 ], dtype=np.float32), np.array( [ 72.50243, 64.17125, 59.15234, -1.3654504, -27.620953, -33.091743, -39.28113, -17.749891 ], dtype=np.float32) ] @staticmethod def _get_per_granule_lats(): return [ np.array( [ 67.969505, 65.545685, 63.103046, 61.853905, 55.169273, 57.062447, 58.86063, 66.495514 ], dtype=np.float32), np.array( [ 72.74879, 70.2493, 67.84738, 66.49691, 58.77254, 60.465942, 62.11525, 71.08249 ], dtype=np.float32), np.array( [ 77.393425, 74.977875, 72.62976, 71.083435, 62.036346, 63.465122, 64.78075, 75.36842 ], dtype=np.float32), np.array( [ 81.67615, 79.49934, 77.278656, 75.369415, 64.72178, 65.78417, 66.66166, 79.00025 ], dtype=np.float32), ] def _add_data_info_to_file_content(self, file_content, filename, data_var_prefix, num_grans): # SDR files always produce data with 48 scans per granule even if there are less total_rows = DEFAULT_FILE_SHAPE[0] * 48 * num_grans new_shape = (total_rows, DEFAULT_FILE_SHAPE[1]) if filename[2:5] in ['M{:02d}'.format(x) for x in range(12)] + ['I01', 'I02', 'I03']: keys = ['Radiance', 'Reflectance'] elif filename[2:5] in ['M{:02d}'.format(x) for x in range(12, 17)] + ['I04', 'I05']: keys = ['Radiance', 'BrightnessTemperature'] else: # DNB keys = ['Radiance'] for k in keys: k = data_var_prefix + "/" + k file_content[k] = np.repeat(DEFAULT_FILE_DATA.copy(), 48 * num_grans, axis=0) file_content[k + "/shape"] = new_shape if self.include_factors: file_content[k + "Factors"] = np.repeat( DEFAULT_FILE_FACTORS.copy()[None, :], num_grans, axis=0).ravel() @staticmethod def _add_geolocation_info_to_file_content(file_content, filename, data_var_prefix, num_grans): # SDR files always produce data with 48 scans per granule even if there are less total_rows = DEFAULT_FILE_SHAPE[0] * 48 * num_grans new_shape = (total_rows, DEFAULT_FILE_SHAPE[1]) is_dnb = filename[:5] not in ['GMODO', 'GIMGO'] if not is_dnb: lon_data = np.linspace(15, 55, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) lat_data = np.linspace(55, 75, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) else: lon_data = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) lat_data = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) for k in ["Latitude"]: k = data_var_prefix + "/" + k file_content[k] = lat_data file_content[k] = np.repeat([file_content[k]], total_rows, axis=0) file_content[k + "/shape"] = new_shape for k in ["Longitude"]: k = data_var_prefix + "/" + k file_content[k] = lon_data file_content[k] = np.repeat([file_content[k]], total_rows, axis=0) file_content[k + "/shape"] = new_shape angles = ['SolarZenithAngle', 'SolarAzimuthAngle', 'SatelliteZenithAngle', 'SatelliteAzimuthAngle'] if is_dnb: angles += ['LunarZenithAngle', 'LunarAzimuthAngle'] for k in angles: k = data_var_prefix + "/" + k file_content[k] = lon_data # close enough to SZA file_content[k] = np.repeat([file_content[k]], total_rows, axis=0) file_content[k + "/shape"] = new_shape @staticmethod def _add_geo_ref(file_content, filename): if filename[:3] == 'SVI': geo_prefix = 'GIMGO' elif filename[:3] == 'SVM': geo_prefix = 'GMODO' else: geo_prefix = None if geo_prefix: file_content['/attr/N_GEO_Ref'] = geo_prefix + filename[5:] @staticmethod def _convert_numpy_content_to_dataarray(final_content): import dask.array as da from xarray import DataArray for key, val in final_content.items(): if isinstance(val, np.ndarray): val = da.from_array(val, chunks=val.shape) if val.ndim > 1: final_content[key] = DataArray(val, dims=('y', 'x')) else: final_content[key] = DataArray(val) def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" final_content = {} for dataset in self.datasets: dataset_group = DATASET_KEYS[dataset] prefix1 = 'Data_Products/{dataset_group}'.format(dataset_group=dataset_group) prefix2 = '{prefix}/{dataset_group}_Aggr'.format(prefix=prefix1, dataset_group=dataset_group) prefix3 = 'All_Data/{dataset_group}_All'.format(dataset_group=dataset_group) file_content = {} self._add_basic_metadata_to_file_content(file_content, filename_info, self._num_test_granules) self._add_granule_specific_info_to_file_content(file_content, dataset_group, self._num_test_granules, self._num_scans_per_gran, prefix1) self._add_geo_ref(file_content, filename) for k, v in list(file_content.items()): file_content[k.format(prefix1=prefix1, prefix2=prefix2, prefix3=prefix3)] = v if filename[:3] in ['SVM', 'SVI', 'SVD']: self._add_data_info_to_file_content(file_content, filename, prefix3, self._num_test_granules) elif filename[0] == 'G': self._add_geolocation_info_to_file_content(file_content, filename, prefix3, self._num_test_granules) final_content.update(file_content) self._convert_numpy_content_to_dataarray(final_content) return final_content @contextmanager def touch_geo_files(*prefixes): """Create and then remove VIIRS SDR geolocation files.""" geofiles = [_touch_geo_file(prefix) for prefix in prefixes] try: yield geofiles finally: for filename in geofiles: os.remove(filename) def _touch_geo_file(prefix): geo_fn = prefix + '_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5' open(geo_fn, 'w') return geo_fn class TestVIIRSSDRReader(unittest.TestCase): """Test VIIRS SDR Reader.""" yaml_file = "viirs_sdr.yaml" def _assert_reflectance_properties(self, data_arr, num_scans=16, with_area=True): self.assertTrue(np.issubdtype(data_arr.dtype, np.float32)) self.assertEqual(data_arr.attrs['calibration'], 'reflectance') self.assertEqual(data_arr.attrs['units'], '%') self.assertEqual(data_arr.attrs['rows_per_scan'], num_scans) if with_area: self.assertIn('area', data_arr.attrs) self.assertIsNotNone(data_arr.attrs['area']) self.assertEqual(data_arr.attrs['area'].shape, data_arr.shape) else: self.assertNotIn('area', data_arr.attrs) def _assert_bt_properties(self, data_arr, num_scans=16, with_area=True): self.assertTrue(np.issubdtype(data_arr.dtype, np.float32)) self.assertEqual(data_arr.attrs['calibration'], 'brightness_temperature') self.assertEqual(data_arr.attrs['units'], 'K') self.assertEqual(data_arr.attrs['rows_per_scan'], num_scans) if with_area: self.assertIn('area', data_arr.attrs) self.assertIsNotNone(data_arr.attrs['area']) self.assertEqual(data_arr.attrs['area'].shape, data_arr.shape) else: self.assertNotIn('area', data_arr.attrs) def _assert_dnb_radiance_properties(self, data_arr, with_area=True): self.assertTrue(np.issubdtype(data_arr.dtype, np.float32)) self.assertEqual(data_arr.attrs['calibration'], 'radiance') self.assertEqual(data_arr.attrs['units'], 'W m-2 sr-1') self.assertEqual(data_arr.attrs['rows_per_scan'], 16) if with_area: self.assertIn('area', data_arr.attrs) self.assertIsNotNone(data_arr.attrs['area']) self.assertEqual(data_arr.attrs['area'].shape, data_arr.shape) else: self.assertNotIn('area', data_arr.attrs) def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_sdr import VIIRSSDRFileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(VIIRSSDRFileHandler, '__bases__', (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the HDF5 file handler.""" self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) # make sure we have some files self.assertTrue(r.file_handlers) def test_init_start_time_beyond(self): """Test basic init with start_time after the provided files.""" from datetime import datetime from satpy.readers import load_reader r = load_reader(self.reader_configs, filter_parameters={ 'start_time': datetime(2012, 2, 26) }) fhs = r.create_filehandlers([ 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', ]) self.assertEqual(len(fhs), 0) def test_init_end_time_beyond(self): """Test basic init with end_time before the provided files.""" from datetime import datetime from satpy.readers import load_reader r = load_reader(self.reader_configs, filter_parameters={ 'end_time': datetime(2012, 2, 24) }) fhs = r.create_filehandlers([ 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', ]) self.assertEqual(len(fhs), 0) def test_init_start_end_time(self): """Test basic init with end_time before the provided files.""" from datetime import datetime from satpy.readers import load_reader r = load_reader(self.reader_configs, filter_parameters={ 'start_time': datetime(2012, 2, 24), 'end_time': datetime(2012, 2, 26) }) loadables = r.select_files_from_pathnames([ 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) # make sure we have some files self.assertTrue(r.file_handlers) def test_load_all_m_reflectances_no_geo(self): """Load all M band reflectances with no geo files provided.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', ]) r.create_filehandlers(loadables) ds = r.load(['M01', 'M02', 'M03', 'M04', 'M05', 'M06', 'M07', 'M08', 'M09', 'M10', 'M11', ]) self.assertEqual(len(ds), 11) for d in ds.values(): self._assert_reflectance_properties(d, with_area=False) def test_load_all_m_reflectances_find_geo(self): """Load all M band reflectances with geo files not specified but existing.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', ]) with touch_geo_files("GMTCO", "GMODO") as (geo_fn1, geo_fn2): r.create_filehandlers(loadables) ds = r.load(['M01', 'M02', 'M03', 'M04', 'M05', 'M06', 'M07', 'M08', 'M09', 'M10', 'M11', ]) self.assertEqual(len(ds), 11) for d in ds.values(): self._assert_reflectance_properties(d, with_area=True) def test_load_all_m_reflectances_provided_geo(self): """Load all M band reflectances with geo files provided.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', ]) with touch_geo_files("GMTCO", "GMODO") as (geo_fn1, geo_fn2): r.create_filehandlers(loadables) ds = r.load(['M01', 'M02', 'M03', 'M04', 'M05', 'M06', 'M07', 'M08', 'M09', 'M10', 'M11', ]) self.assertEqual(len(ds), 11) for d in ds.values(): self._assert_reflectance_properties(d, with_area=True) self.assertEqual(d.attrs['area'].lons.min(), 5) self.assertEqual(d.attrs['area'].lats.min(), 45) self.assertEqual(d.attrs['area'].lons.attrs['rows_per_scan'], 16) self.assertEqual(d.attrs['area'].lats.attrs['rows_per_scan'], 16) def test_load_all_m_reflectances_use_nontc(self): """Load all M band reflectances but use non-TC geolocation.""" from satpy.readers import load_reader r = load_reader(self.reader_configs, use_tc=False) loadables = r.select_files_from_pathnames([ 'SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'GMODO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', ]) with touch_geo_files("GMTCO", "GMODO") as (geo_fn1, geo_fn2): r.create_filehandlers(loadables, {'use_tc': False}) ds = r.load(['M01', 'M02', 'M03', 'M04', 'M05', 'M06', 'M07', 'M08', 'M09', 'M10', 'M11', ]) self.assertEqual(len(ds), 11) for d in ds.values(): self._assert_reflectance_properties(d, with_area=True) self.assertEqual(d.attrs['area'].lons.min(), 15) self.assertEqual(d.attrs['area'].lats.min(), 55) self.assertEqual(d.attrs['area'].lons.attrs['rows_per_scan'], 16) self.assertEqual(d.attrs['area'].lats.attrs['rows_per_scan'], 16) def test_load_all_m_reflectances_use_nontc2(self): """Load all M band reflectances but use non-TC geolocation because TC isn't available.""" from satpy.readers import load_reader r = load_reader(self.reader_configs, use_tc=None) loadables = r.select_files_from_pathnames([ 'SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'GMODO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', ]) with touch_geo_files("GMODO") as (geo_fn2,): r.create_filehandlers(loadables, {'use_tc': None}) ds = r.load(['M01', 'M02', 'M03', 'M04', 'M05', 'M06', 'M07', 'M08', 'M09', 'M10', 'M11', ]) self.assertEqual(len(ds), 11) for d in ds.values(): self._assert_reflectance_properties(d, with_area=True) self.assertEqual(d.attrs['area'].lons.min(), 15) self.assertEqual(d.attrs['area'].lats.min(), 55) self.assertEqual(d.attrs['area'].lons.attrs['rows_per_scan'], 16) self.assertEqual(d.attrs['area'].lats.attrs['rows_per_scan'], 16) def test_load_all_m_bts(self): """Load all M band brightness temperatures.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'SVM12_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM13_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM14_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM15_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM16_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', ]) r.create_filehandlers(loadables) ds = r.load(['M12', 'M13', 'M14', 'M15', 'M16', ]) self.assertEqual(len(ds), 5) for d in ds.values(): self._assert_bt_properties(d, with_area=True) def test_load_dnb_sza_no_factors(self): """Load DNB solar zenith angle with no scaling factors. The angles in VIIRS SDRs should never have scaling factors so we test it that way. """ from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'GDNBO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', ]) r.create_filehandlers(loadables, {'include_factors': False}) ds = r.load(['dnb_solar_zenith_angle', 'dnb_solar_azimuth_angle', 'dnb_satellite_zenith_angle', 'dnb_satellite_azimuth_angle', 'dnb_lunar_zenith_angle', 'dnb_lunar_azimuth_angle']) self.assertEqual(len(ds), 6) for d in ds.values(): self.assertTrue(np.issubdtype(d.dtype, np.float32)) self.assertEqual(d.attrs['units'], 'degrees') self.assertEqual(d.attrs['rows_per_scan'], 16) self.assertIn('area', d.attrs) self.assertIsNotNone(d.attrs['area']) def test_load_all_m_radiances(self): """Load all M band radiances.""" from satpy.readers import load_reader from satpy.tests.utils import make_dsq r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM12_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM13_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM14_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM15_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM16_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', ]) r.create_filehandlers(loadables) ds = r.load([ make_dsq(name='M01', calibration='radiance'), make_dsq(name='M02', calibration='radiance'), make_dsq(name='M03', calibration='radiance'), make_dsq(name='M04', calibration='radiance'), make_dsq(name='M05', calibration='radiance'), make_dsq(name='M06', calibration='radiance'), make_dsq(name='M07', calibration='radiance'), make_dsq(name='M08', calibration='radiance'), make_dsq(name='M09', calibration='radiance'), make_dsq(name='M10', calibration='radiance'), make_dsq(name='M11', calibration='radiance'), make_dsq(name='M12', calibration='radiance'), make_dsq(name='M13', calibration='radiance'), make_dsq(name='M14', calibration='radiance'), make_dsq(name='M15', calibration='radiance'), make_dsq(name='M16', calibration='radiance'), ]) self.assertEqual(len(ds), 16) for d in ds.values(): self.assertTrue(np.issubdtype(d.dtype, np.float32)) self.assertEqual(d.attrs['calibration'], 'radiance') self.assertEqual(d.attrs['units'], 'W m-2 um-1 sr-1') self.assertEqual(d.attrs['rows_per_scan'], 16) self.assertIn('area', d.attrs) self.assertIsNotNone(d.attrs['area']) def test_load_dnb(self): """Load DNB dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'SVDNB_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'GDNBO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', ]) r.create_filehandlers(loadables) ds = r.load(['DNB']) self.assertEqual(len(ds), 1) for d in ds.values(): data = d.values # default scale factors are 2 and offset 1 # multiply DNB by 10000 should mean the first value of 0 should be: # data * factor * 10000 + offset * 10000 # 0 * 2 * 10000 + 1 * 10000 => 10000 self.assertEqual(data[0, 0], 10000) # the second value of 1 should be: # 1 * 2 * 10000 + 1 * 10000 => 30000 self.assertEqual(data[0, 1], 30000) self._assert_dnb_radiance_properties(d, with_area=True) def test_load_dnb_no_factors(self): """Load DNB dataset with no provided scale factors.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'SVDNB_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'GDNBO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', ]) r.create_filehandlers(loadables, {'include_factors': False}) ds = r.load(['DNB']) self.assertEqual(len(ds), 1) for d in ds.values(): data = d.values # no scale factors, default factor 1 and offset 0 # multiply DNB by 10000 should mean the first value of 0 should be: # data * factor * 10000 + offset * 10000 # 0 * 1 * 10000 + 0 * 10000 => 0 self.assertEqual(data[0, 0], 0) # the second value of 1 should be: # 1 * 1 * 10000 + 0 * 10000 => 10000 self.assertEqual(data[0, 1], 10000) self._assert_dnb_radiance_properties(d, with_area=True) def test_load_i_no_files(self): """Load I01 when only DNB files are provided.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'SVDNB_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'GDNBO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', ]) r.create_filehandlers(loadables) self.assertNotIn('I01', [x['name'] for x in r.available_dataset_ids]) ds = r.load(['I01']) self.assertEqual(len(ds), 0) def test_load_all_i_reflectances_provided_geo(self): """Load all I band reflectances with geo files provided.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVI02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVI03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'GITCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', ]) r.create_filehandlers(loadables) ds = r.load(['I01', 'I02', 'I03', ]) self.assertEqual(len(ds), 3) for d in ds.values(): self._assert_reflectance_properties(d, num_scans=32) self.assertEqual(d.attrs['area'].lons.min(), 5) self.assertEqual(d.attrs['area'].lats.min(), 45) self.assertEqual(d.attrs['area'].lons.attrs['rows_per_scan'], 32) self.assertEqual(d.attrs['area'].lats.attrs['rows_per_scan'], 32) def test_load_all_i_bts(self): """Load all I band brightness temperatures.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'SVI04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVI05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'GITCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', ]) r.create_filehandlers(loadables) ds = r.load(['I04', 'I05', ]) self.assertEqual(len(ds), 2) for d in ds.values(): self._assert_bt_properties(d, num_scans=32) def test_load_all_i_radiances(self): """Load all I band radiances.""" from satpy.readers import load_reader from satpy.tests.utils import make_dsq r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVI02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVI03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVI04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVI05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'GITCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', ]) r.create_filehandlers(loadables) ds = r.load([ make_dsq(name='I01', calibration='radiance'), make_dsq(name='I02', calibration='radiance'), make_dsq(name='I03', calibration='radiance'), make_dsq(name='I04', calibration='radiance'), make_dsq(name='I05', calibration='radiance'), ]) self.assertEqual(len(ds), 5) for d in ds.values(): self.assertTrue(np.issubdtype(d.dtype, np.float32)) self.assertEqual(d.attrs['calibration'], 'radiance') self.assertEqual(d.attrs['units'], 'W m-2 um-1 sr-1') self.assertEqual(d.attrs['rows_per_scan'], 32) self.assertIn('area', d.attrs) self.assertIsNotNone(d.attrs['area']) class FakeHDF5FileHandlerAggr(FakeHDF5FileHandler2): """Swap-in HDF5 File Handler with 4 VIIRS Granules per file.""" _num_test_granules = 4 _num_scans_per_gran = [48] * 4 class TestAggrVIIRSSDRReader(unittest.TestCase): """Test VIIRS SDR Reader.""" yaml_file = "viirs_sdr.yaml" def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_sdr import VIIRSSDRFileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(VIIRSSDRFileHandler, '__bases__', (FakeHDF5FileHandlerAggr,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the HDF5 file handler.""" self.p.stop() def test_bounding_box(self): """Test bounding box.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', ]) r.create_filehandlers(loadables) # make sure we have some files expected_lons = [ 72.50243, 64.17125, 59.15234, 59.386833, 55.770416, 53.38952, 53.52594, 51.685738, 50.439102, 50.51393, 49.566296, 48.865967, 18.96082, -4.0238385, -7.05221, -10.247547, -13.951393, -18.062435, -22.608992, -27.620953, -33.091743, -39.28113, -17.749891 ] expected_lats = [ 81.67615, 79.49934, 77.278656, 77.393425, 74.977875, 72.62976, 72.74879, 70.2493, 67.84738, 67.969505, 65.545685, 63.103046, 61.853905, 55.169273, 57.062447, 58.77254, 60.465942, 62.036346, 63.465122, 64.72178, 65.78417, 66.66166, 79.00025 ] lons, lats = r.file_handlers['generic_file'][0].get_bounding_box() np.testing.assert_allclose(lons, expected_lons) np.testing.assert_allclose(lats, expected_lats) class FakeShortHDF5FileHandlerAggr(FakeHDF5FileHandler2): """Fake file that has less scans than usual in a couple granules.""" _num_test_granules = 3 _num_scans_per_gran = [47, 48, 47] class TestShortAggrVIIRSSDRReader(unittest.TestCase): """Test VIIRS SDR Reader with a file that has truncated granules.""" yaml_file = "viirs_sdr.yaml" def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_sdr import VIIRSSDRFileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(VIIRSSDRFileHandler, '__bases__', (FakeShortHDF5FileHandlerAggr,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the HDF5 file handler.""" self.p.stop() def test_load_truncated_band(self): """Test loading a single truncated band.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', ]) r.create_filehandlers(loadables) ds = r.load(["I01"]) self.assertEqual(len(ds), 1) i01_data = ds["I01"].compute() expected_rows = sum(FakeShortHDF5FileHandlerAggr._num_scans_per_gran) * DEFAULT_FILE_SHAPE[0] self.assertEqual(i01_data.shape, (expected_rows, 300)) satpy-0.34.0/satpy/tests/reader_tests/test_virr_l1b.py000066400000000000000000000231411420401153000230440ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Test for readers/virr_l1b.py.""" import os import unittest from unittest import mock import dask.array as da import numpy as np import xarray as xr from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler class FakeHDF5FileHandler2(FakeHDF5FileHandler): """Swap-in HDF5 File Handler.""" def make_test_data(self, dims): """Create fake test data.""" return xr.DataArray(da.from_array(np.ones([dim for dim in dims], dtype=np.float32) * 10, [dim for dim in dims])) def _make_file(self, platform_id, geolocation_prefix, l1b_prefix, ECWN, Emissive_units): dim_0 = 19 dim_1 = 20 test_file = { # Satellite data. '/attr/Day Or Night Flag': 'D', '/attr/Observing Beginning Date': '2018-12-25', '/attr/Observing Beginning Time': '21:41:47.090', '/attr/Observing Ending Date': '2018-12-25', '/attr/Observing Ending Time': '21:47:28.254', '/attr/Satellite Name': platform_id, '/attr/Sensor Identification Code': 'VIRR', # Emissive data. l1b_prefix + 'EV_Emissive': self.make_test_data([3, dim_0, dim_1]), l1b_prefix + 'EV_Emissive/attr/valid_range': [0, 50000], l1b_prefix + 'Emissive_Radiance_Scales': self.make_test_data([dim_0, dim_1]), l1b_prefix + 'EV_Emissive/attr/units': Emissive_units, l1b_prefix + 'Emissive_Radiance_Offsets': self.make_test_data([dim_0, dim_1]), '/attr/' + ECWN: [2610.31, 917.6268, 836.2546], # Reflectance data. l1b_prefix + 'EV_RefSB': self.make_test_data([7, dim_0, dim_1]), l1b_prefix + 'EV_RefSB/attr/valid_range': [0, 32767], l1b_prefix + 'EV_RefSB/attr/units': 'none', '/attr/RefSB_Cal_Coefficients': np.ones(14, dtype=np.float32) * 2 } for attribute in ['Latitude', 'Longitude', geolocation_prefix + 'SolarZenith', geolocation_prefix + 'SensorZenith', geolocation_prefix + 'SolarAzimuth', geolocation_prefix + 'SensorAzimuth']: test_file[attribute] = self.make_test_data([dim_0, dim_1]) test_file[attribute + '/attr/Intercept'] = 0. test_file[attribute + '/attr/units'] = 'degrees' if 'Solar' in attribute or 'Sensor' in attribute: test_file[attribute + '/attr/Slope'] = .01 if 'Azimuth' in attribute: test_file[attribute + '/attr/valid_range'] = [0, 18000] else: test_file[attribute + '/attr/valid_range'] = [-18000, 18000] else: test_file[attribute + '/attr/Slope'] = 1. if 'Longitude' == attribute: test_file[attribute + '/attr/valid_range'] = [-180., 180.] else: test_file[attribute + '/attr/valid_range'] = [-90., 90.] return test_file def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" if filename_info['platform_id'] == 'FY3B': return self._make_file('FY3B', '', '', 'Emmisive_Centroid_Wave_Number', 'milliWstts/m^2/cm^(-1)/steradian') return self._make_file(filename_info['platform_id'], 'Geolocation/', 'Data/', 'Emissive_Centroid_Wave_Number', 'none') class TestVIRRL1BReader(unittest.TestCase): """Test VIRR L1B Reader.""" yaml_file = "virr_l1b.yaml" def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.virr_l1b import VIRR_L1B self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(VIRR_L1B, '__bases__', (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the HDF5 file handler.""" self.p.stop() def _band_helper(self, attributes, units, calibration, standard_name, file_type, band_index_size, resolution): self.assertEqual(units, attributes['units']) self.assertEqual(calibration, attributes['calibration']) self.assertEqual(standard_name, attributes['standard_name']) self.assertEqual(file_type, attributes['file_type']) self.assertTrue(attributes['band_index'] in range(band_index_size)) self.assertEqual(resolution, attributes['resolution']) self.assertEqual(('longitude', 'latitude'), attributes['coordinates']) def _fy3_helper(self, platform_name, reader, Emissive_units): """Load channels and test accurate metadata.""" import datetime band_values = {'1': 22.0, '2': 22.0, '6': 22.0, '7': 22.0, '8': 22.0, '9': 22.0, '10': 22.0, '3': 496.542155, '4': 297.444511, '5': 288.956557, 'solar_zenith_angle': .1, 'satellite_zenith_angle': .1, 'solar_azimuth_angle': .1, 'satellite_azimuth_angle': .1, 'longitude': 10} if platform_name == 'FY3B': # updated 2015 coefficients band_values['1'] = -0.168 band_values['2'] = -0.2706 band_values['6'] = -1.5631 band_values['7'] = -0.2114 band_values['8'] = -0.171 band_values['9'] = -0.1606 band_values['10'] = -0.1328 datasets = reader.load([band for band in band_values]) for dataset in datasets: # Object returned by get_dataset. ds = datasets[dataset['name']] attributes = ds.attrs self.assertTrue(isinstance(ds.data, da.Array)) self.assertEqual('virr', attributes['sensor']) self.assertEqual(platform_name, attributes['platform_name']) self.assertEqual(datetime.datetime(2018, 12, 25, 21, 41, 47, 90000), attributes['start_time']) self.assertEqual(datetime.datetime(2018, 12, 25, 21, 47, 28, 254000), attributes['end_time']) self.assertEqual((19, 20), datasets[dataset['name']].shape) self.assertEqual(('y', 'x'), datasets[dataset['name']].dims) if dataset['name'] in ['1', '2', '6', '7', '8', '9', '10']: self._band_helper(attributes, '%', 'reflectance', 'toa_bidirectional_reflectance', 'virr_l1b', 7, 1000) elif dataset['name'] in ['3', '4', '5']: self._band_helper(attributes, Emissive_units, 'brightness_temperature', 'toa_brightness_temperature', 'virr_l1b', 3, 1000) elif dataset['name'] in ['longitude', 'latitude']: self.assertEqual('degrees', attributes['units']) self.assertTrue(attributes['standard_name'] in ['longitude', 'latitude']) self.assertEqual(['virr_l1b', 'virr_geoxx'], attributes['file_type']) self.assertEqual(1000, attributes['resolution']) else: self.assertEqual('degrees', attributes['units']) self.assertTrue( attributes['standard_name'] in ['solar_zenith_angle', 'sensor_zenith_angle', 'solar_azimuth_angle', 'sensor_azimuth_angle']) self.assertEqual(['virr_geoxx', 'virr_l1b'], attributes['file_type']) self.assertEqual(('longitude', 'latitude'), attributes['coordinates']) self.assertEqual(band_values[dataset['name']], round(float(np.array(ds[ds.shape[0] // 2][ds.shape[1] // 2])), 6)) assert "valid_range" not in ds.attrs def test_fy3b_file(self): """Test that FY3B files are recognized.""" from satpy.readers import load_reader FY3B_reader = load_reader(self.reader_configs) FY3B_file = FY3B_reader.select_files_from_pathnames(['tf2018359214943.FY3B-L_VIRRX_L1B.HDF']) self.assertEqual(1, len(FY3B_file)) FY3B_reader.create_filehandlers(FY3B_file) # Make sure we have some files self.assertTrue(FY3B_reader.file_handlers) self._fy3_helper('FY3B', FY3B_reader, 'milliWstts/m^2/cm^(-1)/steradian') def test_fy3c_file(self): """Test that FY3C files are recognized.""" from satpy.readers import load_reader FY3C_reader = load_reader(self.reader_configs) FY3C_files = FY3C_reader.select_files_from_pathnames(['tf2018359143912.FY3C-L_VIRRX_GEOXX.HDF', 'tf2018359143912.FY3C-L_VIRRX_L1B.HDF']) self.assertEqual(2, len(FY3C_files)) FY3C_reader.create_filehandlers(FY3C_files) # Make sure we have some files self.assertTrue(FY3C_reader.file_handlers) self._fy3_helper('FY3C', FY3C_reader, '1') satpy-0.34.0/satpy/tests/test_compat.py000066400000000000000000000026771420401153000201360ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2022 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Test backports and compatibility fixes.""" import gc from satpy._compat import CachedPropertyBackport class ClassWithCachedProperty: # noqa def __init__(self, x): # noqa self.x = x @CachedPropertyBackport def property(self): # noqa return 2 * self.x def test_cached_property_backport(): """Test cached property backport.""" c = ClassWithCachedProperty(1) assert c.property == 2 def test_cached_property_backport_releases_memory(): """Test that cached property backport releases memory.""" c1 = ClassWithCachedProperty(2) del c1 instances = [ obj for obj in gc.get_objects() if isinstance(obj, ClassWithCachedProperty) ] assert len(instances) == 0 satpy-0.34.0/satpy/tests/test_composites.py000066400000000000000000002130111420401153000210220ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018-2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for compositors in composites/__init__.py.""" import os import unittest from datetime import datetime from unittest import mock import dask import dask.array as da import numpy as np import pytest import xarray as xr class TestMatchDataArrays(unittest.TestCase): """Test the utility method 'match_data_arrays'.""" def _get_test_ds(self, shape=(50, 100), dims=('y', 'x')): """Get a fake DataArray.""" from pyresample.geometry import AreaDefinition data = da.random.random(shape, chunks=25) area = AreaDefinition( 'test', 'test', 'test', {'proj': 'eqc', 'lon_0': 0.0, 'lat_0': 0.0}, shape[dims.index('x')], shape[dims.index('y')], (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) attrs = {'area': area} return xr.DataArray(data, dims=dims, attrs=attrs) def test_single_ds(self): """Test a single dataset is returned unharmed.""" from satpy.composites import CompositeBase ds1 = self._get_test_ds() comp = CompositeBase('test_comp') ret_datasets = comp.match_data_arrays((ds1,)) assert ret_datasets[0].identical(ds1) def test_mult_ds_area(self): """Test multiple datasets successfully pass.""" from satpy.composites import CompositeBase ds1 = self._get_test_ds() ds2 = self._get_test_ds() comp = CompositeBase('test_comp') ret_datasets = comp.match_data_arrays((ds1, ds2)) assert ret_datasets[0].identical(ds1) assert ret_datasets[1].identical(ds2) def test_mult_ds_no_area(self): """Test that all datasets must have an area attribute.""" from satpy.composites import CompositeBase ds1 = self._get_test_ds() ds2 = self._get_test_ds() del ds2.attrs['area'] comp = CompositeBase('test_comp') self.assertRaises(ValueError, comp.match_data_arrays, (ds1, ds2)) def test_mult_ds_diff_area(self): """Test that datasets with different areas fail.""" from pyresample.geometry import AreaDefinition from satpy.composites import CompositeBase, IncompatibleAreas ds1 = self._get_test_ds() ds2 = self._get_test_ds() ds2.attrs['area'] = AreaDefinition( 'test', 'test', 'test', {'proj': 'eqc', 'lon_0': 0.0, 'lat_0': 0.0}, 100, 50, (-30037508.34, -20018754.17, 10037508.34, 18754.17)) comp = CompositeBase('test_comp') self.assertRaises(IncompatibleAreas, comp.match_data_arrays, (ds1, ds2)) def test_mult_ds_diff_dims(self): """Test that datasets with different dimensions still pass.""" from satpy.composites import CompositeBase # x is still 50, y is still 100, even though they are in # different order ds1 = self._get_test_ds(shape=(50, 100), dims=('y', 'x')) ds2 = self._get_test_ds(shape=(3, 100, 50), dims=('bands', 'x', 'y')) comp = CompositeBase('test_comp') ret_datasets = comp.match_data_arrays((ds1, ds2)) assert ret_datasets[0].identical(ds1) assert ret_datasets[1].identical(ds2) def test_mult_ds_diff_size(self): """Test that datasets with different sizes fail.""" from satpy.composites import CompositeBase, IncompatibleAreas # x is 50 in this one, 100 in ds2 # y is 100 in this one, 50 in ds2 ds1 = self._get_test_ds(shape=(50, 100), dims=('x', 'y')) ds2 = self._get_test_ds(shape=(3, 50, 100), dims=('bands', 'y', 'x')) comp = CompositeBase('test_comp') self.assertRaises(IncompatibleAreas, comp.match_data_arrays, (ds1, ds2)) def test_nondimensional_coords(self): """Test the removal of non-dimensional coordinates when compositing.""" from satpy.composites import CompositeBase ds = self._get_test_ds(shape=(2, 2)) ds['acq_time'] = ('y', [0, 1]) comp = CompositeBase('test_comp') ret_datasets = comp.match_data_arrays([ds, ds]) self.assertNotIn('acq_time', ret_datasets[0].coords) class TestRatioSharpenedCompositors(unittest.TestCase): """Test RatioSharpenedRGB and SelfSharpendRGB compositors.""" def setUp(self): """Create test data.""" from pyresample.geometry import AreaDefinition area = AreaDefinition('test', 'test', 'test', {'proj': 'merc'}, 2, 2, (-2000, -2000, 2000, 2000)) attrs = {'area': area, 'start_time': datetime(2018, 1, 1, 18), 'modifiers': tuple(), 'resolution': 1000, 'name': 'test_vis'} ds1 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64), attrs=attrs, dims=('y', 'x'), coords={'y': [0, 1], 'x': [0, 1]}) self.ds1 = ds1 ds2 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64) + 2, attrs=attrs, dims=('y', 'x'), coords={'y': [0, 1], 'x': [0, 1]}) ds2.attrs['name'] += '2' self.ds2 = ds2 ds3 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64) + 3, attrs=attrs, dims=('y', 'x'), coords={'y': [0, 1], 'x': [0, 1]}) ds3.attrs['name'] += '3' self.ds3 = ds3 ds4 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64) + 4, attrs=attrs, dims=('y', 'x'), coords={'y': [0, 1], 'x': [0, 1]}) ds4.attrs['name'] += '4' ds4.attrs['resolution'] = 500 self.ds4 = ds4 # high res version ds4 = xr.DataArray(da.ones((4, 4), chunks=2, dtype=np.float64) + 4, attrs=attrs.copy(), dims=('y', 'x'), coords={'y': [0, 1, 2, 3], 'x': [0, 1, 2, 3]}) ds4.attrs['name'] += '4' ds4.attrs['resolution'] = 500 ds4.attrs['rows_per_scan'] = 1 ds4.attrs['area'] = AreaDefinition('test', 'test', 'test', {'proj': 'merc'}, 4, 4, (-2000, -2000, 2000, 2000)) self.ds4_big = ds4 def test_bad_color(self): """Test that only valid band colors can be provided.""" from satpy.composites import RatioSharpenedRGB self.assertRaises(ValueError, RatioSharpenedRGB, name='true_color', high_resolution_band='bad') def test_match_data_arrays(self): """Test that all of the areas have to be the same resolution.""" from satpy.composites import IncompatibleAreas, RatioSharpenedRGB comp = RatioSharpenedRGB(name='true_color') self.assertRaises(IncompatibleAreas, comp, (self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4_big,)) def test_more_than_three_datasets(self): """Test that only 3 datasets can be passed.""" from satpy.composites import RatioSharpenedRGB comp = RatioSharpenedRGB(name='true_color') self.assertRaises(ValueError, comp, (self.ds1, self.ds2, self.ds3, self.ds1), optional_datasets=(self.ds4_big,)) def test_basic_no_high_res(self): """Test that three datasets can be passed without optional high res.""" from satpy.composites import RatioSharpenedRGB comp = RatioSharpenedRGB(name='true_color') res = comp((self.ds1, self.ds2, self.ds3)) self.assertEqual(res.shape, (3, 2, 2)) def test_basic_no_sharpen(self): """Test that color None does no sharpening.""" from satpy.composites import RatioSharpenedRGB comp = RatioSharpenedRGB(name='true_color', high_resolution_band=None) res = comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4,)) self.assertEqual(res.shape, (3, 2, 2)) def test_basic_red(self): """Test that basic high resolution red can be passed.""" from satpy.composites import RatioSharpenedRGB comp = RatioSharpenedRGB(name='true_color') res = comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4,)) res = res.values self.assertEqual(res.shape, (3, 2, 2)) np.testing.assert_allclose(res[0], self.ds4.values) np.testing.assert_allclose(res[1], np.array([[4.5, 4.5], [4.5, 4.5]], dtype=np.float64)) np.testing.assert_allclose(res[2], np.array([[6, 6], [6, 6]], dtype=np.float64)) def test_self_sharpened_no_high_res(self): """Test for exception when no high res band is specified.""" from satpy.composites import SelfSharpenedRGB comp = SelfSharpenedRGB(name='true_color', high_resolution_band=None) self.assertRaises(ValueError, comp, (self.ds1, self.ds2, self.ds3)) def test_self_sharpened_basic(self): """Test that three datasets can be passed without optional high res.""" from satpy.composites import SelfSharpenedRGB comp = SelfSharpenedRGB(name='true_color') res = comp((self.ds1, self.ds2, self.ds3)) res = res.values self.assertEqual(res.shape, (3, 2, 2)) np.testing.assert_allclose(res[0], self.ds1.values) np.testing.assert_allclose(res[1], np.array([[3, 3], [3, 3]], dtype=np.float64)) np.testing.assert_allclose(res[2], np.array([[4, 4], [4, 4]], dtype=np.float64)) class TestDifferenceCompositor(unittest.TestCase): """Test case for the difference compositor.""" def setUp(self): """Create test data.""" from pyresample.geometry import AreaDefinition area = AreaDefinition('test', 'test', 'test', {'proj': 'merc'}, 2, 2, (-2000, -2000, 2000, 2000)) attrs = {'area': area, 'start_time': datetime(2018, 1, 1, 18), 'modifiers': tuple(), 'resolution': 1000, 'name': 'test_vis'} ds1 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64), attrs=attrs, dims=('y', 'x'), coords={'y': [0, 1], 'x': [0, 1]}) self.ds1 = ds1 ds2 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64) + 2, attrs=attrs, dims=('y', 'x'), coords={'y': [0, 1], 'x': [0, 1]}) ds2.attrs['name'] += '2' self.ds2 = ds2 # high res version ds2 = xr.DataArray(da.ones((4, 4), chunks=2, dtype=np.float64) + 4, attrs=attrs.copy(), dims=('y', 'x'), coords={'y': [0, 1, 2, 3], 'x': [0, 1, 2, 3]}) ds2.attrs['name'] += '2' ds2.attrs['resolution'] = 500 ds2.attrs['rows_per_scan'] = 1 ds2.attrs['area'] = AreaDefinition('test', 'test', 'test', {'proj': 'merc'}, 4, 4, (-2000, -2000, 2000, 2000)) self.ds2_big = ds2 def test_basic_diff(self): """Test that a basic difference composite works.""" from satpy.composites import DifferenceCompositor comp = DifferenceCompositor(name='diff') res = comp((self.ds1, self.ds2), standard_name='temperature_difference') np.testing.assert_allclose(res.values, -2) assert res.attrs.get('standard_name') == 'temperature_difference' def test_bad_areas_diff(self): """Test that a difference where resolutions are different fails.""" from satpy.composites import DifferenceCompositor, IncompatibleAreas comp = DifferenceCompositor(name='diff') # too many arguments self.assertRaises(ValueError, comp, (self.ds1, self.ds2, self.ds2_big)) # different resolution self.assertRaises(IncompatibleAreas, comp, (self.ds1, self.ds2_big)) class TestDayNightCompositor(unittest.TestCase): """Test DayNightCompositor.""" def setUp(self): """Create test data.""" bands = ['R', 'G', 'B'] start_time = datetime(2018, 1, 1, 18, 0, 0) # RGB a = np.zeros((3, 2, 2), dtype=np.float64) a[:, 0, 0] = 0.1 a[:, 0, 1] = 0.2 a[:, 1, 0] = 0.3 a[:, 1, 1] = 0.4 a = da.from_array(a, a.shape) self.data_a = xr.DataArray(a, attrs={'test': 'a', 'start_time': start_time}, coords={'bands': bands}, dims=('bands', 'y', 'x')) b = np.zeros((3, 2, 2), dtype=np.float64) b[:, 0, 0] = np.nan b[:, 0, 1] = 0.25 b[:, 1, 0] = 0.50 b[:, 1, 1] = 0.75 b = da.from_array(b, b.shape) self.data_b = xr.DataArray(b, attrs={'test': 'b', 'start_time': start_time}, coords={'bands': bands}, dims=('bands', 'y', 'x')) sza = np.array([[80., 86.], [94., 100.]]) sza = da.from_array(sza, sza.shape) self.sza = xr.DataArray(sza, dims=('y', 'x')) # fake area my_area = mock.MagicMock() lons = np.array([[-95., -94.], [-93., -92.]]) lons = da.from_array(lons, lons.shape) lats = np.array([[40., 41.], [42., 43.]]) lats = da.from_array(lats, lats.shape) my_area.get_lonlats.return_value = (lons, lats) self.data_a.attrs['area'] = my_area self.data_b.attrs['area'] = my_area # not used except to check that it matches the data arrays self.sza.attrs['area'] = my_area def test_daynight_sza(self): """Test compositor with both day and night portions when SZA data is included.""" from satpy.composites import DayNightCompositor comp = DayNightCompositor(name='dn_test', day_night="day_night") res = comp((self.data_a, self.data_b, self.sza)) res = res.compute() expected = np.array([[0., 0.22122352], [0.5, 1.]]) np.testing.assert_allclose(res.values[0], expected) def test_daynight_area(self): """Test compositor both day and night portions when SZA data is not provided.""" from satpy.composites import DayNightCompositor comp = DayNightCompositor(name='dn_test', day_night="day_night") res = comp((self.data_a, self.data_b)) res = res.compute() expected = np.array([[0., 0.33164983], [0.66835017, 1.]]) np.testing.assert_allclose(res.values[0], expected) def test_night_only_sza(self): """Test compositor with night portion when SZA data is included.""" from satpy.composites import DayNightCompositor comp = DayNightCompositor(name='dn_test', day_night="night_only") res = comp((self.data_b, self.sza)) res = res.compute() expected = np.array([[np.nan, 0.], [0.5, 1.]]) np.testing.assert_allclose(res.values[0], expected) def test_night_only_area(self): """Test compositor with night portion when SZA data is not provided.""" from satpy.composites import DayNightCompositor comp = DayNightCompositor(name='dn_test', day_night="night_only") res = comp((self.data_b)) res = res.compute() expected = np.array([[np.nan, 0.], [0., 0.]]) np.testing.assert_allclose(res.values[0], expected) def test_day_only_sza(self): """Test compositor with day portion when SZA data is included.""" from satpy.composites import DayNightCompositor comp = DayNightCompositor(name='dn_test', day_night="day_only") res = comp((self.data_a, self.sza)) res = res.compute() expected = np.array([[0., 0.22122352], [0., 0.]]) np.testing.assert_allclose(res.values[0], expected) def test_day_only_area(self): """Test compositor with day portion when SZA data is not provided.""" from satpy.composites import DayNightCompositor comp = DayNightCompositor(name='dn_test', day_night="day_only") res = comp((self.data_a)) res = res.compute() expected = np.array([[0., 0.33164983], [0.66835017, 1.]]) np.testing.assert_allclose(res.values[0], expected) class TestFillingCompositor(unittest.TestCase): """Test case for the filling compositor.""" def test_fill(self): """Test filling.""" from satpy.composites import FillingCompositor comp = FillingCompositor(name='fill_test') filler = xr.DataArray(np.array([1, 2, 3, 4, 3, 2, 1])) red = xr.DataArray(np.array([1, 2, 3, np.nan, 3, 2, 1])) green = xr.DataArray(np.array([np.nan, 2, 3, 4, 3, 2, np.nan])) blue = xr.DataArray(np.array([4, 3, 2, 1, 2, 3, 4])) res = comp([filler, red, green, blue]) np.testing.assert_allclose(res.sel(bands='R').data, filler.data) np.testing.assert_allclose(res.sel(bands='G').data, filler.data) np.testing.assert_allclose(res.sel(bands='B').data, blue.data) class TestMultiFiller(unittest.TestCase): """Test case for the MultiFiller compositor.""" def test_fill(self): """Test filling.""" from satpy.composites import MultiFiller comp = MultiFiller(name='fill_test') a = xr.DataArray(np.array([1, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan])) b = xr.DataArray(np.array([np.nan, 2, 3, np.nan, np.nan, np.nan, np.nan])) c = xr.DataArray(np.array([np.nan, 22, 3, np.nan, np.nan, np.nan, 7])) d = xr.DataArray(np.array([np.nan, np.nan, np.nan, np.nan, np.nan, 6, np.nan])) e = xr.DataArray(np.array([np.nan, np.nan, np.nan, np.nan, 5, np.nan, np.nan])) expected = xr.DataArray(np.array([1, 2, 3, np.nan, 5, 6, 7])) res = comp([a, b, c], optional_datasets=[d, e]) np.testing.assert_allclose(res.data, expected.data) class TestLuminanceSharpeningCompositor(unittest.TestCase): """Test luminance sharpening compositor.""" def test_compositor(self): """Test luminance sharpening compositor.""" from satpy.composites import LuminanceSharpeningCompositor comp = LuminanceSharpeningCompositor(name='test') # Three shades of grey rgb_arr = np.array([1, 50, 100, 200, 1, 50, 100, 200, 1, 50, 100, 200]) rgb = xr.DataArray(rgb_arr.reshape((3, 2, 2)), dims=['bands', 'y', 'x'], coords={'bands': ['R', 'G', 'B']}) # 100 % luminance -> all result values ~1.0 lum = xr.DataArray(np.array([[100., 100.], [100., 100.]]), dims=['y', 'x']) res = comp([lum, rgb]) np.testing.assert_allclose(res.data, 1., atol=1e-9) # 50 % luminance, all result values ~0.5 lum = xr.DataArray(np.array([[50., 50.], [50., 50.]]), dims=['y', 'x']) res = comp([lum, rgb]) np.testing.assert_allclose(res.data, 0.5, atol=1e-9) # 30 % luminance, all result values ~0.3 lum = xr.DataArray(np.array([[30., 30.], [30., 30.]]), dims=['y', 'x']) res = comp([lum, rgb]) np.testing.assert_allclose(res.data, 0.3, atol=1e-9) # 0 % luminance, all values ~0.0 lum = xr.DataArray(np.array([[0., 0.], [0., 0.]]), dims=['y', 'x']) res = comp([lum, rgb]) np.testing.assert_allclose(res.data, 0.0, atol=1e-9) class TestSandwichCompositor(unittest.TestCase): """Test sandwich compositor.""" @mock.patch('satpy.composites.enhance2dataset') def test_compositor(self, e2d): """Test luminance sharpening compositor.""" from satpy.composites import SandwichCompositor rgb_arr = da.from_array(np.random.random((3, 2, 2)), chunks=2) rgb = xr.DataArray(rgb_arr, dims=['bands', 'y', 'x']) lum_arr = da.from_array(100 * np.random.random((2, 2)), chunks=2) lum = xr.DataArray(lum_arr, dims=['y', 'x']) # Make enhance2dataset return unmodified dataset e2d.return_value = rgb comp = SandwichCompositor(name='test') res = comp([lum, rgb]) for i in range(3): np.testing.assert_allclose(res.data[i, :, :], rgb_arr[i, :, :] * lum_arr / 100.) # make sure the compositor doesn't modify the input data np.testing.assert_allclose(lum.values, lum_arr.compute()) class TestInlineComposites(unittest.TestCase): """Test inline composites.""" def test_inline_composites(self): """Test that inline composites are working.""" from satpy.composites.config_loader import load_compositor_configs_for_sensors comps = load_compositor_configs_for_sensors(['visir'])[0] # Check that "fog" product has all its prerequisites defined keys = comps['visir'].keys() fog = [comps['visir'][dsid] for dsid in keys if "fog" == dsid['name']][0] self.assertEqual(fog.attrs['prerequisites'][0]['name'], '_fog_dep_0') self.assertEqual(fog.attrs['prerequisites'][1]['name'], '_fog_dep_1') self.assertEqual(fog.attrs['prerequisites'][2], 10.8) # Check that the sub-composite dependencies use wavelengths # (numeric values) keys = comps['visir'].keys() fog_dep_ids = [dsid for dsid in keys if "fog_dep" in dsid['name']] self.assertEqual(comps['visir'][fog_dep_ids[0]].attrs['prerequisites'], [12.0, 10.8]) self.assertEqual(comps['visir'][fog_dep_ids[1]].attrs['prerequisites'], [10.8, 8.7]) # Check the same for SEVIRI and verify channel names are used # in the sub-composite dependencies instead of wavelengths comps = load_compositor_configs_for_sensors(['seviri'])[0] keys = comps['seviri'].keys() fog_dep_ids = [dsid for dsid in keys if "fog_dep" in dsid['name']] self.assertEqual(comps['seviri'][fog_dep_ids[0]].attrs['prerequisites'], ['IR_120', 'IR_108']) self.assertEqual(comps['seviri'][fog_dep_ids[1]].attrs['prerequisites'], ['IR_108', 'IR_087']) class TestColormapCompositor(unittest.TestCase): """Test the ColormapCompositor.""" def setUp(self): """Set up the test case.""" from satpy.composites import ColormapCompositor self.colormap_compositor = ColormapCompositor('test_cmap_compositor') def test_build_colormap_with_int_data_and_without_meanings(self): """Test colormap building.""" palette = np.array([[0, 0, 0], [127, 127, 127], [255, 255, 255]]) colormap, squeezed_palette = self.colormap_compositor.build_colormap(palette, np.uint8, {}) self.assertTrue(np.allclose(colormap.values, [0, 1])) self.assertTrue(np.allclose(squeezed_palette, palette / 255.0)) def test_build_colormap_with_int_data_and_with_meanings(self): """Test colormap building.""" palette = xr.DataArray(np.array([[0, 0, 0], [127, 127, 127], [255, 255, 255]]), dims=['value', 'band']) palette.attrs['palette_meanings'] = [2, 3, 4] colormap, squeezed_palette = self.colormap_compositor.build_colormap(palette, np.uint8, {}) self.assertTrue(np.allclose(colormap.values, [2, 3, 4])) self.assertTrue(np.allclose(squeezed_palette, palette / 255.0)) class TestPaletteCompositor(unittest.TestCase): """Test the PaletteCompositor.""" def test_call(self): """Test palette compositing.""" from satpy.composites import PaletteCompositor cmap_comp = PaletteCompositor('test_cmap_compositor') palette = xr.DataArray(np.array([[0, 0, 0], [127, 127, 127], [255, 255, 255]]), dims=['value', 'band']) palette.attrs['palette_meanings'] = [2, 3, 4] data = xr.DataArray(da.from_array(np.array([[4, 3, 2], [2, 3, 4]], dtype=np.uint8)), dims=['y', 'x']) res = cmap_comp([data, palette]) exp = np.array([[[1., 0.498039, 0.], [0., 0.498039, 1.]], [[1., 0.498039, 0.], [0., 0.498039, 1.]], [[1., 0.498039, 0.], [0., 0.498039, 1.]]]) self.assertTrue(np.allclose(res, exp)) class TestColorizeCompositor(unittest.TestCase): """Test the ColorizeCompositor.""" def test_colorize_no_fill(self): """Test colorizing.""" from satpy.composites import ColorizeCompositor colormap_composite = ColorizeCompositor('test_color_compositor') palette = xr.DataArray(np.array([[0, 0, 0], [127, 127, 127], [255, 255, 255]]), dims=['value', 'band']) palette.attrs['palette_meanings'] = [2, 3, 4] data = xr.DataArray(np.array([[4, 3, 2], [2, 3, 4]], dtype=np.uint8), dims=['y', 'x']) res = colormap_composite([data, palette]) exp = np.array([[[1., 0.498039, 0.], [0., 0.498039, 1.]], [[1., 0.498039, 0.], [0., 0.498039, 1.]], [[1., 0.498039, 0.], [0., 0.498039, 1.]]]) self.assertTrue(np.allclose(res, exp, atol=1e-4)) def test_colorize_with_interpolation(self): """Test colorizing with interpolation.""" from satpy.composites import ColorizeCompositor colormap_composite = ColorizeCompositor('test_color_compositor') palette = xr.DataArray(np.array([[0, 0, 0], [127, 127, 127], [255, 255, 255]]), dims=['value', 'band']) palette.attrs['palette_meanings'] = [2, 3, 4] data = xr.DataArray(da.from_array(np.array([[4, 3, 2.5], [2, 3.2, 4]])), dims=['y', 'x'], attrs={'valid_range': np.array([2, 4])}) res = colormap_composite([data, palette]) exp = np.array([[[1.0000149, 0.49804664, 0.24907766], [0., 0.59844028, 1.0000149]], [[1.00005405, 0.49806613, 0.24902255], [0., 0.59846373, 1.00005405]], [[1.00001585, 0.49804711, 0.24896771], [0., 0.59844073, 1.00001585]]]) self.assertTrue(np.allclose(res, exp, atol=1e-4)) class TestCloudTopHeightCompositor(unittest.TestCase): """Test the CloudTopHeightCompositor.""" def setUp(self): """Set up the test case.""" from satpy.composites.cloud_products import CloudTopHeightCompositor self.colormap_composite = CloudTopHeightCompositor('test_cmap_compositor') self.palette = xr.DataArray(np.array([[0, 0, 0], [127, 127, 127], [255, 255, 255]]), dims=['value', 'band']) self.palette.attrs['palette_meanings'] = [2, 3, 4] self.exp = np.array([[[0., 0.498, 0.], [0., 0.498, np.nan]], [[0., 0.498, 0.], [0., 0.498, np.nan]], [[0., 0.498, 0.], [0., 0.498, np.nan]]]) self.exp_all_valid = np.array([[[0., 0.498, 0.], [0., 0.498, 0.]], [[0., 0.498, 0.], [0., 0.498, 0.]], [[0., 0.498, 0.], [0., 0.498, 0.]]]) def test_call_numpy_with_invalid_value_in_status(self): """Test the CloudTopHeight composite generation.""" status = xr.DataArray(np.array([[1, 0, 1], [1, 0, 65535]]), dims=['y', 'x'], attrs={'_FillValue': 65535}) data = xr.DataArray(np.array([[4, 3, 2], [2, 3, 4]], dtype=np.uint8), dims=['y', 'x']) res = self.colormap_composite([data, self.palette, status]) np.testing.assert_allclose(res, self.exp, atol=1e-4) def test_call_dask_with_invalid_value_in_status(self): """Test the CloudTopHeight composite generation.""" status = xr.DataArray(da.from_array(np.array([[1, 0, 1], [1, 0, 65535]])), dims=['y', 'x'], attrs={'_FillValue': 65535}) data = xr.DataArray(da.from_array(np.array([[4, 3, 2], [2, 3, 4]], dtype=np.uint8)), dims=['y', 'x']) res = self.colormap_composite([data, self.palette, status]) np.testing.assert_allclose(res, self.exp, atol=1e-4) def test_call_dask_with_invalid_value_in_data(self): """Test the CloudTopHeight composite generation.""" status = xr.DataArray(da.from_array(np.array([[1, 0, 1], [1, 0, 1]])), dims=['y', 'x'], attrs={'_FillValue': 65535}) data = xr.DataArray(da.from_array(np.array([[4, 3, 2], [2, 3, 99]], dtype=np.uint8)), dims=['y', 'x'], attrs={'_FillValue': 99}) res = self.colormap_composite([data, self.palette, status]) np.testing.assert_allclose(res, self.exp_all_valid, atol=1e-4) def test_call_with_alternative_fill_value_color(self): """Test the CloudTopHeight composite generation.""" status = xr.DataArray(da.from_array(np.array([[1, 0, 1], [1, 0, 1]])), dims=['y', 'x'], attrs={'_FillValue': 65535}) data = xr.DataArray(da.from_array(np.array([[4, 3, 2], [2, 3, 4]], dtype=np.uint8)), dims=['y', 'x'], attrs={'_FillValue': 99}) self.palette.attrs['fill_value_color'] = np.array([1, 1, 1]) res = self.colormap_composite([data, self.palette, status]) exp = np.array([[[1., 0.498, 1.], [1., 0.498, 1.]], [[1., 0.498, 1.], [1., 0.498, 1.]], [[1., 0.498, 1.], [1., 0.498, 1.]]]) np.testing.assert_allclose(res, exp, atol=1e-4) class TestPrecipCloudsCompositor(unittest.TestCase): """Test the PrecipClouds compositor.""" def test_call(self): """Test the precip composite generation.""" from satpy.composites.cloud_products import PrecipCloudsRGB colormap_compositor = PrecipCloudsRGB('test_precip_compositor') data_light = xr.DataArray(np.array([[80, 70, 60, 0], [20, 30, 40, 255]], dtype=np.uint8), dims=['y', 'x'], attrs={'_FillValue': 255}) data_moderate = xr.DataArray(np.array([[60, 50, 40, 0], [20, 30, 40, 255]], dtype=np.uint8), dims=['y', 'x'], attrs={'_FillValue': 255}) data_intense = xr.DataArray(np.array([[40, 30, 20, 0], [20, 30, 40, 255]], dtype=np.uint8), dims=['y', 'x'], attrs={'_FillValue': 255}) data_flags = xr.DataArray(np.array([[0, 0, 4, 0], [0, 0, 0, 0]], dtype=np.uint8), dims=['y', 'x']) res = colormap_compositor([data_light, data_moderate, data_intense, data_flags]) exp = np.array([[[0.24313725, 0.18235294, 0.12156863, np.nan], [0.12156863, 0.18235294, 0.24313725, np.nan]], [[0.62184874, 0.51820728, 0.41456583, np.nan], [0.20728291, 0.31092437, 0.41456583, np.nan]], [[0.82913165, 0.7254902, 0.62184874, np.nan], [0.20728291, 0.31092437, 0.41456583, np.nan]]]) np.testing.assert_allclose(res, exp) class TestSingleBandCompositor(unittest.TestCase): """Test the single-band compositor.""" def setUp(self): """Create test data.""" from satpy.composites import SingleBandCompositor self.comp = SingleBandCompositor(name='test') all_valid = np.ones((2, 2)) self.all_valid = xr.DataArray(all_valid, dims=['y', 'x']) def test_call(self): """Test calling the compositor.""" # Dataset with extra attributes all_valid = self.all_valid all_valid.attrs['sensor'] = 'foo' attrs = { 'foo': 'bar', 'resolution': 333, 'units': 'K', 'sensor': {'fake_sensor1', 'fake_sensor2'}, 'calibration': 'BT', 'wavelength': 10.8 } self.comp.attrs['resolution'] = None res = self.comp([all_valid], **attrs) # Verify attributes self.assertEqual(res.attrs.get('sensor'), 'foo') self.assertTrue('foo' in res.attrs) self.assertEqual(res.attrs.get('foo'), 'bar') self.assertTrue('units' in res.attrs) self.assertTrue('calibration' in res.attrs) self.assertFalse('modifiers' in res.attrs) self.assertEqual(res.attrs['wavelength'], 10.8) self.assertEqual(res.attrs['resolution'], 333) class TestCategoricalDataCompositor(unittest.TestCase): """Test composiotor for recategorization of categorical data.""" def setUp(self): """Create test data.""" attrs = {'name': 'foo'} data = xr.DataArray(da.from_array([[2., 1.], [3., 0.]]), attrs=attrs, dims=('y', 'x'), coords={'y': [0, 1], 'x': [0, 1]}) self.data = data def test_basic_recategorization(self): """Test general functionality of compositor incl. attributes.""" from satpy.composites import CategoricalDataCompositor lut = [np.nan, 0, 1, 1] name = 'bar' comp = CategoricalDataCompositor(name=name, lut=lut) res = comp([self.data]) res = res.compute() expected = np.array([[1., 0.], [1., np.nan]]) np.testing.assert_equal(res.values, expected) np.testing.assert_equal(res.attrs['name'], name) np.testing.assert_equal(res.attrs['composite_lut'], lut) def test_too_many_datasets(self): """Test that ValueError is raised if more than one dataset is provided.""" from satpy.composites import CategoricalDataCompositor lut = [np.nan, 0, 1, 1] comp = CategoricalDataCompositor(name='foo', lut=lut) np.testing.assert_raises(ValueError, comp, [self.data, self.data]) class TestGenericCompositor(unittest.TestCase): """Test generic compositor.""" def setUp(self): """Create test data.""" from satpy.composites import GenericCompositor self.comp = GenericCompositor(name='test') self.comp2 = GenericCompositor(name='test2', common_channel_mask=False) all_valid = np.ones((1, 2, 2)) self.all_valid = xr.DataArray(all_valid, dims=['bands', 'y', 'x']) first_invalid = np.reshape(np.array([np.nan, 1., 1., 1.]), (1, 2, 2)) self.first_invalid = xr.DataArray(first_invalid, dims=['bands', 'y', 'x']) second_invalid = np.reshape(np.array([1., np.nan, 1., 1.]), (1, 2, 2)) self.second_invalid = xr.DataArray(second_invalid, dims=['bands', 'y', 'x']) wrong_shape = np.reshape(np.array([1., 1., 1.]), (1, 3, 1)) self.wrong_shape = xr.DataArray(wrong_shape, dims=['bands', 'y', 'x']) def test_masking(self): """Test masking in generic compositor.""" # Single channel res = self.comp([self.all_valid]) np.testing.assert_allclose(res.data, 1., atol=1e-9) # Three channels, one value invalid res = self.comp([self.all_valid, self.all_valid, self.first_invalid]) correct = np.reshape(np.array([np.nan, 1., 1., 1.]), (2, 2)) for i in range(3): np.testing.assert_almost_equal(res.data[i, :, :], correct) # Three channels, two values invalid res = self.comp([self.all_valid, self.first_invalid, self.second_invalid]) correct = np.reshape(np.array([np.nan, np.nan, 1., 1.]), (2, 2)) for i in range(3): np.testing.assert_almost_equal(res.data[i, :, :], correct) def test_concat_datasets(self): """Test concatenation of datasets.""" from satpy.composites import IncompatibleAreas res = self.comp._concat_datasets([self.all_valid], 'L') num_bands = len(res.bands) self.assertEqual(num_bands, 1) self.assertEqual(res.shape[0], num_bands) self.assertEqual(res.bands[0], 'L') res = self.comp._concat_datasets([self.all_valid, self.all_valid], 'LA') num_bands = len(res.bands) self.assertEqual(num_bands, 2) self.assertEqual(res.shape[0], num_bands) self.assertEqual(res.bands[0], 'L') self.assertEqual(res.bands[1], 'A') self.assertRaises(IncompatibleAreas, self.comp._concat_datasets, [self.all_valid, self.wrong_shape], 'LA') def test_get_sensors(self): """Test getting sensors from the dataset attributes.""" res = self.comp._get_sensors([self.all_valid]) self.assertIsNone(res) dset1 = self.all_valid dset1.attrs['sensor'] = 'foo' res = self.comp._get_sensors([dset1]) self.assertEqual(res, 'foo') dset2 = self.first_invalid dset2.attrs['sensor'] = 'bar' res = self.comp._get_sensors([dset1, dset2]) self.assertIn('foo', res) self.assertIn('bar', res) self.assertEqual(len(res), 2) self.assertIsInstance(res, set) @mock.patch('satpy.composites.GenericCompositor._get_sensors') @mock.patch('satpy.composites.combine_metadata') @mock.patch('satpy.composites.check_times') @mock.patch('satpy.composites.GenericCompositor.match_data_arrays') def test_call_with_mock(self, match_data_arrays, check_times, combine_metadata, get_sensors): """Test calling generic compositor.""" from satpy.composites import IncompatibleAreas combine_metadata.return_value = dict() get_sensors.return_value = 'foo' # One dataset, no mode given res = self.comp([self.all_valid]) self.assertEqual(res.shape[0], 1) self.assertEqual(res.attrs['mode'], 'L') match_data_arrays.assert_not_called() # This compositor has been initialized without common masking, so the # masking shouldn't have been called projectables = [self.all_valid, self.first_invalid, self.second_invalid] match_data_arrays.return_value = projectables res = self.comp2(projectables) match_data_arrays.assert_called_once() match_data_arrays.reset_mock() # Dataset for alpha given, so shouldn't be masked projectables = [self.all_valid, self.all_valid] match_data_arrays.return_value = projectables res = self.comp(projectables) match_data_arrays.assert_called_once() match_data_arrays.reset_mock() # When areas are incompatible, masking shouldn't happen match_data_arrays.side_effect = IncompatibleAreas() self.assertRaises(IncompatibleAreas, self.comp, [self.all_valid, self.wrong_shape]) match_data_arrays.assert_called_once() def test_call(self): """Test calling generic compositor.""" # Multiple datasets with extra attributes all_valid = self.all_valid all_valid.attrs['sensor'] = 'foo' attrs = {'foo': 'bar', 'resolution': 333} self.comp.attrs['resolution'] = None res = self.comp([self.all_valid, self.first_invalid], **attrs) # Verify attributes self.assertEqual(res.attrs.get('sensor'), 'foo') self.assertIn('foo', res.attrs) self.assertEqual(res.attrs.get('foo'), 'bar') self.assertNotIn('units', res.attrs) self.assertNotIn('calibration', res.attrs) self.assertNotIn('modifiers', res.attrs) self.assertIsNone(res.attrs['wavelength']) self.assertEqual(res.attrs['mode'], 'LA') self.assertEqual(res.attrs['resolution'], 333) class TestAddBands(unittest.TestCase): """Test case for the `add_bands` function.""" def test_add_bands_l_rgb(self): """Test adding bands.""" from satpy.composites import add_bands # L + RGB -> RGB data = xr.DataArray(da.ones((1, 3, 3)), dims=('bands', 'y', 'x'), coords={'bands': ['L']}) new_bands = xr.DataArray(da.array(['R', 'G', 'B']), dims=('bands'), coords={'bands': ['R', 'G', 'B']}) res = add_bands(data, new_bands) res_bands = ['R', 'G', 'B'] self.assertEqual(res.attrs['mode'], ''.join(res_bands)) np.testing.assert_array_equal(res.bands, res_bands) np.testing.assert_array_equal(res.coords['bands'], res_bands) def test_add_bands_l_rgba(self): """Test adding bands.""" from satpy.composites import add_bands # L + RGBA -> RGBA data = xr.DataArray(da.ones((1, 3, 3)), dims=('bands', 'y', 'x'), coords={'bands': ['L']}, attrs={'mode': 'L'}) new_bands = xr.DataArray(da.array(['R', 'G', 'B', 'A']), dims=('bands'), coords={'bands': ['R', 'G', 'B', 'A']}) res = add_bands(data, new_bands) res_bands = ['R', 'G', 'B', 'A'] self.assertEqual(res.attrs['mode'], ''.join(res_bands)) np.testing.assert_array_equal(res.bands, res_bands) np.testing.assert_array_equal(res.coords['bands'], res_bands) def test_add_bands_la_rgb(self): """Test adding bands.""" from satpy.composites import add_bands # LA + RGB -> RGBA data = xr.DataArray(da.ones((2, 3, 3)), dims=('bands', 'y', 'x'), coords={'bands': ['L', 'A']}, attrs={'mode': 'LA'}) new_bands = xr.DataArray(da.array(['R', 'G', 'B']), dims=('bands'), coords={'bands': ['R', 'G', 'B']}) res = add_bands(data, new_bands) res_bands = ['R', 'G', 'B', 'A'] self.assertEqual(res.attrs['mode'], ''.join(res_bands)) np.testing.assert_array_equal(res.bands, res_bands) np.testing.assert_array_equal(res.coords['bands'], res_bands) def test_add_bands_rgb_rbga(self): """Test adding bands.""" from satpy.composites import add_bands # RGB + RGBA -> RGBA data = xr.DataArray(da.ones((3, 3, 3)), dims=('bands', 'y', 'x'), coords={'bands': ['R', 'G', 'B']}, attrs={'mode': 'RGB'}) new_bands = xr.DataArray(da.array(['R', 'G', 'B', 'A']), dims=('bands'), coords={'bands': ['R', 'G', 'B', 'A']}) res = add_bands(data, new_bands) res_bands = ['R', 'G', 'B', 'A'] self.assertEqual(res.attrs['mode'], ''.join(res_bands)) np.testing.assert_array_equal(res.bands, res_bands) np.testing.assert_array_equal(res.coords['bands'], res_bands) def test_add_bands_p_l(self): """Test adding bands.""" from satpy.composites import add_bands # P(RGBA) + L -> RGBA data = xr.DataArray(da.ones((1, 3, 3)), dims=('bands', 'y', 'x'), coords={'bands': ['P']}, attrs={'mode': 'P'}) new_bands = xr.DataArray(da.array(['L']), dims=('bands'), coords={'bands': ['L']}) with pytest.raises(NotImplementedError): add_bands(data, new_bands) class TestStaticImageCompositor(unittest.TestCase): """Test case for the static compositor.""" @mock.patch('satpy.resample.get_area_def') def test_init(self, get_area_def): """Test the initializiation of static compositor.""" from satpy.composites import StaticImageCompositor # No filename given raises ValueError with self.assertRaises(ValueError): StaticImageCompositor("name") # No area defined comp = StaticImageCompositor("name", filename="/foo.tif") self.assertEqual(comp._cache_filename, "/foo.tif") self.assertIsNone(comp.area) # Area defined get_area_def.return_value = "bar" comp = StaticImageCompositor("name", filename="/foo.tif", area="euro4") self.assertEqual(comp._cache_filename, "/foo.tif") self.assertEqual(comp.area, "bar") get_area_def.assert_called_once_with("euro4") @mock.patch('satpy.aux_download.retrieve') @mock.patch('satpy.aux_download.register_file') @mock.patch('satpy.Scene') def test_call(self, Scene, register, retrieve): # noqa """Test the static compositing.""" import satpy from satpy.composites import StaticImageCompositor satpy.config.set(data_dir=os.path.join(os.path.sep, 'path', 'to', 'image')) remote_tif = "http://example.com/foo.tif" class MockScene(dict): def load(self, arg): pass img = mock.MagicMock() img.attrs = {} scn = MockScene() scn['image'] = img Scene.return_value = scn # absolute path to local file comp = StaticImageCompositor("name", filename="/foo.tif", area="euro4") res = comp() Scene.assert_called_once_with(reader='generic_image', filenames=['/foo.tif']) register.assert_not_called() retrieve.assert_not_called() self.assertIn("start_time", res.attrs) self.assertIn("end_time", res.attrs) self.assertIsNone(res.attrs['sensor']) self.assertNotIn('modifiers', res.attrs) self.assertNotIn('calibration', res.attrs) # remote file with local cached version Scene.reset_mock() register.return_value = "data_dir/foo.tif" retrieve.return_value = "data_dir/foo.tif" comp = StaticImageCompositor("name", url=remote_tif, area="euro4") res = comp() Scene.assert_called_once_with(reader='generic_image', filenames=['data_dir/foo.tif']) self.assertIn("start_time", res.attrs) self.assertIn("end_time", res.attrs) self.assertIsNone(res.attrs['sensor']) self.assertNotIn('modifiers', res.attrs) self.assertNotIn('calibration', res.attrs) # Non-georeferenced image, no area given img.attrs.pop('area') comp = StaticImageCompositor("name", filename="/foo.tif") with self.assertRaises(AttributeError): comp() # Non-georeferenced image, area given comp = StaticImageCompositor("name", filename="/foo.tif", area='euro4') res = comp() self.assertEqual(res.attrs['area'].area_id, 'euro4') # Filename contains environment variable os.environ["TEST_IMAGE_PATH"] = "/path/to/image" comp = StaticImageCompositor("name", filename="${TEST_IMAGE_PATH}/foo.tif", area='euro4') self.assertEqual(comp._cache_filename, "/path/to/image/foo.tif") # URL and filename without absolute path comp = StaticImageCompositor("name", url=remote_tif, filename="bar.tif") self.assertEqual(comp._url, remote_tif) self.assertEqual(comp._cache_filename, "bar.tif") # No URL, filename without absolute path, use default data_dir from config with mock.patch('os.path.exists') as exists: exists.return_value = True comp = StaticImageCompositor("name", filename="foo.tif") self.assertEqual(comp._url, None) self.assertEqual(comp._cache_filename, os.path.join(os.path.sep, 'path', 'to', 'image', 'foo.tif')) def _enhance2dataset(dataset, convert_p=False): """Mock the enhance2dataset to return the original data.""" return dataset class TestBackgroundCompositor: """Test case for the background compositor.""" @classmethod def setup_class(cls): """Create shared input data arrays.""" foreground_data = { "L": np.array([[[1., 0.5], [0., np.nan]]]), "LA": np.array([[[1., 0.5], [0., np.nan]], [[0.5, 0.5], [0.5, 0.5]]]), "RGB": np.array([ [[1., 0.5], [0., np.nan]], [[1., 0.5], [0., np.nan]], [[1., 0.5], [0., np.nan]]]), "RGBA": np.array([ [[1.0, 0.5], [0.0, np.nan]], [[1.0, 0.5], [0.0, np.nan]], [[1.0, 0.5], [0.0, np.nan]], [[0.5, 0.5], [0.5, 0.5]]]), } cls.foreground_data = foreground_data @mock.patch('satpy.composites.enhance2dataset', _enhance2dataset) @pytest.mark.parametrize( ('foreground_bands', 'background_bands', 'exp_bands', 'exp_result'), [ ('L', 'L', 'L', np.array([[1.0, 0.5], [0.0, 1.0]])), ('LA', 'LA', 'L', np.array([[1.0, 0.75], [0.5, 1.0]])), ('RGB', 'RGB', 'RGB', np.array([ [[1., 0.5], [0., 1.]], [[1., 0.5], [0., 1.]], [[1., 0.5], [0., 1.]]])), ('RGBA', 'RGBA', 'RGB', np.array([ [[1., 0.75], [0.5, 1.]], [[1., 0.75], [0.5, 1.]], [[1., 0.75], [0.5, 1.]]])), ('RGBA', 'RGB', 'RGB', np.array([ [[1., 0.75], [0.5, 1.]], [[1., 0.75], [0.5, 1.]], [[1., 0.75], [0.5, 1.]]])), ] ) def test_call(self, foreground_bands, background_bands, exp_bands, exp_result): """Test the background compositing.""" from satpy.composites import BackgroundCompositor comp = BackgroundCompositor("name") # L mode images foreground_data = self.foreground_data[foreground_bands] attrs = {'mode': foreground_bands, 'area': 'foo'} foreground = xr.DataArray(da.from_array(foreground_data), dims=('bands', 'y', 'x'), coords={'bands': [c for c in attrs['mode']]}, attrs=attrs) attrs = {'mode': background_bands, 'area': 'foo'} background = xr.DataArray(da.ones((len(background_bands), 2, 2)), dims=('bands', 'y', 'x'), coords={'bands': [c for c in attrs['mode']]}, attrs=attrs) res = comp([foreground, background]) assert res.attrs['area'] == 'foo' np.testing.assert_allclose(res, exp_result) assert res.attrs['mode'] == exp_bands @mock.patch('satpy.composites.enhance2dataset', _enhance2dataset) def test_multiple_sensors(self): """Test the background compositing from multiple sensor data.""" from satpy.composites import BackgroundCompositor comp = BackgroundCompositor("name") # L mode images attrs = {'mode': 'L', 'area': 'foo'} foreground_data = self.foreground_data["L"] foreground = xr.DataArray(da.from_array(foreground_data), dims=('bands', 'y', 'x'), coords={'bands': [c for c in attrs['mode']]}, attrs=attrs.copy()) foreground.attrs['sensor'] = 'abi' background = xr.DataArray(da.ones((1, 2, 2)), dims=('bands', 'y', 'x'), coords={'bands': [c for c in attrs['mode']]}, attrs=attrs.copy()) background.attrs['sensor'] = 'glm' res = comp([foreground, background]) assert res.attrs['area'] == 'foo' np.testing.assert_allclose(res, np.array([[1., 0.5], [0., 1.]])) assert res.attrs['mode'] == 'L' assert res.attrs['sensor'] == {'abi', 'glm'} class TestMaskingCompositor: """Test case for the simple masking compositor.""" @pytest.fixture def conditions_v1(self): """Masking conditions with string values.""" return [{'method': 'equal', 'value': 'Cloud-free_land', 'transparency': 100}, {'method': 'equal', 'value': 'Cloud-free_sea', 'transparency': 50}] @pytest.fixture def conditions_v2(self): """Masking conditions with numerical values.""" return [{'method': 'equal', 'value': 1, 'transparency': 100}, {'method': 'equal', 'value': 2, 'transparency': 50}] @pytest.fixture def test_data(self): """Test data to use with masking compositors.""" return xr.DataArray(da.random.random((3, 3)), dims=['y', 'x']) @pytest.fixture def test_ct_data(self): """Test 2D CT data array.""" flag_meanings = ['Cloud-free_land', 'Cloud-free_sea'] flag_values = da.array([1, 2]) ct_data = da.array([[1, 2, 2], [2, 1, 2], [2, 2, 1]]) ct_data = xr.DataArray(ct_data, dims=['y', 'x']) ct_data.attrs['flag_meanings'] = flag_meanings ct_data.attrs['flag_values'] = flag_values return ct_data @pytest.fixture def test_ct_data_v3(self, test_ct_data): """Set ct data to NaN where it originally is 1.""" return test_ct_data.where(test_ct_data == 1) @pytest.fixture def reference_data(self, test_data, test_ct_data): """Get reference data to use in masking compositor tests.""" # The data are set to NaN where ct is `1` return test_data.where(test_ct_data > 1) @pytest.fixture def reference_alpha(self): """Get reference alpha to use in masking compositor tests.""" ref_alpha = da.array([[0, 0.5, 0.5], [0.5, 0, 0.5], [0.5, 0.5, 0]]) return xr.DataArray(ref_alpha, dims=['y', 'x']) def test_init(self): """Test the initializiation of compositor.""" from satpy.composites import MaskingCompositor # No transparency or conditions given raises ValueError with pytest.raises(ValueError): comp = MaskingCompositor("name") # transparency defined transparency = {0: 100, 1: 50} conditions = [{'method': 'equal', 'value': 0, 'transparency': 100}, {'method': 'equal', 'value': 1, 'transparency': 50}] comp = MaskingCompositor("name", transparency=transparency.copy()) assert not hasattr(comp, 'transparency') # Transparency should be converted to conditions assert comp.conditions == conditions # conditions defined comp = MaskingCompositor("name", conditions=conditions.copy()) assert comp.conditions == conditions def test_get_flag_value(self): """Test reading flag value from attributes based on a name.""" from satpy.composites import _get_flag_value flag_values = da.array([1, 2]) mask = da.array([[1, 2, 2], [2, 1, 2], [2, 2, 1]]) mask = xr.DataArray(mask, dims=['y', 'x']) flag_meanings = ['Cloud-free_land', 'Cloud-free_sea'] mask.attrs['flag_meanings'] = flag_meanings mask.attrs['flag_values'] = flag_values assert _get_flag_value(mask, 'Cloud-free_land') == 1 assert _get_flag_value(mask, 'Cloud-free_sea') == 2 flag_meanings_str = 'Cloud-free_land Cloud-free_sea' mask.attrs['flag_meanings'] = flag_meanings_str assert _get_flag_value(mask, 'Cloud-free_land') == 1 assert _get_flag_value(mask, 'Cloud-free_sea') == 2 @pytest.mark.parametrize("mode", ["LA", "RGBA"]) def test_call_numerical_transparency_data( self, conditions_v1, test_data, test_ct_data, reference_data, reference_alpha, mode): """Test call the compositor with numerical transparency data. Use parameterisation to test different image modes. """ from satpy.composites import MaskingCompositor from satpy.tests.utils import CustomScheduler # Test with numerical transparency data with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = MaskingCompositor("name", conditions=conditions_v1, mode=mode) res = comp([test_data, test_ct_data]) assert res.mode == mode for m in mode.rstrip("A"): np.testing.assert_allclose(res.sel(bands=m), reference_data) np.testing.assert_allclose(res.sel(bands='A'), reference_alpha) def test_call_named_fields(self, conditions_v2, test_data, test_ct_data, reference_data, reference_alpha): """Test with named fields.""" from satpy.composites import MaskingCompositor from satpy.tests.utils import CustomScheduler with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = MaskingCompositor("name", conditions=conditions_v2) res = comp([test_data, test_ct_data]) assert res.mode == "LA" np.testing.assert_allclose(res.sel(bands='L'), reference_data) np.testing.assert_allclose(res.sel(bands='A'), reference_alpha) def test_call_named_fields_string( self, conditions_v2, test_data, test_ct_data, reference_data, reference_alpha): """Test with named fields which are as a string in the mask attributes.""" from satpy.composites import MaskingCompositor from satpy.tests.utils import CustomScheduler flag_meanings_str = 'Cloud-free_land Cloud-free_sea' test_ct_data.attrs['flag_meanings'] = flag_meanings_str with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = MaskingCompositor("name", conditions=conditions_v2) res = comp([test_data, test_ct_data]) assert res.mode == "LA" np.testing.assert_allclose(res.sel(bands='L'), reference_data) np.testing.assert_allclose(res.sel(bands='A'), reference_alpha) def test_method_isnan(self, test_data, test_ct_data, test_ct_data_v3): """Test "isnan" as method.""" from satpy.composites import MaskingCompositor from satpy.tests.utils import CustomScheduler conditions_v3 = [{'method': 'isnan', 'transparency': 100}] # The data are set to NaN where ct is NaN reference_data_v3 = test_data.where(test_ct_data == 1) reference_alpha_v3 = da.array([[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]]) reference_alpha_v3 = xr.DataArray(reference_alpha_v3, dims=['y', 'x']) with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = MaskingCompositor("name", conditions=conditions_v3) res = comp([test_data, test_ct_data_v3]) assert res.mode == "LA" np.testing.assert_allclose(res.sel(bands='L'), reference_data_v3) np.testing.assert_allclose(res.sel(bands='A'), reference_alpha_v3) def test_method_absolute_import(self, test_data, test_ct_data_v3): """Test "absolute_import" as method.""" from satpy.composites import MaskingCompositor from satpy.tests.utils import CustomScheduler conditions_v4 = [{'method': 'absolute_import', 'transparency': 'satpy.resample'}] # This should raise AttributeError with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = MaskingCompositor("name", conditions=conditions_v4) with pytest.raises(AttributeError): comp([test_data, test_ct_data_v3]) def test_rgb_dataset(self, conditions_v1, test_ct_data, reference_alpha): """Test RGB dataset.""" from satpy.composites import MaskingCompositor from satpy.tests.utils import CustomScheduler # 3D data array data = xr.DataArray(da.random.random((3, 3, 3)), dims=['bands', 'y', 'x'], coords={'bands': ['R', 'G', 'B'], 'y': np.arange(3), 'x': np.arange(3)}) with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = MaskingCompositor("name", conditions=conditions_v1) res = comp([data, test_ct_data]) assert res.mode == "RGBA" np.testing.assert_allclose(res.sel(bands='R'), data.sel(bands='R').where(test_ct_data > 1)) np.testing.assert_allclose(res.sel(bands='G'), data.sel(bands='G').where(test_ct_data > 1)) np.testing.assert_allclose(res.sel(bands='B'), data.sel(bands='B').where(test_ct_data > 1)) np.testing.assert_allclose(res.sel(bands='A'), reference_alpha) def test_rgba_dataset(self, conditions_v2, test_ct_data, reference_alpha): """Test RGBA dataset.""" from satpy.composites import MaskingCompositor from satpy.tests.utils import CustomScheduler data = xr.DataArray(da.random.random((4, 3, 3)), dims=['bands', 'y', 'x'], coords={'bands': ['R', 'G', 'B', 'A'], 'y': np.arange(3), 'x': np.arange(3)}) with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = MaskingCompositor("name", conditions=conditions_v2) res = comp([data, test_ct_data]) assert res.mode == "RGBA" np.testing.assert_allclose(res.sel(bands='R'), data.sel(bands='R').where(test_ct_data > 1)) np.testing.assert_allclose(res.sel(bands='G'), data.sel(bands='G').where(test_ct_data > 1)) np.testing.assert_allclose(res.sel(bands='B'), data.sel(bands='B').where(test_ct_data > 1)) # The compositor should drop the original alpha band np.testing.assert_allclose(res.sel(bands='A'), reference_alpha) def test_incorrect_method(self, test_data, test_ct_data): """Test incorrect method.""" from satpy.composites import MaskingCompositor conditions = [{'method': 'foo', 'value': 0, 'transparency': 100}] comp = MaskingCompositor("name", conditions=conditions) with pytest.raises(AttributeError): comp([test_data, test_ct_data]) # Test with too few projectables. with pytest.raises(ValueError): comp([test_data]) def test_incorrect_mode(self, conditions_v1): """Test initiating with unsupported mode.""" from satpy.composites import MaskingCompositor # Incorrect mode raises ValueError with pytest.raises(ValueError): MaskingCompositor("name", conditions=conditions_v1, mode="YCbCrA") class TestNaturalEnhCompositor(unittest.TestCase): """Test NaturalEnh compositor.""" def setUp(self): """Create channel data and set channel weights.""" self.ch1 = xr.DataArray([1.0]) self.ch2 = xr.DataArray([2.0]) self.ch3 = xr.DataArray([3.0]) self.ch16_w = 2.0 self.ch08_w = 3.0 self.ch06_w = 4.0 @mock.patch('satpy.composites.NaturalEnh.__repr__') @mock.patch('satpy.composites.NaturalEnh.match_data_arrays') def test_natural_enh(self, match_data_arrays, repr_): """Test NaturalEnh compositor.""" from satpy.composites import NaturalEnh repr_.return_value = '' projectables = [self.ch1, self.ch2, self.ch3] def temp_func(*args): return args[0] match_data_arrays.side_effect = temp_func comp = NaturalEnh("foo", ch16_w=self.ch16_w, ch08_w=self.ch08_w, ch06_w=self.ch06_w) self.assertEqual(comp.ch16_w, self.ch16_w) self.assertEqual(comp.ch08_w, self.ch08_w) self.assertEqual(comp.ch06_w, self.ch06_w) res = comp(projectables) assert mock.call(projectables) in match_data_arrays.mock_calls correct = (self.ch16_w * projectables[0] + self.ch08_w * projectables[1] + self.ch06_w * projectables[2]) self.assertEqual(res[0], correct) self.assertEqual(res[1], projectables[1]) self.assertEqual(res[2], projectables[2]) class TestEnhance2Dataset(unittest.TestCase): """Test the enhance2dataset utility.""" @mock.patch('satpy.composites.get_enhanced_image') def test_enhance_p_to_rgb(self, get_enhanced_image): """Test enhancing a paletted dataset in RGB mode.""" from trollimage.xrimage import XRImage img = XRImage(xr.DataArray(np.ones((1, 20, 20)) * 2, dims=('bands', 'y', 'x'), coords={'bands': ['P']})) img.palette = ((0, 0, 0), (4, 4, 4), (8, 8, 8)) get_enhanced_image.return_value = img from satpy.composites import enhance2dataset dataset = xr.DataArray(np.ones((1, 20, 20))) res = enhance2dataset(dataset, convert_p=True) assert res.attrs['mode'] == 'RGB' @mock.patch('satpy.composites.get_enhanced_image') def test_enhance_p_to_rgba(self, get_enhanced_image): """Test enhancing a paletted dataset in RGBA mode.""" from trollimage.xrimage import XRImage img = XRImage(xr.DataArray(np.ones((1, 20, 20)) * 2, dims=('bands', 'y', 'x'), coords={'bands': ['P']})) img.palette = ((0, 0, 0, 255), (4, 4, 4, 255), (8, 8, 8, 255)) get_enhanced_image.return_value = img from satpy.composites import enhance2dataset dataset = xr.DataArray(np.ones((1, 20, 20))) res = enhance2dataset(dataset, convert_p=True) assert res.attrs['mode'] == 'RGBA' @mock.patch('satpy.composites.get_enhanced_image') def test_enhance_p(self, get_enhanced_image): """Test enhancing a paletted dataset in P mode.""" from trollimage.xrimage import XRImage img = XRImage(xr.DataArray(np.ones((1, 20, 20)) * 2, dims=('bands', 'y', 'x'), coords={'bands': ['P']})) img.palette = ((0, 0, 0, 255), (4, 4, 4, 255), (8, 8, 8, 255)) get_enhanced_image.return_value = img from satpy.composites import enhance2dataset dataset = xr.DataArray(np.ones((1, 20, 20))) res = enhance2dataset(dataset) assert res.attrs['mode'] == 'P' assert res.max().values == 2 @mock.patch('satpy.composites.get_enhanced_image') def test_enhance_l(self, get_enhanced_image): """Test enhancing a paletted dataset in P mode.""" from trollimage.xrimage import XRImage img = XRImage(xr.DataArray(np.ones((1, 20, 20)) * 2, dims=('bands', 'y', 'x'), coords={'bands': ['L']})) get_enhanced_image.return_value = img from satpy.composites import enhance2dataset dataset = xr.DataArray(np.ones((1, 20, 20))) res = enhance2dataset(dataset) assert res.attrs['mode'] == 'L' assert res.max().values == 1 class TestInferMode(unittest.TestCase): """Test the infer_mode utility.""" def test_bands_coords_is_used(self): """Test that the `bands` coord is used.""" from satpy.composites import GenericCompositor arr = xr.DataArray(np.ones((1, 5, 5)), dims=('bands', 'x', 'y'), coords={'bands': ['P']}) assert GenericCompositor.infer_mode(arr) == 'P' arr = xr.DataArray(np.ones((3, 5, 5)), dims=('bands', 'x', 'y'), coords={'bands': ['Y', 'Cb', 'Cr']}) assert GenericCompositor.infer_mode(arr) == 'YCbCr' def test_mode_is_used(self): """Test that the `mode` attribute is used.""" from satpy.composites import GenericCompositor arr = xr.DataArray(np.ones((1, 5, 5)), dims=('bands', 'x', 'y'), attrs={'mode': 'P'}) assert GenericCompositor.infer_mode(arr) == 'P' def test_band_size_is_used(self): """Test that the band size is used.""" from satpy.composites import GenericCompositor arr = xr.DataArray(np.ones((2, 5, 5)), dims=('bands', 'x', 'y')) assert GenericCompositor.infer_mode(arr) == 'LA' def test_no_bands_is_l(self): """Test that default (no band) is L.""" from satpy.composites import GenericCompositor arr = xr.DataArray(np.ones((5, 5)), dims=('x', 'y')) assert GenericCompositor.infer_mode(arr) == 'L' class TestLongitudeMaskingCompositor(unittest.TestCase): """Test case for the LongitudeMaskingCompositor compositor.""" def test_masking(self): """Test longitude masking.""" from satpy.composites import LongitudeMaskingCompositor area = mock.MagicMock() lons = np.array([-180., -100., -50., 0., 50., 100., 180.]) area.get_lonlats = mock.MagicMock(return_value=[lons, []]) a = xr.DataArray(np.array([1, 2, 3, 4, 5, 6, 7]), attrs={'area': area}) comp = LongitudeMaskingCompositor(name='test', lon_min=-40., lon_max=120.) expected = xr.DataArray(np.array([np.nan, np.nan, np.nan, 4, 5, 6, np.nan])) res = comp([a]) np.testing.assert_allclose(res.data, expected.data) comp = LongitudeMaskingCompositor(name='test', lon_min=-40.) expected = xr.DataArray(np.array([np.nan, np.nan, np.nan, 4, 5, 6, 7])) res = comp([a]) np.testing.assert_allclose(res.data, expected.data) comp = LongitudeMaskingCompositor(name='test', lon_max=120.) expected = xr.DataArray(np.array([1, 2, 3, 4, 5, 6, np.nan])) res = comp([a]) np.testing.assert_allclose(res.data, expected.data) comp = LongitudeMaskingCompositor(name='test', lon_min=120., lon_max=-40.) expected = xr.DataArray(np.array([1, 2, 3, np.nan, np.nan, np.nan, 7])) res = comp([a]) np.testing.assert_allclose(res.data, expected.data) satpy-0.34.0/satpy/tests/test_config.py000066400000000000000000000202231420401153000201030ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Test objects and functions in the satpy.config module.""" import os import sys import unittest from unittest import mock import pytest class TestBuiltinAreas(unittest.TestCase): """Test that the builtin areas are all valid.""" def test_areas_pyproj(self): """Test all areas have valid projections with pyproj.""" import numpy as np import pyproj import xarray as xr from pyresample import parse_area_file from pyresample.geometry import SwathDefinition from satpy.resample import get_area_file lons = np.array([[0, 0.1, 0.2], [0.05, 0.15, 0.25]]) lats = np.array([[0, 0.1, 0.2], [0.05, 0.15, 0.25]]) lons = xr.DataArray(lons) lats = xr.DataArray(lats) swath_def = SwathDefinition(lons, lats) all_areas = parse_area_file(get_area_file()) for area_obj in all_areas: if hasattr(area_obj, 'freeze'): try: area_obj = area_obj.freeze(lonslats=swath_def) except RuntimeError: # we didn't provide enough info to freeze, hard to guess # in a generic test so just skip this area continue proj_dict = area_obj.proj_dict _ = pyproj.Proj(proj_dict) def test_areas_rasterio(self): """Test all areas have valid projections with rasterio.""" try: from rasterio.crs import CRS except ImportError: return unittest.skip("Missing rasterio dependency") if not hasattr(CRS, 'from_dict'): return unittest.skip("RasterIO 1.0+ required") import numpy as np import xarray as xr from pyresample import parse_area_file from pyresample.geometry import SwathDefinition from satpy.resample import get_area_file lons = np.array([[0, 0.1, 0.2], [0.05, 0.15, 0.25]]) lats = np.array([[0, 0.1, 0.2], [0.05, 0.15, 0.25]]) lons = xr.DataArray(lons) lats = xr.DataArray(lats) swath_def = SwathDefinition(lons, lats) all_areas = parse_area_file(get_area_file()) for area_obj in all_areas: if hasattr(area_obj, 'freeze'): try: area_obj = area_obj.freeze(lonslats=swath_def) except RuntimeError: # we didn't provide enough info to freeze, hard to guess # in a generic test so just skip this area continue proj_dict = area_obj.proj_dict if proj_dict.get('proj') in ('ob_tran', 'nsper') and \ 'wktext' not in proj_dict: # FIXME: rasterio doesn't understand ob_tran unless +wktext # See: https://github.com/pyproj4/pyproj/issues/357 # pyproj 2.0+ seems to drop wktext from PROJ dict continue _ = CRS.from_dict(proj_dict) class TestPluginsConfigs(unittest.TestCase): """Test that plugins are working.""" @mock.patch('satpy._config.pkg_resources.iter_entry_points') def test_get_plugin_configs(self, iter_entry_points): """Check that the plugin configs are looked for.""" import pkg_resources ep = pkg_resources.EntryPoint.parse('example_composites = satpy_cpe') ep.dist = pkg_resources.Distribution.from_filename('satpy_cpe-0.0.0-py3.8.egg') ep.dist.module_path = os.path.join(os.path.sep + 'bla', 'bla') iter_entry_points.return_value = [ep] import satpy from satpy._config import get_entry_points_config_dirs # don't let user env vars affect results with satpy.config.set(config_path=[]): dirs = get_entry_points_config_dirs('satpy.composites') self.assertListEqual(dirs, [os.path.join(ep.dist.module_path, 'satpy_cpe', 'etc')]) class TestConfigObject: """Test basic functionality of the central config object.""" def test_custom_config_file(self): """Test adding a custom configuration file using SATPY_CONFIG.""" import tempfile from importlib import reload import yaml import satpy my_config_dict = { 'cache_dir': "/path/to/cache", } try: with tempfile.NamedTemporaryFile(mode='w+t', suffix='.yaml', delete=False) as tfile: yaml.dump(my_config_dict, tfile) tfile.close() with mock.patch.dict('os.environ', {'SATPY_CONFIG': tfile.name}): reload(satpy._config) reload(satpy) assert satpy.config.get('cache_dir') == '/path/to/cache' finally: os.remove(tfile.name) def test_deprecated_env_vars(self): """Test that deprecated variables are mapped to new config.""" from importlib import reload import satpy old_vars = { 'PPP_CONFIG_DIR': '/my/ppp/config/dir', 'SATPY_ANCPATH': '/my/ancpath', } with mock.patch.dict('os.environ', old_vars): reload(satpy._config) reload(satpy) assert satpy.config.get('data_dir') == '/my/ancpath' assert satpy.config.get('config_path') == ['/my/ppp/config/dir'] def test_config_path_multiple(self): """Test that multiple config paths are accepted.""" from importlib import reload import satpy exp_paths, env_paths = _os_specific_multipaths() old_vars = { 'SATPY_CONFIG_PATH': env_paths, } with mock.patch.dict('os.environ', old_vars): reload(satpy._config) reload(satpy) assert satpy.config.get('config_path') == exp_paths def test_config_path_multiple_load(self): """Test that config paths from subprocesses load properly. Satpy modifies the config path environment variable when it is imported. If Satpy is imported again from a subprocess then it should be able to parse this modified variable. """ from importlib import reload import satpy exp_paths, env_paths = _os_specific_multipaths() old_vars = { 'SATPY_CONFIG_PATH': env_paths, } with mock.patch.dict('os.environ', old_vars): # these reloads will update env variable "SATPY_CONFIG_PATH" reload(satpy._config) reload(satpy) # load the updated env variable and parse it again. reload(satpy._config) reload(satpy) assert satpy.config.get('config_path') == exp_paths def test_bad_str_config_path(self): """Test that a str config path isn't allowed.""" from importlib import reload import satpy old_vars = { 'SATPY_CONFIG_PATH': '/my/configs1', } # single path from env var still works with mock.patch.dict('os.environ', old_vars): reload(satpy._config) reload(satpy) assert satpy.config.get('config_path') == ['/my/configs1'] # strings are not allowed, lists are with satpy.config.set(config_path='/single/string/paths/are/bad'): pytest.raises(ValueError, satpy._config.get_config_path_safe) def _os_specific_multipaths(): exp_paths = ['/my/configs1', '/my/configs2', '/my/configs3'] if sys.platform.startswith("win"): exp_paths = ["C:" + p for p in exp_paths] path_str = os.pathsep.join(exp_paths) return exp_paths, path_str satpy-0.34.0/satpy/tests/test_crefl_utils.py000066400000000000000000000033271420401153000211570ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Test CREFL rayleigh correction functions.""" import unittest class TestCreflUtils(unittest.TestCase): """Test crefl_utils.""" def test_get_atm_variables_abi(self): """Test getting atmospheric variables for ABI.""" import numpy as np from satpy.modifiers._crefl_utils import get_atm_variables_abi sphalb, rhoray, TtotraytH2O, tOG = get_atm_variables_abi(0.17690244, 6.123234e-17, 530.61332168, 405., 21.71342113, 77.14385758, 56.214566960, 0.0043149700000000004, 0.0037296, 0.014107995000000002, 0.052349) self.assertLess(abs(np.array(sphalb) - 0.045213532544630494), 1e-10) self.assertLess(abs(rhoray - 2.2030281148621356), 1e-10) self.assertLess(abs(TtotraytH2O - 0.30309880915889087), 1e-10) self.assertLess(abs(tOG - 0.5969089524560548), 1e-10) satpy-0.34.0/satpy/tests/test_data_download.py000066400000000000000000000262161420401153000214460ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Test for ancillary data downloading.""" from unittest import mock import pytest import yaml from satpy.aux_download import DataDownloadMixin from satpy.modifiers import ModifierBase pooch = pytest.importorskip("pooch") README_URL = "https://raw.githubusercontent.com/pytroll/satpy/main/README.rst" class UnfriendlyModifier(ModifierBase, DataDownloadMixin): """Fake modifier that raises an exception in __init__.""" def __init__(self, name, prerequisites=None, optional_prerequisites=None, **kwargs): """Raise an exception if we weren't provided any prerequisites.""" if not prerequisites or len(prerequisites) != 1: raise ValueError("Unexpected number of prereqs") super().__init__(name, prerequisites, optional_prerequisites, **kwargs) self.register_data_files({'url': kwargs['url'], 'filename': kwargs['filename'], 'known_hash': kwargs['known_hash']}) def _setup_custom_composite_config(base_dir): from satpy.composites import StaticImageCompositor from satpy.modifiers.atmosphere import ReflectanceCorrector composite_config = base_dir.mkdir("composites").join("visir.yaml") with open(composite_config, 'w') as comp_file: yaml.dump({ "sensor_name": "visir", "modifiers": { "test_modifier": { "modifier": ReflectanceCorrector, "url": README_URL, "known_hash": None, }, "unfriendly_modifier": { "modifier": UnfriendlyModifier, "url": README_URL, "filename": "unfriendly.rst", "known_hash": None, } }, "composites": { "test_static": { "compositor": StaticImageCompositor, "url": README_URL, "known_hash": None, }, }, }, comp_file) def _setup_custom_reader_config(base_dir): reader_config = base_dir.mkdir("readers").join("fake.yaml") with open(reader_config, 'wt') as comp_file: # abstract base classes can't be converted so we do raw string comp_file.write(""" reader: name: "fake" reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader data_files: - url: {} known_hash: null - url: {} filename: "README2.rst" known_hash: null file_types: {{}} """.format(README_URL, README_URL)) def _setup_custom_writer_config(base_dir): writer_config = base_dir.mkdir("writers").join("fake.yaml") with open(writer_config, 'wt') as comp_file: # abstract base classes can't be converted so we do raw string comp_file.write(""" writer: name: "fake" writer: !!python/name:satpy.writers.Writer data_files: - url: {} known_hash: null - url: {} filename: "README2.rst" known_hash: null """.format(README_URL, README_URL)) def _assert_reader_files_downloaded(readers, found_files): r_cond1 = 'readers/README.rst' in found_files r_cond2 = 'readers/README2.rst' in found_files if readers is not None and not readers: r_cond1 = not r_cond1 r_cond2 = not r_cond2 assert r_cond1 assert r_cond2 def _assert_writer_files_downloaded(writers, found_files): w_cond1 = 'writers/README.rst' in found_files w_cond2 = 'writers/README2.rst' in found_files if writers is not None and not writers: w_cond1 = not w_cond1 w_cond2 = not w_cond2 assert w_cond1 assert w_cond2 def _assert_comp_files_downloaded(comp_sensors, found_files): comp_cond = 'composites/README.rst' in found_files if comp_sensors is not None and not comp_sensors: comp_cond = not comp_cond assert comp_cond def _assert_mod_files_downloaded(comp_sensors, found_files): mod_cond = 'modifiers/README.rst' in found_files unfriendly_cond = 'modifiers/unfriendly.rst' in found_files if comp_sensors is not None and not comp_sensors: mod_cond = not mod_cond assert mod_cond assert not unfriendly_cond class TestDataDownload: """Test basic data downloading functionality.""" @pytest.fixture(autouse=True) def _setup_custom_configs(self, tmpdir): _setup_custom_composite_config(tmpdir) _setup_custom_reader_config(tmpdir) _setup_custom_writer_config(tmpdir) self.tmpdir = tmpdir @pytest.mark.parametrize('comp_sensors', [[], None, ['visir']]) @pytest.mark.parametrize('writers', [[], None, ['fake']]) @pytest.mark.parametrize('readers', [[], None, ['fake']]) def test_find_registerable(self, readers, writers, comp_sensors): """Test that find_registerable finds some things.""" import satpy from satpy.aux_download import find_registerable_files with satpy.config.set(config_path=[self.tmpdir]), \ mock.patch('satpy.aux_download._FILE_REGISTRY', {}): found_files = find_registerable_files( readers=readers, writers=writers, composite_sensors=comp_sensors, ) _assert_reader_files_downloaded(readers, found_files) _assert_writer_files_downloaded(writers, found_files) _assert_comp_files_downloaded(comp_sensors, found_files) _assert_mod_files_downloaded(comp_sensors, found_files) def test_limited_find_registerable(self): """Test that find_registerable doesn't find anything when limited.""" import satpy from satpy.aux_download import find_registerable_files file_registry = {} with satpy.config.set(config_path=[self.tmpdir]), \ mock.patch('satpy.aux_download._FILE_REGISTRY', file_registry): found_files = find_registerable_files( readers=[], writers=[], composite_sensors=[], ) assert not found_files def test_retrieve(self): """Test retrieving a single file.""" import satpy from satpy.aux_download import find_registerable_files, retrieve file_registry = {} with satpy.config.set(config_path=[self.tmpdir], data_dir=str(self.tmpdir)), \ mock.patch('satpy.aux_download._FILE_REGISTRY', file_registry): comp_file = 'composites/README.rst' found_files = find_registerable_files() assert comp_file in found_files assert not self.tmpdir.join(comp_file).exists() retrieve(comp_file) assert self.tmpdir.join(comp_file).exists() def test_offline_retrieve(self): """Test retrieving a single file when offline.""" import satpy from satpy.aux_download import find_registerable_files, retrieve file_registry = {} with satpy.config.set(config_path=[self.tmpdir], data_dir=str(self.tmpdir), download_aux=True), \ mock.patch('satpy.aux_download._FILE_REGISTRY', file_registry): comp_file = 'composites/README.rst' found_files = find_registerable_files() assert comp_file in found_files # the file doesn't exist, we can't download it assert not self.tmpdir.join(comp_file).exists() with satpy.config.set(download_aux=False): pytest.raises(RuntimeError, retrieve, comp_file) # allow downloading and get it retrieve(comp_file) assert self.tmpdir.join(comp_file).exists() # turn off downloading and make sure we get local file with satpy.config.set(download_aux=False): local_file = retrieve(comp_file) assert local_file def test_offline_retrieve_all(self): """Test registering and retrieving all files fails when offline.""" import satpy from satpy.aux_download import retrieve_all with satpy.config.set(config_path=[self.tmpdir], data_dir=str(self.tmpdir), download_aux=False): pytest.raises(RuntimeError, retrieve_all) def test_retrieve_all(self): """Test registering and retrieving all files.""" import satpy from satpy.aux_download import retrieve_all file_registry = {} file_urls = {} with satpy.config.set(config_path=[self.tmpdir], data_dir=str(self.tmpdir)), \ mock.patch('satpy.aux_download._FILE_REGISTRY', file_registry), \ mock.patch('satpy.aux_download._FILE_URLS', file_urls), \ mock.patch('satpy.aux_download.find_registerable_files'): comp_file = 'composites/README.rst' file_registry[comp_file] = None file_urls[comp_file] = README_URL assert not self.tmpdir.join(comp_file).exists() retrieve_all() assert self.tmpdir.join(comp_file).exists() def test_no_downloads_in_tests(self): """Test that tests aren't allowed to download stuff.""" import satpy from satpy.aux_download import register_file, retrieve file_registry = {} with satpy.config.set(config_path=[self.tmpdir], data_dir=str(self.tmpdir), download_aux=True), \ mock.patch('satpy.aux_download._FILE_REGISTRY', file_registry): cache_key = 'myfile.rst' register_file(README_URL, cache_key) assert not self.tmpdir.join(cache_key).exists() pytest.raises(RuntimeError, retrieve, cache_key) # touch the file so it gets created open(self.tmpdir.join(cache_key), 'w').close() # offline downloading should still be allowed with satpy.config.set(download_aux=False): retrieve(cache_key) def test_download_script(self): """Test basic functionality of the download script.""" import satpy from satpy.aux_download import retrieve_all_cmd file_registry = {} file_urls = {} with satpy.config.set(config_path=[self.tmpdir]), \ mock.patch('satpy.aux_download._FILE_REGISTRY', file_registry), \ mock.patch('satpy.aux_download._FILE_URLS', file_urls), \ mock.patch('satpy.aux_download.find_registerable_files'): comp_file = 'composites/README.rst' file_registry[comp_file] = None file_urls[comp_file] = README_URL assert not self.tmpdir.join(comp_file).exists() retrieve_all_cmd(argv=["--data-dir", str(self.tmpdir)]) assert self.tmpdir.join(comp_file).exists() satpy-0.34.0/satpy/tests/test_dataset.py000066400000000000000000001062231420401153000202700ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Test objects and functions in the dataset module.""" import unittest from datetime import datetime import numpy as np import pytest from satpy.dataset.dataid import DataID, DataQuery, ModifierTuple, WavelengthRange, minimal_default_keys_config from satpy.tests.utils import make_cid, make_dataid, make_dsq class TestDataID(unittest.TestCase): """Test DataID object creation and other methods.""" def test_basic_init(self): """Test basic ways of creating a DataID.""" from satpy.dataset.dataid import DataID from satpy.dataset.dataid import default_id_keys_config as dikc from satpy.dataset.dataid import minimal_default_keys_config as mdkc did = DataID(dikc, name="a") assert did['name'] == 'a' assert did['modifiers'] == tuple() DataID(dikc, name="a", wavelength=0.86) DataID(dikc, name="a", resolution=1000) DataID(dikc, name="a", calibration='radiance') DataID(dikc, name="a", wavelength=0.86, resolution=250, calibration='radiance') DataID(dikc, name="a", wavelength=0.86, resolution=250, calibration='radiance', modifiers=('sunz_corrected',)) with pytest.raises(ValueError): DataID(dikc, wavelength=0.86) did = DataID(mdkc, name='comp24', resolution=500) assert did['resolution'] == 500 def test_init_bad_modifiers(self): """Test that modifiers are a tuple.""" from satpy.dataset.dataid import DataID from satpy.dataset.dataid import default_id_keys_config as dikc self.assertRaises(TypeError, DataID, dikc, name="a", modifiers="str") def test_compare_no_wl(self): """Compare fully qualified wavelength ID to no wavelength ID.""" from satpy.dataset.dataid import DataID from satpy.dataset.dataid import default_id_keys_config as dikc d1 = DataID(dikc, name="a", wavelength=(0.1, 0.2, 0.3)) d2 = DataID(dikc, name="a", wavelength=None) # this happens when sorting IDs during dependency checks self.assertFalse(d1 < d2) self.assertTrue(d2 < d1) def test_bad_calibration(self): """Test that asking for a bad calibration fails.""" from satpy.dataset.dataid import DataID from satpy.dataset.dataid import default_id_keys_config as dikc with pytest.raises(ValueError): DataID(dikc, name='C05', calibration='_bad_') def test_is_modified(self): """Test that modifications are detected properly.""" from satpy.dataset.dataid import DataID from satpy.dataset.dataid import default_id_keys_config as dikc d1 = DataID(dikc, name="a", wavelength=(0.1, 0.2, 0.3), modifiers=('hej',)) d2 = DataID(dikc, name="a", wavelength=(0.1, 0.2, 0.3), modifiers=tuple()) assert d1.is_modified() assert not d2.is_modified() def test_create_less_modified_query(self): """Test that modifications are popped correctly.""" from satpy.dataset.dataid import DataID from satpy.dataset.dataid import default_id_keys_config as dikc d1 = DataID(dikc, name="a", wavelength=(0.1, 0.2, 0.3), modifiers=('hej',)) d2 = DataID(dikc, name="a", wavelength=(0.1, 0.2, 0.3), modifiers=tuple()) assert not d1.create_less_modified_query()['modifiers'] assert not d2.create_less_modified_query()['modifiers'] class TestCombineMetadata(unittest.TestCase): """Test how metadata is combined.""" def setUp(self): """Set up the test case.""" self.datetime_dts = ( {'start_time': datetime(2018, 2, 1, 11, 58, 0)}, {'start_time': datetime(2018, 2, 1, 11, 59, 0)}, {'start_time': datetime(2018, 2, 1, 12, 0, 0)}, {'start_time': datetime(2018, 2, 1, 12, 1, 0)}, {'start_time': datetime(2018, 2, 1, 12, 2, 0)}, ) def test_average_datetimes(self): """Test the average_datetimes helper function.""" from satpy.dataset.metadata import average_datetimes dts = ( datetime(2018, 2, 1, 11, 58, 0), datetime(2018, 2, 1, 11, 59, 0), datetime(2018, 2, 1, 12, 0, 0), datetime(2018, 2, 1, 12, 1, 0), datetime(2018, 2, 1, 12, 2, 0), ) ret = average_datetimes(dts) self.assertEqual(dts[2], ret) def test_combine_times_with_averaging(self): """Test the combine_metadata with times with averaging.""" from satpy.dataset.metadata import combine_metadata ret = combine_metadata(*self.datetime_dts) self.assertEqual(self.datetime_dts[2]['start_time'], ret['start_time']) def test_combine_times_without_averaging(self): """Test the combine_metadata with times without averaging.""" from satpy.dataset.metadata import combine_metadata ret = combine_metadata(*self.datetime_dts, average_times=False) # times are not equal so don't include it in the final result self.assertNotIn('start_time', ret) def test_combine_arrays(self): """Test the combine_metadata with arrays.""" from numpy import arange, ones from xarray import DataArray from satpy.dataset.metadata import combine_metadata dts = [ {"quality": (arange(25) % 2).reshape(5, 5).astype("?")}, {"quality": (arange(1, 26) % 3).reshape(5, 5).astype("?")}, {"quality": ones((5, 5,), "?")}, ] assert "quality" not in combine_metadata(*dts) dts2 = [{"quality": DataArray(d["quality"])} for d in dts] assert "quality" not in combine_metadata(*dts2) # the ancillary_variables attribute is actually a list of data arrays dts3 = [{"quality": [d["quality"]]} for d in dts] assert "quality" not in combine_metadata(*dts3) # check cases with repeated arrays dts4 = [ {"quality": dts[0]["quality"]}, {"quality": dts[0]["quality"]}, ] assert "quality" in combine_metadata(*dts4) dts5 = [ {"quality": dts3[0]["quality"]}, {"quality": dts3[0]["quality"]}, ] assert "quality" in combine_metadata(*dts5) # check with other types dts6 = [ DataArray(arange(5), attrs=dts[0]), DataArray(arange(5), attrs=dts[0]), DataArray(arange(5), attrs=dts[1]), object() ] assert "quality" not in combine_metadata(*dts6) def test_combine_lists_identical(self): """Test combine metadata with identical lists.""" from satpy.dataset.metadata import combine_metadata metadatas = [ {'prerequisites': [1, 2, 3, 4]}, {'prerequisites': [1, 2, 3, 4]}, ] res = combine_metadata(*metadatas) assert res['prerequisites'] == [1, 2, 3, 4] def test_combine_lists_same_size_diff_values(self): """Test combine metadata with lists with different values.""" from satpy.dataset.metadata import combine_metadata metadatas = [ {'prerequisites': [1, 2, 3, 4]}, {'prerequisites': [1, 2, 3, 5]}, ] res = combine_metadata(*metadatas) assert 'prerequisites' not in res def test_combine_lists_different_size(self): """Test combine metadata with different size lists.""" from satpy.dataset.metadata import combine_metadata metadatas = [ {'prerequisites': [1, 2, 3, 4]}, {'prerequisites': []}, ] res = combine_metadata(*metadatas) assert 'prerequisites' not in res metadatas = [ {'prerequisites': [1, 2, 3, 4]}, {'prerequisites': [1, 2, 3]}, ] res = combine_metadata(*metadatas) assert 'prerequisites' not in res def test_combine_identical_numpy_scalars(self): """Test combining identical fill values.""" from satpy.dataset.metadata import combine_metadata test_metadata = [{'_FillValue': np.uint16(42)}, {'_FillValue': np.uint16(42)}] assert combine_metadata(*test_metadata) == {'_FillValue': 42} def test_combine_empty_metadata(self): """Test combining empty metadata.""" from satpy.dataset.metadata import combine_metadata test_metadata = [{}, {}] assert combine_metadata(*test_metadata) == {} def test_combine_nans(self): """Test combining nan fill values.""" from satpy.dataset.metadata import combine_metadata test_metadata = [{'_FillValue': np.nan}, {'_FillValue': np.nan}] assert combine_metadata(*test_metadata) == {'_FillValue': np.nan} def test_combine_numpy_arrays(self): """Test combining values that are numpy arrays.""" from satpy.dataset.metadata import combine_metadata test_metadata = [{'valid_range': np.array([0., 0.00032], dtype=np.float32)}, {'valid_range': np.array([0., 0.00032], dtype=np.float32)}, {'valid_range': np.array([0., 0.00032], dtype=np.float32)}] result = combine_metadata(*test_metadata) assert np.allclose(result['valid_range'], np.array([0., 0.00032], dtype=np.float32)) def test_combine_dask_arrays(self): """Test combining values that are dask arrays.""" import dask.array as da from satpy.dataset.metadata import combine_metadata test_metadata = [{'valid_range': da.from_array(np.array([0., 0.00032], dtype=np.float32))}, {'valid_range': da.from_array(np.array([0., 0.00032], dtype=np.float32))}] result = combine_metadata(*test_metadata) assert 'valid_range' not in result def test_combine_real_world_mda(self): """Test with real data.""" mda_objects = ({'_FillValue': np.nan, 'valid_range': np.array([0., 0.00032], dtype=np.float32), 'ancillary_variables': ['cpp_status_flag', 'cpp_conditions', 'cpp_quality', 'cpp_reff_pal', '-'], 'platform_name': 'NOAA-20', 'sensor': {'viirs'}, 'raw_metadata': {'foo': {'bar': np.array([1, 2, 3])}}}, {'_FillValue': np.nan, 'valid_range': np.array([0., 0.00032], dtype=np.float32), 'ancillary_variables': ['cpp_status_flag', 'cpp_conditions', 'cpp_quality', 'cpp_reff_pal', '-'], 'platform_name': 'NOAA-20', 'sensor': {'viirs'}, 'raw_metadata': {'foo': {'bar': np.array([1, 2, 3])}}}) expected = {'_FillValue': np.nan, 'valid_range': np.array([0., 0.00032], dtype=np.float32), 'ancillary_variables': ['cpp_status_flag', 'cpp_conditions', 'cpp_quality', 'cpp_reff_pal', '-'], 'platform_name': 'NOAA-20', 'sensor': {'viirs'}, 'raw_metadata': {'foo': {'bar': np.array([1, 2, 3])}}} from satpy.dataset.metadata import combine_metadata result = combine_metadata(*mda_objects) assert np.allclose(result.pop('_FillValue'), expected.pop('_FillValue'), equal_nan=True) assert np.allclose(result.pop('valid_range'), expected.pop('valid_range')) np.testing.assert_equal(result.pop('raw_metadata'), expected.pop('raw_metadata')) assert result == expected def test_combine_one_metadata_object(self): """Test combining one metadata object.""" mda_objects = ({'_FillValue': np.nan, 'valid_range': np.array([0., 0.00032], dtype=np.float32), 'ancillary_variables': ['cpp_status_flag', 'cpp_conditions', 'cpp_quality', 'cpp_reff_pal', '-'], 'platform_name': 'NOAA-20', 'sensor': {'viirs'}},) expected = {'_FillValue': np.nan, 'valid_range': np.array([0., 0.00032], dtype=np.float32), 'ancillary_variables': ['cpp_status_flag', 'cpp_conditions', 'cpp_quality', 'cpp_reff_pal', '-'], 'platform_name': 'NOAA-20', 'sensor': {'viirs'}} from satpy.dataset.metadata import combine_metadata result = combine_metadata(*mda_objects) assert np.allclose(result.pop('_FillValue'), expected.pop('_FillValue'), equal_nan=True) assert np.allclose(result.pop('valid_range'), expected.pop('valid_range')) assert result == expected def test_combine_dicts_close(): """Test combination of dictionaries whose values are close.""" from satpy.dataset.metadata import combine_metadata attrs = { 'raw_metadata': { 'a': 1, 'b': 'foo', 'c': [1, 2, 3], 'd': { 'e': np.str('bar'), 'f': datetime(2020, 1, 1, 12, 15, 30), 'g': np.array([1, 2, 3]), }, 'h': np.array([datetime(2020, 1, 1), datetime(2020, 1, 1)]) } } attrs_close = { 'raw_metadata': { 'a': 1 + 1E-12, 'b': 'foo', 'c': np.array([1, 2, 3]) + 1E-12, 'd': { 'e': np.str('bar'), 'f': datetime(2020, 1, 1, 12, 15, 30), 'g': np.array([1, 2, 3]) + 1E-12 }, 'h': np.array([datetime(2020, 1, 1), datetime(2020, 1, 1)]) } } test_metadata = [attrs, attrs_close] result = combine_metadata(*test_metadata) assert result == attrs @pytest.mark.parametrize( "test_mda", [ # a/b/c/d different {'a': np.array([1, 2, 3]), 'd': 123}, {'a': {'b': np.array([4, 5, 6]), 'c': 1.0}, 'd': 'foo'}, {'a': {'b': np.array([1, 2, 3]), 'c': 2.0}, 'd': 'foo'}, {'a': {'b': np.array([1, 2, 3]), 'c': 1.0}, 'd': 'bar'}, # a/b/c/d type different np.array([1, 2, 3]), {'a': {'b': 'baz', 'c': 1.0}, 'd': 'foo'}, {'a': {'b': np.array([1, 2, 3]), 'c': 'baz'}, 'd': 'foo'}, {'a': {'b': np.array([1, 2, 3]), 'c': 1.0}, 'd': 1.0} ] ) def test_combine_dicts_different(test_mda): """Test combination of dictionaries differing in various ways.""" from satpy.dataset.metadata import combine_metadata mda = {'a': {'b': np.array([1, 2, 3]), 'c': 1.0}, 'd': 'foo'} test_metadata = [{'raw_metadata': mda}, {'raw_metadata': test_mda}] result = combine_metadata(*test_metadata) assert not result def test_dataid(): """Test the DataID object.""" from satpy.dataset.dataid import DataID, ModifierTuple, ValueList, WavelengthRange # Check that enum is translated to type. did = make_dataid() assert issubclass(did._id_keys['calibration']['type'], ValueList) assert 'enum' not in did._id_keys['calibration'] # Check that None is never a valid value did = make_dataid(name='cheese_shops', resolution=None) assert 'resolution' not in did assert 'None' not in did.__repr__() with pytest.raises(ValueError): make_dataid(name=None, resolution=1000) # Check that defaults are applied correctly assert did['modifiers'] == ModifierTuple() # Check that from_dict creates a distinct instance... did2 = did.from_dict(dict(name='cheese_shops', resolution=None)) assert did is not did2 # ...But is equal assert did2 == did # Check that the instance is immutable with pytest.raises(TypeError): did['resolution'] = 1000 # Check that a missing required field crashes with pytest.raises(ValueError): make_dataid(resolution=1000) # Check to_dict assert did.to_dict() == dict(name='cheese_shops', modifiers=tuple()) # Check repr did = make_dataid(name='VIS008', resolution=111) assert repr(did) == "DataID(name='VIS008', resolution=111, modifiers=())" # Check inequality default_id_keys_config = {'name': None, 'wavelength': { 'type': WavelengthRange, }, 'resolution': None, 'calibration': { 'enum': [ 'reflectance', 'brightness_temperature', 'radiance', 'counts' ] }, 'modifiers': { 'default': ModifierTuple(), 'type': ModifierTuple, }, } assert DataID(default_id_keys_config, wavelength=10) != DataID(default_id_keys_config, name="VIS006") def test_dataid_equal_if_enums_different(): """Check that dataids with different enums but same items are equal.""" from satpy.dataset.dataid import DataID, ModifierTuple, WavelengthRange id_keys_config1 = {'name': None, 'wavelength': { 'type': WavelengthRange, }, 'resolution': None, 'calibration': { 'enum': [ 'c1', 'c2', 'c3', ] }, 'modifiers': { 'default': ModifierTuple(), 'type': ModifierTuple, }, } id_keys_config2 = {'name': None, 'wavelength': { 'type': WavelengthRange, }, 'resolution': None, 'calibration': { 'enum': [ 'c1', 'c1.5', 'c2', 'c2.5', 'c3' ] }, 'modifiers': { 'default': ModifierTuple(), 'type': ModifierTuple, }, } assert DataID(id_keys_config1, name='ni', calibration='c2') == DataID(id_keys_config2, name="ni", calibration='c2') def test_dataid_copy(): """Test copying a DataID.""" from copy import deepcopy from satpy.dataset.dataid import DataID from satpy.dataset.dataid import default_id_keys_config as dikc did = DataID(dikc, name="a", resolution=1000) did2 = deepcopy(did) assert did2 == did assert did2.id_keys == did.id_keys def test_dataid_pickle(): """Test dataid pickling roundtrip.""" import pickle from satpy.tests.utils import make_dataid did = make_dataid(name='hi', wavelength=(10, 11, 12), resolution=1000, calibration='radiance') assert did == pickle.loads(pickle.dumps(did)) def test_dataid_elements_picklable(): """Test individual elements of DataID can be pickled. In some cases, like in the base reader classes, the elements of a DataID are extracted and stored in a separate dictionary. This means that the internal/fancy pickle handling of DataID does not play a part. """ import pickle from satpy.tests.utils import make_dataid did = make_dataid(name='hi', wavelength=(10, 11, 12), resolution=1000, calibration='radiance') for value in did.values(): pickled_value = pickle.loads(pickle.dumps(value)) assert value == pickled_value class TestDataQuery: """Test case for data queries.""" def test_dataquery(self): """Test DataQuery objects.""" from satpy.dataset import DataQuery DataQuery(name='cheese_shops') # Check repr did = DataQuery(name='VIS008', resolution=111) assert repr(did) == "DataQuery(name='VIS008', resolution=111)" # Check inequality assert DataQuery(wavelength=10) != DataQuery(name="VIS006") def test_is_modified(self): """Test that modifications are detected properly.""" from satpy.dataset import DataQuery d1 = DataQuery(name="a", wavelength=0.2, modifiers=('hej',)) d2 = DataQuery(name="a", wavelength=0.2, modifiers=tuple()) assert d1.is_modified() assert not d2.is_modified() def test_create_less_modified_query(self): """Test that modifications are popped correctly.""" from satpy.dataset import DataQuery d1 = DataQuery(name="a", wavelength=0.2, modifiers=('hej',)) d2 = DataQuery(name="a", wavelength=0.2, modifiers=tuple()) assert not d1.create_less_modified_query()['modifiers'] assert not d2.create_less_modified_query()['modifiers'] class TestIDQueryInteractions(unittest.TestCase): """Test the interactions between DataIDs and DataQuerys.""" def setUp(self) -> None: """Set up the test case.""" self.default_id_keys_config = { 'name': { 'required': True, }, 'wavelength': { 'type': WavelengthRange, }, 'resolution': None, 'calibration': { 'enum': [ 'reflectance', 'brightness_temperature', 'radiance', 'counts' ] }, 'modifiers': { 'default': ModifierTuple(), 'type': ModifierTuple, }, } def test_hash_equality(self): """Test hash equality.""" dq = DataQuery(modifiers=tuple(), name='cheese_shops') did = DataID(self.default_id_keys_config, name='cheese_shops') assert hash(dq) == hash(did) def test_id_filtering(self): """Check did filtering.""" dq = DataQuery(modifiers=tuple(), name='cheese_shops') did = DataID(self.default_id_keys_config, name='cheese_shops') did2 = DataID(self.default_id_keys_config, name='ni') res = dq.filter_dataids([did2, did]) assert len(res) == 1 assert res[0] == did dataid_container = [DataID(self.default_id_keys_config, name='ds1', resolution=250, calibration='reflectance', modifiers=tuple())] dq = DataQuery(wavelength=0.22, modifiers=tuple()) assert len(dq.filter_dataids(dataid_container)) == 0 dataid_container = [DataID(minimal_default_keys_config, name='natural_color')] dq = DataQuery(name='natural_color', resolution=250) assert len(dq.filter_dataids(dataid_container)) == 1 dq = make_dsq(wavelength=0.22, modifiers=('mod1',)) did = make_cid(name='static_image') assert len(dq.filter_dataids([did])) == 0 def test_inequality(self): """Check (in)equality.""" assert DataQuery(wavelength=10) != DataID(self.default_id_keys_config, name="VIS006") def test_sort_dataids(self): """Check dataid sorting.""" dq = DataQuery(name='cheese_shops', wavelength=2, modifiers='*') did = DataID(self.default_id_keys_config, name='cheese_shops', wavelength=(1, 2, 3)) did2 = DataID(self.default_id_keys_config, name='cheese_shops', wavelength=(1.1, 2.1, 3.1)) dsids, distances = dq.sort_dataids([did2, did]) assert list(dsids) == [did, did2] assert np.allclose(distances, [0, 0.1]) dq = DataQuery(name='cheese_shops') did = DataID(self.default_id_keys_config, name='cheese_shops', resolution=200) did2 = DataID(self.default_id_keys_config, name='cheese_shops', resolution=400) dsids, distances = dq.sort_dataids([did2, did]) assert list(dsids) == [did, did2] assert distances[0] < distances[1] did = DataID(self.default_id_keys_config, name='cheese_shops', calibration='counts') did2 = DataID(self.default_id_keys_config, name='cheese_shops', calibration='reflectance') dsids, distances = dq.sort_dataids([did2, did]) assert list(dsids) == [did2, did] assert distances[0] < distances[1] did = DataID(self.default_id_keys_config, name='cheese_shops', modifiers=tuple()) did2 = DataID(self.default_id_keys_config, name='cheese_shops', modifiers=tuple(['out_of_stock'])) dsids, distances = dq.sort_dataids([did2, did]) assert list(dsids) == [did, did2] assert distances[0] < distances[1] def test_sort_dataids_with_different_set_of_keys(self): """Check sorting data ids when the query has a different set of keys.""" dq = DataQuery(name='solar_zenith_angle', calibration='reflectance') dids = [DataID(self.default_id_keys_config, name='solar_zenith_angle', resolution=1000, modifiers=()), DataID(self.default_id_keys_config, name='solar_zenith_angle', resolution=500, modifiers=()), DataID(self.default_id_keys_config, name='solar_zenith_angle', resolution=250, modifiers=())] dsids, distances = dq.sort_dataids(dids) assert distances[0] < distances[1] assert distances[1] < distances[2] def test_seviri_hrv_has_priority_over_vis008(self): """Check that the HRV channel has priority over VIS008 when querying 0.8µm.""" dids = [DataID(self.default_id_keys_config, name='HRV', wavelength=WavelengthRange(min=0.5, central=0.7, max=0.9, unit='µm'), resolution=1000.134348869, calibration="reflectance", modifiers=()), DataID(self.default_id_keys_config, name='HRV', wavelength=WavelengthRange(min=0.5, central=0.7, max=0.9, unit='µm'), resolution=1000.134348869, calibration="radiance", modifiers=()), DataID(self.default_id_keys_config, name='HRV', wavelength=WavelengthRange(min=0.5, central=0.7, max=0.9, unit='µm'), resolution=1000.134348869, calibration="counts", modifiers=()), DataID(self.default_id_keys_config, name='VIS006', wavelength=WavelengthRange(min=0.56, central=0.635, max=0.71, unit='µm'), resolution=3000.403165817, calibration="reflectance", modifiers=()), DataID(self.default_id_keys_config, name='VIS006', wavelength=WavelengthRange(min=0.56, central=0.635, max=0.71, unit='µm'), resolution=3000.403165817, calibration="radiance", modifiers=()), DataID(self.default_id_keys_config, name='VIS006', wavelength=WavelengthRange(min=0.56, central=0.635, max=0.71, unit='µm'), resolution=3000.403165817, calibration="counts", modifiers=()), DataID(self.default_id_keys_config, name='VIS008', wavelength=WavelengthRange(min=0.74, central=0.81, max=0.88, unit='µm'), resolution=3000.403165817, calibration="reflectance", modifiers=()), DataID(self.default_id_keys_config, name='VIS008', wavelength=WavelengthRange(min=0.74, central=0.81, max=0.88, unit='µm'), resolution=3000.403165817, calibration="radiance", modifiers=()), DataID(self.default_id_keys_config, name='VIS008', wavelength=WavelengthRange(min=0.74, central=0.81, max=0.88, unit='µm'), resolution=3000.403165817, calibration="counts", modifiers=())] dq = DataQuery(wavelength=0.8) res, distances = dq.sort_dataids(dids) assert res[0].name == "HRV" def test_frequency_double_side_band_class_method_convert(): """Test the frequency double side band object: test the class method convert.""" from satpy.readers.aapp_mhs_amsub_l1c import FrequencyDoubleSideBand frq_dsb = FrequencyDoubleSideBand(183, 7, 2) res = frq_dsb.convert(185) assert res == 185 res = frq_dsb.convert({'central': 185, 'side': 7, 'bandwidth': 2}) assert res == FrequencyDoubleSideBand(185, 7, 2) def test_frequency_double_side_band_channel_str(): """Test the frequency double side band object: test the band description.""" from satpy.readers.aapp_mhs_amsub_l1c import FrequencyDoubleSideBand frq_dsb1 = FrequencyDoubleSideBand(183, 7, 2) frq_dsb2 = FrequencyDoubleSideBand(183000, 7000, 2000, 'MHz') assert str(frq_dsb1) == "183 GHz (7_2 GHz)" assert str(frq_dsb2) == "183000 MHz (7000_2000 MHz)" def test_frequency_double_side_band_channel_equality(): """Test the frequency double side band object: check if two bands are 'equal'.""" from satpy.readers.aapp_mhs_amsub_l1c import FrequencyDoubleSideBand frq_dsb = FrequencyDoubleSideBand(183, 7, 2) assert frq_dsb is not None assert 183 != frq_dsb assert 190 == frq_dsb assert 176 == frq_dsb assert 175.5 == frq_dsb assert frq_dsb != FrequencyDoubleSideBand(183, 6.5, 3) frq_dsb = None assert FrequencyDoubleSideBand(183, 7, 2) != frq_dsb assert frq_dsb < FrequencyDoubleSideBand(183, 7, 2) assert FrequencyDoubleSideBand(182, 7, 2) < FrequencyDoubleSideBand(183, 7, 2) assert FrequencyDoubleSideBand(184, 7, 2) > FrequencyDoubleSideBand(183, 7, 2) def test_frequency_double_side_band_channel_distances(): """Test the frequency double side band object: get the distance between two bands.""" from satpy.readers.aapp_mhs_amsub_l1c import FrequencyDoubleSideBand frq_dsb = FrequencyDoubleSideBand(183, 7, 2) mydist = frq_dsb.distance(175.5) assert mydist == 0.5 mydist = frq_dsb.distance(190.5) assert mydist == 0.5 np.testing.assert_almost_equal(frq_dsb.distance(175.6), 0.4) np.testing.assert_almost_equal(frq_dsb.distance(190.1), 0.1) mydist = frq_dsb.distance(185) assert mydist == np.inf mydist = frq_dsb.distance((183, 7.0, 2)) assert mydist == 0 mydist = frq_dsb.distance((183, 7.0, 1)) assert mydist == 0 mydist = frq_dsb.distance(FrequencyDoubleSideBand(183, 7.0, 2)) assert mydist == 0 def test_frequency_double_side_band_channel_containment(): """Test the frequency double side band object: check if one band contains another.""" from satpy.readers.aapp_mhs_amsub_l1c import FrequencyDoubleSideBand frq_dsb = FrequencyDoubleSideBand(183, 7, 2) assert 175.5 in frq_dsb assert frq_dsb in FrequencyDoubleSideBand(183, 6.5, 3) assert frq_dsb not in FrequencyDoubleSideBand(183, 4, 2) with pytest.raises(NotImplementedError): assert frq_dsb in FrequencyDoubleSideBand(183, 6.5, 3, 'MHz') frq_dsb = None assert (frq_dsb in FrequencyDoubleSideBand(183, 3, 2)) is False assert '183' not in FrequencyDoubleSideBand(183, 3, 2) def test_frequency_range_class_method_convert(): """Test the frequency range object: test the class method convert.""" from satpy.readers.aapp_mhs_amsub_l1c import FrequencyRange frq_dsb = FrequencyRange(89, 2) res = frq_dsb.convert(89) assert res == 89 res = frq_dsb.convert({'central': 89, 'bandwidth': 2}) assert res == FrequencyRange(89, 2) def test_frequency_range_channel_equality(): """Test the frequency range object: check if two bands are 'equal'.""" from satpy.readers.aapp_mhs_amsub_l1c import FrequencyRange frqr = FrequencyRange(2, 1) assert frqr is not None assert 1.7 == frqr assert 1.2 != frqr assert frqr == (2, 1) assert frqr == (2, 1, 'GHz') def test_frequency_range_channel_containment(): """Test the frequency range object: channel containment.""" from satpy.readers.aapp_mhs_amsub_l1c import FrequencyRange frqr = FrequencyRange(2, 1) assert 1.7 in frqr assert 2.8 not in frqr with pytest.raises(NotImplementedError): assert frqr in FrequencyRange(89, 2, 'MHz') frqr = None assert (frqr in FrequencyRange(89, 2)) is False assert '89' not in FrequencyRange(89, 2) def test_frequency_range_channel_distances(): """Test the frequency range object: derive distances between bands.""" from satpy.readers.aapp_mhs_amsub_l1c import FrequencyRange frqr = FrequencyRange(190.0, 2) mydist = frqr.distance(FrequencyRange(190, 2)) assert mydist == 0 mydist = frqr.distance(FrequencyRange(189.5, 2)) assert mydist == np.inf mydist = frqr.distance(189.5) assert mydist == 0.5 mydist = frqr.distance(188.0) assert mydist == np.inf def test_wavelength_range(): """Test the wavelength range object.""" from satpy.dataset.dataid import WavelengthRange wr = WavelengthRange(1, 2, 3) assert 1.2 == wr assert .9 != wr assert wr == (1, 2, 3) assert wr == (1, 2, 3, 'µm') # Check containement assert 1.2 in wr assert .9 not in wr assert WavelengthRange(1, 2, 3) in wr assert WavelengthRange(1.1, 2.2, 3.3) not in wr assert WavelengthRange(1.2, 2, 2.8) in wr assert WavelengthRange(10, 20, 30) not in wr assert 'bla' not in wr assert None not in wr wr2 = WavelengthRange(1, 2, 3, 'µm') assert wr2 in wr wr2 = WavelengthRange(1, 2, 3, 'nm') with pytest.raises(NotImplementedError): wr2 in wr # noqa # Check __str__ assert str(wr) == "2 µm (1-3 µm)" assert str(wr2) == "2 nm (1-3 nm)" wr = WavelengthRange(10.5, 11.5, 12.5) np.testing.assert_almost_equal(wr.distance(11.1), 0.4) def test_wavelength_range_cf_roundtrip(): """Test the wavelength range object roundtrip to cf.""" from satpy.dataset.dataid import WavelengthRange wr = WavelengthRange(1, 2, 3) assert WavelengthRange.from_cf(wr.to_cf()) == wr assert WavelengthRange.from_cf([str(item) for item in wr]) == wr satpy-0.34.0/satpy/tests/test_demo.py000066400000000000000000000464421420401153000175750ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019-2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the satpy.demo module.""" from __future__ import annotations import contextlib import io import os import sys import tarfile import unittest from collections import defaultdict from unittest import mock class _GlobHelper(object): """Create side effect function for mocking gcsfs glob method.""" def __init__(self, num_results): """Initialize side_effect function for mocking gcsfs glob method. Args: num_results (int or list): Number of results for each glob call to return. If a list then number of results per call. The last number is used for any additional calls. """ self.current_call = 0 if not isinstance(num_results, (list, tuple)): num_results = [num_results] self.num_results = num_results def __call__(self, pattern): """Mimic glob by being used as the side effect function.""" try: num_results = self.num_results[self.current_call] except IndexError: num_results = self.num_results[-1] self.current_call += 1 return [pattern + '.{:03d}'.format(idx) for idx in range(num_results)] class TestDemo(unittest.TestCase): """Test demo data download functions.""" def setUp(self): """Create temporary directory to save files to.""" import tempfile self.base_dir = tempfile.mkdtemp() self.prev_dir = os.getcwd() os.chdir(self.base_dir) def tearDown(self): """Remove the temporary directory created for a test.""" os.chdir(self.prev_dir) try: import shutil shutil.rmtree(self.base_dir, ignore_errors=True) except OSError: pass @mock.patch('satpy.demo._google_cloud_platform.gcsfs') def test_get_us_midlatitude_cyclone_abi(self, gcsfs_mod): """Test data download function.""" from satpy.demo import get_us_midlatitude_cyclone_abi gcsfs_mod.GCSFileSystem = mock.MagicMock() gcsfs_inst = mock.MagicMock() gcsfs_mod.GCSFileSystem.return_value = gcsfs_inst gcsfs_inst.glob.return_value = ['a.nc', 'b.nc'] # expected 16 files, got 2 self.assertRaises(AssertionError, get_us_midlatitude_cyclone_abi) # unknown access method self.assertRaises(NotImplementedError, get_us_midlatitude_cyclone_abi, method='unknown') gcsfs_inst.glob.return_value = ['a.nc'] * 16 filenames = get_us_midlatitude_cyclone_abi() expected = os.path.join('.', 'abi_l1b', '20190314_us_midlatitude_cyclone', 'a.nc') for fn in filenames: self.assertEqual(expected, fn) @mock.patch('satpy.demo._google_cloud_platform.gcsfs') def test_get_hurricane_florence_abi(self, gcsfs_mod): """Test data download function.""" from satpy.demo import get_hurricane_florence_abi gcsfs_mod.GCSFileSystem = mock.MagicMock() gcsfs_inst = mock.MagicMock() gcsfs_mod.GCSFileSystem.return_value = gcsfs_inst # only return 5 results total gcsfs_inst.glob.side_effect = _GlobHelper([5, 0]) # expected 16 files * 10 frames, got 16 * 5 self.assertRaises(AssertionError, get_hurricane_florence_abi) self.assertRaises(NotImplementedError, get_hurricane_florence_abi, method='unknown') gcsfs_inst.glob.side_effect = _GlobHelper([int(240 / 16), 0, 0, 0] * 16) filenames = get_hurricane_florence_abi() self.assertEqual(10 * 16, len(filenames)) gcsfs_inst.glob.side_effect = _GlobHelper([int(240 / 16), 0, 0, 0] * 16) filenames = get_hurricane_florence_abi(channels=[2, 3, 4]) self.assertEqual(10 * 3, len(filenames)) gcsfs_inst.glob.side_effect = _GlobHelper([int(240 / 16), 0, 0, 0] * 16) filenames = get_hurricane_florence_abi(channels=[2, 3, 4], num_frames=5) self.assertEqual(5 * 3, len(filenames)) gcsfs_inst.glob.side_effect = _GlobHelper([int(240 / 16), 0, 0, 0] * 16) filenames = get_hurricane_florence_abi(num_frames=5) self.assertEqual(5 * 16, len(filenames)) class TestGCPUtils(unittest.TestCase): """Test Google Cloud Platform utilities.""" @mock.patch('satpy.demo._google_cloud_platform.urlopen') def test_is_gcp_instance(self, uo): """Test is_google_cloud_instance.""" from satpy.demo._google_cloud_platform import URLError, is_google_cloud_instance uo.side_effect = URLError("Test Environment") self.assertFalse(is_google_cloud_instance()) @mock.patch('satpy.demo._google_cloud_platform.gcsfs') def test_get_bucket_files(self, gcsfs_mod): """Test get_bucket_files basic cases.""" from satpy.demo._google_cloud_platform import get_bucket_files gcsfs_mod.GCSFileSystem = mock.MagicMock() gcsfs_inst = mock.MagicMock() gcsfs_mod.GCSFileSystem.return_value = gcsfs_inst gcsfs_inst.glob.return_value = ['a.nc', 'b.nc'] filenames = get_bucket_files('*.nc', '.') expected = [os.path.join('.', 'a.nc'), os.path.join('.', 'b.nc')] self.assertEqual(expected, filenames) gcsfs_inst.glob.side_effect = _GlobHelper(10) filenames = get_bucket_files(['*.nc', '*.txt'], '.', pattern_slice=slice(2, 5)) self.assertEqual(len(filenames), 3 * 2) gcsfs_inst.glob.side_effect = None # reset mock side effect gcsfs_inst.glob.return_value = ['a.nc', 'b.nc'] self.assertRaises(OSError, get_bucket_files, '*.nc', 'does_not_exist') open('a.nc', 'w').close() # touch the file gcsfs_inst.get.reset_mock() gcsfs_inst.glob.return_value = ['a.nc'] filenames = get_bucket_files('*.nc', '.') self.assertEqual([os.path.join('.', 'a.nc')], filenames) gcsfs_inst.get.assert_not_called() # force redownload gcsfs_inst.get.reset_mock() gcsfs_inst.glob.return_value = ['a.nc'] filenames = get_bucket_files('*.nc', '.', force=True) self.assertEqual([os.path.join('.', 'a.nc')], filenames) gcsfs_inst.get.assert_called_once() # if we don't get any results then we expect an exception gcsfs_inst.get.reset_mock() gcsfs_inst.glob.return_value = [] self.assertRaises(OSError, get_bucket_files, '*.nc', '.') @mock.patch('satpy.demo._google_cloud_platform.gcsfs', None) def test_no_gcsfs(self): """Test that 'gcsfs' is required.""" from satpy.demo._google_cloud_platform import get_bucket_files self.assertRaises(RuntimeError, get_bucket_files, '*.nc', '.') class TestAHIDemoDownload: """Test the AHI demo data download.""" @mock.patch.dict(sys.modules, {'s3fs': mock.MagicMock()}) def test_ahi_full_download(self): """Test that the himawari download works as expected.""" from tempfile import gettempdir from satpy.demo import download_typhoon_surigae_ahi files = download_typhoon_surigae_ahi(base_dir=gettempdir()) assert len(files) == 160 @mock.patch.dict(sys.modules, {'s3fs': mock.MagicMock()}) def test_ahi_partial_download(self): """Test that the himawari download works as expected.""" from tempfile import gettempdir from satpy.demo import download_typhoon_surigae_ahi files = download_typhoon_surigae_ahi(base_dir=gettempdir(), segments=[4, 9], channels=[1, 2, 3]) assert len(files) == 6 def _create_and_populate_dummy_tarfile(fn): """Populate a dummy tarfile with dummy files.""" fn.parent.mkdir(exist_ok=True, parents=True) with tarfile.open(fn, mode="x:gz") as tf: for i in range(3): with open(f"fci-rc{i:d}", "w"): pass tf.addfile(tf.gettarinfo(name=f"fci-rc{i:d}")) def test_fci_download(tmp_path, monkeypatch): """Test download of FCI test data.""" from satpy.demo import download_fci_test_data monkeypatch.chdir(tmp_path) def fake_download_url(url, nm): """Create a dummy tarfile. Create a dummy tarfile. Intended as a drop-in replacement for demo.utils.download_url. """ _create_and_populate_dummy_tarfile(nm) with mock.patch("satpy.demo.fci.utils.download_url", new=fake_download_url): files = download_fci_test_data(tmp_path) assert len(files) == 3 assert files == ["fci-rc0", "fci-rc1", "fci-rc2"] for f in files: assert os.path.exists(f) class _FakeRequest: """Fake object to act like a requests return value when downloading a file.""" requests_log: list[str] = [] def __init__(self, url, stream=None): self._filename = os.path.basename(url) self.headers = {} self.requests_log.append(url) del stream # just mimicking requests 'get' def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): return def raise_for_status(self): return def _get_fake_bytesio(self): filelike_obj = io.BytesIO() filelike_obj.write(self._filename.encode("ascii")) filelike_obj.seek(0) return filelike_obj def iter_content(self, chunk_size): """Return generator of 'chunk_size' at a time.""" bytes_io = self._get_fake_bytesio() x = bytes_io.read(chunk_size) while x: yield x x = bytes_io.read(chunk_size) @mock.patch('satpy.demo.utils.requests') class TestVIIRSSDRDemoDownload: """Test VIIRS SDR downloading.""" ALL_BAND_PREFIXES = ("SVI01", "SVI02", "SVI03", "SVI04", "SVI05", "SVM01", "SVM02", "SVM03", "SVM04", "SVM05", "SVM06", "SVM07", "SVM08", "SVM09", "SVM10", "SVM11", "SVM12", "SVM13", "SVM14", "SVM15", "SVM16", "SVDNB") ALL_GEO_PREFIXES = ("GITCO", "GMTCO", "GDNBO") def test_download(self, _requests, tmpdir): """Test downloading VIIRS SDR data.""" from satpy.demo import get_viirs_sdr_20170128_1229 _requests.get.side_effect = _FakeRequest with mock_filesystem(): files = get_viirs_sdr_20170128_1229(base_dir=str(tmpdir)) assert len(files) == 10 * (16 + 5 + 1 + 3) # 10 granules * (5 I bands + 16 M bands + 1 DNB + 3 geolocation) self._assert_bands_in_filenames_and_contents(self.ALL_BAND_PREFIXES + self.ALL_GEO_PREFIXES, files, 10) def test_do_not_download_the_files_twice(self, _requests, tmpdir): """Test re-downloading VIIRS SDR data.""" from satpy.demo import get_viirs_sdr_20170128_1229 get_mock = mock.MagicMock() _requests.get.return_value.__enter__ = get_mock with mock_filesystem(): files = get_viirs_sdr_20170128_1229(base_dir=str(tmpdir)) new_files = get_viirs_sdr_20170128_1229(base_dir=str(tmpdir)) total_num_files = 10 * (16 + 5 + 1 + 3) # 10 granules * (5 I bands + 16 M bands + 1 DNB + 3 geolocation) assert len(new_files) == total_num_files assert get_mock.call_count == total_num_files assert new_files == files def test_download_channels_num_granules_im(self, _requests, tmpdir): """Test downloading VIIRS SDR I/M data with select granules.""" from satpy.demo import get_viirs_sdr_20170128_1229 _requests.get.side_effect = _FakeRequest with mock_filesystem(): files = get_viirs_sdr_20170128_1229(base_dir=str(tmpdir), channels=("I01", "M01")) assert len(files) == 10 * (1 + 1 + 2) # 10 granules * (1 I band + 1 M band + 2 geolocation) self._assert_bands_in_filenames_and_contents(("SVI01", "SVM01", "GITCO", "GMTCO"), files, 10) def test_download_channels_num_granules_im_twice(self, _requests, tmpdir): """Test re-downloading VIIRS SDR I/M data with select granules.""" from satpy.demo import get_viirs_sdr_20170128_1229 get_mock = mock.MagicMock() _requests.get.return_value.__enter__ = get_mock with mock_filesystem(): files = get_viirs_sdr_20170128_1229(base_dir=str(tmpdir), channels=("I01", "M01")) num_first_batch = 10 * (1 + 1 + 2) # 10 granules * (1 I band + 1 M band + 2 geolocation) assert len(files) == num_first_batch files = get_viirs_sdr_20170128_1229(base_dir=str(tmpdir), channels=("I01", "M01"), granules=(2, 3)) assert len(files) == 2 * (1 + 1 + 2) # 2 granules * (1 I band + 1 M band + 2 geolocation) assert get_mock.call_count == num_first_batch def test_download_channels_num_granules_dnb(self, _requests, tmpdir): """Test downloading and re-downloading VIIRS SDR DNB data with select granules.""" from satpy.demo import get_viirs_sdr_20170128_1229 _requests.get.side_effect = _FakeRequest with mock_filesystem(): files = get_viirs_sdr_20170128_1229(base_dir=str(tmpdir), channels=("DNB",), granules=(5, 6, 7, 8, 9)) assert len(files) == 5 * (1 + 1) # 5 granules * (1 DNB + 1 geolocation) self._assert_bands_in_filenames_and_contents(("SVDNB", "GDNBO"), files, 5) def _assert_bands_in_filenames_and_contents(self, band_prefixes, filenames, num_files_per_band): self._assert_bands_in_filenames(band_prefixes, filenames, num_files_per_band) self._assert_file_contents(filenames) @staticmethod def _assert_bands_in_filenames(band_prefixes, filenames, num_files_per_band): for band_name in band_prefixes: files_for_band = [x for x in filenames if band_name in x] assert files_for_band assert len(set(files_for_band)) == num_files_per_band @staticmethod def _assert_file_contents(filenames): for fn in filenames: with open(fn, "rb") as fake_hdf5_file: assert fake_hdf5_file.read().decode("ascii") == os.path.basename(fn) @contextlib.contextmanager def mock_filesystem(): """Create a mock filesystem, patching `open` and `os.path.isfile`.""" class FakeFile: """Fake file based on BytesIO.""" def __init__(self): self.io = io.BytesIO() def __enter__(self): return self.io def __exit__(self, *args, **kwargs): self.io.seek(0) fake_fs = defaultdict(FakeFile) mo = mock.mock_open() def fun(filename, *args, **kwargs): return fake_fs[filename] mo.side_effect = fun with mock.patch("builtins.open", mo): with mock.patch("os.path.isfile") as isfile: isfile.side_effect = (lambda target: target in fake_fs) yield def test_fs(): """Test the mock filesystem.""" with mock_filesystem(): with open("somefile", "w") as fd: fd.write(b"bla") with open("someotherfile", "w") as fd: fd.write(b"bli") with open("somefile", "r") as fd: assert fd.read() == b"bla" with open("someotherfile", "r") as fd: assert fd.read() == b"bli" assert os.path.isfile("somefile") assert not os.path.isfile("missingfile") class TestSEVIRIHRITDemoDownload(unittest.TestCase): """Test case for downloading an hrit tarball.""" def setUp(self): """Set up the test case.""" from satpy.demo.seviri_hrit import generate_subset_of_filenames self.subdir = os.path.join(".", "seviri_hrit", "20180228_1500") self.files = generate_subset_of_filenames(base_dir=self.subdir) self.patcher = mock.patch('satpy.demo.utils.requests.get', autospec=True) self.get_mock = self.patcher.start() _FakeRequest.requests_log = [] def tearDown(self): """Tear down the test case.""" self.patcher.stop() def test_download_gets_files_with_contents(self): """Test downloading SEVIRI HRIT data with content.""" from satpy.demo import download_seviri_hrit_20180228_1500 self.get_mock.side_effect = _FakeRequest with mock_filesystem(): files = download_seviri_hrit_20180228_1500() assert len(files) == 114 assert set(files) == set(self.files) for the_file in files: with open(the_file, mode="r") as fd: assert fd.read().decode("utf8") == os.path.basename(the_file) def test_download_from_zenodo(self): """Test downloading SEVIRI HRIT data from zenodo.""" from satpy.demo import download_seviri_hrit_20180228_1500 self.get_mock.side_effect = _FakeRequest with mock_filesystem(): download_seviri_hrit_20180228_1500() assert _FakeRequest.requests_log[0].startswith("https://zenodo.org") def test_download_a_subset_of_files(self): """Test downloading a subset of files.""" from satpy.demo import download_seviri_hrit_20180228_1500 with mock_filesystem(): files = download_seviri_hrit_20180228_1500(subset={"HRV": [1, 2, 3], "IR_108": [1, 2], "EPI": None}) assert set(files) == set(os.path.join(self.subdir, filename) for filename in [ 'H-000-MSG4__-MSG4________-_________-EPI______-201802281500-__', 'H-000-MSG4__-MSG4________-HRV______-000001___-201802281500-__', 'H-000-MSG4__-MSG4________-HRV______-000002___-201802281500-__', 'H-000-MSG4__-MSG4________-HRV______-000003___-201802281500-__', 'H-000-MSG4__-MSG4________-IR_108___-000001___-201802281500-__', 'H-000-MSG4__-MSG4________-IR_108___-000002___-201802281500-__', ]) def test_do_not_download_same_file_twice(self): """Test that files are not downloaded twice.""" from satpy.demo import download_seviri_hrit_20180228_1500 get_mock = mock.MagicMock() self.get_mock.return_value.__enter__ = get_mock with mock_filesystem(): files = download_seviri_hrit_20180228_1500(subset={"HRV": [1, 2, 3], "IR_108": [1, 2], "EPI": None}) new_files = download_seviri_hrit_20180228_1500(subset={"HRV": [1, 2, 3], "IR_108": [1, 2], "EPI": None}) assert set(files) == set(new_files) assert get_mock.call_count == 6 def test_download_to_output_directory(self): """Test downloading to an output directory.""" from tempfile import gettempdir from satpy.demo import download_seviri_hrit_20180228_1500 with mock_filesystem(): base_dir = gettempdir() files = download_seviri_hrit_20180228_1500(base_dir=base_dir) assert files[0].startswith(base_dir) satpy-0.34.0/satpy/tests/test_dependency_tree.py000066400000000000000000000246461420401153000220100ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit tests for the dependency tree class and dependencies.""" import os import unittest from satpy.dependency_tree import DependencyTree from satpy.tests.utils import make_cid, make_dataid class TestDependencyTree(unittest.TestCase): """Test the dependency tree. This is what we are working with:: None (No Data) +DataID(name='comp19') + +DataID(name='ds5', resolution=250, modifiers=('res_change',)) + + +DataID(name='ds5', resolution=250, modifiers=()) + + +__EMPTY_LEAF_SENTINEL__ (No Data) + +DataID(name='comp13') + + +DataID(name='ds5', resolution=250, modifiers=('res_change',)) + + + +DataID(name='ds5', resolution=250, modifiers=()) + + + +__EMPTY_LEAF_SENTINEL__ (No Data) + +DataID(name='ds2', resolution=250, calibration=, modifiers=()) """ def setUp(self): """Set up the test tree.""" self.dependency_tree = DependencyTree(None, None, None) composite_1 = make_cid(name="comp19") dependency_1 = make_dataid(name="ds5", resolution=250, modifiers=("res_change",)) dependency_1_1 = make_dataid(name="ds5", resolution=250, modifiers=tuple()) node_composite_1 = self.dependency_tree.add_leaf(composite_1) node_dependency_1 = self.dependency_tree.add_leaf(dependency_1, node_composite_1) self.dependency_tree.add_leaf(dependency_1_1, node_dependency_1) # ToDo: do we really want then empty node to be at the same level as the unmodified data? node_dependency_1.add_child(self.dependency_tree.empty_node) dependency_2 = make_cid(name="comp13") dependency_2_1 = dependency_1 node_dependency_2 = self.dependency_tree.add_leaf(dependency_2, node_composite_1) self.dependency_tree.add_leaf(dependency_2_1, node_dependency_2) # We don't need to add the unmodified dependency a second time. dependency_3 = make_dataid(name='ds2', resolution=250, calibration="reflectance", modifiers=tuple()) self.dependency_tree.add_leaf(dependency_3, node_composite_1) @staticmethod def _nodes_equal(node_list1, node_list2): names1 = [node.name for node in node_list1] names2 = [node.name for node in node_list2] return sorted(names1) == sorted(names2) def test_copy_preserves_all_nodes(self): """Test that dependency tree copy preserves all nodes.""" new_dependency_tree = self.dependency_tree.copy() assert self.dependency_tree.empty_node is new_dependency_tree.empty_node assert self._nodes_equal(self.dependency_tree.leaves(), new_dependency_tree.leaves()) assert self._nodes_equal(self.dependency_tree.trunk(), new_dependency_tree.trunk()) # make sure that we can get access to sub-nodes c13_id = make_cid(name='comp13') assert self._nodes_equal(self.dependency_tree.trunk(limit_nodes_to=[c13_id]), new_dependency_tree.trunk(limit_nodes_to=[c13_id])) def test_copy_preserves_unique_empty_node(self): """Test that dependency tree copy preserves the uniqueness of the empty node.""" new_dependency_tree = self.dependency_tree.copy() assert self.dependency_tree.empty_node is new_dependency_tree.empty_node self.assertIs(self.dependency_tree._root.children[0].children[0].children[1], self.dependency_tree.empty_node) self.assertIs(new_dependency_tree._root.children[0].children[0].children[1], self.dependency_tree.empty_node) def test_new_dependency_tree_preserves_unique_empty_node(self): """Test that dependency tree instantiation preserves the uniqueness of the empty node.""" new_dependency_tree = DependencyTree(None, None, None) assert self.dependency_tree.empty_node is new_dependency_tree.empty_node class TestMissingDependencies(unittest.TestCase): """Test the MissingDependencies exception.""" def test_new_missing_dependencies(self): """Test new MissingDependencies.""" from satpy.node import MissingDependencies error = MissingDependencies('bla') assert error.missing_dependencies == 'bla' def test_new_missing_dependencies_with_message(self): """Test new MissingDependencies with a message.""" from satpy.node import MissingDependencies error = MissingDependencies('bla', "This is a message") assert 'This is a message' in str(error) class TestMultipleResolutionSameChannelDependency(unittest.TestCase): """Test that MODIS situations where the same channel is available at multiple resolution works.""" def test_modis_overview_1000m(self): """Test a modis overview dependency calculation with resolution fixed to 1000m.""" from satpy import DataQuery from satpy._config import PACKAGE_CONFIG_PATH from satpy.composites import GenericCompositor from satpy.dataset import DatasetDict from satpy.modifiers.geometry import SunZenithCorrector from satpy.readers.yaml_reader import FileYAMLReader config_file = os.path.join(PACKAGE_CONFIG_PATH, 'readers', 'modis_l1b.yaml') self.reader_instance = FileYAMLReader.from_config_files(config_file) overview = {'_satpy_id': make_dataid(name='overview'), 'name': 'overview', 'optional_prerequisites': [], 'prerequisites': [DataQuery(name='1', modifiers=('sunz_corrected',)), DataQuery(name='2', modifiers=('sunz_corrected',)), DataQuery(name='31')], 'standard_name': 'overview'} compositors = {'modis': DatasetDict()} compositors['modis']['overview'] = GenericCompositor(**overview) modifiers = {'modis': {'sunz_corrected': (SunZenithCorrector, {'optional_prerequisites': ['solar_zenith_angle'], 'name': 'sunz_corrected', 'prerequisites': []})}} dep_tree = DependencyTree({'modis_l1b': self.reader_instance}, compositors, modifiers) dep_tree.populate_with_keys({'overview'}, DataQuery(resolution=1000)) for key in dep_tree._all_nodes.keys(): assert key.get('resolution', 1000) == 1000 class TestMultipleSensors(unittest.TestCase): """Test cases where multiple sensors are available. This is what we are working with:: None (No Data) +DataID(name='comp19') + +DataID(name='ds5', resolution=250, modifiers=('res_change',)) + + +DataID(name='ds5', resolution=250, modifiers=()) + + +__EMPTY_LEAF_SENTINEL__ (No Data) + +DataID(name='comp13') + + +DataID(name='ds5', resolution=250, modifiers=('res_change',)) + + + +DataID(name='ds5', resolution=250, modifiers=()) + + + +__EMPTY_LEAF_SENTINEL__ (No Data) + +DataID(name='ds2', resolution=250, calibration=, modifiers=()) """ def setUp(self): """Set up the test tree.""" from satpy.composites import CompositeBase from satpy.dataset.data_dict import DatasetDict from satpy.modifiers import ModifierBase class _FakeCompositor(CompositeBase): def __init__(self, ret_val, *args, **kwargs): self.ret_val = ret_val super().__init__(*args, **kwargs) def __call__(self, *args, **kwargs): return self.ret_val class _FakeModifier(ModifierBase): def __init__(self, ret_val, *args, **kwargs): self.ret_val = ret_val super().__init__(*args, **kwargs) def __call__(self, *args, **kwargs): return self.ret_val comp1_sensor1 = _FakeCompositor(1, "comp1") comp1_sensor2 = _FakeCompositor(2, "comp1") # create the dictionary one element at a time to force "incorrect" order # (sensor2 comes before sensor1, but results should be alphabetical order) compositors = {} compositors['sensor2'] = s2_comps = DatasetDict() compositors['sensor1'] = s1_comps = DatasetDict() c1_s2_id = make_cid(name='comp1', resolution=1000) c1_s1_id = make_cid(name='comp1', resolution=500) s2_comps[c1_s2_id] = comp1_sensor2 s1_comps[c1_s1_id] = comp1_sensor1 modifiers = {} modifiers['sensor2'] = s2_mods = {} modifiers['sensor1'] = s1_mods = {} s2_mods['mod1'] = (_FakeModifier, {'ret_val': 2}) s1_mods['mod1'] = (_FakeModifier, {'ret_val': 1}) self.dependency_tree = DependencyTree({}, compositors, modifiers) # manually add a leaf so we don't have to mock a reader ds5 = make_dataid(name="ds5", resolution=250, modifiers=tuple()) self.dependency_tree.add_leaf(ds5) def test_compositor_loaded_sensor_order(self): """Test that a compositor is loaded from the first alphabetical sensor.""" self.dependency_tree.populate_with_keys({'comp1'}) comp_nodes = self.dependency_tree.trunk() self.assertEqual(len(comp_nodes), 1) self.assertEqual(comp_nodes[0].name.resolution, 500) def test_modifier_loaded_sensor_order(self): """Test that a modifier is loaded from the first alphabetical sensor.""" from satpy import DataQuery dq = DataQuery(name='ds5', modifiers=('mod1',)) self.dependency_tree.populate_with_keys({dq}) comp_nodes = self.dependency_tree.trunk() self.assertEqual(len(comp_nodes), 1) self.assertEqual(comp_nodes[0].data[0].ret_val, 1) satpy-0.34.0/satpy/tests/test_file_handlers.py000066400000000000000000000160101420401153000214340ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """test file handler baseclass.""" import unittest from unittest import mock import numpy as np import pytest from satpy.readers.file_handlers import BaseFileHandler from satpy.tests.utils import FakeFileHandler class TestBaseFileHandler(unittest.TestCase): """Test the BaseFileHandler.""" def setUp(self): """Set up the test.""" self._old_set = BaseFileHandler.__abstractmethods__ BaseFileHandler._abstractmethods__ = set() self.fh = BaseFileHandler( 'filename', {'filename_info': 'bla'}, 'filetype_info') def test_combine_times(self): """Combine times.""" info1 = {'start_time': 1} info2 = {'start_time': 2} res = self.fh.combine_info([info1, info2]) exp = {'start_time': 1} self.assertDictEqual(res, exp) res = self.fh.combine_info([info2, info1]) exp = {'start_time': 1} self.assertDictEqual(res, exp) info1 = {'end_time': 1} info2 = {'end_time': 2} res = self.fh.combine_info([info1, info2]) exp = {'end_time': 2} self.assertDictEqual(res, exp) res = self.fh.combine_info([info2, info1]) exp = {'end_time': 2} self.assertDictEqual(res, exp) def test_combine_orbits(self): """Combine orbits.""" info1 = {'start_orbit': 1} info2 = {'start_orbit': 2} res = self.fh.combine_info([info1, info2]) exp = {'start_orbit': 1} self.assertDictEqual(res, exp) res = self.fh.combine_info([info2, info1]) exp = {'start_orbit': 1} self.assertDictEqual(res, exp) info1 = {'end_orbit': 1} info2 = {'end_orbit': 2} res = self.fh.combine_info([info1, info2]) exp = {'end_orbit': 2} self.assertDictEqual(res, exp) res = self.fh.combine_info([info2, info1]) exp = {'end_orbit': 2} self.assertDictEqual(res, exp) @mock.patch('satpy.readers.file_handlers.SwathDefinition') def test_combine_area(self, sdef): """Combine area.""" area1 = mock.MagicMock() area1.lons = np.arange(5) area1.lats = np.arange(5) area1.name = 'area1' area2 = mock.MagicMock() area2.lons = np.arange(5) area2.lats = np.arange(5) area2.name = 'area2' info1 = {'area': area1} info2 = {'area': area2} self.fh.combine_info([info1, info2]) self.assertTupleEqual(sdef.call_args[1]['lons'].shape, (2, 5)) self.assertTupleEqual(sdef.call_args[1]['lats'].shape, (2, 5)) self.assertEqual(sdef.return_value.name, 'area1_area2') def test_combine_orbital_parameters(self): """Combine orbital parameters.""" info1 = {'orbital_parameters': {'projection_longitude': 1, 'projection_latitude': 1, 'projection_altitude': 1, 'satellite_nominal_longitude': 1, 'satellite_nominal_latitude': 1, 'satellite_actual_longitude': 1, 'satellite_actual_latitude': 1, 'satellite_actual_altitude': 1, 'nadir_longitude': 1, 'nadir_latitude': 1, 'only_in_1': False}} info2 = {'orbital_parameters': {'projection_longitude': 2, 'projection_latitude': 2, 'projection_altitude': 2, 'satellite_nominal_longitude': 2, 'satellite_nominal_latitude': 2, 'satellite_actual_longitude': 2, 'satellite_actual_latitude': 2, 'satellite_actual_altitude': 2, 'nadir_longitude': 2, 'nadir_latitude': 2, 'only_in_2': True}} exp = {'orbital_parameters': {'projection_longitude': 1.5, 'projection_latitude': 1.5, 'projection_altitude': 1.5, 'satellite_nominal_longitude': 1.5, 'satellite_nominal_latitude': 1.5, 'satellite_actual_longitude': 1.5, 'satellite_actual_latitude': 1.5, 'satellite_actual_altitude': 1.5, 'nadir_longitude': 1.5, 'nadir_latitude': 1.5, 'only_in_1': False, 'only_in_2': True}} res = self.fh.combine_info([info1, info2]) self.assertDictEqual(res, exp) # Identity self.assertEqual(self.fh.combine_info([info1]), info1) # Empty self.fh.combine_info([{}]) def test_file_is_kept_intact(self): """Test that the file object passed (string, path, or other) is kept intact.""" open_file = mock.MagicMock() bfh = BaseFileHandler(open_file, {'filename_info': 'bla'}, 'filetype_info') assert bfh.filename == open_file from pathlib import Path filename = Path('/bla/bla.nc') bfh = BaseFileHandler(filename, {'filename_info': 'bla'}, 'filetype_info') assert isinstance(bfh.filename, Path) def tearDown(self): """Tear down the test.""" BaseFileHandler.__abstractmethods__ = self._old_set @pytest.mark.parametrize( ("file_type", "ds_file_type", "exp_result"), [ ("fake1", "fake1", True), ("fake1", ["fake1"], True), ("fake1", ["fake1", "fake2"], True), ("fake1", ["fake2"], None), ("fake1", "fake2", None), ("fake1", "fake1_with_suffix", None), ] ) def test_file_type_match(file_type, ds_file_type, exp_result): """Test that file type matching uses exactly equality.""" fh = FakeFileHandler("some_file.txt", {}, {"file_type": file_type}) assert fh.file_type_matches(ds_file_type) is exp_result satpy-0.34.0/satpy/tests/test_modifiers.py000066400000000000000000000602311420401153000206220ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for modifiers in modifiers/__init__.py.""" import contextlib import unittest from datetime import datetime, timedelta from glob import glob from typing import Optional, Union from unittest import mock import dask.array as da import numpy as np import pytest import xarray as xr from pyresample.geometry import AreaDefinition, StackedAreaDefinition from pytest_lazyfixture import lazy_fixture import satpy def _sunz_area_def(): """Get fake area for testing sunz generation.""" area = AreaDefinition('test', 'test', 'test', {'proj': 'merc'}, 2, 2, (-2000, -2000, 2000, 2000)) return area def _sunz_bigger_area_def(): """Get area that is twice the size of 'sunz_area_def'.""" bigger_area = AreaDefinition('test', 'test', 'test', {'proj': 'merc'}, 4, 4, (-2000, -2000, 2000, 2000)) return bigger_area def _sunz_stacked_area_def(): """Get fake stacked area for testing sunz generation.""" area1 = AreaDefinition('test', 'test', 'test', {'proj': 'merc'}, 2, 1, (-2000, 0, 2000, 2000)) area2 = AreaDefinition('test', 'test', 'test', {'proj': 'merc'}, 2, 1, (-2000, -2000, 2000, 0)) return StackedAreaDefinition(area1, area2) def _shared_sunz_attrs(area_def): attrs = {'area': area_def, 'start_time': datetime(2018, 1, 1, 18), 'modifiers': tuple(), 'name': 'test_vis'} return attrs def _get_ds1(attrs): ds1 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64), attrs=attrs, dims=('y', 'x'), coords={'y': [0, 1], 'x': [0, 1]}) return ds1 @pytest.fixture(scope="session") def sunz_ds1(): """Generate fake dataset for sunz tests.""" attrs = _shared_sunz_attrs(_sunz_area_def()) return _get_ds1(attrs) @pytest.fixture(scope="session") def sunz_ds1_stacked(): """Generate fake dataset for sunz tests.""" attrs = _shared_sunz_attrs(_sunz_stacked_area_def()) return _get_ds1(attrs) @pytest.fixture(scope="session") def sunz_ds2(): """Generate larger fake dataset for sunz tests.""" attrs = _shared_sunz_attrs(_sunz_bigger_area_def()) ds2 = xr.DataArray(da.ones((4, 4), chunks=2, dtype=np.float64), attrs=attrs, dims=('y', 'x'), coords={'y': [0, 0.5, 1, 1.5], 'x': [0, 0.5, 1, 1.5]}) return ds2 @pytest.fixture(scope="session") def sunz_sza(): """Generate fake solar zenith angle data array for testing.""" sza = xr.DataArray( np.rad2deg(np.arccos(da.from_array([[0.0149581333, 0.0146694376], [0.0150812684, 0.0147925727]], chunks=2))), attrs={'area': _sunz_area_def()}, dims=('y', 'x'), coords={'y': [0, 1], 'x': [0, 1]}, ) return sza class TestSunZenithCorrector: """Test case for the zenith corrector.""" def test_basic_default_not_provided(self, sunz_ds1): """Test default limits when SZA isn't provided.""" from satpy.modifiers.geometry import SunZenithCorrector comp = SunZenithCorrector(name='sza_test', modifiers=tuple()) res = comp((sunz_ds1,), test_attr='test') np.testing.assert_allclose(res.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]])) assert 'y' in res.coords assert 'x' in res.coords ds1 = sunz_ds1.copy().drop_vars(('y', 'x')) res = comp((ds1,), test_attr='test') np.testing.assert_allclose(res.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]])) assert 'y' not in res.coords assert 'x' not in res.coords def test_basic_lims_not_provided(self, sunz_ds1): """Test custom limits when SZA isn't provided.""" from satpy.modifiers.geometry import SunZenithCorrector comp = SunZenithCorrector(name='sza_test', modifiers=tuple(), correction_limit=90) res = comp((sunz_ds1,), test_attr='test') np.testing.assert_allclose(res.values, np.array([[66.853262, 68.168939], [66.30742, 67.601493]])) @pytest.mark.parametrize("data_arr", [lazy_fixture("sunz_ds1"), lazy_fixture("sunz_ds1_stacked")]) def test_basic_default_provided(self, data_arr, sunz_sza): """Test default limits when SZA is provided.""" from satpy.modifiers.geometry import SunZenithCorrector comp = SunZenithCorrector(name='sza_test', modifiers=tuple()) res = comp((data_arr, sunz_sza), test_attr='test') np.testing.assert_allclose(res.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]])) @pytest.mark.parametrize("data_arr", [lazy_fixture("sunz_ds1"), lazy_fixture("sunz_ds1_stacked")]) def test_basic_lims_provided(self, data_arr, sunz_sza): """Test custom limits when SZA is provided.""" from satpy.modifiers.geometry import SunZenithCorrector comp = SunZenithCorrector(name='sza_test', modifiers=tuple(), correction_limit=90) res = comp((data_arr, sunz_sza), test_attr='test') np.testing.assert_allclose(res.values, np.array([[66.853262, 68.168939], [66.30742, 67.601493]])) def test_imcompatible_areas(self, sunz_ds2, sunz_sza): """Test sunz correction on incompatible areas.""" from satpy.composites import IncompatibleAreas from satpy.modifiers.geometry import SunZenithCorrector comp = SunZenithCorrector(name='sza_test', modifiers=tuple(), correction_limit=90) with pytest.raises(IncompatibleAreas): comp((sunz_ds2, sunz_sza), test_attr='test') class TestNIRReflectance(unittest.TestCase): """Test NIR reflectance compositor.""" def setUp(self): """Set up the test case for the NIRReflectance compositor.""" self.get_lonlats = mock.MagicMock() self.lons, self.lats = 1, 2 self.get_lonlats.return_value = (self.lons, self.lats) area = mock.MagicMock(get_lonlats=self.get_lonlats) self.start_time = 1 self.metadata = {'platform_name': 'Meteosat-11', 'sensor': 'seviri', 'name': 'IR_039', 'area': area, 'start_time': self.start_time} nir_arr = np.random.random((2, 2)) self.nir = xr.DataArray(da.from_array(nir_arr), dims=['y', 'x']) self.nir.attrs.update(self.metadata) ir_arr = 100 * np.random.random((2, 2)) self.ir_ = xr.DataArray(da.from_array(ir_arr), dims=['y', 'x']) self.ir_.attrs['area'] = area self.sunz_arr = 100 * np.random.random((2, 2)) self.sunz = xr.DataArray(da.from_array(self.sunz_arr), dims=['y', 'x']) self.sunz.attrs['standard_name'] = 'solar_zenith_angle' self.sunz.attrs['area'] = area self.da_sunz = da.from_array(self.sunz_arr) refl_arr = np.random.random((2, 2)) self.refl = da.from_array(refl_arr) self.refl_with_co2 = da.from_array(np.random.random((2, 2))) self.refl_from_tbs = mock.MagicMock() self.refl_from_tbs.side_effect = self.fake_refl_from_tbs def fake_refl_from_tbs(self, sun_zenith, da_nir, da_tb11, tb_ir_co2=None): """Fake refl_from_tbs.""" del sun_zenith, da_nir, da_tb11 if tb_ir_co2 is not None: return self.refl_with_co2 return self.refl @mock.patch('satpy.modifiers.spectral.sun_zenith_angle') @mock.patch('satpy.modifiers.NIRReflectance.apply_modifier_info') @mock.patch('satpy.modifiers.spectral.Calculator') def test_provide_sunz_no_co2(self, calculator, apply_modifier_info, sza): """Test NIR reflectance compositor provided only sunz.""" calculator.return_value = mock.MagicMock( reflectance_from_tbs=self.refl_from_tbs) sza.return_value = self.da_sunz from satpy.modifiers.spectral import NIRReflectance comp = NIRReflectance(name='test') info = {'modifiers': None} res = comp([self.nir, self.ir_], optional_datasets=[self.sunz], **info) assert self.metadata.items() <= res.attrs.items() assert res.attrs['units'] == '%' assert res.attrs['sun_zenith_threshold'] is not None assert np.allclose(res.data, self.refl * 100).compute() @mock.patch('satpy.modifiers.spectral.sun_zenith_angle') @mock.patch('satpy.modifiers.NIRReflectance.apply_modifier_info') @mock.patch('satpy.modifiers.spectral.Calculator') def test_no_sunz_no_co2(self, calculator, apply_modifier_info, sza): """Test NIR reflectance compositor with minimal parameters.""" calculator.return_value = mock.MagicMock( reflectance_from_tbs=self.refl_from_tbs) sza.return_value = self.da_sunz from satpy.modifiers.spectral import NIRReflectance comp = NIRReflectance(name='test') info = {'modifiers': None} res = comp([self.nir, self.ir_], optional_datasets=[], **info) self.get_lonlats.assert_called() sza.assert_called_with(self.start_time, self.lons, self.lats) self.refl_from_tbs.assert_called_with(self.da_sunz, self.nir.data, self.ir_.data, tb_ir_co2=None) assert np.allclose(res.data, self.refl * 100).compute() @mock.patch('satpy.modifiers.spectral.sun_zenith_angle') @mock.patch('satpy.modifiers.NIRReflectance.apply_modifier_info') @mock.patch('satpy.modifiers.spectral.Calculator') def test_no_sunz_with_co2(self, calculator, apply_modifier_info, sza): """Test NIR reflectance compositor provided extra co2 info.""" calculator.return_value = mock.MagicMock( reflectance_from_tbs=self.refl_from_tbs) from satpy.modifiers.spectral import NIRReflectance sza.return_value = self.da_sunz comp = NIRReflectance(name='test') info = {'modifiers': None} co2_arr = np.random.random((2, 2)) co2 = xr.DataArray(da.from_array(co2_arr), dims=['y', 'x']) co2.attrs['wavelength'] = [12.0, 13.0, 14.0] co2.attrs['units'] = 'K' res = comp([self.nir, self.ir_], optional_datasets=[co2], **info) self.refl_from_tbs.assert_called_with(self.da_sunz, self.nir.data, self.ir_.data, tb_ir_co2=co2.data) assert np.allclose(res.data, self.refl_with_co2 * 100).compute() @mock.patch('satpy.modifiers.spectral.sun_zenith_angle') @mock.patch('satpy.modifiers.NIRReflectance.apply_modifier_info') @mock.patch('satpy.modifiers.spectral.Calculator') def test_provide_sunz_and_threshold(self, calculator, apply_modifier_info, sza): """Test NIR reflectance compositor provided sunz and a sunz threshold.""" calculator.return_value = mock.MagicMock( reflectance_from_tbs=self.refl_from_tbs) from satpy.modifiers.spectral import NIRReflectance sza.return_value = self.da_sunz comp = NIRReflectance(name='test', sunz_threshold=84.0) info = {'modifiers': None} res = comp([self.nir, self.ir_], optional_datasets=[self.sunz], **info) self.assertEqual(res.attrs['sun_zenith_threshold'], 84.0) calculator.assert_called_with('Meteosat-11', 'seviri', 'IR_039', sunz_threshold=84.0, masking_limit=NIRReflectance.MASKING_LIMIT) @mock.patch('satpy.modifiers.spectral.sun_zenith_angle') @mock.patch('satpy.modifiers.NIRReflectance.apply_modifier_info') @mock.patch('satpy.modifiers.spectral.Calculator') def test_sunz_threshold_default_value_is_not_none(self, calculator, apply_modifier_info, sza): """Check that sun_zenith_threshold is not None.""" from satpy.modifiers.spectral import NIRReflectance comp = NIRReflectance(name='test') info = {'modifiers': None} calculator.return_value = mock.MagicMock( reflectance_from_tbs=self.refl_from_tbs) comp([self.nir, self.ir_], optional_datasets=[self.sunz], **info) assert comp.sun_zenith_threshold is not None @mock.patch('satpy.modifiers.spectral.sun_zenith_angle') @mock.patch('satpy.modifiers.NIRReflectance.apply_modifier_info') @mock.patch('satpy.modifiers.spectral.Calculator') def test_provide_masking_limit(self, calculator, apply_modifier_info, sza): """Test NIR reflectance compositor provided sunz and a sunz threshold.""" calculator.return_value = mock.MagicMock( reflectance_from_tbs=self.refl_from_tbs) from satpy.modifiers.spectral import NIRReflectance sza.return_value = self.da_sunz comp = NIRReflectance(name='test', masking_limit=None) info = {'modifiers': None} res = comp([self.nir, self.ir_], optional_datasets=[self.sunz], **info) self.assertIsNone(res.attrs['sun_zenith_masking_limit']) calculator.assert_called_with('Meteosat-11', 'seviri', 'IR_039', sunz_threshold=NIRReflectance.TERMINATOR_LIMIT, masking_limit=None) @mock.patch('satpy.modifiers.spectral.sun_zenith_angle') @mock.patch('satpy.modifiers.NIRReflectance.apply_modifier_info') @mock.patch('satpy.modifiers.spectral.Calculator') def test_masking_limit_default_value_is_not_none(self, calculator, apply_modifier_info, sza): """Check that sun_zenith_threshold is not None.""" from satpy.modifiers.spectral import NIRReflectance comp = NIRReflectance(name='test') info = {'modifiers': None} calculator.return_value = mock.MagicMock( reflectance_from_tbs=self.refl_from_tbs) comp([self.nir, self.ir_], optional_datasets=[self.sunz], **info) assert comp.masking_limit is not None class TestNIREmissivePartFromReflectance(unittest.TestCase): """Test the NIR Emissive part from reflectance compositor.""" @mock.patch('satpy.modifiers.spectral.sun_zenith_angle') @mock.patch('satpy.modifiers.NIRReflectance.apply_modifier_info') @mock.patch('satpy.modifiers.spectral.Calculator') def test_compositor(self, calculator, apply_modifier_info, sza): """Test the NIR emissive part from reflectance compositor.""" from satpy.modifiers.spectral import NIRReflectance refl_arr = np.random.random((2, 2)) refl = da.from_array(refl_arr) refl_from_tbs = mock.MagicMock() refl_from_tbs.return_value = refl calculator.return_value = mock.MagicMock(reflectance_from_tbs=refl_from_tbs) emissive_arr = np.random.random((2, 2)) emissive = da.from_array(emissive_arr) emissive_part = mock.MagicMock() emissive_part.return_value = emissive calculator.return_value = mock.MagicMock(emissive_part_3x=emissive_part) from satpy.modifiers.spectral import NIREmissivePartFromReflectance comp = NIREmissivePartFromReflectance(name='test', sunz_threshold=86.0) info = {'modifiers': None} platform = 'NOAA-20' sensor = 'viirs' chan_name = 'M12' get_lonlats = mock.MagicMock() lons, lats = 1, 2 get_lonlats.return_value = (lons, lats) area = mock.MagicMock(get_lonlats=get_lonlats) nir_arr = np.random.random((2, 2)) nir = xr.DataArray(da.from_array(nir_arr), dims=['y', 'x']) nir.attrs['platform_name'] = platform nir.attrs['sensor'] = sensor nir.attrs['name'] = chan_name nir.attrs['area'] = area ir_arr = np.random.random((2, 2)) ir_ = xr.DataArray(da.from_array(ir_arr), dims=['y', 'x']) ir_.attrs['area'] = area sunz_arr = 100 * np.random.random((2, 2)) sunz = xr.DataArray(da.from_array(sunz_arr), dims=['y', 'x']) sunz.attrs['standard_name'] = 'solar_zenith_angle' sunz.attrs['area'] = area sunz2 = da.from_array(sunz_arr) sza.return_value = sunz2 res = comp([nir, ir_], optional_datasets=[sunz], **info) self.assertEqual(res.attrs['sun_zenith_threshold'], 86.0) self.assertEqual(res.attrs['units'], 'K') self.assertEqual(res.attrs['platform_name'], platform) self.assertEqual(res.attrs['sensor'], sensor) self.assertEqual(res.attrs['name'], chan_name) calculator.assert_called_with('NOAA-20', 'viirs', 'M12', sunz_threshold=86.0, masking_limit=NIRReflectance.MASKING_LIMIT) class TestPSPAtmosphericalCorrection(unittest.TestCase): """Test the pyspectral-based atmospheric correction modifier.""" def test_call(self): """Test atmospherical correction.""" from pyresample.geometry import SwathDefinition from satpy.modifiers import PSPAtmosphericalCorrection # Patch methods lons = np.zeros((5, 5)) lons[1, 1] = np.inf lons = da.from_array(lons, chunks=5) lats = np.zeros((5, 5)) lats[1, 1] = np.inf lats = da.from_array(lats, chunks=5) area = SwathDefinition(lons, lats) stime = datetime(2020, 1, 1, 12, 0, 0) orb_params = { "satellite_actual_altitude": 12345678, "nadir_longitude": 0.0, "nadir_latitude": 0.0, } band = xr.DataArray(da.zeros((5, 5)), attrs={'area': area, 'start_time': stime, 'name': 'name', 'platform_name': 'platform', 'sensor': 'sensor', 'orbital_parameters': orb_params}, dims=('y', 'x')) # Perform atmospherical correction psp = PSPAtmosphericalCorrection(name='dummy') res = psp(projectables=[band]) res.compute() def _angle_cache_area_def(): area = AreaDefinition( "test", "", "", {"proj": "merc"}, 5, 5, (-2500, -2500, 2500, 2500), ) return area def _angle_cache_stacked_area_def(): area1 = AreaDefinition( "test", "", "", {"proj": "merc"}, 5, 2, (2500, 500, 7500, 2500), ) area2 = AreaDefinition( "test", "", "", {"proj": "merc"}, 5, 3, (2500, -2500, 7500, 500), ) return StackedAreaDefinition(area1, area2) def _get_angle_test_data(area_def: Optional[Union[AreaDefinition, StackedAreaDefinition]] = None, chunks: Optional[Union[int, tuple]] = 2) -> xr.DataArray: if area_def is None: area_def = _angle_cache_area_def() orb_params = { "satellite_nominal_altitude": 12345678, "satellite_nominal_longitude": 10.0, "satellite_nominal_latitude": 0.0, } stime = datetime(2020, 1, 1, 12, 0, 0) data = da.zeros((5, 5), chunks=chunks) vis = xr.DataArray(data, attrs={ 'area': area_def, 'start_time': stime, 'orbital_parameters': orb_params, }) return vis def _get_stacked_angle_test_data(): return _get_angle_test_data(area_def=_angle_cache_stacked_area_def(), chunks=(5, (2, 2, 1))) def _similar_sat_pos_datetime(orig_data, lon_offset=0.04): # change data slightly new_data = orig_data.copy() old_lon = new_data.attrs["orbital_parameters"]["satellite_nominal_longitude"] new_data.attrs["orbital_parameters"]["satellite_nominal_longitude"] = old_lon + lon_offset new_data.attrs["start_time"] = new_data.attrs["start_time"] + timedelta(hours=36) return new_data def _diff_sat_pos_datetime(orig_data): return _similar_sat_pos_datetime(orig_data, lon_offset=0.05) def _glob_reversed(pat): """Behave like glob but force results to be in the wrong order.""" return sorted(glob(pat), reverse=True) @contextlib.contextmanager def _mock_glob_if(mock_glob): if mock_glob: with mock.patch("satpy.modifiers.angles.glob", _glob_reversed): yield else: yield def _assert_allclose_if(expect_equal, arr1, arr2): if not expect_equal: pytest.raises(AssertionError, np.testing.assert_allclose, arr1, arr2) else: np.testing.assert_allclose(arr1, arr2) class TestAngleGeneration: """Test the angle generation utility functions.""" @pytest.mark.parametrize("input_func", [_get_angle_test_data, _get_stacked_angle_test_data]) def test_get_angles(self, input_func): """Test sun and satellite angle calculation.""" from satpy.modifiers.angles import get_angles data = input_func() from pyorbital.orbital import get_observer_look with mock.patch("satpy.modifiers.angles.get_observer_look", wraps=get_observer_look) as gol: angles = get_angles(data) assert all(isinstance(x, xr.DataArray) for x in angles) da.compute(angles) # get_observer_look should have been called once per array chunk assert gol.call_count == data.data.blocks.size # Check arguments of get_orbserver_look() call, especially the altitude # unit conversion from meters to kilometers args = gol.call_args[0] assert args[:4] == (10.0, 0.0, 12345.678, data.attrs["start_time"]) @pytest.mark.parametrize("force_bad_glob", [False, True]) @pytest.mark.parametrize( ("input2_func", "exp_equal_sun", "exp_num_zarr"), [ (lambda x: x, True, 4), (_similar_sat_pos_datetime, False, 4), (_diff_sat_pos_datetime, False, 6), ] ) @pytest.mark.parametrize("input_func", [_get_angle_test_data, _get_stacked_angle_test_data]) def test_cache_get_angles(self, input_func, input2_func, exp_equal_sun, exp_num_zarr, force_bad_glob, tmpdir): """Test get_angles when caching is enabled.""" from satpy.modifiers.angles import ( STATIC_EARTH_INERTIAL_DATETIME, _get_sensor_angles_from_sat_pos, _get_valid_lonlats, get_angles, ) # Patch methods data = input_func() additional_cache = exp_num_zarr > 4 # Compute angles from pyorbital.orbital import get_observer_look with mock.patch("satpy.modifiers.angles.get_observer_look", wraps=get_observer_look) as gol, \ satpy.config.set(cache_lonlats=True, cache_sensor_angles=True, cache_dir=str(tmpdir)): res = get_angles(data) assert all(isinstance(x, xr.DataArray) for x in res) # call again, should be cached new_data = input2_func(data) with _mock_glob_if(force_bad_glob): res2 = get_angles(new_data) assert all(isinstance(x, xr.DataArray) for x in res2) res, res2 = da.compute(res, res2) for r1, r2 in zip(res[:2], res2[:2]): _assert_allclose_if(not additional_cache, r1, r2) for r1, r2 in zip(res[2:], res2[2:]): _assert_allclose_if(exp_equal_sun, r1, r2) zarr_dirs = glob(str(tmpdir / "*.zarr")) assert len(zarr_dirs) == exp_num_zarr # two for lon/lat, one for sata, one for satz _get_sensor_angles_from_sat_pos.cache_clear() _get_valid_lonlats.cache_clear() zarr_dirs = glob(str(tmpdir / "*.zarr")) assert len(zarr_dirs) == 0 assert gol.call_count == data.data.blocks.size * (int(additional_cache) + 1) args = gol.call_args_list[0][0] assert args[:4] == (10.0, 0.0, 12345.678, STATIC_EARTH_INERTIAL_DATETIME) exp_sat_lon = 10.1 if additional_cache else 10.0 args = gol.call_args_list[-1][0] assert args[:4] == (exp_sat_lon, 0.0, 12345.678, STATIC_EARTH_INERTIAL_DATETIME) satpy-0.34.0/satpy/tests/test_multiscene.py000066400000000000000000000700321420401153000210110ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit tests for multiscene.py.""" import os import shutil import tempfile import unittest from datetime import datetime from unittest import mock import pytest from satpy.dataset.dataid import DataID, ModifierTuple, WavelengthRange DEFAULT_SHAPE = (5, 10) local_id_keys_config = {'name': { 'required': True, }, 'wavelength': { 'type': WavelengthRange, }, 'resolution': None, 'calibration': { 'enum': [ 'reflectance', 'brightness_temperature', 'radiance', 'counts' ] }, 'polarization': None, 'level': None, 'modifiers': { 'required': True, 'default': ModifierTuple(), 'type': ModifierTuple, }, } def make_dataid(**items): """Make a data id.""" return DataID(local_id_keys_config, **items) def _fake_get_enhanced_image(img, enhance=None, overlay=None, decorate=None): from trollimage.xrimage import XRImage return XRImage(img) def _create_test_area(proj_str=None, shape=DEFAULT_SHAPE, extents=None): """Create a test area definition.""" from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict if proj_str is None: proj_str = '+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. ' \ '+lat_0=25 +lat_1=25 +units=m +no_defs' proj_dict = proj4_str_to_dict(proj_str) extents = extents or (-1000., -1500., 1000., 1500.) return AreaDefinition( 'test', 'test', 'test', proj_dict, shape[1], shape[0], extents ) def _create_test_dataset(name, shape=DEFAULT_SHAPE, area=None): """Create a test DataArray object.""" import dask.array as da import numpy as np import xarray as xr return xr.DataArray( da.zeros(shape, dtype=np.float32, chunks=shape), dims=('y', 'x'), attrs={'name': name, 'area': area, '_satpy_id_keys': local_id_keys_config}) def _create_test_scenes(num_scenes=2, shape=DEFAULT_SHAPE, area=None): """Create some test scenes for various test cases.""" from satpy import Scene ds1 = _create_test_dataset('ds1', shape=shape, area=area) ds2 = _create_test_dataset('ds2', shape=shape, area=area) scenes = [] for _ in range(num_scenes): scn = Scene() scn['ds1'] = ds1.copy() scn['ds2'] = ds2.copy() scenes.append(scn) return scenes class TestMultiScene(unittest.TestCase): """Test basic functionality of MultiScene.""" def test_init_empty(self): """Test creating a multiscene with no children.""" from satpy import MultiScene MultiScene() def test_init_children(self): """Test creating a multiscene with children.""" from satpy import MultiScene scenes = _create_test_scenes() MultiScene(scenes) def test_properties(self): """Test basic properties/attributes of the MultiScene.""" from satpy import MultiScene area = _create_test_area() scenes = _create_test_scenes(area=area) ds1_id = make_dataid(name='ds1') ds2_id = make_dataid(name='ds2') ds3_id = make_dataid(name='ds3') ds4_id = make_dataid(name='ds4') # Add a dataset to only one of the Scenes scenes[1]['ds3'] = _create_test_dataset('ds3') mscn = MultiScene(scenes) self.assertSetEqual(mscn.loaded_dataset_ids, {ds1_id, ds2_id, ds3_id}) self.assertSetEqual(mscn.shared_dataset_ids, {ds1_id, ds2_id}) self.assertTrue(mscn.all_same_area) bigger_area = _create_test_area(shape=(20, 40)) scenes[0]['ds4'] = _create_test_dataset('ds4', shape=(20, 40), area=bigger_area) self.assertSetEqual(mscn.loaded_dataset_ids, {ds1_id, ds2_id, ds3_id, ds4_id}) self.assertSetEqual(mscn.shared_dataset_ids, {ds1_id, ds2_id}) self.assertFalse(mscn.all_same_area) def test_from_files(self): """Test creating a multiscene from multiple files.""" from satpy import MultiScene input_files_abi = [ "OR_ABI-L1b-RadC-M3C01_G16_s20171171502203_e20171171504576_c20171171505018.nc", "OR_ABI-L1b-RadC-M3C01_G16_s20171171507203_e20171171509576_c20171171510018.nc", "OR_ABI-L1b-RadC-M3C01_G16_s20171171512203_e20171171514576_c20171171515017.nc", "OR_ABI-L1b-RadC-M3C01_G16_s20171171517203_e20171171519577_c20171171520019.nc", "OR_ABI-L1b-RadC-M3C01_G16_s20171171522203_e20171171524576_c20171171525020.nc", "OR_ABI-L1b-RadC-M3C01_G16_s20171171527203_e20171171529576_c20171171530017.nc", ] input_files_glm = [ "OR_GLM-L2-GLMC-M3_G16_s20171171500000_e20171171501000_c20380190314080.nc", "OR_GLM-L2-GLMC-M3_G16_s20171171501000_e20171171502000_c20380190314080.nc", "OR_GLM-L2-GLMC-M3_G16_s20171171502000_e20171171503000_c20380190314080.nc", "OR_GLM-L2-GLMC-M3_G16_s20171171503000_e20171171504000_c20380190314080.nc", "OR_GLM-L2-GLMC-M3_G16_s20171171504000_e20171171505000_c20380190314080.nc", "OR_GLM-L2-GLMC-M3_G16_s20171171505000_e20171171506000_c20380190314080.nc", "OR_GLM-L2-GLMC-M3_G16_s20171171506000_e20171171507000_c20380190314080.nc", "OR_GLM-L2-GLMC-M3_G16_s20171171507000_e20171171508000_c20380190314080.nc", ] with mock.patch('satpy.multiscene.Scene') as scn_mock: mscn = MultiScene.from_files( input_files_abi, reader='abi_l1b', scene_kwargs={"reader_kwargs": {}}) assert len(mscn.scenes) == 6 calls = [mock.call( filenames={'abi_l1b': [in_file_abi]}, reader_kwargs={}) for in_file_abi in input_files_abi] scn_mock.assert_has_calls(calls) scn_mock.reset_mock() with pytest.warns(DeprecationWarning): mscn = MultiScene.from_files( input_files_abi + input_files_glm, reader=('abi_l1b', "glm_l2"), group_keys=["start_time"], ensure_all_readers=True, time_threshold=30) assert len(mscn.scenes) == 2 calls = [mock.call( filenames={'abi_l1b': [in_file_abi], 'glm_l2': [in_file_glm]}) for (in_file_abi, in_file_glm) in zip(input_files_abi[0:2], [input_files_glm[2]] + [input_files_glm[7]])] scn_mock.assert_has_calls(calls) scn_mock.reset_mock() mscn = MultiScene.from_files( input_files_abi + input_files_glm, reader=('abi_l1b', "glm_l2"), group_keys=["start_time"], ensure_all_readers=False, time_threshold=30) assert len(mscn.scenes) == 12 def test_group(self): """Test group.""" from satpy import MultiScene, Scene ds1 = _create_test_dataset(name='ds1') ds2 = _create_test_dataset(name='ds2') ds3 = _create_test_dataset(name='ds3') ds4 = _create_test_dataset(name='ds4') scene1 = Scene() scene1['ds1'] = ds1 scene1['ds2'] = ds2 scene2 = Scene() scene2['ds3'] = ds3 scene2['ds4'] = ds4 multi_scene = MultiScene([scene1, scene2]) groups = {make_dataid(name='odd', wavelength=(1, 2, 3)): ['ds1', 'ds3'], make_dataid(name='even', wavelength=(2, 3, 4)): ['ds2', 'ds4']} multi_scene.group(groups) self.assertSetEqual(multi_scene.shared_dataset_ids, set(groups.keys())) def test_add_group_aliases(self): """Test adding group aliases.""" import types import numpy as np import xarray as xr from satpy import Scene from satpy.multiscene import add_group_aliases # Define test scenes ds_id1 = make_dataid(name='ds1', wavelength=(10.7, 10.8, 10.9)) ds_id2 = make_dataid(name='ds2', wavelength=(1.9, 2.0, 2.1)) ds_id3 = make_dataid(name='ds3', wavelength=(10.8, 10.9, 11.0)) ds_id31 = make_dataid(name='ds31', polarization='H') scene1 = Scene() scene1[ds_id1] = xr.DataArray([1]) scene2 = Scene() scene2[ds_id2] = xr.DataArray([2]) scene3 = Scene() scene3[ds_id3] = xr.DataArray([3]) scene3[ds_id31] = xr.DataArray([4]) scenes = [scene1, scene2, scene3] # Define groups g1 = make_dataid(name='g1', wavelength=(10, 11, 12)) g2 = make_dataid(name='g2', wavelength=(1, 2, 3), polarization='V') groups = {g1: ['ds1', 'ds3'], g2: ['ds2']} # Test adding aliases with_aliases = add_group_aliases(iter(scenes), groups) self.assertIsInstance(with_aliases, types.GeneratorType) with_aliases = list(with_aliases) self.assertSetEqual(set(with_aliases[0].keys()), {g1, ds_id1}) self.assertSetEqual(set(with_aliases[1].keys()), {g2, ds_id2}) self.assertSetEqual(set(with_aliases[2].keys()), {g1, ds_id3, ds_id31}) np.testing.assert_array_equal(with_aliases[0]['g1'].values, [1]) np.testing.assert_array_equal(with_aliases[0]['ds1'].values, [1]) np.testing.assert_array_equal(with_aliases[1]['g2'].values, [2]) np.testing.assert_array_equal(with_aliases[1]['ds2'].values, [2]) np.testing.assert_array_equal(with_aliases[2]['g1'].values, [3]) np.testing.assert_array_equal(with_aliases[2]['ds3'].values, [3]) np.testing.assert_array_equal(with_aliases[2]['ds31'].values, [4]) # Make sure that modifying the result doesn't modify the original self.assertNotIn(g1, scene1) # Adding an alias for multiple datasets in one scene should fail gen = add_group_aliases([scene3], {g1: ['ds3', 'ds31']}) self.assertRaises(ValueError, list, gen) class TestMultiSceneSave(unittest.TestCase): """Test saving a MultiScene to various formats.""" def setUp(self): """Create temporary directory to save files to.""" self.base_dir = tempfile.mkdtemp() def tearDown(self): """Remove the temporary directory created for a test.""" try: shutil.rmtree(self.base_dir, ignore_errors=True) except OSError: pass @mock.patch('satpy.multiscene.get_enhanced_image', _fake_get_enhanced_image) def test_save_mp4_distributed(self): """Save a series of fake scenes to an mp4 video.""" from satpy import MultiScene area = _create_test_area() scenes = _create_test_scenes(area=area) # Add a dataset to only one of the Scenes scenes[1]['ds3'] = _create_test_dataset('ds3') # Add a start and end time for ds_id in ['ds1', 'ds2', 'ds3']: scenes[1][ds_id].attrs['start_time'] = datetime(2018, 1, 2) scenes[1][ds_id].attrs['end_time'] = datetime(2018, 1, 2, 12) if ds_id == 'ds3': continue scenes[0][ds_id].attrs['start_time'] = datetime(2018, 1, 1) scenes[0][ds_id].attrs['end_time'] = datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) fn = os.path.join( self.base_dir, 'test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4') writer_mock = mock.MagicMock() client_mock = mock.MagicMock() client_mock.compute.side_effect = lambda x: tuple(v.compute() for v in x) client_mock.gather.side_effect = lambda x: x with mock.patch('satpy.multiscene.imageio.get_writer') as get_writer: get_writer.return_value = writer_mock # force order of datasets by specifying them mscn.save_animation(fn, client=client_mock, datasets=['ds1', 'ds2', 'ds3']) # 2 saves for the first scene + 1 black frame # 3 for the second scene self.assertEqual(writer_mock.append_data.call_count, 3 + 3) filenames = [os.path.basename(args[0][0]) for args in get_writer.call_args_list] self.assertEqual(filenames[0], 'test_save_mp4_ds1_20180101_00_20180102_12.mp4') self.assertEqual(filenames[1], 'test_save_mp4_ds2_20180101_00_20180102_12.mp4') self.assertEqual(filenames[2], 'test_save_mp4_ds3_20180102_00_20180102_12.mp4') # Test no distributed client found mscn = MultiScene(scenes) fn = os.path.join( self.base_dir, 'test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4') writer_mock = mock.MagicMock() client_mock = mock.MagicMock() client_mock.compute.side_effect = lambda x: tuple(v.compute() for v in x) client_mock.gather.side_effect = lambda x: x with mock.patch('satpy.multiscene.imageio.get_writer') as get_writer, \ mock.patch('satpy.multiscene.get_client', mock.Mock(side_effect=ValueError("No client"))): get_writer.return_value = writer_mock # force order of datasets by specifying them mscn.save_animation(fn, datasets=['ds1', 'ds2', 'ds3']) # 2 saves for the first scene + 1 black frame # 3 for the second scene self.assertEqual(writer_mock.append_data.call_count, 3 + 3) filenames = [os.path.basename(args[0][0]) for args in get_writer.call_args_list] self.assertEqual(filenames[0], 'test_save_mp4_ds1_20180101_00_20180102_12.mp4') self.assertEqual(filenames[1], 'test_save_mp4_ds2_20180101_00_20180102_12.mp4') self.assertEqual(filenames[2], 'test_save_mp4_ds3_20180102_00_20180102_12.mp4') @mock.patch('satpy.multiscene.get_enhanced_image', _fake_get_enhanced_image) def test_save_mp4_no_distributed(self): """Save a series of fake scenes to an mp4 video when distributed isn't available.""" from satpy import MultiScene area = _create_test_area() scenes = _create_test_scenes(area=area) # Add a dataset to only one of the Scenes scenes[1]['ds3'] = _create_test_dataset('ds3') # Add a start and end time for ds_id in ['ds1', 'ds2', 'ds3']: scenes[1][ds_id].attrs['start_time'] = datetime(2018, 1, 2) scenes[1][ds_id].attrs['end_time'] = datetime(2018, 1, 2, 12) if ds_id == 'ds3': continue scenes[0][ds_id].attrs['start_time'] = datetime(2018, 1, 1) scenes[0][ds_id].attrs['end_time'] = datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) fn = os.path.join( self.base_dir, 'test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4') writer_mock = mock.MagicMock() client_mock = mock.MagicMock() client_mock.compute.side_effect = lambda x: tuple(v.compute() for v in x) client_mock.gather.side_effect = lambda x: x with mock.patch('satpy.multiscene.imageio.get_writer') as get_writer, \ mock.patch('satpy.multiscene.get_client', None): get_writer.return_value = writer_mock # force order of datasets by specifying them mscn.save_animation(fn, datasets=['ds1', 'ds2', 'ds3']) # 2 saves for the first scene + 1 black frame # 3 for the second scene self.assertEqual(writer_mock.append_data.call_count, 3 + 3) filenames = [os.path.basename(args[0][0]) for args in get_writer.call_args_list] self.assertEqual(filenames[0], 'test_save_mp4_ds1_20180101_00_20180102_12.mp4') self.assertEqual(filenames[1], 'test_save_mp4_ds2_20180101_00_20180102_12.mp4') self.assertEqual(filenames[2], 'test_save_mp4_ds3_20180102_00_20180102_12.mp4') @mock.patch('satpy.multiscene.get_enhanced_image', _fake_get_enhanced_image) def test_save_datasets_simple(self): """Save a series of fake scenes to an PNG images.""" from satpy import MultiScene area = _create_test_area() scenes = _create_test_scenes(area=area) # Add a dataset to only one of the Scenes scenes[1]['ds3'] = _create_test_dataset('ds3') # Add a start and end time for ds_id in ['ds1', 'ds2', 'ds3']: scenes[1][ds_id].attrs['start_time'] = datetime(2018, 1, 2) scenes[1][ds_id].attrs['end_time'] = datetime(2018, 1, 2, 12) if ds_id == 'ds3': continue scenes[0][ds_id].attrs['start_time'] = datetime(2018, 1, 1) scenes[0][ds_id].attrs['end_time'] = datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) client_mock = mock.MagicMock() client_mock.compute.side_effect = lambda x: tuple(v for v in x) client_mock.gather.side_effect = lambda x: x with mock.patch('satpy.multiscene.Scene.save_datasets') as save_datasets: save_datasets.return_value = [True] # some arbitrary return value # force order of datasets by specifying them mscn.save_datasets(base_dir=self.base_dir, client=False, datasets=['ds1', 'ds2', 'ds3'], writer='simple_image') # 2 for each scene self.assertEqual(save_datasets.call_count, 2) @mock.patch('satpy.multiscene.get_enhanced_image', _fake_get_enhanced_image) def test_save_datasets_distributed_delayed(self): """Test distributed save for writers returning delayed obejcts e.g. simple_image.""" from dask.delayed import Delayed from satpy import MultiScene area = _create_test_area() scenes = _create_test_scenes(area=area) # Add a dataset to only one of the Scenes scenes[1]['ds3'] = _create_test_dataset('ds3') # Add a start and end time for ds_id in ['ds1', 'ds2', 'ds3']: scenes[1][ds_id].attrs['start_time'] = datetime(2018, 1, 2) scenes[1][ds_id].attrs['end_time'] = datetime(2018, 1, 2, 12) if ds_id == 'ds3': continue scenes[0][ds_id].attrs['start_time'] = datetime(2018, 1, 1) scenes[0][ds_id].attrs['end_time'] = datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) client_mock = mock.MagicMock() client_mock.compute.side_effect = lambda x: tuple(v for v in x) client_mock.gather.side_effect = lambda x: x future_mock = mock.MagicMock() future_mock.__class__ = Delayed with mock.patch('satpy.multiscene.Scene.save_datasets') as save_datasets: save_datasets.return_value = [future_mock] # some arbitrary return value # force order of datasets by specifying them mscn.save_datasets(base_dir=self.base_dir, client=client_mock, datasets=['ds1', 'ds2', 'ds3'], writer='simple_image') # 2 for each scene self.assertEqual(save_datasets.call_count, 2) @mock.patch('satpy.multiscene.get_enhanced_image', _fake_get_enhanced_image) def test_save_datasets_distributed_source_target(self): """Test distributed save for writers returning sources and targets e.g. geotiff writer.""" import dask.array as da from satpy import MultiScene area = _create_test_area() scenes = _create_test_scenes(area=area) # Add a dataset to only one of the Scenes scenes[1]['ds3'] = _create_test_dataset('ds3') # Add a start and end time for ds_id in ['ds1', 'ds2', 'ds3']: scenes[1][ds_id].attrs['start_time'] = datetime(2018, 1, 2) scenes[1][ds_id].attrs['end_time'] = datetime(2018, 1, 2, 12) if ds_id == 'ds3': continue scenes[0][ds_id].attrs['start_time'] = datetime(2018, 1, 1) scenes[0][ds_id].attrs['end_time'] = datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) client_mock = mock.MagicMock() client_mock.compute.side_effect = lambda x: tuple(v for v in x) client_mock.gather.side_effect = lambda x: x source_mock = mock.MagicMock() source_mock.__class__ = da.Array target_mock = mock.MagicMock() with mock.patch('satpy.multiscene.Scene.save_datasets') as save_datasets: save_datasets.return_value = [(source_mock, target_mock)] # some arbitrary return value # force order of datasets by specifying them with self.assertRaises(NotImplementedError): mscn.save_datasets(base_dir=self.base_dir, client=client_mock, datasets=['ds1', 'ds2', 'ds3'], writer='geotiff') def test_crop(self): """Test the crop method.""" import numpy as np from pyresample.geometry import AreaDefinition from xarray import DataArray from satpy import MultiScene, Scene scene1 = Scene() area_extent = (-5570248.477339745, -5561247.267842293, 5567248.074173927, 5570248.477339745) proj_dict = {'a': 6378169.0, 'b': 6356583.8, 'h': 35785831.0, 'lon_0': 0.0, 'proj': 'geos', 'units': 'm'} x_size = 3712 y_size = 3712 area_def = AreaDefinition( 'test', 'test', 'test', proj_dict, x_size, y_size, area_extent, ) area_def2 = AreaDefinition( 'test2', 'test2', 'test2', proj_dict, x_size // 2, y_size // 2, area_extent, ) scene1["1"] = DataArray(np.zeros((y_size, x_size))) scene1["2"] = DataArray(np.zeros((y_size, x_size)), dims=('y', 'x')) scene1["3"] = DataArray(np.zeros((y_size, x_size)), dims=('y', 'x'), attrs={'area': area_def}) scene1["4"] = DataArray(np.zeros((y_size // 2, x_size // 2)), dims=('y', 'x'), attrs={'area': area_def2}) mscn = MultiScene([scene1]) # by lon/lat bbox new_mscn = mscn.crop(ll_bbox=(-20., -5., 0, 0)) new_scn1 = list(new_mscn.scenes)[0] self.assertIn('1', new_scn1) self.assertIn('2', new_scn1) self.assertIn('3', new_scn1) self.assertTupleEqual(new_scn1['1'].shape, (y_size, x_size)) self.assertTupleEqual(new_scn1['2'].shape, (y_size, x_size)) self.assertTupleEqual(new_scn1['3'].shape, (184, 714)) self.assertTupleEqual(new_scn1['4'].shape, (92, 357)) class TestBlendFuncs(unittest.TestCase): """Test individual functions used for blending.""" def setUp(self): """Set up test data.""" from datetime import datetime import dask.array as da import xarray as xr from pyresample.geometry import AreaDefinition area = AreaDefinition('test', 'test', 'test', {'proj': 'geos', 'lon_0': -95.5, 'h': 35786023.0}, 2, 2, [-200, -200, 200, 200]) ds1 = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=('y', 'x'), attrs={'start_time': datetime(2018, 1, 1, 0, 0, 0), 'area': area}) self.ds1 = ds1 ds2 = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=('y', 'x'), attrs={'start_time': datetime(2018, 1, 1, 1, 0, 0), 'area': area}) self.ds2 = ds2 ds3 = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=('y', 'time'), attrs={'start_time': datetime(2018, 1, 1, 0, 0, 0), 'area': area}) self.ds3 = ds3 ds4 = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=('y', 'time'), attrs={'start_time': datetime(2018, 1, 1, 1, 0, 0), 'area': area}) self.ds4 = ds4 def test_stack(self): """Test the 'stack' function.""" from satpy.multiscene import stack res = stack([self.ds1, self.ds2]) self.assertTupleEqual(self.ds1.shape, res.shape) def test_timeseries(self): """Test the 'timeseries' function.""" import xarray as xr from satpy.multiscene import timeseries res = timeseries([self.ds1, self.ds2]) res2 = timeseries([self.ds3, self.ds4]) self.assertIsInstance(res, xr.DataArray) self.assertIsInstance(res2, xr.DataArray) self.assertTupleEqual((2, self.ds1.shape[0], self.ds1.shape[1]), res.shape) self.assertTupleEqual((self.ds3.shape[0], self.ds3.shape[1]+self.ds4.shape[1]), res2.shape) @mock.patch('satpy.multiscene.get_enhanced_image') def test_save_mp4(smg, tmp_path): """Save a series of fake scenes to an mp4 video.""" from satpy import MultiScene area = _create_test_area() scenes = _create_test_scenes(area=area) smg.side_effect = _fake_get_enhanced_image # Add a dataset to only one of the Scenes scenes[1]['ds3'] = _create_test_dataset('ds3') # Add a start and end time for ds_id in ['ds1', 'ds2', 'ds3']: scenes[1][ds_id].attrs['start_time'] = datetime(2018, 1, 2) scenes[1][ds_id].attrs['end_time'] = datetime(2018, 1, 2, 12) if ds_id == 'ds3': continue scenes[0][ds_id].attrs['start_time'] = datetime(2018, 1, 1) scenes[0][ds_id].attrs['end_time'] = datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) fn = str(tmp_path / 'test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4') writer_mock = mock.MagicMock() with mock.patch('satpy.multiscene.imageio.get_writer') as get_writer: get_writer.return_value = writer_mock # force order of datasets by specifying them mscn.save_animation(fn, datasets=['ds1', 'ds2', 'ds3'], client=False) # 2 saves for the first scene + 1 black frame # 3 for the second scene assert writer_mock.append_data.call_count == 3 + 3 filenames = [os.path.basename(args[0][0]) for args in get_writer.call_args_list] assert filenames[0] == 'test_save_mp4_ds1_20180101_00_20180102_12.mp4' assert filenames[1] == 'test_save_mp4_ds2_20180101_00_20180102_12.mp4' assert filenames[2] == 'test_save_mp4_ds3_20180102_00_20180102_12.mp4' # make sure that not specifying datasets still saves all of them fn = str(tmp_path / 'test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4') writer_mock = mock.MagicMock() with mock.patch('satpy.multiscene.imageio.get_writer') as get_writer: get_writer.return_value = writer_mock # force order of datasets by specifying them mscn.save_animation(fn, client=False) # the 'ds3' dataset isn't known to the first scene so it doesn't get saved # 2 for first scene, 2 for second scene assert writer_mock.append_data.call_count == 2 + 2 assert "test_save_mp4_ds1_20180101_00_20180102_12.mp4" in filenames assert "test_save_mp4_ds2_20180101_00_20180102_12.mp4" in filenames assert "test_save_mp4_ds3_20180102_00_20180102_12.mp4" in filenames # test decorating and enhancing fn = str(tmp_path / 'test-{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}-rich.mp4') writer_mock = mock.MagicMock() with mock.patch('satpy.multiscene.imageio.get_writer') as get_writer: get_writer.return_value = writer_mock mscn.save_animation( fn, client=False, enh_args={"decorate": { "decorate": [{ "text": { "txt": "Test {start_time:%Y-%m-%d %H:%M} - " "{end_time:%Y-%m-%d %H:%M}"}}]}}) assert writer_mock.append_data.call_count == 2 + 2 assert ("2018-01-02" in smg.call_args_list[-1][1] ["decorate"]["decorate"][0]["text"]["txt"]) satpy-0.34.0/satpy/tests/test_node.py000066400000000000000000000064421420401153000175720ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit tests for the dependency tree class and dependencies.""" import unittest from unittest.mock import MagicMock from satpy.node import CompositorNode class FakeCompositor: """A fake compositor.""" def __init__(self, id): """Set up the fake compositor.""" self.id = id class TestCompositorNodeCopy(unittest.TestCase): """Test case for copying a node.""" def setUp(self): """Set up the test case.""" self.node = CompositorNode(MagicMock()) self.node.add_required_nodes([MagicMock(), MagicMock()]) self.node.add_optional_nodes([MagicMock()]) self.node_copy = self.node.copy() def test_node_data_is_copied(self): """Test that the data of the node is copied.""" assert self.node_copy.data is not self.node.data def test_node_data_required_nodes_are_copies(self): """Test that the required nodes of the node data are copied.""" for req1, req2 in zip(self.node.required_nodes, self.node_copy.required_nodes): assert req1 is not req2 def test_node_data_optional_nodes_are_copies(self): """Test that the optional nodes of the node data are copied.""" for req1, req2 in zip(self.node.optional_nodes, self.node_copy.optional_nodes): assert req1 is not req2 class TestCompositorNode(unittest.TestCase): """Test case for the compositor node object.""" def setUp(self): """Set up the test case.""" self.name = 'hej' self.fake = FakeCompositor(self.name) self.c_node = CompositorNode(self.fake) def test_compositor_node_init(self): """Test compositor node initialization.""" assert self.c_node.name == self.name assert self.fake in self.c_node.data def test_add_required_nodes(self): """Test adding required nodes.""" self.c_node.add_required_nodes([1, 2, 3]) assert self.c_node.required_nodes == [1, 2, 3] def test_add_required_nodes_twice(self): """Test adding required nodes twice.""" self.c_node.add_required_nodes([1, 2]) self.c_node.add_required_nodes([3]) assert self.c_node.required_nodes == [1, 2, 3] def test_add_optional_nodes(self): """Test adding optional nodes.""" self.c_node.add_optional_nodes([1, 2, 3]) assert self.c_node.optional_nodes == [1, 2, 3] def test_add_optional_nodes_twice(self): """Test adding optional nodes twice.""" self.c_node.add_optional_nodes([1, 2]) self.c_node.add_optional_nodes([3]) assert self.c_node.optional_nodes == [1, 2, 3] satpy-0.34.0/satpy/tests/test_readers.py000066400000000000000000001371751420401153000203020ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Test classes and functions in the readers/__init__.py module.""" import os import unittest from contextlib import suppress from unittest import mock import pytest from satpy.dataset.data_dict import get_key from satpy.dataset.dataid import DataID, ModifierTuple, WavelengthRange # clear the config dir environment variable so it doesn't interfere os.environ.pop("PPP_CONFIG_DIR", None) os.environ.pop("SATPY_CONFIG_PATH", None) local_id_keys_config = {'name': { 'required': True, }, 'wavelength': { 'type': WavelengthRange, }, 'resolution': None, 'calibration': { 'enum': [ 'reflectance', 'brightness_temperature', 'radiance', 'counts' ] }, 'polarization': None, 'level': None, 'modifiers': { 'required': True, 'default': ModifierTuple(), 'type': ModifierTuple, }, } def make_dataid(**items): """Make a data id.""" return DataID(local_id_keys_config, **items) class TestDatasetDict(unittest.TestCase): """Test DatasetDict and its methods.""" def setUp(self): """Create a test DatasetDict.""" from satpy import DatasetDict self.regular_dict = regular_dict = { make_dataid(name="test", wavelength=(0, 0.5, 1), resolution=1000): "1", make_dataid(name="testh", wavelength=(0, 0.5, 1), resolution=500): "1h", make_dataid(name="test2", wavelength=(1, 1.5, 2), resolution=1000): "2", make_dataid(name="test3", wavelength=(1.2, 1.7, 2.2), resolution=1000): "3", make_dataid(name="test4", calibration="radiance", polarization="V"): "4rad", make_dataid(name="test4", calibration="reflectance", polarization="H"): "4refl", make_dataid(name="test5", modifiers=('mod1', 'mod2')): "5_2mod", make_dataid(name="test5", modifiers=('mod2',)): "5_1mod", make_dataid(name='test6', level=100): '6_100', make_dataid(name='test6', level=200): '6_200', } self.test_dict = DatasetDict(regular_dict) def test_init_noargs(self): """Test DatasetDict init with no arguments.""" from satpy import DatasetDict d = DatasetDict() self.assertIsInstance(d, dict) def test_init_dict(self): """Test DatasetDict init with a regular dict argument.""" from satpy import DatasetDict regular_dict = {make_dataid(name="test", wavelength=(0, 0.5, 1)): "1", } d = DatasetDict(regular_dict) self.assertEqual(d, regular_dict) def test_getitem(self): """Test DatasetDict getitem with different arguments.""" from satpy.tests.utils import make_dsq d = self.test_dict # access by name self.assertEqual(d["test"], "1") # access by exact wavelength self.assertEqual(d[1.5], "2") # access by near wavelength self.assertEqual(d[1.55], "2") # access by near wavelength of another dataset self.assertEqual(d[1.65], "3") # access by name with multiple levels self.assertEqual(d['test6'], '6_100') self.assertEqual(d[make_dsq(wavelength=1.5)], "2") self.assertEqual(d[make_dsq(wavelength=0.5, resolution=1000)], "1") self.assertEqual(d[make_dsq(wavelength=0.5, resolution=500)], "1h") self.assertEqual(d[make_dsq(name='test6', level=100)], '6_100') self.assertEqual(d[make_dsq(name='test6', level=200)], '6_200') # higher resolution is returned self.assertEqual(d[0.5], "1h") self.assertEqual(d['test4'], '4refl') self.assertEqual(d[make_dataid(name='test4', calibration='radiance')], '4rad') self.assertRaises(KeyError, d.getitem, '1h') # test with full tuple self.assertEqual(d[make_dsq(name='test', wavelength=(0, 0.5, 1), resolution=1000)], "1") def test_get_key(self): """Test 'get_key' special functions.""" from satpy.dataset import DataQuery d = self.test_dict res1 = get_key(make_dataid(name='test4'), d, calibration='radiance') res2 = get_key(make_dataid(name='test4'), d, calibration='radiance', num_results=0) res3 = get_key(make_dataid(name='test4'), d, calibration='radiance', num_results=3) self.assertEqual(len(res2), 1) self.assertEqual(len(res3), 1) res2 = res2[0] res3 = res3[0] self.assertEqual(res1, res2) self.assertEqual(res1, res3) res1 = get_key('test4', d, query=DataQuery(polarization='V')) self.assertEqual(res1, make_dataid(name='test4', calibration='radiance', polarization='V')) res1 = get_key(0.5, d, query=DataQuery(resolution=500)) self.assertEqual(res1, make_dataid(name='testh', wavelength=(0, 0.5, 1), resolution=500)) res1 = get_key('test6', d, query=DataQuery(level=100)) self.assertEqual(res1, make_dataid(name='test6', level=100)) res1 = get_key('test5', d) res2 = get_key('test5', d, query=DataQuery(modifiers=('mod2',))) res3 = get_key('test5', d, query=DataQuery(modifiers=('mod1', 'mod2',))) self.assertEqual(res1, make_dataid(name='test5', modifiers=('mod2',))) self.assertEqual(res1, res2) self.assertNotEqual(res1, res3) # more than 1 result when default is to ask for 1 result self.assertRaises(KeyError, get_key, 'test4', d, best=False) def test_contains(self): """Test DatasetDict contains method.""" d = self.test_dict self.assertIn('test', d) self.assertFalse(d.contains('test')) self.assertNotIn('test_bad', d) self.assertIn(0.5, d) self.assertFalse(d.contains(0.5)) self.assertIn(1.5, d) self.assertIn(1.55, d) self.assertIn(1.65, d) self.assertIn(make_dataid(name='test4', calibration='radiance'), d) self.assertIn('test4', d) def test_keys(self): """Test keys method of DatasetDict.""" from satpy.tests.utils import DataID d = self.test_dict self.assertEqual(len(d.keys()), len(self.regular_dict.keys())) self.assertTrue(all(isinstance(x, DataID) for x in d.keys())) name_keys = d.keys(names=True) self.assertListEqual(sorted(set(name_keys))[:4], [ 'test', 'test2', 'test3', 'test4']) wl_keys = tuple(d.keys(wavelengths=True)) self.assertIn((0, 0.5, 1), wl_keys) self.assertIn((1, 1.5, 2, 'µm'), wl_keys) self.assertIn((1.2, 1.7, 2.2, 'µm'), wl_keys) self.assertIn(None, wl_keys) def test_setitem(self): """Test setitem method of DatasetDict.""" d = self.test_dict d['new_ds'] = {'metadata': 'new_ds'} self.assertEqual(d['new_ds']['metadata'], 'new_ds') d[0.5] = {'calibration': 'radiance'} self.assertEqual(d[0.5]['resolution'], 500) self.assertEqual(d[0.5]['name'], 'testh') class TestReaderLoader(unittest.TestCase): """Test the `load_readers` function. Assumes that the VIIRS SDR reader exists and works. """ def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy.readers.viirs_sdr import VIIRSSDRFileHandler from satpy.tests.reader_tests.test_viirs_sdr import FakeHDF5FileHandler2 # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(VIIRSSDRFileHandler, '__bases__', (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the HDF5 file handler.""" self.p.stop() def test_no_args(self): """Test no args provided. This should check the local directory which should have no files. """ from satpy.readers import load_readers ri = load_readers() self.assertDictEqual(ri, {}) def test_filenames_only(self): """Test with filenames specified.""" from satpy.readers import load_readers ri = load_readers(filenames=['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5']) self.assertListEqual(list(ri.keys()), ['viirs_sdr']) def test_filenames_and_reader(self): """Test with filenames and reader specified.""" from satpy.readers import load_readers ri = load_readers(reader='viirs_sdr', filenames=['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5']) self.assertListEqual(list(ri.keys()), ['viirs_sdr']) def test_bad_reader_name_with_filenames(self): """Test bad reader name with filenames provided.""" from satpy.readers import load_readers self.assertRaises(ValueError, load_readers, reader='i_dont_exist', filenames=[ 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', ]) def test_filenames_as_path(self): """Test with filenames specified as pathlib.Path.""" from pathlib import Path from satpy.readers import load_readers ri = load_readers(filenames=[ Path('SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'), ]) self.assertListEqual(list(ri.keys()), ['viirs_sdr']) def test_filenames_as_dict(self): """Test loading readers where filenames are organized by reader.""" from satpy.readers import load_readers filenames = { 'viirs_sdr': ['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'], } ri = load_readers(filenames=filenames) self.assertListEqual(list(ri.keys()), ['viirs_sdr']) def test_filenames_as_dict_bad_reader(self): """Test loading with filenames dict but one of the readers is bad.""" from satpy.readers import load_readers filenames = { 'viirs_sdr': ['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'], '__fake__': ['fake.txt'], } self.assertRaisesRegex(ValueError, r'(?=.*__fake__)(?!.*viirs)(^No reader.+)', load_readers, filenames=filenames) def test_filenames_as_dict_with_reader(self): """Test loading from a filenames dict with a single reader specified. This can happen in the deprecated Scene behavior of passing a reader and a base_dir. """ from satpy.readers import load_readers filenames = { 'viirs_sdr': ['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'], } ri = load_readers(reader='viirs_sdr', filenames=filenames) self.assertListEqual(list(ri.keys()), ['viirs_sdr']) def test_empty_filenames_as_dict(self): """Test passing filenames as a dictionary with an empty list of filenames.""" # only one reader from satpy.readers import load_readers filenames = { 'viirs_sdr': [], } self.assertRaises(ValueError, load_readers, filenames=filenames) # two readers, one is empty filenames = { 'viirs_sdr': ['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'], 'viirs_l1b': [], } ri = load_readers(filenames) self.assertListEqual(list(ri.keys()), ['viirs_sdr']) @mock.patch('satpy.readers.hrit_base.HRITFileHandler._get_hd') @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler._get_header') @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.start_time') @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.end_time') @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGPrologueFileHandler.read_prologue') @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGEpilogueFileHandler.read_epilogue') def test_missing_requirements(self, *mocks): """Test warnings and exceptions in case of missing requirements.""" from satpy.readers import load_readers # Filenames from a single scan epi_pro_miss = ['H-000-MSG4__-MSG4________-IR_108___-000006___-201809050900-__'] epi_miss = epi_pro_miss + ['H-000-MSG4__-MSG4________-_________-PRO______-201809050900-__'] pro_miss = epi_pro_miss + ['H-000-MSG4__-MSG4________-_________-EPI______-201809050900-__'] for filenames in [epi_miss, pro_miss, epi_pro_miss]: self.assertRaises(ValueError, load_readers, reader='seviri_l1b_hrit', filenames=filenames) # Filenames from multiple scans at_least_one_complete = [ # 09:00 scan is ok 'H-000-MSG4__-MSG4________-IR_108___-000006___-201809050900-__', 'H-000-MSG4__-MSG4________-_________-PRO______-201809050900-__', 'H-000-MSG4__-MSG4________-_________-EPI______-201809050900-__', # 10:00 scan is incomplete 'H-000-MSG4__-MSG4________-IR_108___-000006___-201809051000-__', ] try: load_readers(filenames=at_least_one_complete, reader='seviri_l1b_hrit') except ValueError: self.fail('If at least one set of filenames is complete, no ' 'exception should be raised') def test_all_filtered(self): """Test behaviour if no file matches the filter parameters.""" import datetime from satpy.readers import load_readers filenames = { 'viirs_sdr': ['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'], } filter_params = {'start_time': datetime.datetime(1970, 1, 1), 'end_time': datetime.datetime(1970, 1, 2), 'area': None} self.assertRaises(ValueError, load_readers, filenames=filenames, reader_kwargs={'filter_parameters': filter_params}) def test_all_filtered_multiple(self): """Test behaviour if no file matches the filter parameters.""" import datetime from satpy.readers import load_readers filenames = { 'viirs_sdr': ['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'], 'abi_l1b': ['OR_ABI-L1b-RadF-M3C01_G16_s20120561730408_e20120561741175_c20172631741218.nc'], } filter_params = {'start_time': datetime.datetime(1970, 1, 1), 'end_time': datetime.datetime(1970, 1, 2)} self.assertRaises(ValueError, load_readers, filenames=filenames, reader_kwargs={'filter_parameters': filter_params}) def test_almost_all_filtered(self): """Test behaviour if only one reader has datasets.""" import datetime from satpy.readers import load_readers filenames = { 'viirs_sdr': ['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'], 'abi_l1b': ['OR_ABI-L1b-RadF-M3C01_G16_s20172631730408_e20172631741175_c20172631741218.nc'], } filter_params = {'start_time': datetime.datetime(2012, 2, 25), 'end_time': datetime.datetime(2012, 2, 26)} # viirs has data that matches the request, abi doesn't readers = load_readers(filenames=filenames, reader_kwargs={'filter_parameters': filter_params}) self.assertIn('viirs_sdr', readers) # abi_l1b reader was created, but no datasets available self.assertIn('abi_l1b', readers) self.assertEqual(len(list(readers['abi_l1b'].available_dataset_ids)), 0) class TestFindFilesAndReaders(unittest.TestCase): """Test the find_files_and_readers utility function.""" def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy.readers.viirs_sdr import VIIRSSDRFileHandler from satpy.tests.reader_tests.test_viirs_sdr import FakeHDF5FileHandler2 # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(VIIRSSDRFileHandler, '__bases__', (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the HDF5 file handler.""" self.p.stop() # def test_sensor(self): # """Test with filenames and sensor specified""" # from satpy.readers import load_readers # ri = load_readers(sensor='viirs', # filenames=[ # 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5' # ]) # self.assertListEqual(list(ri.keys()), ['viirs_sdr']) # def test_reader_name(self): """Test with default base_dir and reader specified.""" from satpy.readers import find_files_and_readers fn = 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5' # touch the file so it exists on disk test_file = open(fn, 'w') try: ri = find_files_and_readers(reader='viirs_sdr') self.assertListEqual(list(ri.keys()), ['viirs_sdr']) self.assertListEqual(ri['viirs_sdr'], [fn]) finally: test_file.close() os.remove(fn) def test_reader_other_name(self): """Test with default base_dir and reader specified.""" from satpy.readers import find_files_and_readers fn = 'S_NWC_CPP_npp_32505_20180204T1114116Z_20180204T1128227Z.nc' # touch the file so it exists on disk test_file = open(fn, 'w') try: ri = find_files_and_readers(reader='nwcsaf-pps_nc') self.assertListEqual(list(ri.keys()), ['nwcsaf-pps_nc']) self.assertListEqual(ri['nwcsaf-pps_nc'], [fn]) finally: test_file.close() os.remove(fn) def test_reader_name_matched_start_end_time(self): """Test with start and end time matching the filename.""" from datetime import datetime from satpy.readers import find_files_and_readers fn = 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5' # touch the file so it exists on disk test_file = open(fn, 'w') try: ri = find_files_and_readers(reader='viirs_sdr', start_time=datetime(2012, 2, 25, 18, 0, 0), end_time=datetime(2012, 2, 25, 19, 0, 0), ) self.assertListEqual(list(ri.keys()), ['viirs_sdr']) self.assertListEqual(ri['viirs_sdr'], [fn]) finally: test_file.close() os.remove(fn) def test_reader_name_matched_start_time(self): """Test with start matching the filename. Start time in the middle of the file time should still match the file. """ from datetime import datetime from satpy.readers import find_files_and_readers fn = 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5' # touch the file so it exists on disk test_file = open(fn, 'w') try: ri = find_files_and_readers(reader='viirs_sdr', start_time=datetime(2012, 2, 25, 18, 1, 30)) self.assertListEqual(list(ri.keys()), ['viirs_sdr']) self.assertListEqual(ri['viirs_sdr'], [fn]) finally: test_file.close() os.remove(fn) def test_reader_name_matched_end_time(self): """Test with end matching the filename. End time in the middle of the file time should still match the file. """ from datetime import datetime from satpy.readers import find_files_and_readers fn = 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5' # touch the file so it exists on disk test_file = open(fn, 'w') try: ri = find_files_and_readers(reader='viirs_sdr', end_time=datetime(2012, 2, 25, 18, 1, 30)) self.assertListEqual(list(ri.keys()), ['viirs_sdr']) self.assertListEqual(ri['viirs_sdr'], [fn]) finally: test_file.close() os.remove(fn) def test_reader_name_unmatched_start_end_time(self): """Test with start and end time matching the filename.""" from datetime import datetime from satpy.readers import find_files_and_readers fn = 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5' # touch the file so it exists on disk test_file = open(fn, 'w') try: self.assertRaises(ValueError, find_files_and_readers, reader='viirs_sdr', start_time=datetime(2012, 2, 26, 18, 0, 0), end_time=datetime(2012, 2, 26, 19, 0, 0), ) finally: test_file.close() os.remove(fn) def test_no_parameters(self): """Test with no limiting parameters.""" from satpy.readers import find_files_and_readers fn = 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5' # touch the file so it exists on disk test_file = open(fn, 'w') try: ri = find_files_and_readers() self.assertListEqual(list(ri.keys()), ['viirs_sdr']) self.assertListEqual(ri['viirs_sdr'], [fn]) finally: test_file.close() os.remove(fn) def test_bad_sensor(self): """Test bad sensor doesn't find any files.""" from satpy.readers import find_files_and_readers fn = 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5' # touch the file so it exists on disk test_file = open(fn, 'w') try: self.assertRaises(ValueError, find_files_and_readers, sensor='i_dont_exist') finally: test_file.close() os.remove(fn) def test_sensor(self): """Test that readers for the current sensor are loaded.""" from satpy.readers import find_files_and_readers fn = 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5' # touch the file so it exists on disk test_file = open(fn, 'w') try: # we can't easily know how many readers satpy has that support # 'viirs' so we just pass it and hope that this works ri = find_files_and_readers(sensor='viirs') self.assertListEqual(list(ri.keys()), ['viirs_sdr']) self.assertListEqual(ri['viirs_sdr'], [fn]) finally: test_file.close() os.remove(fn) def test_sensor_no_files(self): """Test that readers for the current sensor are loaded.""" from satpy.readers import find_files_and_readers # we can't easily know how many readers satpy has that support # 'viirs' so we just pass it and hope that this works self.assertRaises(ValueError, find_files_and_readers, sensor='viirs') self.assertEqual( find_files_and_readers(sensor='viirs', missing_ok=True), {}) def test_reader_load_failed(self): """Test that an exception is raised when a reader can't be loaded.""" import yaml from satpy.readers import find_files_and_readers # touch the file so it exists on disk with mock.patch('yaml.load') as load: load.side_effect = yaml.YAMLError("Import problems") self.assertRaises(yaml.YAMLError, find_files_and_readers, reader='viirs_sdr') def test_pending_old_reader_name_mapping(self): """Test that requesting pending old reader names raises a warning.""" from satpy.readers import PENDING_OLD_READER_NAMES, get_valid_reader_names if not PENDING_OLD_READER_NAMES: return unittest.skip("Skipping pending deprecated reader tests because " "no pending deprecated readers.") test_reader = sorted(PENDING_OLD_READER_NAMES.keys())[0] with self.assertWarns(FutureWarning): valid_reader_names = get_valid_reader_names([test_reader]) self.assertEqual(valid_reader_names[0], PENDING_OLD_READER_NAMES[test_reader]) def test_old_reader_name_mapping(self): """Test that requesting old reader names raises a warning.""" from satpy.readers import OLD_READER_NAMES, get_valid_reader_names if not OLD_READER_NAMES: return unittest.skip("Skipping deprecated reader tests because " "no deprecated readers.") test_reader = sorted(OLD_READER_NAMES.keys())[0] with self.assertRaises(ValueError): get_valid_reader_names([test_reader]) class TestYAMLFiles(unittest.TestCase): """Test and analyze the reader configuration files.""" def test_filename_matches_reader_name(self): """Test that every reader filename matches the name in the YAML.""" import yaml class IgnoreLoader(yaml.SafeLoader): def _ignore_all_tags(self, tag_suffix, node): return tag_suffix + ' ' + node.value IgnoreLoader.add_multi_constructor('', IgnoreLoader._ignore_all_tags) from satpy._config import glob_config from satpy.readers import read_reader_config for reader_config in glob_config('readers/*.yaml'): reader_fn = os.path.basename(reader_config) reader_fn_name = os.path.splitext(reader_fn)[0] reader_info = read_reader_config([reader_config], loader=IgnoreLoader) self.assertEqual(reader_fn_name, reader_info['name'], "Reader YAML filename doesn't match reader " "name in the YAML file.") def test_available_readers(self): """Test the 'available_readers' function.""" from satpy import available_readers reader_names = available_readers() self.assertGreater(len(reader_names), 0) self.assertIsInstance(reader_names[0], str) self.assertIn('viirs_sdr', reader_names) # needs h5py self.assertIn('abi_l1b', reader_names) # needs netcdf4 self.assertEqual(reader_names, sorted(reader_names)) reader_infos = available_readers(as_dict=True) self.assertEqual(len(reader_names), len(reader_infos)) self.assertIsInstance(reader_infos[0], dict) for reader_info in reader_infos: self.assertIn('name', reader_info) self.assertEqual(reader_infos, sorted(reader_infos, key=lambda reader_info: reader_info['name'])) class TestGroupFiles(unittest.TestCase): """Test the 'group_files' utility function.""" def setUp(self): """Set up test filenames to use.""" input_files = [ "OR_ABI-L1b-RadC-M3C01_G16_s20171171502203_e20171171504576_c20171171505018.nc", "OR_ABI-L1b-RadC-M3C01_G16_s20171171507203_e20171171509576_c20171171510018.nc", "OR_ABI-L1b-RadC-M3C01_G16_s20171171512203_e20171171514576_c20171171515017.nc", "OR_ABI-L1b-RadC-M3C01_G16_s20171171517203_e20171171519577_c20171171520019.nc", "OR_ABI-L1b-RadC-M3C01_G16_s20171171522203_e20171171524576_c20171171525020.nc", "OR_ABI-L1b-RadC-M3C01_G16_s20171171527203_e20171171529576_c20171171530017.nc", "OR_ABI-L1b-RadC-M3C02_G16_s20171171502203_e20171171504576_c20171171505008.nc", "OR_ABI-L1b-RadC-M3C02_G16_s20171171507203_e20171171509576_c20171171510012.nc", "OR_ABI-L1b-RadC-M3C02_G16_s20171171512203_e20171171514576_c20171171515007.nc", "OR_ABI-L1b-RadC-M3C02_G16_s20171171517203_e20171171519576_c20171171520010.nc", "OR_ABI-L1b-RadC-M3C02_G16_s20171171522203_e20171171524576_c20171171525008.nc", "OR_ABI-L1b-RadC-M3C02_G16_s20171171527203_e20171171529576_c20171171530008.nc", ] self.g16_files = input_files self.g17_files = [x.replace('G16', 'G17') for x in input_files] self.noaa20_files = [ "GITCO_j01_d20180511_t2027292_e2028538_b02476_c20190530192858056873_noac_ops.h5", "GITCO_j01_d20180511_t2028550_e2030195_b02476_c20190530192932937427_noac_ops.h5", "GITCO_j01_d20180511_t2030208_e2031435_b02476_c20190530192932937427_noac_ops.h5", "GITCO_j01_d20180511_t2031447_e2033092_b02476_c20190530192932937427_noac_ops.h5", "GITCO_j01_d20180511_t2033105_e2034350_b02476_c20190530192932937427_noac_ops.h5", "SVI03_j01_d20180511_t2027292_e2028538_b02476_c20190530190950789763_noac_ops.h5", "SVI03_j01_d20180511_t2028550_e2030195_b02476_c20190530192911205765_noac_ops.h5", "SVI03_j01_d20180511_t2030208_e2031435_b02476_c20190530192911205765_noac_ops.h5", "SVI03_j01_d20180511_t2031447_e2033092_b02476_c20190530192911205765_noac_ops.h5", "SVI03_j01_d20180511_t2033105_e2034350_b02476_c20190530192911205765_noac_ops.h5", "SVI04_j01_d20180511_t2027292_e2028538_b02476_c20190530190951848958_noac_ops.h5", "SVI04_j01_d20180511_t2028550_e2030195_b02476_c20190530192903985164_noac_ops.h5", "SVI04_j01_d20180511_t2030208_e2031435_b02476_c20190530192903985164_noac_ops.h5", "SVI04_j01_d20180511_t2031447_e2033092_b02476_c20190530192903985164_noac_ops.h5", "SVI04_j01_d20180511_t2033105_e2034350_b02476_c20190530192903985164_noac_ops.h5" ] self.npp_files = [ "GITCO_npp_d20180511_t1939067_e1940309_b33872_c20190612031740518143_noac_ops.h5", "GITCO_npp_d20180511_t1940321_e1941563_b33872_c20190612031740518143_noac_ops.h5", "GITCO_npp_d20180511_t1941575_e1943217_b33872_c20190612031740518143_noac_ops.h5", "SVI03_npp_d20180511_t1939067_e1940309_b33872_c20190612032009230105_noac_ops.h5", "SVI03_npp_d20180511_t1940321_e1941563_b33872_c20190612032009230105_noac_ops.h5", "SVI03_npp_d20180511_t1941575_e1943217_b33872_c20190612032009230105_noac_ops.h5", ] self.unknown_files = [ "ʌsɔ˙pıʃɐʌuı", "no such"] def test_no_reader(self): """Test that reader does not need to be provided.""" from satpy.readers import group_files # without files it's going to be an empty result assert group_files([]) == [] groups = group_files(self.g16_files) self.assertEqual(6, len(groups)) def test_unknown_files(self): """Test that error is raised on unknown files.""" from satpy.readers import group_files with pytest.raises(ValueError): group_files(self.unknown_files, "abi_l1b") def test_bad_reader(self): """Test that reader not existing causes an error.""" import yaml from satpy.readers import group_files # touch the file so it exists on disk with mock.patch('yaml.load') as load: load.side_effect = yaml.YAMLError("Import problems") self.assertRaises(yaml.YAMLError, group_files, [], reader='abi_l1b') def test_default_behavior(self): """Test the default behavior with the 'abi_l1b' reader.""" from satpy.readers import group_files groups = group_files(self.g16_files, reader='abi_l1b') self.assertEqual(6, len(groups)) self.assertEqual(2, len(groups[0]['abi_l1b'])) def test_default_behavior_set(self): """Test the default behavior with the 'abi_l1b' reader.""" from satpy.readers import group_files files = set(self.g16_files) num_files = len(files) groups = group_files(files, reader='abi_l1b') # we didn't modify it self.assertEqual(len(files), num_files) self.assertEqual(6, len(groups)) self.assertEqual(2, len(groups[0]['abi_l1b'])) def test_non_datetime_group_key(self): """Test what happens when the start_time isn't used for grouping.""" from satpy.readers import group_files groups = group_files(self.g16_files, reader='abi_l1b', group_keys=('platform_shortname',)) self.assertEqual(1, len(groups)) self.assertEqual(12, len(groups[0]['abi_l1b'])) def test_large_time_threshold(self): """Test what happens when the time threshold holds multiple files.""" from satpy.readers import group_files groups = group_files(self.g16_files, reader='abi_l1b', time_threshold=60*8) self.assertEqual(3, len(groups)) self.assertEqual(4, len(groups[0]['abi_l1b'])) def test_two_instruments_files(self): """Test the behavior when two instruments files are provided. This is undesired from a user point of view since we don't want G16 and G17 files in the same Scene. Readers (like abi_l1b) are or can be configured to have specific group keys for handling these situations. Due to that this test forces the fallback group keys of ('start_time',). """ from satpy.readers import group_files groups = group_files(self.g16_files + self.g17_files, reader='abi_l1b', group_keys=('start_time',)) self.assertEqual(6, len(groups)) self.assertEqual(4, len(groups[0]['abi_l1b'])) def test_two_instruments_files_split(self): """Test the default behavior when two instruments files are provided and split. Tell the sorting to include the platform identifier as another field to use for grouping. """ from satpy.readers import group_files groups = group_files(self.g16_files + self.g17_files, reader='abi_l1b', group_keys=('start_time', 'platform_shortname')) self.assertEqual(12, len(groups)) self.assertEqual(2, len(groups[0]['abi_l1b'])) # default for abi_l1b should also behave like this groups = group_files(self.g16_files + self.g17_files, reader='abi_l1b') self.assertEqual(12, len(groups)) self.assertEqual(2, len(groups[0]['abi_l1b'])) def test_viirs_orbits(self): """Test a reader that doesn't use 'start_time' for default grouping.""" from satpy.readers import group_files groups = group_files(self.noaa20_files + self.npp_files, reader='viirs_sdr') self.assertEqual(2, len(groups)) # the noaa-20 files will be first because the orbit number is smaller # 5 granules * 3 file types self.assertEqual(5 * 3, len(groups[0]['viirs_sdr'])) # 3 granules * 2 file types self.assertEqual(6, len(groups[1]['viirs_sdr'])) def test_viirs_override_keys(self): """Test overriding a group keys to add 'start_time'.""" from satpy.readers import group_files groups = group_files(self.noaa20_files + self.npp_files, reader='viirs_sdr', group_keys=('start_time', 'orbit', 'platform_shortname')) self.assertEqual(8, len(groups)) self.assertEqual(2, len(groups[0]['viirs_sdr'])) # NPP self.assertEqual(2, len(groups[1]['viirs_sdr'])) # NPP self.assertEqual(2, len(groups[2]['viirs_sdr'])) # NPP self.assertEqual(3, len(groups[3]['viirs_sdr'])) # N20 self.assertEqual(3, len(groups[4]['viirs_sdr'])) # N20 self.assertEqual(3, len(groups[5]['viirs_sdr'])) # N20 self.assertEqual(3, len(groups[6]['viirs_sdr'])) # N20 self.assertEqual(3, len(groups[7]['viirs_sdr'])) # N20 # Ask for a larger time span with our groups groups = group_files(self.noaa20_files + self.npp_files, reader='viirs_sdr', time_threshold=60 * 60 * 2, group_keys=('start_time', 'orbit', 'platform_shortname')) self.assertEqual(2, len(groups)) # NPP is first because it has an earlier time # 3 granules * 2 file types self.assertEqual(6, len(groups[0]['viirs_sdr'])) # 5 granules * 3 file types self.assertEqual(5 * 3, len(groups[1]['viirs_sdr'])) def test_multi_readers(self): """Test passing multiple readers.""" from satpy.readers import group_files groups = group_files( self.g16_files + self.noaa20_files, reader=("abi_l1b", "viirs_sdr")) assert len(groups) == 11 # test that they're grouped together when time threshold is huge and # only time is used to group groups = group_files( self.g16_files + self.noaa20_files, reader=("abi_l1b", "viirs_sdr"), group_keys=("start_time",), time_threshold=10**9) assert len(groups) == 1 # test that a warning is raised when a string is passed (meaning no # group keys found in common) with pytest.warns(UserWarning): groups = group_files( self.g16_files + self.noaa20_files, reader=("abi_l1b", "viirs_sdr"), group_keys=("start_time"), time_threshold=10**9) _filenames_abi_glm = [ "OR_ABI-L1b-RadF-M6C14_G16_s19000010000000_e19000010005000_c20403662359590.nc", "OR_ABI-L1b-RadF-M6C14_G16_s19000010010000_e19000010015000_c20403662359590.nc", "OR_ABI-L1b-RadF-M6C14_G16_s19000010020000_e19000010025000_c20403662359590.nc", "OR_GLM-L2-GLMF-M3_G16_s19000010000000_e19000010001000_c20403662359590.nc", "OR_GLM-L2-GLMF-M3_G16_s19000010001000_e19000010002000_c20403662359590.nc", "OR_GLM-L2-GLMF-M3_G16_s19000010002000_e19000010003000_c20403662359590.nc", "OR_GLM-L2-GLMF-M3_G16_s19000010003000_e19000010004000_c20403662359590.nc", "OR_GLM-L2-GLMF-M3_G16_s19000010004000_e19000010005000_c20403662359590.nc", "OR_GLM-L2-GLMF-M3_G16_s19000010005000_e19000010006000_c20403662359590.nc", "OR_GLM-L2-GLMF-M3_G16_s19000010006000_e19000010007000_c20403662359590.nc", "OR_GLM-L2-GLMF-M3_G16_s19000010007000_e19000010008000_c20403662359590.nc", "OR_GLM-L2-GLMF-M3_G16_s19000010008000_e19000010009000_c20403662359590.nc", "OR_GLM-L2-GLMF-M3_G16_s19000010009000_e19000010010000_c20403662359590.nc", "OR_GLM-L2-GLMF-M3_G16_s19000010010000_e19000010011000_c20403662359590.nc", "OR_GLM-L2-GLMF-M3_G16_s19000010011000_e19000010012000_c20403662359590.nc", "OR_GLM-L2-GLMF-M3_G16_s19000010012000_e19000010013000_c20403662359590.nc", "OR_GLM-L2-GLMF-M3_G16_s19000010013000_e19000010014000_c20403662359590.nc", "OR_GLM-L2-GLMF-M3_G16_s19000010014000_e19000010015000_c20403662359590.nc", "OR_GLM-L2-GLMF-M3_G16_s19000010015000_e19000010016000_c20403662359590.nc"] def test_multi_readers_empty_groups_raises_filenotfounderror(self): """Test behaviour on empty groups passing multiple readers. Make sure it raises an exception, for there will be groups containing GLM but not ABI. """ from satpy.readers import group_files with pytest.raises( FileNotFoundError, match="when grouping files, group at index 1 " "had no files for readers: abi_l1b"): group_files( self._filenames_abi_glm, reader=["abi_l1b", "glm_l2"], group_keys=("start_time",), time_threshold=35, missing="raise") def test_multi_readers_empty_groups_missing_skip(self): """Verify empty groups are skipped. Verify that all groups lacking ABI are skipped, resulting in only three groups that are all non-empty for both instruments. """ from satpy.readers import group_files groups = group_files( self._filenames_abi_glm, reader=["abi_l1b", "glm_l2"], group_keys=("start_time",), time_threshold=35, missing="skip") assert len(groups) == 2 for g in groups: assert g["abi_l1b"] assert g["glm_l2"] def test_multi_readers_empty_groups_passed(self): """Verify that all groups are there, resulting in some that are empty.""" from satpy.readers import group_files groups = group_files( self._filenames_abi_glm, reader=["abi_l1b", "glm_l2"], group_keys=("start_time",), time_threshold=35, missing="pass") assert len(groups) == 17 assert not groups[1]["abi_l1b"] # should be empty assert groups[1]["glm_l2"] # should not be empty def test_multi_readers_invalid_parameter(self): """Verify that invalid missing parameter raises ValueError.""" from satpy.readers import group_files with pytest.raises(ValueError): group_files( self._filenames_abi_glm, reader=["abi_l1b", "glm_l2"], group_keys=("start_time",), time_threshold=35, missing="hopkin green frog") def _generate_random_string(): import uuid return str(uuid.uuid1()) def _assert_is_open_file_and_close(opened): try: assert hasattr(opened, 'tell') finally: opened.close() def _posixify_path(filename): drive, driveless_name = os.path.splitdrive(filename) return driveless_name.replace('\\', '/') class TestFSFile(unittest.TestCase): """Test the FSFile class.""" def setUp(self): """Set up the instance.""" import tempfile import zipfile from pathlib import Path import fsspec self.random_string = _generate_random_string() self.local_filename = os.path.join(tempfile.gettempdir(), self.random_string) Path(self.local_filename).touch() self.local_file = fsspec.open(self.local_filename) self.random_string2 = _generate_random_string() self.local_filename2 = os.path.join(tempfile.gettempdir(), self.random_string2) Path(self.local_filename2).touch() self.zip_name = os.path.join(tempfile.gettempdir(), self.random_string2 + ".zip") zip_file = zipfile.ZipFile(self.zip_name, 'w', zipfile.ZIP_DEFLATED) zip_file.write(self.local_filename2) zip_file.close() os.remove(self.local_filename2) def tearDown(self): """Destroy the instance.""" os.remove(self.local_filename) with suppress(PermissionError): os.remove(self.zip_name) def test_regular_filename_is_returned_with_str(self): """Test that str give the filename.""" from satpy.readers import FSFile assert str(FSFile(self.random_string)) == self.random_string def test_fsfile_with_regular_filename_abides_pathlike(self): """Test that FSFile abides PathLike for regular filenames.""" from satpy.readers import FSFile assert os.fspath(FSFile(self.random_string)) == self.random_string def test_fsfile_with_regular_filename_and_fs_spec_abides_pathlike(self): """Test that FSFile abides PathLike for filename+fs instances.""" from satpy.readers import FSFile assert os.fspath(FSFile(self.random_string, fs=None)) == self.random_string def test_fsfile_with_pathlike(self): """Test FSFile with path-like object.""" from pathlib import Path from satpy.readers import FSFile f = FSFile(Path(self.local_filename)) assert str(f) == os.fspath(f) == self.local_filename def test_fsfile_with_fs_open_file_abides_pathlike(self): """Test that FSFile abides PathLike for fsspec OpenFile instances.""" from satpy.readers import FSFile assert os.fspath(FSFile(self.local_file)).endswith(self.random_string) def test_repr_includes_filename(self): """Test that repr includes the filename.""" from satpy.readers import FSFile assert self.random_string in repr(FSFile(self.local_file)) def test_open_regular_file(self): """Test opening a regular file.""" from satpy.readers import FSFile _assert_is_open_file_and_close(FSFile(self.local_filename).open()) def test_open_local_fs_file(self): """Test opening a localfs file.""" from satpy.readers import FSFile _assert_is_open_file_and_close(FSFile(self.local_file).open()) def test_open_zip_fs_regular_filename(self): """Test opening a zipfs with a regular filename provided.""" from fsspec.implementations.zip import ZipFileSystem from satpy.readers import FSFile zip_fs = ZipFileSystem(self.zip_name) file = FSFile(_posixify_path(self.local_filename2), zip_fs) _assert_is_open_file_and_close(file.open()) def test_open_zip_fs_openfile(self): """Test opening a zipfs openfile.""" import fsspec from satpy.readers import FSFile open_file = fsspec.open("zip:/" + _posixify_path(self.local_filename2) + "::file://" + self.zip_name) file = FSFile(open_file) _assert_is_open_file_and_close(file.open()) def test_sorting_fsfiles(self): """Test sorting FSFiles.""" from fsspec.implementations.zip import ZipFileSystem from satpy.readers import FSFile zip_fs = ZipFileSystem(self.zip_name) file1 = FSFile(self.local_filename2, zip_fs) file2 = FSFile(self.local_filename) extra_file = os.path.normpath('/somedir/bla') sorted_filenames = [os.fspath(file) for file in sorted([file1, file2, extra_file])] expected_filenames = sorted([extra_file, os.fspath(file1), os.fspath(file2)]) assert sorted_filenames == expected_filenames def test_equality(self): """Test that FSFile compares equal when it should.""" from fsspec.implementations.zip import ZipFileSystem from satpy.readers import FSFile zip_fs = ZipFileSystem(self.zip_name) assert FSFile(self.local_filename) == FSFile(self.local_filename) assert (FSFile(self.local_filename, zip_fs) == FSFile(self.local_filename, zip_fs)) assert (FSFile(self.local_filename, zip_fs) != FSFile(self.local_filename)) assert FSFile(self.local_filename) != FSFile(self.local_filename2) def test_hash(self): """Test that FSFile hashing behaves sanely.""" from fsspec.implementations.cached import CachingFileSystem from fsspec.implementations.local import LocalFileSystem from fsspec.implementations.zip import ZipFileSystem from satpy.readers import FSFile lfs = LocalFileSystem() zfs = ZipFileSystem(self.zip_name) cfs = CachingFileSystem(fs=lfs) # make sure each name/fs-combi has its own hash assert len({hash(FSFile(fn, fs)) for fn in {self.local_filename, self.local_filename2} for fs in [None, lfs, zfs, cfs]}) == 2*4 satpy-0.34.0/satpy/tests/test_regressions.py000066400000000000000000000176411420401153000212130ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Test fixed bugs.""" from unittest.mock import patch import dask.array as da import numpy as np from xarray import DataArray, Dataset from satpy.tests.utils import make_dataid abi_file_list = ['/data/OR_ABI-L1b-RadF-M3C01_G16_s20180722030423_e20180722041189_c20180722041235-118900_0.nc', '/data/OR_ABI-L1b-RadF-M3C02_G16_s20180722030423_e20180722041190_c20180722041228-120000_0.nc', '/data/OR_ABI-L1b-RadF-M3C03_G16_s20180722030423_e20180722041190_c20180722041237-119000_0.nc', '/data/OR_ABI-L1b-RadF-M3C04_G16_s20180722030423_e20180722041189_c20180722041221.nc', '/data/OR_ABI-L1b-RadF-M3C05_G16_s20180722030423_e20180722041190_c20180722041237-119101_0.nc', '/data/OR_ABI-L1b-RadF-M3C06_G16_s20180722030423_e20180722041195_c20180722041227.nc', '/data/OR_ABI-L1b-RadF-M3C07_G16_s20180722030423_e20180722041201_c20180722041238.nc', '/data/OR_ABI-L1b-RadF-M3C08_G16_s20180722030423_e20180722041190_c20180722041238.nc', '/data/OR_ABI-L1b-RadF-M3C09_G16_s20180722030423_e20180722041195_c20180722041256.nc', '/data/OR_ABI-L1b-RadF-M3C10_G16_s20180722030423_e20180722041201_c20180722041250.nc', '/data/OR_ABI-L1b-RadF-M3C11_G16_s20180722030423_e20180722041189_c20180722041254.nc', '/data/OR_ABI-L1b-RadF-M3C12_G16_s20180722030423_e20180722041195_c20180722041256.nc', '/data/OR_ABI-L1b-RadF-M3C13_G16_s20180722030423_e20180722041201_c20180722041259.nc', '/data/OR_ABI-L1b-RadF-M3C14_G16_s20180722030423_e20180722041190_c20180722041258.nc', '/data/OR_ABI-L1b-RadF-M3C15_G16_s20180722030423_e20180722041195_c20180722041259.nc', '/data/OR_ABI-L1b-RadF-M3C16_G16_s20180722030423_e20180722041202_c20180722041259.nc'] def generate_fake_abi_xr_dataset(filename, chunks=None, **kwargs): """Create a fake xarray dataset for abi data. This is an incomplete copy of existing file structures. """ dataset = Dataset(attrs={ 'time_coverage_start': '2018-03-13T20:30:42.3Z', 'time_coverage_end': '2018-03-13T20:41:18.9Z', }) projection = DataArray( [-214748364], attrs={ 'long_name': 'GOES-R ABI fixed grid projection', 'grid_mapping_name': 'geostationary', 'perspective_point_height': 35786023.0, 'semi_major_axis': 6378137.0, 'semi_minor_axis': 6356752.31414, 'inverse_flattening': 298.2572221, 'latitude_of_projection_origin': 0.0, 'longitude_of_projection_origin': -75.0, 'sweep_angle_axis': 'x' }) dataset['goes_imager_projection'] = projection if 'C01' in filename or 'C03' in filename or 'C05' in filename: stop = 10847 step = 2 scale = 2.8e-05 offset = 0.151858 elif 'C02' in filename: stop = 21693 step = 4 scale = 1.4e-05 offset = 0.151865 else: stop = 5424 step = 1 scale = 5.6e-05 offset = 0.151844 y = DataArray( da.arange(0, stop, step), attrs={ 'scale_factor': -scale, 'add_offset': offset, 'units': 'rad', 'axis': 'Y', 'long_name': 'GOES fixed grid projection y-coordinate', 'standard_name': 'projection_y_coordinate' }, dims=['y']) dataset['y'] = y x = DataArray( da.arange(0, stop, step), attrs={ 'scale_factor': scale, 'add_offset': -offset, 'units': 'rad', 'axis': 'X', 'long_name': 'GOES fixed grid projection x-coordinate', 'standard_name': 'projection_x_coordinate' }, dims=['x']) dataset['x'] = x rad = DataArray( da.random.randint(0, 1025, size=[len(y), len(x)], dtype=np.int16, chunks=chunks), attrs={ '_FillValue': np.array(1023), 'long_name': 'ABI L1b Radiances', 'standard_name': 'toa_outgoing_radiance_per_unit_wavelength', '_Unsigned': 'true', 'sensor_band_bit_depth': 10, 'valid_range': np.array([0, 1022], dtype=np.int16), 'scale_factor': 0.8121064, 'add_offset': -25.936647, 'units': 'W m-2 sr-1 um-1', 'resolution': 'y: 0.000028 rad x: 0.000028 rad', 'grid_mapping': 'goes_imager_projection', 'cell_methods': 't: point area: point' }, dims=['y', 'x'] ) dataset['Rad'] = rad sublat = DataArray(0.0, attrs={ 'long_name': 'nominal satellite subpoint latitude (platform latitude)', 'standard_name': 'latitude', '_FillValue': -999.0, 'units': 'degrees_north'}) dataset['nominal_satellite_subpoint_lat'] = sublat sublon = DataArray(-75.0, attrs={ 'long_name': 'nominal satellite subpoint longitude (platform longitude)', 'standard_name': 'longitude', '_FillValue': -999.0, 'units': 'degrees_east'}) dataset['nominal_satellite_subpoint_lon'] = sublon satheight = DataArray(35786.023, attrs={ 'long_name': 'nominal satellite height above GRS 80 ellipsoid (platform altitude)', 'standard_name': 'height_above_reference_ellipsoid', '_FillValue': -999.0, 'units': 'km'}) dataset['nominal_satellite_height'] = satheight yaw_flip_flag = DataArray(0, attrs={ 'long_name': 'Flag indicating the spacecraft is operating in yaw flip configuration', '_Unsigned': 'true', '_FillValue': np.array(-1), 'valid_range': np.array([0, 1], dtype=np.int8), 'units': '1', 'flag_values': '0 1', 'flag_meanings': 'false true'}) dataset['yaw_flip_flag'] = yaw_flip_flag return dataset @patch('xarray.open_dataset') def test_1258(fake_open_dataset): """Save true_color from abi with radiance doesn't need two resamplings.""" from satpy import Scene fake_open_dataset.side_effect = generate_fake_abi_xr_dataset scene = Scene(abi_file_list, reader='abi_l1b') scene.load(['true_color_nocorr', 'C04'], calibration='radiance') resampled_scene = scene.resample(scene.coarsest_area(), resampler='native') assert len(resampled_scene.keys()) == 2 @patch('xarray.open_dataset') def test_1088(fake_open_dataset): """Check that copied arrays gets resampled.""" from satpy import Scene fake_open_dataset.side_effect = generate_fake_abi_xr_dataset scene = Scene(abi_file_list, reader='abi_l1b') scene.load(['C04'], calibration='radiance') my_id = make_dataid(name='my_name', wavelength=(10, 11, 12)) scene[my_id] = scene['C04'].copy() resampled = scene.resample('eurol') assert resampled[my_id].shape == (2048, 2560) @patch('xarray.open_dataset') def test_no_enums(fake_open_dataset): """Check that no enums are inserted in the resulting attrs.""" from enum import Enum from satpy import Scene fake_open_dataset.side_effect = generate_fake_abi_xr_dataset scene = Scene(abi_file_list, reader='abi_l1b') scene.load(['C04'], calibration='radiance') for value in scene['C04'].attrs.values(): assert not isinstance(value, Enum) satpy-0.34.0/satpy/tests/test_resample.py000066400000000000000000001305431420401153000204550ustar00rootroot00000000000000#!/usr/bin/python # Copyright (c) 2016 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unittests for resamplers.""" import os import shutil import tempfile import unittest from unittest import mock try: from pyresample.ewa import LegacyDaskEWAResampler except ImportError: LegacyDaskEWAResampler = None from pyproj import CRS def get_test_data(input_shape=(100, 50), output_shape=(200, 100), output_proj=None, input_dims=('y', 'x')): """Get common data objects used in testing. Returns: tuple: * input_data_on_area: DataArray with dimensions as if it is a gridded dataset. * input_area_def: AreaDefinition of the above DataArray * input_data_on_swath: DataArray with dimensions as if it is a swath. * input_swath: SwathDefinition of the above DataArray * target_area_def: AreaDefinition to be used as a target for resampling """ import dask.array as da from pyresample.geometry import AreaDefinition, SwathDefinition from pyresample.utils import proj4_str_to_dict from xarray import DataArray ds1 = DataArray(da.zeros(input_shape, chunks=85), dims=input_dims, attrs={'name': 'test_data_name', 'test': 'test'}) if input_dims and 'y' in input_dims: ds1 = ds1.assign_coords(y=da.arange(input_shape[-2], chunks=85)) if input_dims and 'x' in input_dims: ds1 = ds1.assign_coords(x=da.arange(input_shape[-1], chunks=85)) if input_dims and 'bands' in input_dims: ds1 = ds1.assign_coords(bands=list('RGBA'[:ds1.sizes['bands']])) input_proj_str = ('+proj=geos +lon_0=-95.0 +h=35786023.0 +a=6378137.0 ' '+b=6356752.31414 +sweep=x +units=m +no_defs') source = AreaDefinition( 'test_target', 'test_target', 'test_target', proj4_str_to_dict(input_proj_str), input_shape[1], # width input_shape[0], # height (-1000., -1500., 1000., 1500.)) ds1.attrs['area'] = source crs = CRS.from_string(input_proj_str) ds1 = ds1.assign_coords(crs=crs) ds2 = ds1.copy() input_area_shape = tuple(ds1.sizes[dim] for dim in ds1.dims if dim in ['y', 'x']) geo_dims = ('y', 'x') if input_dims else None lons = da.random.random(input_area_shape, chunks=50) lats = da.random.random(input_area_shape, chunks=50) swath_def = SwathDefinition( DataArray(lons, dims=geo_dims), DataArray(lats, dims=geo_dims)) ds2.attrs['area'] = swath_def crs = CRS.from_string('+proj=latlong +datum=WGS84 +ellps=WGS84') ds2 = ds2.assign_coords(crs=crs) # set up target definition output_proj_str = ('+proj=lcc +datum=WGS84 +ellps=WGS84 ' '+lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs') output_proj_str = output_proj or output_proj_str target = AreaDefinition( 'test_target', 'test_target', 'test_target', proj4_str_to_dict(output_proj_str), output_shape[1], # width output_shape[0], # height (-1000., -1500., 1000., 1500.), ) return ds1, source, ds2, swath_def, target class TestHLResample(unittest.TestCase): """Test the higher level resampling functions.""" def test_type_preserve(self): """Check that the type of resampled datasets is preserved.""" import dask.array as da import numpy as np import xarray as xr from pyresample.geometry import SwathDefinition from satpy.resample import resample_dataset source_area = SwathDefinition(xr.DataArray(da.arange(4, chunks=5).reshape((2, 2)), dims=['y', 'x']), xr.DataArray(da.arange(4, chunks=5).reshape((2, 2)), dims=['y', 'x'])) dest_area = SwathDefinition(xr.DataArray(da.arange(4, chunks=5).reshape((2, 2)) + .0001, dims=['y', 'x']), xr.DataArray(da.arange(4, chunks=5).reshape((2, 2)) + .0001, dims=['y', 'x'])) expected_gap = np.array([[1, 2], [3, 255]]) data = xr.DataArray(da.from_array(expected_gap, chunks=5), dims=['y', 'x']) data.attrs['_FillValue'] = 255 data.attrs['area'] = source_area res = resample_dataset(data, dest_area) self.assertEqual(res.dtype, data.dtype) self.assertTrue(np.all(res.values == expected_gap)) expected_filled = np.array([[1, 2], [3, 3]]) res = resample_dataset(data, dest_area, radius_of_influence=1000000) self.assertEqual(res.dtype, data.dtype) self.assertTrue(np.all(res.values == expected_filled)) class TestKDTreeResampler(unittest.TestCase): """Test the kd-tree resampler.""" @mock.patch('satpy.resample.KDTreeResampler._check_numpy_cache') @mock.patch('satpy.resample.xr.Dataset') @mock.patch('satpy.resample.zarr.open') @mock.patch('satpy.resample.KDTreeResampler._create_cache_filename') @mock.patch('pyresample.kd_tree.XArrayResamplerNN') def test_kd_resampling(self, xr_resampler, create_filename, zarr_open, xr_dset, cnc): """Test the kd resampler.""" import dask.array as da from satpy.resample import KDTreeResampler data, source_area, swath_data, source_swath, target_area = get_test_data() mock_dset = mock.MagicMock() xr_dset.return_value = mock_dset resampler = KDTreeResampler(source_swath, target_area) resampler.precompute( mask=da.arange(5, chunks=5).astype(bool), cache_dir='.') xr_resampler.assert_called_once() resampler.resampler.get_neighbour_info.assert_called() # swath definitions should not be cached self.assertFalse(len(mock_dset.to_zarr.mock_calls), 0) resampler.resampler.reset_mock() cnc.assert_called_once() resampler = KDTreeResampler(source_area, target_area) resampler.precompute() resampler.resampler.get_neighbour_info.assert_called_with(mask=None) try: the_dir = tempfile.mkdtemp() resampler = KDTreeResampler(source_area, target_area) create_filename.return_value = os.path.join(the_dir, 'test_cache.zarr') zarr_open.side_effect = ValueError() resampler.precompute(cache_dir=the_dir) # assert data was saved to the on-disk cache self.assertEqual(len(mock_dset.to_zarr.mock_calls), 1) # assert that zarr_open was called to try to zarr_open something from disk self.assertEqual(len(zarr_open.mock_calls), 1) # we should have cached things in-memory self.assertEqual(len(resampler._index_caches), 1) nbcalls = len(resampler.resampler.get_neighbour_info.mock_calls) # test reusing the resampler zarr_open.side_effect = None # The kdtree shouldn't be available after saving cache to disk assert resampler.resampler.delayed_kdtree is None class FakeZarr(dict): def close(self): pass def astype(self, dtype): pass zarr_open.return_value = FakeZarr(valid_input_index=1, valid_output_index=2, index_array=3, distance_array=4) resampler.precompute(cache_dir=the_dir) # we already have things cached in-memory, no need to save again self.assertEqual(len(mock_dset.to_zarr.mock_calls), 1) # we already have things cached in-memory, don't need to load self.assertEqual(len(zarr_open.mock_calls), 1) # we should have cached things in-memory self.assertEqual(len(resampler._index_caches), 1) self.assertEqual(len(resampler.resampler.get_neighbour_info.mock_calls), nbcalls) # test loading saved resampler resampler = KDTreeResampler(source_area, target_area) resampler.precompute(cache_dir=the_dir) self.assertEqual(len(zarr_open.mock_calls), 4) self.assertEqual(len(resampler.resampler.get_neighbour_info.mock_calls), nbcalls) # we should have cached things in-memory now self.assertEqual(len(resampler._index_caches), 1) finally: shutil.rmtree(the_dir) fill_value = 8 resampler.compute(data, fill_value=fill_value) resampler.resampler.get_sample_from_neighbour_info.assert_called_with(data, fill_value) @mock.patch('satpy.resample.np.load') @mock.patch('satpy.resample.xr.Dataset') def test_check_numpy_cache(self, xr_Dataset, np_load): """Test that cache stored in .npz is converted to zarr.""" from satpy.resample import KDTreeResampler data, source_area, swath_data, source_swath, target_area = get_test_data() resampler = KDTreeResampler(source_area, target_area) zarr_out = mock.MagicMock() xr_Dataset.return_value = zarr_out try: the_dir = tempfile.mkdtemp() kwargs = {} np_path = resampler._create_cache_filename(the_dir, prefix='resample_lut-', fmt='.npz', mask=None, **kwargs) zarr_path = resampler._create_cache_filename(the_dir, prefix='nn_lut-', fmt='.zarr', mask=None, **kwargs) resampler._check_numpy_cache(the_dir) np_load.assert_not_called() zarr_out.to_zarr.assert_not_called() with open(np_path, 'w') as fid: fid.write("42") resampler._check_numpy_cache(the_dir) np_load.assert_called_once_with(np_path, 'r') zarr_out.to_zarr.assert_called_once_with(zarr_path) finally: shutil.rmtree(the_dir) @unittest.skipIf(LegacyDaskEWAResampler is not None, "Deprecated EWA resampler is now in pyresample. " "No need to test in Satpy.") class TestEWAResampler(unittest.TestCase): """Test EWA resampler class.""" @mock.patch('satpy.resample.fornav') @mock.patch('satpy.resample.ll2cr') @mock.patch('satpy.resample.SwathDefinition.get_lonlats') def test_2d_ewa(self, get_lonlats, ll2cr, fornav): """Test EWA with a 2D dataset.""" import numpy as np import xarray as xr from satpy.resample import resample_dataset ll2cr.return_value = (100, np.zeros((10, 10), dtype=np.float32), np.zeros((10, 10), dtype=np.float32)) fornav.return_value = (100 * 200, np.zeros((200, 100), dtype=np.float32)) _, _, swath_data, source_swath, target_area = get_test_data() get_lonlats.return_value = (source_swath.lons, source_swath.lats) swath_data.data = swath_data.data.astype(np.float32) num_chunks = len(source_swath.lons.chunks[0]) * len(source_swath.lons.chunks[1]) new_data = resample_dataset(swath_data, target_area, resampler='ewa') self.assertTupleEqual(new_data.shape, (200, 100)) self.assertEqual(new_data.dtype, np.float32) self.assertEqual(new_data.attrs['test'], 'test') self.assertIs(new_data.attrs['area'], target_area) # make sure we can actually compute everything new_data.compute() lonlat_calls = get_lonlats.call_count ll2cr_calls = ll2cr.call_count # resample a different dataset and make sure cache is used data = xr.DataArray( swath_data.data, dims=('y', 'x'), attrs={'area': source_swath, 'test': 'test2', 'name': 'test2'}) new_data = resample_dataset(data, target_area, resampler='ewa') new_data.compute() # ll2cr will be called once more because of the computation self.assertEqual(ll2cr.call_count, ll2cr_calls + num_chunks) # but we should already have taken the lonlats from the SwathDefinition self.assertEqual(get_lonlats.call_count, lonlat_calls) self.assertIn('y', new_data.coords) self.assertIn('x', new_data.coords) self.assertIn('crs', new_data.coords) self.assertIsInstance(new_data.coords['crs'].item(), CRS) self.assertIn('lambert', new_data.coords['crs'].item().coordinate_operation.method_name.lower()) self.assertEqual(new_data.coords['y'].attrs['units'], 'meter') self.assertEqual(new_data.coords['x'].attrs['units'], 'meter') self.assertEqual(target_area.crs, new_data.coords['crs'].item()) @mock.patch('satpy.resample.fornav') @mock.patch('satpy.resample.ll2cr') @mock.patch('satpy.resample.SwathDefinition.get_lonlats') def test_3d_ewa(self, get_lonlats, ll2cr, fornav): """Test EWA with a 3D dataset.""" import numpy as np import xarray as xr from satpy.resample import resample_dataset _, _, swath_data, source_swath, target_area = get_test_data( input_shape=(3, 200, 100), input_dims=('bands', 'y', 'x')) swath_data.data = swath_data.data.astype(np.float32) ll2cr.return_value = (100, np.zeros((10, 10), dtype=np.float32), np.zeros((10, 10), dtype=np.float32)) fornav.return_value = ([100 * 200] * 3, [np.zeros((200, 100), dtype=np.float32)] * 3) get_lonlats.return_value = (source_swath.lons, source_swath.lats) num_chunks = len(source_swath.lons.chunks[0]) * len(source_swath.lons.chunks[1]) new_data = resample_dataset(swath_data, target_area, resampler='ewa') self.assertTupleEqual(new_data.shape, (3, 200, 100)) self.assertEqual(new_data.dtype, np.float32) self.assertEqual(new_data.attrs['test'], 'test') self.assertIs(new_data.attrs['area'], target_area) # make sure we can actually compute everything new_data.compute() lonlat_calls = get_lonlats.call_count ll2cr_calls = ll2cr.call_count # resample a different dataset and make sure cache is used swath_data = xr.DataArray( swath_data.data, dims=('bands', 'y', 'x'), coords={'bands': ['R', 'G', 'B']}, attrs={'area': source_swath, 'test': 'test'}) new_data = resample_dataset(swath_data, target_area, resampler='ewa') new_data.compute() # ll2cr will be called once more because of the computation self.assertEqual(ll2cr.call_count, ll2cr_calls + num_chunks) # but we should already have taken the lonlats from the SwathDefinition self.assertEqual(get_lonlats.call_count, lonlat_calls) self.assertIn('y', new_data.coords) self.assertIn('x', new_data.coords) self.assertIn('bands', new_data.coords) self.assertIn('crs', new_data.coords) self.assertIsInstance(new_data.coords['crs'].item(), CRS) self.assertIn('lambert', new_data.coords['crs'].item().coordinate_operation.method_name.lower()) self.assertEqual(new_data.coords['y'].attrs['units'], 'meter') self.assertEqual(new_data.coords['x'].attrs['units'], 'meter') np.testing.assert_equal(new_data.coords['bands'].values, ['R', 'G', 'B']) self.assertEqual(target_area.crs, new_data.coords['crs'].item()) class TestNativeResampler(unittest.TestCase): """Tests for the 'native' resampling method.""" def test_expand_reduce(self): """Test class method 'expand_reduce' basics.""" import dask.array as da import numpy as np from satpy.resample import NativeResampler d_arr = da.zeros((6, 20), chunks=4) new_data = NativeResampler._expand_reduce(d_arr, {0: 2., 1: 2.}) self.assertEqual(new_data.shape, (12, 40)) new_data = NativeResampler._expand_reduce(d_arr, {0: .5, 1: .5}) self.assertEqual(new_data.shape, (3, 10)) self.assertRaises(ValueError, NativeResampler._expand_reduce, d_arr, {0: 1. / 3, 1: 1.}) new_data = NativeResampler._expand_reduce(d_arr, {0: 1., 1: 1.}) self.assertEqual(new_data.shape, (6, 20)) self.assertIs(new_data, d_arr) self.assertRaises(ValueError, NativeResampler._expand_reduce, d_arr, {0: 0.333323423, 1: 1.}) self.assertRaises(ValueError, NativeResampler._expand_reduce, d_arr, {0: 1.333323423, 1: 1.}) n_arr = np.zeros((6, 20)) new_data = NativeResampler._expand_reduce(n_arr, {0: 2., 1: 1.0}) self.assertTrue(np.all(new_data.compute()[::2, :] == n_arr)) def test_expand_dims(self): """Test expanding native resampling with 2D data.""" import numpy as np from satpy.resample import NativeResampler ds1, source_area, _, _, target_area = get_test_data() # source geo def doesn't actually matter resampler = NativeResampler(source_area, target_area) new_data = resampler.resample(ds1) self.assertEqual(new_data.shape, (200, 100)) new_data2 = resampler.resample(ds1.compute()) self.assertTrue(np.all(new_data == new_data2)) self.assertIn('y', new_data.coords) self.assertIn('x', new_data.coords) self.assertIn('crs', new_data.coords) self.assertIsInstance(new_data.coords['crs'].item(), CRS) self.assertIn('lambert', new_data.coords['crs'].item().coordinate_operation.method_name.lower()) self.assertEqual(new_data.coords['y'].attrs['units'], 'meter') self.assertEqual(new_data.coords['x'].attrs['units'], 'meter') self.assertEqual(target_area.crs, new_data.coords['crs'].item()) def test_expand_dims_3d(self): """Test expanding native resampling with 3D data.""" import numpy as np from satpy.resample import NativeResampler ds1, source_area, _, _, target_area = get_test_data( input_shape=(3, 100, 50), input_dims=('bands', 'y', 'x')) # source geo def doesn't actually matter resampler = NativeResampler(source_area, target_area) new_data = resampler.resample(ds1) self.assertEqual(new_data.shape, (3, 200, 100)) new_data2 = resampler.resample(ds1.compute()) self.assertTrue(np.all(new_data == new_data2)) self.assertIn('y', new_data.coords) self.assertIn('x', new_data.coords) self.assertIn('bands', new_data.coords) np.testing.assert_equal(new_data.coords['bands'].values, ['R', 'G', 'B']) self.assertIn('crs', new_data.coords) self.assertIsInstance(new_data.coords['crs'].item(), CRS) self.assertIn('lambert', new_data.coords['crs'].item().coordinate_operation.method_name.lower()) self.assertEqual(new_data.coords['y'].attrs['units'], 'meter') self.assertEqual(new_data.coords['x'].attrs['units'], 'meter') self.assertEqual(target_area.crs, new_data.coords['crs'].item()) def test_expand_without_dims(self): """Test expanding native resampling with no dimensions specified.""" import numpy as np from satpy.resample import NativeResampler ds1, source_area, _, _, target_area = get_test_data(input_dims=None) # source geo def doesn't actually matter resampler = NativeResampler(source_area, target_area) new_data = resampler.resample(ds1) self.assertEqual(new_data.shape, (200, 100)) new_data2 = resampler.resample(ds1.compute()) self.assertTrue(np.all(new_data == new_data2)) self.assertIn('crs', new_data.coords) self.assertIsInstance(new_data.coords['crs'].item(), CRS) self.assertIn('lambert', new_data.coords['crs'].item().coordinate_operation.method_name.lower()) self.assertEqual(target_area.crs, new_data.coords['crs'].item()) def test_expand_without_dims_4D(self): """Test expanding native resampling with 4D data with no dimensions specified.""" from satpy.resample import NativeResampler ds1, source_area, _, _, target_area = get_test_data( input_shape=(2, 3, 100, 50), input_dims=None) # source geo def doesn't actually matter resampler = NativeResampler(source_area, target_area) self.assertRaises(ValueError, resampler.resample, ds1) class TestBilinearResampler(unittest.TestCase): """Test the bilinear resampler.""" @mock.patch('satpy.resample._move_existing_caches') @mock.patch('satpy.resample.BilinearResampler._create_cache_filename') @mock.patch('pyresample.bilinear.XArrayBilinearResampler') def test_bil_resampling(self, xr_resampler, create_filename, move_existing_caches): """Test the bilinear resampler.""" import dask.array as da import xarray as xr from satpy.resample import BilinearResampler data, source_area, swath_data, source_swath, target_area = get_test_data() # Test that bilinear resampling info calculation is called resampler = BilinearResampler(source_swath, target_area) resampler.precompute( mask=da.arange(5, chunks=5).astype(bool)) resampler.resampler.load_resampling_info.assert_not_called() resampler.resampler.get_bil_info.assert_called_once() resampler.resampler.reset_mock() # Test that get_sample_from_bil_info is called properly fill_value = 8 resampler.resampler.get_sample_from_bil_info.return_value = \ xr.DataArray(da.zeros(target_area.shape), dims=('y', 'x')) new_data = resampler.compute(data, fill_value=fill_value) resampler.resampler.get_sample_from_bil_info.assert_called_with( data, fill_value=fill_value, output_shape=target_area.shape) self.assertIn('y', new_data.coords) self.assertIn('x', new_data.coords) self.assertIn('crs', new_data.coords) self.assertIsInstance(new_data.coords['crs'].item(), CRS) self.assertIn('lambert', new_data.coords['crs'].item().coordinate_operation.method_name.lower()) self.assertEqual(new_data.coords['y'].attrs['units'], 'meter') self.assertEqual(new_data.coords['x'].attrs['units'], 'meter') self.assertEqual(target_area.crs, new_data.coords['crs'].item()) # Test that the resampling info is tried to read from the disk resampler = BilinearResampler(source_swath, target_area) resampler.precompute(cache_dir='.') resampler.resampler.load_resampling_info.assert_called() # Test caching the resampling info try: the_dir = tempfile.mkdtemp() resampler = BilinearResampler(source_area, target_area) create_filename.return_value = os.path.join(the_dir, 'test_cache.zarr') xr_resampler.return_value.load_resampling_info.side_effect = IOError resampler.precompute(cache_dir=the_dir) resampler.resampler.save_resampling_info.assert_called() # assert data was saved to the on-disk cache resampler.resampler.save_resampling_info.assert_called_once() nbcalls = resampler.resampler.get_bil_info.call_count resampler.resampler.load_resampling_info.side_effect = None resampler.precompute(cache_dir=the_dir) # we already have things cached in-memory, no need to save again resampler.resampler.save_resampling_info.assert_called_once() # we already have things cached in-memory, don't need to load self.assertEqual(resampler.resampler.get_bil_info.call_count, nbcalls) # test loading saved resampler resampler = BilinearResampler(source_area, target_area) resampler.precompute(cache_dir=the_dir) self.assertEqual(resampler.resampler.load_resampling_info.call_count, 3) self.assertEqual(resampler.resampler.get_bil_info.call_count, nbcalls) resampler = BilinearResampler(source_area, target_area) resampler.precompute(cache_dir=the_dir) resampler.save_bil_info(cache_dir=the_dir) zarr_file = os.path.join(the_dir, 'test_cache.zarr') # Save again faking the cache file already exists with mock.patch('os.path.exists') as exists: exists.return_value = True resampler.save_bil_info(cache_dir=the_dir) move_existing_caches.assert_called_once_with(the_dir, zarr_file) finally: shutil.rmtree(the_dir) def test_move_existing_caches(self): """Test that existing caches are moved to a subdirectory.""" try: the_dir = tempfile.mkdtemp() # Test that existing cache file is moved away zarr_file = os.path.join(the_dir, 'test.zarr') with open(zarr_file, 'w') as fid: fid.write('42') from satpy.resample import _move_existing_caches _move_existing_caches(the_dir, zarr_file) self.assertFalse(os.path.exists(zarr_file)) self.assertTrue(os.path.exists( os.path.join(the_dir, 'moved_by_satpy', 'test.zarr'))) # Run again to see that the existing dir doesn't matter with open(zarr_file, 'w') as fid: fid.write('42') _move_existing_caches(the_dir, zarr_file) finally: shutil.rmtree(the_dir) class TestCoordinateHelpers(unittest.TestCase): """Test various utility functions for working with coordinates.""" def test_area_def_coordinates(self): """Test coordinates being added with an AreaDefinition.""" import dask.array as da import numpy as np import xarray as xr from pyresample.geometry import AreaDefinition from satpy.resample import add_crs_xy_coords area_def = AreaDefinition( 'test', 'test', 'test', {'proj': 'lcc', 'lat_1': 25, 'lat_0': 25}, 100, 200, [-100, -100, 100, 100] ) data_arr = xr.DataArray( da.zeros((200, 100), chunks=50), attrs={'area': area_def}, dims=('y', 'x'), ) new_data_arr = add_crs_xy_coords(data_arr, area_def) self.assertIn('y', new_data_arr.coords) self.assertIn('x', new_data_arr.coords) self.assertIn('units', new_data_arr.coords['y'].attrs) self.assertEqual( new_data_arr.coords['y'].attrs['units'], 'meter') self.assertIn('units', new_data_arr.coords['x'].attrs) self.assertEqual( new_data_arr.coords['x'].attrs['units'], 'meter') self.assertIn('crs', new_data_arr.coords) self.assertIsInstance(new_data_arr.coords['crs'].item(), CRS) self.assertEqual(area_def.crs, new_data_arr.coords['crs'].item()) # already has coords data_arr = xr.DataArray( da.zeros((200, 100), chunks=50), attrs={'area': area_def}, dims=('y', 'x'), coords={'y': np.arange(2, 202), 'x': np.arange(100)} ) new_data_arr = add_crs_xy_coords(data_arr, area_def) self.assertIn('y', new_data_arr.coords) self.assertNotIn('units', new_data_arr.coords['y'].attrs) self.assertIn('x', new_data_arr.coords) self.assertNotIn('units', new_data_arr.coords['x'].attrs) np.testing.assert_equal(new_data_arr.coords['y'], np.arange(2, 202)) self.assertIn('crs', new_data_arr.coords) self.assertIsInstance(new_data_arr.coords['crs'].item(), CRS) self.assertEqual(area_def.crs, new_data_arr.coords['crs'].item()) # lat/lon area area_def = AreaDefinition( 'test', 'test', 'test', {'proj': 'latlong'}, 100, 200, [-100, -100, 100, 100] ) data_arr = xr.DataArray( da.zeros((200, 100), chunks=50), attrs={'area': area_def}, dims=('y', 'x'), ) new_data_arr = add_crs_xy_coords(data_arr, area_def) self.assertIn('y', new_data_arr.coords) self.assertIn('x', new_data_arr.coords) self.assertIn('units', new_data_arr.coords['y'].attrs) self.assertEqual( new_data_arr.coords['y'].attrs['units'], 'degrees_north') self.assertIn('units', new_data_arr.coords['x'].attrs) self.assertEqual( new_data_arr.coords['x'].attrs['units'], 'degrees_east') self.assertIn('crs', new_data_arr.coords) self.assertIsInstance(new_data_arr.coords['crs'].item(), CRS) self.assertEqual(area_def.crs, new_data_arr.coords['crs'].item()) def test_swath_def_coordinates(self): """Test coordinates being added with an SwathDefinition.""" import dask.array as da import xarray as xr from pyresample.geometry import SwathDefinition from satpy.resample import add_crs_xy_coords lons_data = da.random.random((200, 100), chunks=50) lats_data = da.random.random((200, 100), chunks=50) lons = xr.DataArray(lons_data, attrs={'units': 'degrees_east'}, dims=('y', 'x')) lats = xr.DataArray(lats_data, attrs={'units': 'degrees_north'}, dims=('y', 'x')) area_def = SwathDefinition(lons, lats) data_arr = xr.DataArray( da.zeros((200, 100), chunks=50), attrs={'area': area_def}, dims=('y', 'x'), ) new_data_arr = add_crs_xy_coords(data_arr, area_def) # See https://github.com/pydata/xarray/issues/3068 # self.assertIn('longitude', new_data_arr.coords) # self.assertIn('units', new_data_arr.coords['longitude'].attrs) # self.assertEqual( # new_data_arr.coords['longitude'].attrs['units'], 'degrees_east') # self.assertIsInstance(new_data_arr.coords['longitude'].data, da.Array) # self.assertIn('latitude', new_data_arr.coords) # self.assertIn('units', new_data_arr.coords['latitude'].attrs) # self.assertEqual( # new_data_arr.coords['latitude'].attrs['units'], 'degrees_north') # self.assertIsInstance(new_data_arr.coords['latitude'].data, da.Array) self.assertIn('crs', new_data_arr.coords) crs = new_data_arr.coords['crs'].item() self.assertIsInstance(crs, CRS) assert crs.is_geographic self.assertIsInstance(new_data_arr.coords['crs'].item(), CRS) class TestBucketAvg(unittest.TestCase): """Test the bucket resampler.""" def setUp(self): """Create fake area definitions and resampler to be tested.""" from satpy.resample import BucketAvg get_lonlats = mock.MagicMock() get_lonlats.return_value = (1, 2) get_proj_vectors = mock.MagicMock() get_proj_vectors.return_value = ([1, 2, 3, 4, 5], [1, 2, 3, 4, 5]) self.source_geo_def = mock.MagicMock(get_lonlats=get_lonlats) self.target_geo_def = mock.MagicMock(get_lonlats=get_lonlats, crs=None, get_proj_vectors=get_proj_vectors) self.bucket = BucketAvg(self.source_geo_def, self.target_geo_def) def test_init(self): """Test bucket resampler initialization.""" self.assertIsNone(self.bucket.resampler) self.assertTrue(self.bucket.source_geo_def == self.source_geo_def) self.assertTrue(self.bucket.target_geo_def == self.target_geo_def) @mock.patch('pyresample.bucket.BucketResampler') def test_precompute(self, bucket): """Test bucket resampler precomputation.""" bucket.return_value = True self.bucket.precompute() self.assertTrue(self.bucket.resampler) bucket.assert_called_once_with(self.target_geo_def, 1, 2) def _compute_mocked_bucket_avg(self, data, return_data=None, **kwargs): """Compute the mocked bucket average.""" self.bucket.resampler = mock.MagicMock() if return_data is not None: self.bucket.resampler.get_average.return_value = return_data else: self.bucket.resampler.get_average.return_value = data res = self.bucket.compute(data, **kwargs) return res def test_compute(self): """Test bucket resampler computation.""" import dask.array as da # 1D data data = da.ones((5,)) res = self._compute_mocked_bucket_avg(data, fill_value=2) self.assertEqual(res.shape, (1, 5)) # 2D data data = da.ones((5, 5)) res = self._compute_mocked_bucket_avg(data, fill_value=2) self.assertEqual(res.shape, (1, 5, 5)) # 3D data data = da.ones((3, 5, 5)) self.bucket.resampler.get_average.return_value = data[0, :, :] res = self._compute_mocked_bucket_avg(data, return_data=data[0, :, :], fill_value=2) self.assertEqual(res.shape, (3, 5, 5)) @mock.patch('satpy.resample.PR_USE_SKIPNA', True) def test_compute_and_use_skipna_handling(self): """Test bucket resampler computation and use skipna handling.""" import dask.array as da data = da.ones((5,)) self._compute_mocked_bucket_avg(data, fill_value=2, mask_all_nan=True) self.bucket.resampler.get_average.assert_called_once_with( data, fill_value=2, skipna=True) self._compute_mocked_bucket_avg(data, fill_value=2, skipna=False) self.bucket.resampler.get_average.assert_called_once_with( data, fill_value=2, skipna=False) self._compute_mocked_bucket_avg(data, fill_value=2) self.bucket.resampler.get_average.assert_called_once_with( data, fill_value=2, skipna=True) @mock.patch('satpy.resample.PR_USE_SKIPNA', False) def test_compute_and_not_use_skipna_handling(self): """Test bucket resampler computation and not use skipna handling.""" import dask.array as da data = da.ones((5,)) self._compute_mocked_bucket_avg(data, fill_value=2, mask_all_nan=True) self.bucket.resampler.get_average.assert_called_once_with( data, fill_value=2, mask_all_nan=True) self._compute_mocked_bucket_avg(data, fill_value=2, mask_all_nan=False) self.bucket.resampler.get_average.assert_called_once_with( data, fill_value=2, mask_all_nan=False) self._compute_mocked_bucket_avg(data, fill_value=2) self.bucket.resampler.get_average.assert_called_once_with( data, fill_value=2, mask_all_nan=False) self._compute_mocked_bucket_avg(data, fill_value=2, skipna=True) self.bucket.resampler.get_average.assert_called_once_with( data, fill_value=2, mask_all_nan=False) @mock.patch('pyresample.bucket.BucketResampler') def test_resample(self, pyresample_bucket): """Test bucket resamplers resample method.""" import dask.array as da import xarray as xr self.bucket.resampler = mock.MagicMock() self.bucket.precompute = mock.MagicMock() self.bucket.compute = mock.MagicMock() # 1D input data data = xr.DataArray(da.ones((5,)), dims=('foo'), attrs={'bar': 'baz'}) self.bucket.compute.return_value = da.ones((5, 5)) res = self.bucket.resample(data) self.bucket.precompute.assert_called_once() self.bucket.compute.assert_called_once() self.assertEqual(res.shape, (5, 5)) self.assertEqual(res.dims, ('y', 'x')) self.assertTrue('bar' in res.attrs) self.assertEqual(res.attrs['bar'], 'baz') # 2D input data data = xr.DataArray(da.ones((5, 5)), dims=('foo', 'bar')) self.bucket.compute.return_value = da.ones((5, 5)) res = self.bucket.resample(data) self.assertEqual(res.shape, (5, 5)) self.assertEqual(res.dims, ('y', 'x')) # 3D input data with 'bands' dim data = xr.DataArray(da.ones((1, 5, 5)), dims=('bands', 'foo', 'bar'), coords={'bands': ['L']}) self.bucket.compute.return_value = da.ones((1, 5, 5)) res = self.bucket.resample(data) self.assertEqual(res.shape, (1, 5, 5)) self.assertEqual(res.dims, ('bands', 'y', 'x')) self.assertEqual(res.coords['bands'], ['L']) # 3D input data with misc dim names data = xr.DataArray(da.ones((3, 5, 5)), dims=('foo', 'bar', 'baz')) self.bucket.compute.return_value = da.ones((3, 5, 5)) res = self.bucket.resample(data) self.assertEqual(res.shape, (3, 5, 5)) self.assertEqual(res.dims, ('foo', 'bar', 'baz')) class TestBucketSum(unittest.TestCase): """Test the sum bucket resampler.""" def setUp(self): """Create fake area definitions and resampler to be tested.""" from satpy.resample import BucketSum get_lonlats = mock.MagicMock() get_lonlats.return_value = (1, 2) self.source_geo_def = mock.MagicMock(get_lonlats=get_lonlats) self.target_geo_def = mock.MagicMock(get_lonlats=get_lonlats) self.bucket = BucketSum(self.source_geo_def, self.target_geo_def) def _compute_mocked_bucket_sum(self, data, return_data=None, **kwargs): """Compute the mocked bucket sum.""" self.bucket.resampler = mock.MagicMock() if return_data is not None: self.bucket.resampler.get_sum.return_value = return_data else: self.bucket.resampler.get_sum.return_value = data res = self.bucket.compute(data, **kwargs) return res def test_compute(self): """Test sum bucket resampler computation.""" import dask.array as da # 1D data data = da.ones((5,)) res = self._compute_mocked_bucket_sum(data) self.assertEqual(res.shape, (1, 5)) # 2D data data = da.ones((5, 5)) res = self._compute_mocked_bucket_sum(data) self.assertEqual(res.shape, (1, 5, 5)) # 3D data data = da.ones((3, 5, 5)) res = self._compute_mocked_bucket_sum(data, return_data=data[0, :, :]) self.assertEqual(res.shape, (3, 5, 5)) @mock.patch('satpy.resample.PR_USE_SKIPNA', True) def test_compute_and_use_skipna_handling(self): """Test bucket resampler computation and use skipna handling.""" import dask.array as da data = da.ones((5,)) self._compute_mocked_bucket_sum(data, mask_all_nan=True) self.bucket.resampler.get_sum.assert_called_once_with( data, skipna=True) self._compute_mocked_bucket_sum(data, skipna=False) self.bucket.resampler.get_sum.assert_called_once_with( data, skipna=False) self._compute_mocked_bucket_sum(data) self.bucket.resampler.get_sum.assert_called_once_with( data, skipna=True) @mock.patch('satpy.resample.PR_USE_SKIPNA', False) def test_compute_and_not_use_skipna_handling(self): """Test bucket resampler computation and not use skipna handling.""" import dask.array as da data = da.ones((5,)) self._compute_mocked_bucket_sum(data, mask_all_nan=True) self.bucket.resampler.get_sum.assert_called_once_with( data, mask_all_nan=True) self._compute_mocked_bucket_sum(data, mask_all_nan=False) self.bucket.resampler.get_sum.assert_called_once_with( data, mask_all_nan=False) self._compute_mocked_bucket_sum(data) self.bucket.resampler.get_sum.assert_called_once_with( data, mask_all_nan=False) self._compute_mocked_bucket_sum(data, fill_value=2, skipna=True) self.bucket.resampler.get_sum.assert_called_once_with( data, fill_value=2, mask_all_nan=False) class TestBucketCount(unittest.TestCase): """Test the count bucket resampler.""" def setUp(self): """Create fake area definitions and resampler to be tested.""" from satpy.resample import BucketCount get_lonlats = mock.MagicMock() get_lonlats.return_value = (1, 2) self.source_geo_def = mock.MagicMock(get_lonlats=get_lonlats) self.target_geo_def = mock.MagicMock(get_lonlats=get_lonlats) self.bucket = BucketCount(self.source_geo_def, self.target_geo_def) def _compute_mocked_bucket_count(self, data, return_data=None, **kwargs): """Compute the mocked bucket count.""" self.bucket.resampler = mock.MagicMock() if return_data is not None: self.bucket.resampler.get_count.return_value = return_data else: self.bucket.resampler.get_count.return_value = data res = self.bucket.compute(data, **kwargs) return res def test_compute(self): """Test count bucket resampler computation.""" import dask.array as da # 1D data data = da.ones((5,)) res = self._compute_mocked_bucket_count(data) self.bucket.resampler.get_count.assert_called_once_with() self.assertEqual(res.shape, (1, 5)) # 2D data data = da.ones((5, 5)) res = self._compute_mocked_bucket_count(data) self.bucket.resampler.get_count.assert_called_once_with() self.assertEqual(res.shape, (1, 5, 5)) # 3D data data = da.ones((3, 5, 5)) res = self._compute_mocked_bucket_count(data, return_data=data[0, :, :]) self.assertEqual(res.shape, (3, 5, 5)) class TestBucketFraction(unittest.TestCase): """Test the fraction bucket resampler.""" def setUp(self): """Create fake area definitions and resampler to be tested.""" from satpy.resample import BucketFraction get_lonlats = mock.MagicMock() get_lonlats.return_value = (1, 2) get_proj_vectors = mock.MagicMock() get_proj_vectors.return_value = ([1, 2, 3, 4, 5], [1, 2, 3, 4, 5]) self.source_geo_def = mock.MagicMock(get_lonlats=get_lonlats) self.target_geo_def = mock.MagicMock(get_lonlats=get_lonlats, crs=None, get_proj_vectors=get_proj_vectors) self.bucket = BucketFraction(self.source_geo_def, self.target_geo_def) def test_compute(self): """Test fraction bucket resampler computation.""" import dask.array as da import numpy as np self.bucket.resampler = mock.MagicMock() data = da.ones((3, 3)) # No kwargs given _ = self.bucket.compute(data) self.bucket.resampler.get_fractions.assert_called_with( data, categories=None, fill_value=np.nan) # Custom kwargs _ = self.bucket.compute(data, categories=[1, 2], fill_value=0) self.bucket.resampler.get_fractions.assert_called_with( data, categories=[1, 2], fill_value=0) # Too many dimensions data = da.ones((3, 5, 5)) with self.assertRaises(ValueError): _ = self.bucket.compute(data) @mock.patch('pyresample.bucket.BucketResampler') def test_resample(self, pyresample_bucket): """Test fraction bucket resamplers resample method.""" import dask.array as da import numpy as np import xarray as xr self.bucket.resampler = mock.MagicMock() self.bucket.precompute = mock.MagicMock() self.bucket.compute = mock.MagicMock() # Fractions return a dict data = xr.DataArray(da.ones((1, 5, 5)), dims=('bands', 'y', 'x')) arr = da.ones((5, 5)) self.bucket.compute.return_value = {0: arr, 1: arr, 2: arr} res = self.bucket.resample(data) self.assertTrue('categories' in res.coords) self.assertTrue('categories' in res.dims) self.assertTrue(np.all(res.coords['categories'] == np.array([0, 1, 2]))) satpy-0.34.0/satpy/tests/test_scene.py000066400000000000000000002456351420401153000177530ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2010-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit tests for scene.py.""" import os import random import string import unittest from datetime import datetime from unittest import mock import dask.array as da import numpy as np import pytest import xarray as xr import satpy from satpy import Scene from satpy.tests.utils import ( FAKE_FILEHANDLER_END, FAKE_FILEHANDLER_START, default_id_keys_config, make_cid, make_dataid, make_dsq, spy_decorator, ) TEST_ETC_DIR = os.path.join(os.path.dirname(__file__), 'etc') def _check_comp19_deps_are_loaded(scene): # comp19 required resampling to produce so we should have its 3 deps # 1. comp13 # 2. ds5 # 3. ds2 loaded_ids = list(scene.keys()) assert len(loaded_ids) == 3 for name in ('comp13', 'ds5', 'ds2'): assert any(x['name'] == name for x in loaded_ids) class TestScene: """Test the scene class.""" def setup_method(self): """Set config_path to point to test 'etc' directory.""" self.old_config_path = satpy.config.get('config_path') satpy.config.set(config_path=[TEST_ETC_DIR]) def teardown_method(self): """Restore previous 'config_path' setting.""" satpy.config.set(config_path=self.old_config_path) def test_init(self): """Test scene initialization.""" with mock.patch('satpy.scene.Scene._create_reader_instances') as cri: cri.return_value = {} Scene(filenames=['bla'], reader='blo') cri.assert_called_once_with(filenames=['bla'], reader='blo', reader_kwargs=None) def test_init_str_filename(self): """Test initializing with a single string as filenames.""" pytest.raises(ValueError, Scene, reader='blo', filenames='test.nc') def test_start_end_times(self): """Test start and end times for a scene.""" scene = Scene(filenames=['fake1_1.txt'], reader='fake1') assert scene.start_time == FAKE_FILEHANDLER_START assert scene.end_time == FAKE_FILEHANDLER_END def test_init_preserve_reader_kwargs(self): """Test that the initialization preserves the kwargs.""" cri = spy_decorator(Scene._create_reader_instances) with mock.patch('satpy.scene.Scene._create_reader_instances', cri): reader_kwargs = {'calibration_type': 'gsics'} scene = Scene(filenames=['fake1_1.txt'], reader='fake1', filter_parameters={'area': 'euron1'}, reader_kwargs=reader_kwargs) assert reader_kwargs is not cri.mock.call_args[1]['reader_kwargs'] assert scene.start_time == FAKE_FILEHANDLER_START assert scene.end_time == FAKE_FILEHANDLER_END @pytest.mark.parametrize( ("reader", "filenames", "exp_sensors"), [ ("fake1", ["fake1_1.txt"], {"fake_sensor"}), (None, {"fake1": ["fake1_1.txt"], "fake2_1ds": ["fake2_1ds_1.txt"]}, {"fake_sensor", "fake_sensor2"}), ] ) def test_sensor_names_readers(self, reader, filenames, exp_sensors): """Test that Scene sensor_names handles different cases properly.""" scene = Scene(reader=reader, filenames=filenames) assert scene.start_time == FAKE_FILEHANDLER_START assert scene.end_time == FAKE_FILEHANDLER_END assert scene.sensor_names == exp_sensors @pytest.mark.parametrize( ("include_reader", "added_sensor", "exp_sensors"), [ (False, "my_sensor", {"my_sensor"}), (True, "my_sensor", {"my_sensor", "fake_sensor"}), (False, {"my_sensor"}, {"my_sensor"}), (True, {"my_sensor"}, {"my_sensor", "fake_sensor"}), (False, {"my_sensor1", "my_sensor2"}, {"my_sensor1", "my_sensor2"}), (True, {"my_sensor1", "my_sensor2"}, {"my_sensor1", "my_sensor2", "fake_sensor"}), ] ) def test_sensor_names_added_datasets(self, include_reader, added_sensor, exp_sensors): """Test that Scene sensor_names handles contained sensors properly.""" if include_reader: scene = Scene(reader="fake1", filenames=["fake1_1.txt"]) else: scene = Scene() scene["my_ds"] = xr.DataArray([], attrs={"sensor": added_sensor}) assert scene.sensor_names == exp_sensors def test_init_alone(self): """Test simple initialization.""" scn = Scene() assert not scn._readers, 'Empty scene should not load any readers' def test_init_no_files(self): """Test that providing an empty list of filenames fails.""" pytest.raises(ValueError, Scene, reader='viirs_sdr', filenames=[]) def test_create_reader_instances_with_filenames(self): """Test creating a reader providing filenames.""" filenames = ["bla", "foo", "bar"] reader_name = None with mock.patch('satpy.scene.load_readers') as findermock: Scene(filenames=filenames) findermock.assert_called_once_with( filenames=filenames, reader=reader_name, reader_kwargs=None, ) def test_init_with_empty_filenames(self): """Test initialization with empty filename list.""" filenames = [] Scene(filenames=filenames) def test_init_with_fsfile(self): """Test initialisation with FSFile objects.""" from satpy.readers import FSFile # We should not mock _create_reader_instances here, because in # https://github.com/pytroll/satpy/issues/1605 satpy fails with # TypeError within that method if passed an FSFile instance. # Instead rely on the ValueError that satpy raises if no readers # are found. # Choose random filename that doesn't exist. Not using tempfile here, # because tempfile creates files and we don't want that here. fsf = FSFile("".join(random.choices(string.printable, k=50))) with pytest.raises(ValueError, match="No supported files found"): Scene(filenames=[fsf], reader=[]) # TODO: Rewrite this test for the 'find_files_and_readers' function # def test_create_reader_instances_with_sensor(self): # import satpy.scene # sensors = ["bla", "foo", "bar"] # filenames = None # reader_name = None # with mock.patch('satpy.scene.Scene._compute_metadata_from_readers'): # with mock.patch('satpy.scene.load_readers') as findermock: # scene = satpy.scene.Scene(sensor=sensors) # findermock.assert_called_once_with( # ppp_config_dir=mock.ANY, # reader=reader_name, # filenames=filenames, # reader_kwargs=None, # ) # def test_create_reader_instances_with_sensor_and_filenames(self): # import satpy.scene # sensors = ["bla", "foo", "bar"] # filenames = ["1", "2", "3"] # reader_name = None # with mock.patch('satpy.scene.Scene._compute_metadata_from_readers'): # with mock.patch('satpy.scene.load_readers') as findermock: # scene = satpy.scene.Scene(sensor=sensors, filenames=filenames) # findermock.assert_called_once_with( # ppp_config_dir=mock.ANY, # reader=reader_name, # sensor=sensors, # filenames=filenames, # reader_kwargs=None, # ) def test_create_reader_instances_with_reader(self): """Test createring a reader instance providing the reader name.""" reader = "foo" filenames = ["1", "2", "3"] with mock.patch('satpy.scene.load_readers') as findermock: findermock.return_value = {} Scene(reader=reader, filenames=filenames) findermock.assert_called_once_with(reader=reader, filenames=filenames, reader_kwargs=None, ) def test_create_reader_instances_with_reader_kwargs(self): """Test creating a reader instance with reader kwargs.""" from satpy.readers.yaml_reader import FileYAMLReader reader_kwargs = {'calibration_type': 'gsics'} filter_parameters = {'area': 'euron1'} reader_kwargs2 = {'calibration_type': 'gsics', 'filter_parameters': filter_parameters} rinit = spy_decorator(FileYAMLReader.create_filehandlers) with mock.patch('satpy.readers.yaml_reader.FileYAMLReader.create_filehandlers', rinit): scene = Scene(filenames=['fake1_1.txt'], reader='fake1', filter_parameters={'area': 'euron1'}, reader_kwargs=reader_kwargs) del scene assert reader_kwargs == rinit.mock.call_args[1]['fh_kwargs'] rinit.mock.reset_mock() scene = Scene(filenames=['fake1_1.txt'], reader='fake1', reader_kwargs=reader_kwargs2) assert reader_kwargs == rinit.mock.call_args[1]['fh_kwargs'] del scene def test_create_multiple_reader_different_kwargs(self): """Test passing different kwargs to different readers.""" from satpy.readers import load_reader with satpy.config.set(config_path=[TEST_ETC_DIR]), \ mock.patch.object(satpy.readers, 'load_reader', wraps=load_reader) as lr: Scene(filenames={"fake1_1ds": ["fake1_1ds_1.txt"], "fake2_1ds": ["fake2_1ds_1.txt"]}, reader_kwargs={ "fake1_1ds": {"mouth": "omegna"}, "fake2_1ds": {"mouth": "varallo"} }) lr.assert_has_calls([ mock.call([os.path.join(TEST_ETC_DIR, 'readers', 'fake1_1ds.yaml')], mouth="omegna"), mock.call([os.path.join(TEST_ETC_DIR, 'readers', 'fake2_1ds.yaml')], mouth="varallo")]) def test_iter(self): """Test iteration over the scene.""" scene = Scene() scene["1"] = xr.DataArray(np.arange(5)) scene["2"] = xr.DataArray(np.arange(5)) scene["3"] = xr.DataArray(np.arange(5)) for x in scene: assert isinstance(x, xr.DataArray) def test_iter_by_area_swath(self): """Test iterating by area on a swath.""" from pyresample.geometry import SwathDefinition scene = Scene() sd = SwathDefinition(lons=np.arange(5), lats=np.arange(5)) scene["1"] = xr.DataArray(np.arange(5), attrs={'area': sd}) scene["2"] = xr.DataArray(np.arange(5), attrs={'area': sd}) scene["3"] = xr.DataArray(np.arange(5)) for area_obj, ds_list in scene.iter_by_area(): ds_list_names = set(ds['name'] for ds in ds_list) if area_obj is sd: assert ds_list_names == {'1', '2'} else: assert area_obj is None assert ds_list_names == {'3'} def test_bad_setitem(self): """Test setting an item wrongly.""" scene = Scene() pytest.raises(ValueError, scene.__setitem__, '1', np.arange(5)) def test_setitem(self): """Test setting an item.""" from satpy.tests.utils import make_dataid scene = Scene() scene["1"] = ds1 = xr.DataArray(np.arange(5)) expected_id = make_cid(**ds1.attrs) assert set(scene._datasets.keys()) == {expected_id} assert set(scene._wishlist) == {expected_id} did = make_dataid(name='oranges') scene[did] = ds1 assert 'oranges' in scene nparray = np.arange(5*5).reshape(5, 5) with pytest.raises(ValueError): scene['apples'] = nparray assert 'apples' not in scene did = make_dataid(name='apples') scene[did] = nparray assert 'apples' in scene def test_getitem(self): """Test __getitem__ with names only.""" scene = Scene() scene["1"] = ds1 = xr.DataArray(np.arange(5)) scene["2"] = ds2 = xr.DataArray(np.arange(5)) scene["3"] = ds3 = xr.DataArray(np.arange(5)) assert scene['1'] is ds1 assert scene['2'] is ds2 assert scene['3'] is ds3 pytest.raises(KeyError, scene.__getitem__, '4') assert scene.get('3') is ds3 assert scene.get('4') is None def test_getitem_modifiers(self): """Test __getitem__ with names and modifiers.""" # Return least modified item scene = Scene() scene['1'] = ds1_m0 = xr.DataArray(np.arange(5)) scene[make_dataid(name='1', modifiers=('mod1',)) ] = xr.DataArray(np.arange(5)) assert scene['1'] is ds1_m0 assert len(list(scene.keys())) == 2 scene = Scene() scene['1'] = ds1_m0 = xr.DataArray(np.arange(5)) scene[make_dataid(name='1', modifiers=('mod1',)) ] = xr.DataArray(np.arange(5)) scene[make_dataid(name='1', modifiers=('mod1', 'mod2')) ] = xr.DataArray(np.arange(5)) assert scene['1'] is ds1_m0 assert len(list(scene.keys())) == 3 scene = Scene() scene[make_dataid(name='1', modifiers=('mod1', 'mod2')) ] = ds1_m2 = xr.DataArray(np.arange(5)) scene[make_dataid(name='1', modifiers=('mod1',)) ] = ds1_m1 = xr.DataArray(np.arange(5)) assert scene['1'] is ds1_m1 assert scene[make_dataid(name='1', modifiers=('mod1', 'mod2'))] is ds1_m2 pytest.raises(KeyError, scene.__getitem__, make_dataid(name='1', modifiers=tuple())) assert len(list(scene.keys())) == 2 def test_getitem_slices(self): """Test __getitem__ with slices.""" from pyresample.geometry import AreaDefinition, SwathDefinition from pyresample.utils import proj4_str_to_dict scene1 = Scene() scene2 = Scene() proj_dict = proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 ' '+lon_0=-95. +lat_0=25 +lat_1=25 ' '+units=m +no_defs') area_def = AreaDefinition( 'test', 'test', 'test', proj_dict, 200, 400, (-1000., -1500., 1000., 1500.), ) swath_def = SwathDefinition(lons=np.zeros((5, 10)), lats=np.zeros((5, 10))) scene1["1"] = scene2["1"] = xr.DataArray(np.zeros((5, 10))) scene1["2"] = scene2["2"] = xr.DataArray(np.zeros((5, 10)), dims=('y', 'x')) scene1["3"] = xr.DataArray(np.zeros((5, 10)), dims=('y', 'x'), attrs={'area': area_def}) anc_vars = [xr.DataArray(np.ones((5, 10)), attrs={'name': 'anc_var', 'area': area_def})] attrs = {'ancillary_variables': anc_vars, 'area': area_def} scene1["3a"] = xr.DataArray(np.zeros((5, 10)), dims=('y', 'x'), attrs=attrs) scene2["4"] = xr.DataArray(np.zeros((5, 10)), dims=('y', 'x'), attrs={'area': swath_def}) anc_vars = [xr.DataArray(np.ones((5, 10)), attrs={'name': 'anc_var', 'area': swath_def})] attrs = {'ancillary_variables': anc_vars, 'area': swath_def} scene2["4a"] = xr.DataArray(np.zeros((5, 10)), dims=('y', 'x'), attrs=attrs) new_scn1 = scene1[2:5, 2:8] new_scn2 = scene2[2:5, 2:8] for new_scn in [new_scn1, new_scn2]: # datasets without an area don't get sliced assert new_scn['1'].shape == (5, 10) assert new_scn['2'].shape == (5, 10) assert new_scn1['3'].shape == (3, 6) assert 'area' in new_scn1['3'].attrs assert new_scn1['3'].attrs['area'].shape == (3, 6) assert new_scn1['3a'].shape == (3, 6) a_var = new_scn1['3a'].attrs['ancillary_variables'][0] assert a_var.shape == (3, 6) assert new_scn2['4'].shape == (3, 6) assert 'area' in new_scn2['4'].attrs assert new_scn2['4'].attrs['area'].shape == (3, 6) assert new_scn2['4a'].shape == (3, 6) a_var = new_scn2['4a'].attrs['ancillary_variables'][0] assert a_var.shape == (3, 6) def test_crop(self): """Test the crop method.""" from pyresample.geometry import AreaDefinition scene1 = Scene() area_extent = (-5570248.477339745, -5561247.267842293, 5567248.074173927, 5570248.477339745) proj_dict = {'a': 6378169.0, 'b': 6356583.8, 'h': 35785831.0, 'lon_0': 0.0, 'proj': 'geos', 'units': 'm'} x_size = 3712 y_size = 3712 area_def = AreaDefinition( 'test', 'test', 'test', proj_dict, x_size, y_size, area_extent, ) area_def2 = AreaDefinition( 'test2', 'test2', 'test2', proj_dict, x_size // 2, y_size // 2, area_extent, ) scene1["1"] = xr.DataArray(np.zeros((y_size, x_size))) scene1["2"] = xr.DataArray(np.zeros((y_size, x_size)), dims=('y', 'x')) scene1["3"] = xr.DataArray(np.zeros((y_size, x_size)), dims=('y', 'x'), attrs={'area': area_def}) scene1["4"] = xr.DataArray(np.zeros((y_size // 2, x_size // 2)), dims=('y', 'x'), attrs={'area': area_def2}) # by area crop_area = AreaDefinition( 'test', 'test', 'test', proj_dict, x_size, y_size, (area_extent[0] + 10000., area_extent[1] + 500000., area_extent[2] - 10000., area_extent[3] - 500000.) ) new_scn1 = scene1.crop(crop_area) assert '1' in new_scn1 assert '2' in new_scn1 assert '3' in new_scn1 assert new_scn1['1'].shape == (y_size, x_size) assert new_scn1['2'].shape == (y_size, x_size) assert new_scn1['3'].shape == (3380, 3708) assert new_scn1['4'].shape == (1690, 1854) # by lon/lat bbox new_scn1 = scene1.crop(ll_bbox=(-20., -5., 0, 0)) assert '1' in new_scn1 assert '2' in new_scn1 assert '3' in new_scn1 assert new_scn1['1'].shape == (y_size, x_size) assert new_scn1['2'].shape == (y_size, x_size) assert new_scn1['3'].shape == (184, 714) assert new_scn1['4'].shape == (92, 357) # by x/y bbox new_scn1 = scene1.crop(xy_bbox=(-200000., -100000., 0, 0)) assert '1' in new_scn1 assert '2' in new_scn1 assert '3' in new_scn1 assert new_scn1['1'].shape == (y_size, x_size) assert new_scn1['2'].shape == (y_size, x_size) assert new_scn1['3'].shape == (36, 70) assert new_scn1['4'].shape == (18, 35) def test_crop_epsg_crs(self): """Test the crop method when source area uses an EPSG code.""" from pyresample.geometry import AreaDefinition scene1 = Scene() area_extent = (699960.0, 5390220.0, 809760.0, 5500020.0) x_size = 3712 y_size = 3712 area_def = AreaDefinition( 'test', 'test', 'test', "EPSG:32630", x_size, y_size, area_extent, ) scene1["1"] = xr.DataArray(np.zeros((y_size, x_size)), dims=('y', 'x'), attrs={'area': area_def}) # by x/y bbox new_scn1 = scene1.crop(xy_bbox=(719695.7781587119, 5427887.407618969, 725068.1609052602, 5433708.364368956)) assert '1' in new_scn1 assert new_scn1['1'].shape == (198, 182) def test_crop_rgb(self): """Test the crop method on multi-dimensional data.""" from pyresample.geometry import AreaDefinition scene1 = Scene() area_extent = (-5570248.477339745, -5561247.267842293, 5567248.074173927, 5570248.477339745) proj_dict = {'a': 6378169.0, 'b': 6356583.8, 'h': 35785831.0, 'lon_0': 0.0, 'proj': 'geos', 'units': 'm'} x_size = 3712 y_size = 3712 area_def = AreaDefinition( 'test', 'test', 'test', proj_dict, x_size, y_size, area_extent, ) area_def2 = AreaDefinition( 'test2', 'test2', 'test2', proj_dict, x_size // 2, y_size // 2, area_extent, ) scene1["1"] = xr.DataArray(np.zeros((3, y_size, x_size)), dims=('bands', 'y', 'x'), attrs={'area': area_def}) scene1["2"] = xr.DataArray(np.zeros((y_size // 2, 3, x_size // 2)), dims=('y', 'bands', 'x'), attrs={'area': area_def2}) # by lon/lat bbox new_scn1 = scene1.crop(ll_bbox=(-20., -5., 0, 0)) assert '1' in new_scn1 assert '2' in new_scn1 assert 'bands' in new_scn1['1'].dims assert 'bands' in new_scn1['2'].dims assert new_scn1['1'].shape == (3, 184, 714) assert new_scn1['2'].shape == (92, 3, 357) def test_contains(self): """Test contains.""" scene = Scene() scene["1"] = xr.DataArray(np.arange(5), attrs={'wavelength': (0.1, 0.2, 0.3), '_satpy_id_keys': default_id_keys_config}) assert '1' in scene assert 0.15 in scene assert '2' not in scene assert 0.31 not in scene scene = Scene() scene['blueberry'] = xr.DataArray(np.arange(5)) scene['blackberry'] = xr.DataArray(np.arange(5)) scene['strawberry'] = xr.DataArray(np.arange(5)) scene['raspberry'] = xr.DataArray(np.arange(5)) # deepcode ignore replace~keys~list~compare: This is on purpose assert make_cid(name='blueberry') in scene.keys() assert make_cid(name='blueberry') in scene assert 'blueberry' in scene assert 'blueberry' not in scene.keys() def test_delitem(self): """Test deleting an item.""" scene = Scene() scene["1"] = xr.DataArray(np.arange(5), attrs={'wavelength': (0.1, 0.2, 0.3), '_satpy_id_keys': default_id_keys_config}) scene["2"] = xr.DataArray(np.arange(5), attrs={'wavelength': (0.4, 0.5, 0.6), '_satpy_id_keys': default_id_keys_config}) scene["3"] = xr.DataArray(np.arange(5), attrs={'wavelength': (0.7, 0.8, 0.9), '_satpy_id_keys': default_id_keys_config}) del scene['1'] del scene['3'] del scene[0.45] assert not scene._wishlist assert not list(scene._datasets.keys()) pytest.raises(KeyError, scene.__delitem__, 0.2) def test_all_datasets_no_readers(self): """Test all datasets with no reader.""" scene = Scene() pytest.raises(KeyError, scene.all_dataset_ids, reader_name='fake') id_list = scene.all_dataset_ids() assert id_list == [] # no sensors are loaded so we shouldn't get any comps either id_list = scene.all_dataset_ids(composites=True) assert id_list == [] def test_all_dataset_names_no_readers(self): """Test all dataset names with no reader.""" scene = Scene() pytest.raises(KeyError, scene.all_dataset_names, reader_name='fake') name_list = scene.all_dataset_names() assert name_list == [] # no sensors are loaded so we shouldn't get any comps either name_list = scene.all_dataset_names(composites=True) assert name_list == [] def test_available_dataset_no_readers(self): """Test the available datasets without a reader.""" scene = Scene() pytest.raises( KeyError, scene.available_dataset_ids, reader_name='fake') name_list = scene.available_dataset_ids() assert name_list == [] # no sensors are loaded so we shouldn't get any comps either name_list = scene.available_dataset_ids(composites=True) assert name_list == [] def test_available_dataset_names_no_readers(self): """Test the available dataset names without a reader.""" scene = Scene() pytest.raises( KeyError, scene.available_dataset_names, reader_name='fake') name_list = scene.available_dataset_names() assert name_list == [] # no sensors are loaded so we shouldn't get any comps either name_list = scene.available_dataset_names(composites=True) assert name_list == [] class TestFinestCoarsestArea: """Test the Scene logic for finding the finest and coarsest area.""" def setup_method(self): """Set common variables.""" from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict self.scene = Scene() self.scene["1"] = xr.DataArray(np.arange(10).reshape((2, 5)), attrs={'wavelength': (0.1, 0.2, 0.3)}) self.ds1 = self.scene["1"] self.scene["2"] = xr.DataArray(np.arange(40).reshape((4, 10)), attrs={'wavelength': (0.4, 0.5, 0.6)}) self.ds2 = self.scene["2"] self.scene["3"] = xr.DataArray(np.arange(40).reshape((4, 10)), attrs={'wavelength': (0.7, 0.8, 0.9)}) self.ds3 = self.scene["3"] proj_dict = proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 ' '+lon_0=-95. +lat_0=25 +lat_1=25 ' '+units=m +no_defs') self.area_def1 = AreaDefinition( 'test', 'test', 'test', proj_dict, 100, 200, (-1000., -1500., 1000., 1500.), ) self.area_def2 = AreaDefinition( 'test', 'test', 'test', proj_dict, 200, 400, (-1000., -1500., 1000., 1500.), ) def test_coarsest_finest_area_upright_area(self): """Test 'coarsest_area' and 'finest_area' methods for upright areas.""" self.ds1.attrs['area'] = self.area_def1 self.ds2.attrs['area'] = self.area_def2 self.ds3.attrs['area'] = self.area_def2 assert self.scene.coarsest_area() is self.area_def1 assert self.scene.finest_area() is self.area_def2 assert self.scene.coarsest_area(['2', '3']) is self.area_def2 def test_coarsest_finest_area_flipped_area(self): """Test 'coarsest_area' and 'finest_area' methods for flipped areas with negative pixel sizes.""" area_def1_flipped = self.area_def1.copy(area_extent=tuple([-1*ae for ae in self.area_def1.area_extent])) area_def2_flipped = self.area_def2.copy(area_extent=tuple([-1*ae for ae in self.area_def2.area_extent])) self.ds1.attrs['area'] = area_def1_flipped self.ds2.attrs['area'] = area_def2_flipped self.ds3.attrs['area'] = area_def2_flipped assert self.scene.coarsest_area() is area_def1_flipped assert self.scene.finest_area() is area_def2_flipped assert self.scene.coarsest_area(['2', '3']) is area_def2_flipped class TestSceneAvailableDatasets: """Test the Scene's handling of various dependencies.""" def setup_method(self): """Set config_path to point to test 'etc' directory.""" self.old_config_path = satpy.config.get('config_path') satpy.config.set(config_path=[TEST_ETC_DIR]) def teardown_method(self): """Restore previous 'config_path' setting.""" satpy.config.set(config_path=self.old_config_path) def test_all_datasets_one_reader(self): """Test all datasets for one reader.""" scene = Scene(filenames=['fake1_1.txt'], reader='fake1') id_list = scene.all_dataset_ids() # 20 data products + 6 lon/lat products num_reader_ds = 21 + 6 assert len(id_list) == num_reader_ds id_list = scene.all_dataset_ids(composites=True) assert len(id_list) == num_reader_ds + 29 def test_all_datasets_multiple_reader(self): """Test all datasets for multiple readers.""" scene = Scene(filenames={'fake1_1ds': ['fake1_1ds_1.txt'], 'fake2_1ds': ['fake2_1ds_1.txt']}) id_list = scene.all_dataset_ids() assert len(id_list) == 2 id_list = scene.all_dataset_ids(composites=True) # ds1 and ds2 => 2 # composites that use these two datasets => 11 assert len(id_list) == 2 + 11 def test_available_datasets_one_reader(self): """Test the available datasets for one reader.""" scene = Scene(filenames=['fake1_1ds_1.txt'], reader='fake1_1ds') id_list = scene.available_dataset_ids() assert len(id_list) == 1 id_list = scene.available_dataset_ids(composites=True) # ds1, comp1, comp14, comp16, static_image, comp26 assert len(id_list) == 6 def test_available_composite_ids_missing_available(self): """Test available_composite_ids when a composites dep is missing.""" scene = Scene(filenames=['fake1_1ds_1.txt'], reader='fake1_1ds') assert 'comp2' not in scene.available_composite_names() def test_available_composites_known_versus_all(self): """Test available_composite_ids when some datasets aren't available.""" scene = Scene(filenames=['fake1_1.txt'], reader='fake1', reader_kwargs={"not_available": ["ds2", "ds3"]}) all_comps = scene.all_composite_names() avail_comps = scene.available_composite_names() # there should always be more known composites than available composites assert len(all_comps) > len(avail_comps) for not_avail_comp in ("comp2", "comp3"): assert not_avail_comp in all_comps assert not_avail_comp not in avail_comps class TestSceneSerialization: """Test the Scene serialization.""" def setup_method(self): """Set config_path to point to test 'etc' directory.""" self.old_config_path = satpy.config.get('config_path') satpy.config.set(config_path=[TEST_ETC_DIR]) def teardown_method(self): """Restore previous 'config_path' setting.""" satpy.config.set(config_path=self.old_config_path) def test_serialization_with_readers_and_data_arr(self): """Test that dask can serialize a Scene with readers.""" from distributed.protocol import deserialize, serialize scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load(['ds1']) cloned_scene = deserialize(*serialize(scene)) assert scene._readers.keys() == cloned_scene._readers.keys() assert scene.all_dataset_ids == scene.all_dataset_ids class TestSceneLoading: """Test the Scene objects `.load` method.""" def setup_method(self): """Set config_path to point to test 'etc' directory.""" self.old_config_path = satpy.config.get('config_path') satpy.config.set(config_path=[TEST_ETC_DIR]) def teardown_method(self): """Restore previous 'config_path' setting.""" satpy.config.set(config_path=self.old_config_path) def test_load_str(self): """Test passing a string to Scene.load.""" scene = Scene(filenames=['fake1_1.txt'], reader='fake1') pytest.raises(TypeError, scene.load, 'ds1') def test_load_no_exist(self): """Test loading a dataset that doesn't exist.""" scene = Scene(filenames=['fake1_1.txt'], reader='fake1') pytest.raises(KeyError, scene.load, ['im_a_dataset_that_doesnt_exist']) def test_load_no_exist2(self): """Test loading a dataset that doesn't exist then another load.""" from satpy.readers.yaml_reader import FileYAMLReader load_mock = spy_decorator(FileYAMLReader.load) with mock.patch.object(FileYAMLReader, 'load', load_mock): lmock = load_mock.mock scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load(['ds9_fail_load']) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 0 lmock.assert_called_once_with( set([make_dataid(name='ds9_fail_load', wavelength=(1.0, 1.1, 1.2))])) scene.load(['ds1']) loaded_ids = list(scene._datasets.keys()) assert lmock.call_count == 2 # most recent call should have only been ds1 lmock.assert_called_with(set([ make_dataid(name='ds1', resolution=250, calibration='reflectance', modifiers=tuple())])) assert len(loaded_ids) == 1 def test_load_ds1_no_comps(self): """Test loading one dataset with no loaded compositors.""" scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load(['ds1']) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 assert loaded_ids[0] == make_dataid(name='ds1', resolution=250, calibration='reflectance', modifiers=tuple()) def test_load_ds1_load_twice(self): """Test loading one dataset with no loaded compositors.""" from satpy.readers.yaml_reader import FileYAMLReader scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load(['ds1']) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 assert loaded_ids[0] == make_dataid(name='ds1', resolution=250, calibration='reflectance', modifiers=tuple()) load_mock = spy_decorator(FileYAMLReader.load) with mock.patch.object(FileYAMLReader, 'load', load_mock): lmock = load_mock.mock scene.load(['ds1']) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 assert loaded_ids[0] == make_dataid(name='ds1', resolution=250, calibration='reflectance', modifiers=tuple()) assert not lmock.called, ("Reader.load was called again when " "loading something that's already " "loaded") def test_load_ds1_unknown_modifier(self): """Test loading one dataset with no loaded compositors.""" scene = Scene(filenames=['fake1_1.txt'], reader='fake1') pytest.raises(KeyError, scene.load, [make_dataid(name='ds1', modifiers=('_fake_bad_mod_',))]) def test_load_ds4_cal(self): """Test loading a dataset that has two calibration variations.""" scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load(['ds4']) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 assert loaded_ids[0]['calibration'] == 'reflectance' def test_load_ds5_best_resolution(self): """Test loading a dataset has multiple resolutions available.""" scene = Scene(filenames=['fake1_1.txt', 'fake1_highres_1.txt'], reader='fake1') scene.load(['ds5']) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 assert loaded_ids[0]['name'] == 'ds5' assert loaded_ids[0]['resolution'] == 250 def test_load_ds5_multiple_resolution(self): """Test loading a dataset has multiple resolutions available with different resolutions.""" scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load(['ds5'], resolution=1000) scene.load(['ds5'], resolution=500) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 assert loaded_ids[0]['name'] == 'ds5' assert loaded_ids[0]['resolution'] == 500 assert loaded_ids[1]['name'] == 'ds5' assert loaded_ids[1]['resolution'] == 1000 def test_load_ds5_resolution_list(self): """Test loading a dataset has multiple resolutions available with different resolutions.""" scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load(['ds5'], resolution=[500, 1000]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 assert loaded_ids[0]['name'] == 'ds5' assert loaded_ids[0]['resolution'] == 500 def test_load_ds5_empty_modifiers(self): """Test loading a dataset has multiple resolutions available with different resolutions.""" scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load([make_dsq(name='ds5', modifiers=tuple())]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 assert loaded_ids[0]['name'] == 'ds5' def test_load_ds5_missing_best_resolution(self): """Test loading a dataset that has multiple resolutions but the best isn't available.""" # only the 500m is available scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load(['ds5']) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 assert loaded_ids[0]['name'] == 'ds5' assert loaded_ids[0]['resolution'] == 500 def test_load_ds6_wl(self): """Test loading a dataset by wavelength.""" scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load([0.22]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 assert loaded_ids[0]['name'] == 'ds6' def test_load_ds9_fail_load(self): """Test loading a dataset that will fail during load.""" scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load(['ds9_fail_load']) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 0 def test_load_comp1(self): """Test loading a composite with one required prereq.""" scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load(['comp1']) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 assert loaded_ids[0] == make_cid(name='comp1') def test_load_comp4(self): """Test loading a composite that depends on a composite.""" scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load(['comp4']) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 assert loaded_ids[0] == make_cid(name='comp4') def test_load_multiple_resolutions(self): """Test loading a dataset has multiple resolutions available with different resolutions.""" scene = Scene(filenames=['fake1_1.txt'], reader='fake1') comp25 = make_cid(name='comp25', resolution=1000) scene[comp25] = xr.DataArray([], attrs={'name': 'comp25', 'resolution': 1000}) scene.load(['comp25'], resolution=500) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 assert loaded_ids[0]['name'] == 'comp25' assert loaded_ids[0]['resolution'] == 500 assert loaded_ids[1]['name'] == 'comp25' assert loaded_ids[1]['resolution'] == 1000 def test_load_same_subcomposite(self): """Test loading a composite and one of it's subcomposites at the same time.""" scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load(['comp24', 'comp25'], resolution=500) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 assert loaded_ids[0]['name'] == 'comp24' assert loaded_ids[0]['resolution'] == 500 assert loaded_ids[1]['name'] == 'comp25' assert loaded_ids[1]['resolution'] == 500 def test_load_comp5(self): """Test loading a composite that has an optional prerequisite.""" scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load(['comp5']) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 assert loaded_ids[0] == make_cid(name='comp5') def test_load_comp6(self): """Test loading a composite that has an optional composite prerequisite.""" scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load(['comp6']) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 assert loaded_ids[0] == make_cid(name='comp6') def test_load_comp8(self): """Test loading a composite that has a non-existent prereq.""" scene = Scene(filenames=['fake1_1.txt'], reader='fake1') pytest.raises(KeyError, scene.load, ['comp8']) def test_load_comp9(self): """Test loading a composite that has a non-existent optional prereq.""" # it is fine that an optional prereq doesn't exist scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load(['comp9']) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 assert loaded_ids[0] == make_cid(name='comp9') def test_load_comp10(self): """Test loading a composite that depends on a modified dataset.""" # it is fine that an optional prereq doesn't exist scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load(['comp10']) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 assert loaded_ids[0] == make_cid(name='comp10') def test_load_comp11(self): """Test loading a composite that depends all wavelengths.""" # it is fine that an optional prereq doesn't exist scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load(['comp11']) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 assert loaded_ids[0] == make_cid(name='comp11') def test_load_comp12(self): """Test loading a composite that depends all wavelengths that get modified.""" # it is fine that an optional prereq doesn't exist scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load(['comp12']) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 assert loaded_ids[0] == make_cid(name='comp12') def test_load_comp13(self): """Test loading a composite that depends on a modified dataset where the resolution changes.""" # it is fine that an optional prereq doesn't exist scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load(['comp13']) loaded_ids = list(scene.keys()) assert len(loaded_ids) == 1 assert loaded_ids[0] == make_cid(name='comp13') def test_load_comp14(self): """Test loading a composite that updates the DataID during generation.""" # it is fine that an optional prereq doesn't exist scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load(['comp14']) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 assert loaded_ids[0]['name'] == 'comp14' def test_load_comp15(self): """Test loading a composite whose prerequisites can't be loaded. Note that the prereq exists in the reader, but fails in loading. """ # it is fine that an optional prereq doesn't exist scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load(['comp15']) loaded_ids = list(scene._datasets.keys()) assert not loaded_ids def test_load_comp16(self): """Test loading a composite whose opt prereq can't be loaded. Note that the prereq exists in the reader, but fails in loading """ # it is fine that an optional prereq doesn't exist scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load(['comp16']) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 assert loaded_ids[0]['name'] == 'comp16' def test_load_comp17(self): """Test loading a composite that depends on a composite that won't load.""" # it is fine that an optional prereq doesn't exist scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load(['comp17']) loaded_ids = list(scene._datasets.keys()) assert not loaded_ids def test_load_comp18(self): """Test loading a composite that depends on a incompatible area modified dataset.""" # it is fine that an optional prereq doesn't exist scene = Scene(filenames=['fake1_1.txt', 'fake1_highres_1.txt'], reader='fake1') scene.load(['comp18']) loaded_ids = list(scene._datasets.keys()) # depends on: # ds3 # ds4 (mod1, mod3) # ds5 (mod1, incomp_areas) # We should end up with ds3, ds4 (mod1, mod3), ds5 (mod1), and ds1 # for the incomp_areas modifier assert len(loaded_ids) == 4 # the 1 dependencies assert 'ds3' in scene._datasets assert make_dataid(name='ds4', calibration='reflectance', modifiers=('mod1', 'mod3')) in scene._datasets assert make_dataid(name='ds5', resolution=250, modifiers=('mod1',)) in scene._datasets def test_load_comp18_2(self): """Test loading a composite that depends on a incompatible area modified dataset. Specifically a modified dataset where the modifier has optional dependencies. """ # it is fine that an optional prereq doesn't exist scene = Scene(filenames=['fake1_1.txt', 'fake1_highres_1.txt'], reader='fake1') scene.load(['comp18_2']) loaded_ids = list(scene._datasets.keys()) # depends on: # ds3 # ds4 (mod1, mod3) # ds5 (mod1, incomp_areas_opt) # We should end up with ds3, ds4 (mod1, mod3), ds5 (mod1), and ds1 # and ds2 for the incomp_areas_opt modifier assert len(loaded_ids) == 5 # the 1 dependencies assert 'ds3' in scene._datasets assert 'ds2' in scene._datasets assert make_dataid(name='ds4', calibration='reflectance', modifiers=('mod1', 'mod3')) in scene._datasets assert make_dataid(name='ds5', resolution=250, modifiers=('mod1',)) in scene._datasets def test_load_comp19(self): """Test loading a composite that shares a dep with a dependency. More importantly test that loading a dependency that depends on the same dependency as this composite (a sibling dependency) and that sibling dependency includes a modifier. This test makes sure that the Node in the dependency tree is the exact same node. """ # Check dependency tree nodes # initialize the dep tree without loading the data scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene._update_dependency_tree({'comp19'}, None) this_node = scene._dependency_tree['comp19'] shared_dep_id = make_dataid(name='ds5', modifiers=('res_change',)) shared_dep_expected_node = scene._dependency_tree[shared_dep_id] # get the node for the first dep in the prereqs list of the # comp13 node shared_dep_node = scene._dependency_tree['comp13'].data[1][0] shared_dep_node2 = this_node.data[1][0] assert shared_dep_expected_node is shared_dep_node assert shared_dep_expected_node is shared_dep_node2 scene.load(['comp19']) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 assert loaded_ids[0] == make_cid(name='comp19') def test_load_multiple_comps(self): """Test loading multiple composites.""" scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load(['comp1', 'comp2', 'comp3', 'comp4', 'comp5', 'comp6', 'comp7', 'comp9', 'comp10']) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 9 def test_load_multiple_comps_separate(self): """Test loading multiple composites, one at a time.""" scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load(['comp10']) scene.load(['comp9']) scene.load(['comp7']) scene.load(['comp6']) scene.load(['comp5']) scene.load(['comp4']) scene.load(['comp3']) scene.load(['comp2']) scene.load(['comp1']) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 9 def test_load_modified(self): """Test loading a modified dataset.""" scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load([make_dsq(name='ds1', modifiers=('mod1', 'mod2'))]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 assert loaded_ids[0]['modifiers'] == ('mod1', 'mod2') def test_load_multiple_modified(self): """Test loading multiple modified datasets.""" scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load([ make_dataid(name='ds1', modifiers=('mod1', 'mod2')), make_dataid(name='ds2', modifiers=('mod2', 'mod1')), ]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 for i in loaded_ids: if i['name'] == 'ds1': assert i['modifiers'] == ('mod1', 'mod2') else: assert i['name'] == 'ds2' assert i['modifiers'] == ('mod2', 'mod1') def test_load_dataset_after_composite(self): """Test load composite followed by other datasets.""" from satpy.readers.yaml_reader import FileYAMLReader from satpy.tests.utils import FakeCompositor load_mock = spy_decorator(FileYAMLReader.load) comp_mock = spy_decorator(FakeCompositor.__call__) with mock.patch.object(FileYAMLReader, 'load', load_mock),\ mock.patch.object(FakeCompositor, '__call__', comp_mock): lmock = load_mock.mock scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load(['comp3']) assert lmock.call_count == 1 scene.load(['ds1']) assert lmock.call_count == 2 scene.load(['ds1']) # we should only load from the file twice assert lmock.call_count == 2 # we should only generate the composite once assert comp_mock.mock.call_count == 1 loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 def test_load_dataset_after_composite2(self): """Test load complex composite followed by other datasets.""" from satpy.readers.yaml_reader import FileYAMLReader from satpy.tests.utils import FakeCompositor, FakeModifier load_mock = spy_decorator(FileYAMLReader.load) comp_mock = spy_decorator(FakeCompositor.__call__) mod_mock = spy_decorator(FakeModifier.__call__) with mock.patch.object(FileYAMLReader, 'load', load_mock), \ mock.patch.object(FakeCompositor, '__call__', comp_mock), \ mock.patch.object(FakeModifier, '__call__', mod_mock): lmock = load_mock.mock scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load(['comp10']) assert lmock.call_count == 1 loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 with mock.patch.object(scene, '_generate_composites_nodes_from_loaded_datasets', wraps=scene._generate_composites_nodes_from_loaded_datasets) as m: scene.load(['ds1']) assert lmock.call_count == 2 loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 # this is the unmodified ds1 assert make_dataid( name='ds1', resolution=250, calibration='reflectance', modifiers=tuple() ) in loaded_ids # m.assert_called_once_with(set([scene._dependency_tree['ds1']])) m.assert_called_once_with(set()) with mock.patch.object(scene, '_generate_composites_nodes_from_loaded_datasets', wraps=scene._generate_composites_nodes_from_loaded_datasets) as m: scene.load(['ds1']) assert lmock.call_count == 2 loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 # this is the unmodified ds1 assert make_dataid( name='ds1', resolution=250, calibration='reflectance', modifiers=tuple() ) in loaded_ids m.assert_called_once_with(set()) # we should only generate the comp10 composite once but comp2 was also generated assert comp_mock.mock.call_count == 1 + 1 # Create the modded ds1 at comp10, then load the umodified version # again assert mod_mock.mock.call_count == 1 loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 def test_load_comp20(self): """Test loading composite with optional modifier dependencies.""" # it is fine that an optional prereq doesn't exist scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load(['comp20']) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 assert loaded_ids[0] == make_cid(name='comp20') def test_load_comp21(self): """Test loading composite with bad optional modifier dependencies.""" # it is fine that an optional prereq doesn't exist scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load(['comp21']) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 assert loaded_ids[0] == make_cid(name='comp21') def test_load_comp22(self): """Test loading composite with only optional modifier dependencies.""" # it is fine that an optional prereq doesn't exist scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load(['comp22']) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 assert loaded_ids[0] == make_cid(name='comp22') def test_load_green(self): """Test loading ahi_green.""" scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load(['ahi_green']) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 assert loaded_ids[0] == make_cid(name='ahi_green') def test_no_generate_comp10(self): """Test generating a composite after loading.""" # it is fine that an optional prereq doesn't exist scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load(['comp10'], generate=False) assert any(ds_id['name'] == 'comp10' for ds_id in scene._wishlist) assert 'comp10' not in scene._datasets # two dependencies should have been loaded assert len(scene._datasets) == 2 assert len(scene.missing_datasets) == 1 scene._generate_composites_from_loaded_datasets() assert any(ds_id['name'] == 'comp10' for ds_id in scene._wishlist) assert 'comp10' in scene._datasets assert not scene.missing_datasets def test_modified_with_wl_dep(self): """Test modifying a dataset with a modifier with modified deps. More importantly test that loading the modifiers dependency at the same time as the original modified dataset that the dependency tree nodes are unique and that DataIDs. """ from satpy.dataset.dataid import WavelengthRange # Check dependency tree nodes # initialize the dep tree without loading the data ds1_mod_id = make_dsq(name='ds1', modifiers=('mod_wl',)) ds3_mod_id = make_dsq(name='ds3', modifiers=('mod_wl',)) scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene._update_dependency_tree({ds1_mod_id, ds3_mod_id}, None) ds1_mod_node = scene._dependency_tree[ds1_mod_id] ds3_mod_node = scene._dependency_tree[ds3_mod_id] ds1_mod_dep_node = ds1_mod_node.data[1][1] ds3_mod_dep_node = ds3_mod_node.data[1][1] # mod_wl depends on the this node: ds6_modded_node = scene._dependency_tree[make_dataid(name='ds6', modifiers=('mod1',))] # this dep should be full qualified with name and wavelength assert ds6_modded_node.name['name'] is not None assert isinstance(ds6_modded_node.name['wavelength'], WavelengthRange) # the node should be shared between everything that uses it assert ds1_mod_dep_node is ds3_mod_dep_node assert ds1_mod_dep_node is ds6_modded_node # it is fine that an optional prereq doesn't exist scene.load([ds1_mod_id, ds3_mod_id]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 assert ds1_mod_id in scene._datasets assert ds3_mod_id in scene._datasets def test_load_comp11_and_23(self): """Test loading two composites that depend on similar wavelengths.""" scene = Scene(filenames=['fake1_1.txt'], reader='fake1') # mock the available comps/mods in the compositor loader avail_comps = scene.available_composite_ids() assert make_cid(name='comp11') in avail_comps assert make_cid(name='comp23') in avail_comps # it is fine that an optional prereq doesn't exist scene.load(['comp11', 'comp23']) comp11_node = scene._dependency_tree['comp11'] comp23_node = scene._dependency_tree['comp23'] assert comp11_node.data[1][-1].name['name'] == 'ds10' assert comp23_node.data[1][0].name['name'] == 'ds8' loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 assert 'comp11' in scene assert 'comp23' in scene def test_load_too_many(self): """Test dependency tree if too many reader keys match.""" scene = Scene(filenames=['fake3_1.txt'], reader='fake3') avail_comps = scene.available_composite_ids() # static image => 1 assert len(avail_comps) == 1 pytest.raises(KeyError, scene.load, [0.21]) def test_available_comps_no_deps(self): """Test Scene available composites when composites don't have a dependency.""" scene = Scene(filenames=['fake1_1.txt'], reader='fake1') all_comp_ids = scene.available_composite_ids() assert make_cid(name='static_image') in all_comp_ids available_comp_ids = scene.available_composite_ids() assert make_cid(name='static_image') in available_comp_ids class TestSceneResampling: """Test resampling a Scene to another Scene object.""" def setup_method(self): """Set config_path to point to test 'etc' directory.""" self.old_config_path = satpy.config.get('config_path') satpy.config.set(config_path=[TEST_ETC_DIR]) def teardown_method(self): """Restore previous 'config_path' setting.""" satpy.config.set(config_path=self.old_config_path) def _fake_resample_dataset(self, dataset, dest_area, **kwargs): """Return copy of dataset pretending it was resampled.""" return dataset.copy() def _fake_resample_dataset_force_20x20(self, dataset, dest_area, **kwargs): """Return copy of dataset pretending it was resampled to (20, 20) shape.""" data = np.zeros((20, 20)) attrs = dataset.attrs.copy() attrs['area'] = dest_area return xr.DataArray( data, dims=('y', 'x'), attrs=attrs, ) @mock.patch('satpy.scene.resample_dataset') @pytest.mark.parametrize('datasets', [ None, ('comp13', 'ds5', 'ds2'), ]) def test_resample_scene_copy(self, rs, datasets): """Test that the Scene is properly copied during resampling. The Scene that is created as a copy of the original Scene should not be able to affect the original Scene object. """ from pyresample.geometry import AreaDefinition rs.side_effect = self._fake_resample_dataset_force_20x20 proj_str = ('+proj=lcc +datum=WGS84 +ellps=WGS84 ' '+lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs') area_def = AreaDefinition('test', 'test', 'test', proj_str, 5, 5, (-1000., -1500., 1000., 1500.)) area_def.get_area_slices = mock.MagicMock() scene = Scene(filenames=['fake1_1.txt', 'fake1_highres_1.txt'], reader='fake1') scene.load(['comp19']) new_scene = scene.resample(area_def, datasets=datasets) new_scene['new_ds'] = new_scene['comp19'].copy() scene.load(['ds1']) comp19_node = scene._dependency_tree['comp19'] ds5_mod_id = make_dataid(name='ds5', modifiers=('res_change',)) ds5_node = scene._dependency_tree[ds5_mod_id] comp13_node = scene._dependency_tree['comp13'] assert comp13_node.data[1][0] is comp19_node.data[1][0] assert comp13_node.data[1][0] is ds5_node pytest.raises(KeyError, scene._dependency_tree.__getitem__, 'new_ds') # comp19 required resampling to produce so we should have its 3 deps # 1. comp13 # 2. ds5 # 3. ds2 # Then we loaded ds1 separately so we should have # 4. ds1 loaded_ids = list(scene.keys()) assert len(loaded_ids) == 4 for name in ('comp13', 'ds5', 'ds2', 'ds1'): assert any(x['name'] == name for x in loaded_ids) loaded_ids = list(new_scene.keys()) assert len(loaded_ids) == 2 assert loaded_ids[0] == make_cid(name='comp19') assert loaded_ids[1] == make_cid(name='new_ds') @mock.patch('satpy.scene.resample_dataset') def test_resample_scene_preserves_requested_dependencies(self, rs): """Test that the Scene is properly copied during resampling. The Scene that is created as a copy of the original Scene should not be able to affect the original Scene object. """ from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict rs.side_effect = self._fake_resample_dataset proj_dict = proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 ' '+lon_0=-95. +lat_0=25 +lat_1=25 ' '+units=m +no_defs') area_def = AreaDefinition('test', 'test', 'test', proj_dict, 5, 5, (-1000., -1500., 1000., 1500.)) area_def.get_area_slices = mock.MagicMock() scene = Scene(filenames=['fake1_1.txt'], reader='fake1') # Set PYTHONHASHSEED to 0 in the interpreter to test as intended (comp26 comes before comp14) scene.load(['comp26', 'comp14'], generate=False) scene.resample(area_def, unload=True) new_scene_2 = scene.resample(area_def, unload=True) assert 'comp14' not in scene assert 'comp26' not in scene assert 'comp14' in new_scene_2 assert 'comp26' in new_scene_2 assert 'ds1' not in new_scene_2 # unloaded @mock.patch('satpy.scene.resample_dataset') def test_resample_reduce_data_toggle(self, rs): """Test that the Scene can be reduced or not reduced during resampling.""" from pyresample.geometry import AreaDefinition rs.side_effect = self._fake_resample_dataset_force_20x20 proj_str = ('+proj=lcc +datum=WGS84 +ellps=WGS84 ' '+lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs') target_area = AreaDefinition('test', 'test', 'test', proj_str, 4, 4, (-1000., -1500., 1000., 1500.)) area_def = AreaDefinition('test', 'test', 'test', proj_str, 5, 5, (-1000., -1500., 1000., 1500.)) area_def.get_area_slices = mock.MagicMock() get_area_slices = area_def.get_area_slices get_area_slices.return_value = (slice(0, 3, None), slice(0, 3, None)) area_def_big = AreaDefinition('test', 'test', 'test', proj_str, 10, 10, (-1000., -1500., 1000., 1500.)) area_def_big.get_area_slices = mock.MagicMock() get_area_slices_big = area_def_big.get_area_slices get_area_slices_big.return_value = (slice(0, 6, None), slice(0, 6, None)) # Test that data reduction can be disabled scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load(['comp19']) scene['comp19'].attrs['area'] = area_def scene['comp19_big'] = xr.DataArray( da.zeros((10, 10)), dims=('y', 'x'), attrs=scene['comp19'].attrs.copy()) scene['comp19_big'].attrs['area'] = area_def_big scene['comp19_copy'] = scene['comp19'].copy() orig_slice_data = scene._slice_data # we force the below order of processing to test that success isn't # based on data of the same resolution being processed together test_order = [ make_cid(**scene['comp19'].attrs), make_cid(**scene['comp19_big'].attrs), make_cid(**scene['comp19_copy'].attrs), ] with mock.patch('satpy.scene.Scene._slice_data') as slice_data, \ mock.patch('satpy.dataset.dataset_walker') as ds_walker: ds_walker.return_value = test_order slice_data.side_effect = orig_slice_data scene.resample(target_area, reduce_data=False) assert not slice_data.called assert not get_area_slices.called scene.resample(target_area) assert slice_data.called_once assert get_area_slices.called_once scene.resample(target_area, reduce_data=True) # 2 times for each dataset # once for default (reduce_data=True) # once for kwarg forced to `True` assert slice_data.call_count == 2 * 3 assert get_area_slices.called_once def test_resample_ancillary(self): """Test that the Scene reducing data does not affect final output.""" from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict proj_dict = proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 ' '+lon_0=-95. +lat_0=25 +lat_1=25 ' '+units=m +no_defs') area_def = AreaDefinition('test', 'test', 'test', proj_dict, 5, 5, (-1000., -1500., 1000., 1500.)) scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load(['comp19', 'comp20']) scene['comp19'].attrs['area'] = area_def scene['comp19'].attrs['ancillary_variables'] = [scene['comp20']] scene['comp20'].attrs['area'] = area_def dst_area = AreaDefinition('dst', 'dst', 'dst', proj_dict, 2, 2, (-1000., -1500., 0., 0.), ) new_scene = scene.resample(dst_area) assert new_scene['comp20'] is new_scene['comp19'].attrs['ancillary_variables'][0] def test_resample_reduce_data(self): """Test that the Scene reducing data does not affect final output.""" from pyresample.geometry import AreaDefinition proj_str = ('+proj=lcc +datum=WGS84 +ellps=WGS84 ' '+lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs') area_def = AreaDefinition('test', 'test', 'test', proj_str, 20, 20, (-1000., -1500., 1000., 1500.)) scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load(['comp19']) scene['comp19'].attrs['area'] = area_def dst_area = AreaDefinition('dst', 'dst', 'dst', proj_str, 20, 20, (-1000., -1500., 0., 0.), ) new_scene1 = scene.resample(dst_area, reduce_data=False) new_scene2 = scene.resample(dst_area) new_scene3 = scene.resample(dst_area, reduce_data=True) assert new_scene1['comp19'].shape == (20, 20, 3) assert new_scene2['comp19'].shape == (20, 20, 3) assert new_scene3['comp19'].shape == (20, 20, 3) @mock.patch('satpy.scene.resample_dataset') def test_no_generate_comp10(self, rs): """Test generating a composite after loading.""" from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict rs.side_effect = self._fake_resample_dataset proj_dict = proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 ' '+lon_0=-95. +lat_0=25 +lat_1=25 ' '+units=m +no_defs') area_def = AreaDefinition( 'test', 'test', 'test', proj_dict, 200, 400, (-1000., -1500., 1000., 1500.), ) # it is fine that an optional prereq doesn't exist scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load(['comp10'], generate=False) assert any(ds_id['name'] == 'comp10' for ds_id in scene._wishlist) assert 'comp10' not in scene # two dependencies should have been loaded assert len(scene._datasets) == 2 assert len(scene.missing_datasets) == 1 new_scn = scene.resample(area_def, generate=False) assert 'comp10' not in scene # two dependencies should have been loaded assert len(scene._datasets) == 2 assert len(scene.missing_datasets) == 1 new_scn._generate_composites_from_loaded_datasets() assert any(ds_id['name'] == 'comp10' for ds_id in new_scn._wishlist) assert 'comp10' in new_scn assert not new_scn.missing_datasets # try generating them right away new_scn = scene.resample(area_def) assert any(ds_id['name'] == 'comp10' for ds_id in new_scn._wishlist) assert 'comp10' in new_scn assert not new_scn.missing_datasets def test_comp_loading_after_resampling_existing_sensor(self): """Test requesting a composite after resampling.""" scene = Scene(filenames=['fake1_1.txt'], reader='fake1') scene.load(["ds1", "ds2"]) new_scn = scene.resample(resampler='native') # Can't load from readers after resampling with pytest.raises(KeyError): new_scn.load(["ds3"]) # But we can load composites because the sensor composites were loaded # when the reader datasets were accessed new_scn.load(["comp2"]) assert "comp2" in new_scn def test_comp_loading_after_resampling_new_sensor(self): """Test requesting a composite after resampling when the sensor composites weren't loaded before.""" # this is our base Scene with sensor "fake_sensor2" scene1 = Scene(filenames=['fake2_3ds_1.txt'], reader='fake2_3ds') scene1.load(["ds2"]) new_scn = scene1.resample(resampler='native') # Can't load from readers after resampling with pytest.raises(KeyError): new_scn.load(["ds3"]) # Can't load the composite from fake_sensor composites yet # 'ds1' is missing with pytest.raises(KeyError): new_scn.load(["comp2"]) # artificial DataArray "created by the user" # mimics a user adding their own data with the same sensor user_da = scene1["ds2"].copy() user_da.attrs["name"] = "ds1" user_da.attrs["sensor"] = {"fake_sensor2"} # Add 'ds1' that doesn't provide the 'fake_sensor' sensor new_scn["ds1"] = user_da with pytest.raises(KeyError): new_scn.load(["comp2"]) assert "comp2" not in new_scn # artificial DataArray "created by the user" # mimics a user adding their own data with its own sensor to the Scene user_da = scene1["ds2"].copy() user_da.attrs["name"] = "ds1" user_da.attrs["sensor"] = {"fake_sensor"} # Now 'fake_sensor' composites have been loaded new_scn["ds1"] = user_da new_scn.load(["comp2"]) assert "comp2" in new_scn def test_comp_loading_multisensor_composite_created_user(self): """Test that multisensor composite can be created manually. Test that if the user has created datasets "manually", that multi-sensor composites provided can still be read. """ scene1 = Scene(filenames=["fake1_1.txt"], reader="fake1") scene1.load(["ds1"]) scene2 = Scene(filenames=["fake4_1.txt"], reader="fake4") scene2.load(["ds4_b"]) scene3 = Scene() scene3["ds1"] = scene1["ds1"] scene3["ds4_b"] = scene2["ds4_b"] scene3.load(["comp_multi"]) assert "comp_multi" in scene3 def test_comps_need_resampling_optional_mod_deps(self): """Test that a composite with complex dependencies. This is specifically testing the case where a compositor depends on multiple resolution prerequisites which themselves are composites. These sub-composites depend on data with a modifier that only has optional dependencies. This is a very specific use case and is the simplest way to present the problem (so far). The general issue is that the Scene loading creates the "ds13" dataset which already has one modifier on it. The "comp27" composite requires resampling so its 4 prerequisites + the requested "ds13" (from the reader which includes mod1 modifier) remain. If the DependencyTree is not copied properly in this situation then the new Scene object will have the composite dependencies without resolution in its dep tree, but have the DataIDs with the resolution in the dataset dictionary. This all results in the Scene trying to regenerate composite dependencies that aren't needed which fail. """ scene = Scene(filenames=['fake1_1.txt'], reader='fake1') # should require resampling scene.load(['comp27', 'ds13']) assert 'comp27' not in scene assert 'ds13' in scene new_scene = scene.resample(resampler='native') assert len(list(new_scene.keys())) == 2 assert 'comp27' in new_scene assert 'ds13' in new_scene class TestSceneSaving(unittest.TestCase): """Test the Scene's saving method.""" def setUp(self): """Create temporary directory to save files to.""" import tempfile self.base_dir = tempfile.mkdtemp() def tearDown(self): """Remove the temporary directory created for a test.""" try: import shutil shutil.rmtree(self.base_dir, ignore_errors=True) except OSError: pass def test_save_datasets_default(self): """Save a dataset using 'save_datasets'.""" ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={'name': 'test', 'start_time': datetime(2018, 1, 1, 0, 0, 0)} ) scn = Scene() scn['test'] = ds1 scn.save_datasets(base_dir=self.base_dir) assert os.path.isfile(os.path.join(self.base_dir, 'test_20180101_000000.tif')) def test_save_datasets_by_ext(self): """Save a dataset using 'save_datasets' with 'filename'.""" ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={'name': 'test', 'start_time': datetime(2018, 1, 1, 0, 0, 0)} ) scn = Scene() scn['test'] = ds1 from satpy.writers.simple_image import PillowWriter save_image_mock = spy_decorator(PillowWriter.save_image) with mock.patch.object(PillowWriter, 'save_image', save_image_mock): scn.save_datasets(base_dir=self.base_dir, filename='{name}.png') save_image_mock.mock.assert_called_once() assert os.path.isfile(os.path.join(self.base_dir, 'test.png')) def test_save_datasets_bad_writer(self): """Save a dataset using 'save_datasets' and a bad writer.""" ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={'name': 'test', 'start_time': datetime.utcnow()} ) scn = Scene() scn['test'] = ds1 pytest.raises(ValueError, scn.save_datasets, writer='_bad_writer_', base_dir=self.base_dir) def test_save_datasets_missing_wishlist(self): """Calling 'save_datasets' with no valid datasets.""" scn = Scene() scn._wishlist.add(make_cid(name='true_color')) pytest.raises(RuntimeError, scn.save_datasets, writer='geotiff', base_dir=self.base_dir) pytest.raises(KeyError, scn.save_datasets, datasets=['no_exist']) def test_save_dataset_default(self): """Save a dataset using 'save_dataset'.""" ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={'name': 'test', 'start_time': datetime(2018, 1, 1, 0, 0, 0)} ) scn = Scene() scn['test'] = ds1 scn.save_dataset('test', base_dir=self.base_dir) assert os.path.isfile(os.path.join(self.base_dir, 'test_20180101_000000.tif')) class TestSceneConversions(unittest.TestCase): """Test Scene conversion to geoviews, xarray, etc.""" def test_to_xarray_dataset_with_empty_scene(self): """Test converting empty Scene to xarray dataset.""" scn = Scene() xrds = scn.to_xarray_dataset() assert isinstance(xrds, xr.Dataset) assert len(xrds.variables) == 0 assert len(xrds.coords) == 0 def test_geoviews_basic_with_area(self): """Test converting a Scene to geoviews with an AreaDefinition.""" from pyresample.geometry import AreaDefinition scn = Scene() area = AreaDefinition('test', 'test', 'test', {'proj': 'geos', 'lon_0': -95.5, 'h': 35786023.0}, 2, 2, [-200, -200, 200, 200]) scn['ds1'] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=('y', 'x'), attrs={'start_time': datetime(2018, 1, 1), 'area': area}) gv_obj = scn.to_geoviews() # we assume that if we got something back, geoviews can use it assert gv_obj is not None def test_geoviews_basic_with_swath(self): """Test converting a Scene to geoviews with a SwathDefinition.""" from pyresample.geometry import SwathDefinition scn = Scene() lons = xr.DataArray(da.zeros((2, 2))) lats = xr.DataArray(da.zeros((2, 2))) area = SwathDefinition(lons, lats) scn['ds1'] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=('y', 'x'), attrs={'start_time': datetime(2018, 1, 1), 'area': area}) gv_obj = scn.to_geoviews() # we assume that if we got something back, geoviews can use it assert gv_obj is not None class TestSceneAggregation(unittest.TestCase): """Test the scene's aggregate method.""" def test_aggregate(self): """Test the aggregate method.""" x_size = 3712 y_size = 3712 scene1 = self._create_test_data(x_size, y_size) scene2 = scene1.aggregate(func='sum', x=2, y=2) expected_aggregated_shape = (y_size / 2, x_size / 2) self._check_aggregation_results(expected_aggregated_shape, scene1, scene2, x_size, y_size) @staticmethod def _create_test_data(x_size, y_size): from pyresample.geometry import AreaDefinition scene1 = Scene() area_extent = (-5570248.477339745, -5561247.267842293, 5567248.074173927, 5570248.477339745) proj_dict = {'a': 6378169.0, 'b': 6356583.8, 'h': 35785831.0, 'lon_0': 0.0, 'proj': 'geos', 'units': 'm'} area_def = AreaDefinition( 'test', 'test', 'test', proj_dict, x_size, y_size, area_extent, ) scene1["1"] = xr.DataArray(np.ones((y_size, x_size)), attrs={'_satpy_id_keys': default_id_keys_config}) scene1["2"] = xr.DataArray(np.ones((y_size, x_size)), dims=('y', 'x'), attrs={'_satpy_id_keys': default_id_keys_config}) scene1["3"] = xr.DataArray(np.ones((y_size, x_size)), dims=('y', 'x'), attrs={'area': area_def, '_satpy_id_keys': default_id_keys_config}) scene1["4"] = xr.DataArray(np.ones((y_size, x_size)), dims=('y', 'x'), attrs={'area': area_def, 'standard_name': 'backscatter', '_satpy_id_keys': default_id_keys_config}) return scene1 def _check_aggregation_results(self, expected_aggregated_shape, scene1, scene2, x_size, y_size): assert scene1['1'] is scene2['1'] assert scene1['2'] is scene2['2'] np.testing.assert_allclose(scene2['3'].data, 4) assert scene2['1'].shape == (y_size, x_size) assert scene2['2'].shape == (y_size, x_size) assert scene2['3'].shape == expected_aggregated_shape assert 'standard_name' in scene2['4'].attrs assert scene2['4'].attrs['standard_name'] == 'backscatter' def test_aggregate_with_boundary(self): """Test aggregation with boundary argument.""" x_size = 3711 y_size = 3711 scene1 = self._create_test_data(x_size, y_size) with pytest.raises(ValueError): scene1.aggregate(func='sum', x=2, y=2, boundary='exact') scene2 = scene1.aggregate(func='sum', x=2, y=2, boundary='trim') expected_aggregated_shape = (y_size // 2, x_size // 2) self._check_aggregation_results(expected_aggregated_shape, scene1, scene2, x_size, y_size) satpy-0.34.0/satpy/tests/test_utils.py000066400000000000000000000351621420401153000200060ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Testing of utils.""" from __future__ import annotations import logging import typing import unittest import warnings from unittest import mock import dask.array as da import numpy as np import pytest import xarray as xr from satpy.utils import angle2xyz, get_satpos, lonlat2xyz, proj_units_to_meters, xyz2angle, xyz2lonlat class TestUtils(unittest.TestCase): """Testing utils.""" def test_lonlat2xyz(self): """Test the lonlat2xyz function.""" x__, y__, z__ = lonlat2xyz(0, 0) self.assertAlmostEqual(x__, 1) self.assertAlmostEqual(y__, 0) self.assertAlmostEqual(z__, 0) x__, y__, z__ = lonlat2xyz(90, 0) self.assertAlmostEqual(x__, 0) self.assertAlmostEqual(y__, 1) self.assertAlmostEqual(z__, 0) x__, y__, z__ = lonlat2xyz(0, 90) self.assertAlmostEqual(x__, 0) self.assertAlmostEqual(y__, 0) self.assertAlmostEqual(z__, 1) x__, y__, z__ = lonlat2xyz(180, 0) self.assertAlmostEqual(x__, -1) self.assertAlmostEqual(y__, 0) self.assertAlmostEqual(z__, 0) x__, y__, z__ = lonlat2xyz(-90, 0) self.assertAlmostEqual(x__, 0) self.assertAlmostEqual(y__, -1) self.assertAlmostEqual(z__, 0) x__, y__, z__ = lonlat2xyz(0, -90) self.assertAlmostEqual(x__, 0) self.assertAlmostEqual(y__, 0) self.assertAlmostEqual(z__, -1) x__, y__, z__ = lonlat2xyz(0, 45) self.assertAlmostEqual(x__, np.sqrt(2) / 2) self.assertAlmostEqual(y__, 0) self.assertAlmostEqual(z__, np.sqrt(2) / 2) x__, y__, z__ = lonlat2xyz(0, 60) self.assertAlmostEqual(x__, np.sqrt(1) / 2) self.assertAlmostEqual(y__, 0) self.assertAlmostEqual(z__, np.sqrt(3) / 2) def test_angle2xyz(self): """Test the lonlat2xyz function.""" x__, y__, z__ = angle2xyz(0, 0) self.assertAlmostEqual(x__, 0) self.assertAlmostEqual(y__, 0) self.assertAlmostEqual(z__, 1) x__, y__, z__ = angle2xyz(90, 0) self.assertAlmostEqual(x__, 0) self.assertAlmostEqual(y__, 0) self.assertAlmostEqual(z__, 1) x__, y__, z__ = angle2xyz(0, 90) self.assertAlmostEqual(x__, 0) self.assertAlmostEqual(y__, 1) self.assertAlmostEqual(z__, 0) x__, y__, z__ = angle2xyz(180, 0) self.assertAlmostEqual(x__, 0) self.assertAlmostEqual(y__, 0) self.assertAlmostEqual(z__, 1) x__, y__, z__ = angle2xyz(-90, 0) self.assertAlmostEqual(x__, 0) self.assertAlmostEqual(y__, 0) self.assertAlmostEqual(z__, 1) x__, y__, z__ = angle2xyz(0, -90) self.assertAlmostEqual(x__, 0) self.assertAlmostEqual(y__, -1) self.assertAlmostEqual(z__, 0) x__, y__, z__ = angle2xyz(90, 90) self.assertAlmostEqual(x__, 1) self.assertAlmostEqual(y__, 0) self.assertAlmostEqual(z__, 0) x__, y__, z__ = angle2xyz(-90, 90) self.assertAlmostEqual(x__, -1) self.assertAlmostEqual(y__, 0) self.assertAlmostEqual(z__, 0) x__, y__, z__ = angle2xyz(180, 90) self.assertAlmostEqual(x__, 0) self.assertAlmostEqual(y__, -1) self.assertAlmostEqual(z__, 0) x__, y__, z__ = angle2xyz(0, -90) self.assertAlmostEqual(x__, 0) self.assertAlmostEqual(y__, -1) self.assertAlmostEqual(z__, 0) x__, y__, z__ = angle2xyz(0, 45) self.assertAlmostEqual(x__, 0) self.assertAlmostEqual(y__, np.sqrt(2) / 2) self.assertAlmostEqual(z__, np.sqrt(2) / 2) x__, y__, z__ = angle2xyz(0, 60) self.assertAlmostEqual(x__, 0) self.assertAlmostEqual(y__, np.sqrt(3) / 2) self.assertAlmostEqual(z__, np.sqrt(1) / 2) def test_xyz2lonlat(self): """Test xyz2lonlat.""" lon, lat = xyz2lonlat(1, 0, 0) self.assertAlmostEqual(lon, 0) self.assertAlmostEqual(lat, 0) lon, lat = xyz2lonlat(0, 1, 0) self.assertAlmostEqual(lon, 90) self.assertAlmostEqual(lat, 0) lon, lat = xyz2lonlat(0, 0, 1, asin=True) self.assertAlmostEqual(lon, 0) self.assertAlmostEqual(lat, 90) lon, lat = xyz2lonlat(0, 0, 1) self.assertAlmostEqual(lon, 0) self.assertAlmostEqual(lat, 90) lon, lat = xyz2lonlat(np.sqrt(2) / 2, np.sqrt(2) / 2, 0) self.assertAlmostEqual(lon, 45) self.assertAlmostEqual(lat, 0) def test_xyz2angle(self): """Test xyz2angle.""" azi, zen = xyz2angle(1, 0, 0) self.assertAlmostEqual(azi, 90) self.assertAlmostEqual(zen, 90) azi, zen = xyz2angle(0, 1, 0) self.assertAlmostEqual(azi, 0) self.assertAlmostEqual(zen, 90) azi, zen = xyz2angle(0, 0, 1) self.assertAlmostEqual(azi, 0) self.assertAlmostEqual(zen, 0) azi, zen = xyz2angle(0, 0, 1, acos=True) self.assertAlmostEqual(azi, 0) self.assertAlmostEqual(zen, 0) azi, zen = xyz2angle(np.sqrt(2) / 2, np.sqrt(2) / 2, 0) self.assertAlmostEqual(azi, 45) self.assertAlmostEqual(zen, 90) azi, zen = xyz2angle(-1, 0, 0) self.assertAlmostEqual(azi, -90) self.assertAlmostEqual(zen, 90) azi, zen = xyz2angle(0, -1, 0) self.assertAlmostEqual(azi, 180) self.assertAlmostEqual(zen, 90) def test_proj_units_to_meters(self): """Test proj units to meters conversion.""" prj = '+asd=123123123123' res = proj_units_to_meters(prj) self.assertEqual(res, prj) prj = '+a=6378.137' res = proj_units_to_meters(prj) self.assertEqual(res, '+a=6378137.000') prj = '+a=6378.137 +units=km' res = proj_units_to_meters(prj) self.assertEqual(res, '+a=6378137.000') prj = '+a=6378.137 +b=6378.137' res = proj_units_to_meters(prj) self.assertEqual(res, '+a=6378137.000 +b=6378137.000') prj = '+a=6378.137 +b=6378.137 +h=35785.863' res = proj_units_to_meters(prj) self.assertEqual(res, '+a=6378137.000 +b=6378137.000 +h=35785863.000') @mock.patch('satpy.utils.warnings.warn') def test_get_satpos(self, warn_mock): """Test getting the satellite position.""" orb_params = {'nadir_longitude': 1, 'satellite_actual_longitude': 1.1, 'satellite_nominal_longitude': 1.2, 'projection_longitude': 1.3, 'nadir_latitude': 2, 'satellite_actual_latitude': 2.1, 'satellite_nominal_latitude': 2.2, 'projection_latitude': 2.3, 'satellite_actual_altitude': 3, 'satellite_nominal_altitude': 3.1, 'projection_altitude': 3.2} dataset = mock.MagicMock(attrs={'orbital_parameters': orb_params, 'satellite_longitude': -1, 'satellite_latitude': -2, 'satellite_altitude': -3}) # Nadir lon, lat, alt = get_satpos(dataset) self.assertTupleEqual((lon, lat, alt), (1, 2, 3)) # Actual orb_params.pop('nadir_longitude') orb_params.pop('nadir_latitude') lon, lat, alt = get_satpos(dataset) self.assertTupleEqual((lon, lat, alt), (1.1, 2.1, 3)) # Nominal orb_params.pop('satellite_actual_longitude') orb_params.pop('satellite_actual_latitude') orb_params.pop('satellite_actual_altitude') lon, lat, alt = get_satpos(dataset) self.assertTupleEqual((lon, lat, alt), (1.2, 2.2, 3.1)) # Projection orb_params.pop('satellite_nominal_longitude') orb_params.pop('satellite_nominal_latitude') orb_params.pop('satellite_nominal_altitude') lon, lat, alt = get_satpos(dataset) self.assertTupleEqual((lon, lat, alt), (1.3, 2.3, 3.2)) warn_mock.assert_called() # Legacy dataset.attrs.pop('orbital_parameters') lon, lat, alt = get_satpos(dataset) self.assertTupleEqual((lon, lat, alt), (-1, -2, -3)) def test_make_fake_scene(): """Test the make_fake_scene utility. Although the make_fake_scene utility is for internal testing purposes, it has grown sufficiently complex that it needs its own testing. """ from satpy.tests.utils import make_fake_scene assert make_fake_scene({}).keys() == [] sc = make_fake_scene({ "six": np.arange(25).reshape(5, 5)}) assert len(sc.keys()) == 1 assert sc.keys().pop()['name'] == "six" assert sc["six"].attrs["area"].shape == (5, 5) sc = make_fake_scene({ "seven": np.arange(3*7).reshape(3, 7), "eight": np.arange(3*8).reshape(3, 8)}, daskify=True, area=False, common_attrs={"repetency": "fourteen hundred per centimetre"}) assert "area" not in sc["seven"].attrs.keys() assert (sc["seven"].attrs["repetency"] == sc["eight"].attrs["repetency"] == "fourteen hundred per centimetre") assert isinstance(sc["seven"].data, da.Array) sc = make_fake_scene({ "nine": xr.DataArray( np.arange(2*9).reshape(2, 9), dims=("y", "x"), attrs={"please": "preserve", "answer": 42})}, common_attrs={"bad words": "semprini bahnhof veerooster winterbanden"}) assert sc["nine"].attrs.keys() >= {"please", "answer", "bad words", "area"} class TestCheckSatpy(unittest.TestCase): """Test the 'check_satpy' function.""" def test_basic_check_satpy(self): """Test 'check_satpy' basic functionality.""" from satpy.utils import check_satpy check_satpy() def test_specific_check_satpy(self): """Test 'check_satpy' with specific features provided.""" from satpy.utils import check_satpy with mock.patch('satpy.utils.print') as print_mock: check_satpy(readers=['viirs_sdr'], extras=('cartopy', '__fake')) checked_fake = False for call in print_mock.mock_calls: if len(call[1]) > 0 and '__fake' in call[1][0]: self.assertNotIn('ok', call[1][1]) checked_fake = True self.assertTrue(checked_fake, "Did not find __fake module " "mentioned in checks") def test_debug_on(caplog): """Test that debug_on is working as expected.""" from satpy.utils import debug, debug_off, debug_on def depwarn(): logger = logging.getLogger("satpy.silly") logger.debug("But now it's just got SILLY.") warnings.warn("Stop that! It's SILLY.", DeprecationWarning) warnings.filterwarnings("ignore", category=DeprecationWarning) debug_on(False) filts_before = warnings.filters.copy() # test that logging on, but deprecation warnings still off with caplog.at_level(logging.DEBUG): depwarn() assert warnings.filters == filts_before assert "But now it's just got SILLY." in caplog.text debug_on(True) # test that logging on and deprecation warnings on with pytest.warns(DeprecationWarning): depwarn() assert warnings.filters != filts_before debug_off() # other tests assume debugging is off # test that filters were reset assert warnings.filters == filts_before with debug(): assert warnings.filters != filts_before assert warnings.filters == filts_before def test_logging_on_and_off(caplog): """Test that switching logging on and off works.""" from satpy.utils import logging_off, logging_on logger = logging.getLogger("satpy.silly") logging_on() with caplog.at_level(logging.WARNING): logger.debug("I'd like to leave the army please, sir.") logger.warning("Stop that! It's SILLY.") assert "Stop that! It's SILLY" in caplog.text assert "I'd like to leave the army please, sir." not in caplog.text logging_off() with caplog.at_level(logging.DEBUG): logger.warning("You've got a nice army base here, Colonel.") assert "You've got a nice army base here, Colonel." not in caplog.text @pytest.mark.parametrize( ("shapes", "chunks", "dims", "exp_unified"), [ ( ((3, 5, 5), (5, 5)), (-1, -1), (("bands", "y", "x"), ("y", "x")), True, ), ( ((3, 5, 5), (5, 5)), (-1, 2), (("bands", "y", "x"), ("y", "x")), True, ), ( ((4, 5, 5), (3, 5, 5)), (-1, -1), (("bands", "y", "x"), ("bands", "y", "x")), False, ), ], ) def test_unify_chunks(shapes, chunks, dims, exp_unified): """Test unify_chunks utility function.""" from satpy.utils import unify_chunks inputs = list(_data_arrays_from_params(shapes, chunks, dims)) results = unify_chunks(*inputs) if exp_unified: _verify_unified(results) else: _verify_unchanged_chunks(results, inputs) def _data_arrays_from_params(shapes: list[tuple[int, ...]], chunks: list[tuple[int, ...]], dims: list[tuple[int, ...]] ) -> typing.Generator[xr.DataArray, None, None]: for shape, chunk, dim in zip(shapes, chunks, dims): yield xr.DataArray(da.ones(shape, chunks=chunk), dims=dim) def _verify_unified(data_arrays: list[xr.DataArray]) -> None: dim_chunks: dict[str, int] = {} for data_arr in data_arrays: for dim, chunk_size in zip(data_arr.dims, data_arr.chunks): exp_chunks = dim_chunks.setdefault(dim, chunk_size) assert exp_chunks == chunk_size def _verify_unchanged_chunks(data_arrays: list[xr.DataArray], orig_arrays: list[xr.DataArray]) -> None: for data_arr, orig_arr in zip(data_arrays, orig_arrays): assert data_arr.chunks == orig_arr.chunks satpy-0.34.0/satpy/tests/test_writers.py000066400000000000000000001024011420401153000203340ustar00rootroot00000000000000#!/usr/bin/python # Copyright (c) 2015 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Test generic writer functions.""" from __future__ import annotations import os import shutil import unittest import warnings from unittest import mock import dask.array as da import numpy as np import pytest import xarray as xr from trollimage.colormap import greys class TestWritersModule(unittest.TestCase): """Test the writers module.""" def test_to_image_1d(self): """Conversion to image.""" # 1D from satpy.writers import to_image p = xr.DataArray(np.arange(25), dims=['y']) self.assertRaises(ValueError, to_image, p) @mock.patch('satpy.writers.XRImage') def test_to_image_2d(self, mock_geoimage): """Conversion to image.""" from satpy.writers import to_image # 2D data = np.arange(25).reshape((5, 5)) p = xr.DataArray(data, attrs=dict(mode="L", fill_value=0, palette=[0, 1, 2, 3, 4, 5]), dims=['y', 'x']) to_image(p) np.testing.assert_array_equal( data, mock_geoimage.call_args[0][0].values) mock_geoimage.reset_mock() @mock.patch('satpy.writers.XRImage') def test_to_image_3d(self, mock_geoimage): """Conversion to image.""" # 3D from satpy.writers import to_image data = np.arange(75).reshape((3, 5, 5)) p = xr.DataArray(data, dims=['bands', 'y', 'x']) p['bands'] = ['R', 'G', 'B'] to_image(p) np.testing.assert_array_equal(data[0], mock_geoimage.call_args[0][0][0]) np.testing.assert_array_equal(data[1], mock_geoimage.call_args[0][0][1]) np.testing.assert_array_equal(data[2], mock_geoimage.call_args[0][0][2]) @mock.patch('satpy.writers.get_enhanced_image') def test_show(self, mock_get_image): """Check showing.""" from satpy.writers import show data = np.arange(25).reshape((5, 5)) p = xr.DataArray(data, dims=['y', 'x']) show(p) self.assertTrue(mock_get_image.return_value.show.called) class TestEnhancer(unittest.TestCase): """Test basic `Enhancer` functionality with builtin configs.""" def test_basic_init_no_args(self): """Test Enhancer init with no arguments passed.""" from satpy.writers import Enhancer e = Enhancer() self.assertIsNotNone(e.enhancement_tree) def test_basic_init_no_enh(self): """Test Enhancer init requesting no enhancements.""" from satpy.writers import Enhancer e = Enhancer(enhancement_config_file=False) self.assertIsNone(e.enhancement_tree) def test_basic_init_provided_enh(self): """Test Enhancer init with string enhancement configs.""" from satpy.writers import Enhancer e = Enhancer(enhancement_config_file=["""enhancements: enh1: standard_name: toa_bidirectional_reflectance operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: linear} """]) self.assertIsNotNone(e.enhancement_tree) def test_init_nonexistent_enh_file(self): """Test Enhancer init with a nonexistent enhancement configuration file.""" from satpy.writers import Enhancer self.assertRaises( ValueError, Enhancer, enhancement_config_file="is_not_a_valid_filename_?.yaml") class _BaseCustomEnhancementConfigTests: TEST_CONFIGS: dict[str, str] = {} @classmethod def setup_class(cls): """Create fake user configurations.""" for fn, content in cls.TEST_CONFIGS.items(): base_dir = os.path.dirname(fn) if base_dir: os.makedirs(base_dir, exist_ok=True) with open(fn, 'w') as f: f.write(content) # create fake test image writer from satpy.writers import ImageWriter class CustomImageWriter(ImageWriter): def __init__(self, **kwargs): super(CustomImageWriter, self).__init__(name='test', config_files=[], **kwargs) self.img = None def save_image(self, img, **kwargs): self.img = img cls.CustomImageWriter = CustomImageWriter @classmethod def teardown_class(cls): """Remove fake user configurations.""" for fn, _content in cls.TEST_CONFIGS.items(): base_dir = os.path.dirname(fn) if base_dir not in ['.', ''] and os.path.isdir(base_dir): shutil.rmtree(base_dir) elif os.path.isfile(fn): os.remove(fn) class TestComplexSensorEnhancerConfigs(_BaseCustomEnhancementConfigTests): """Test enhancement configs that use or expect multiple sensors.""" ENH_FN = 'test_sensor1.yaml' ENH_FN2 = 'test_sensor2.yaml' TEST_CONFIGS = { ENH_FN: """ enhancements: test1_sensor1_specific: name: test1 sensor: test_sensor1 operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: crude, min_stretch: 0, max_stretch: 200} """, ENH_FN2: """ enhancements: default: operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: crude, min_stretch: 0, max_stretch: 100} test1_sensor2_specific: name: test1 sensor: test_sensor2 operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: crude, min_stretch: 0, max_stretch: 50} exact_multisensor_comp: name: my_comp sensor: [test_sensor1, test_sensor2] operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: crude, min_stretch: 0, max_stretch: 20} """, } def test_multisensor_choice(self): """Test that a DataArray with two sensors works.""" from xarray import DataArray from satpy.writers import Enhancer, get_enhanced_image ds = DataArray(np.arange(1, 11.).reshape((2, 5)), attrs={ 'name': 'test1', 'sensor': {'test_sensor2', 'test_sensor1'}, 'mode': 'L' }, dims=['y', 'x']) e = Enhancer() assert e.enhancement_tree is not None img = get_enhanced_image(ds, enhance=e) # make sure that both sensor configs were loaded assert (set(e.sensor_enhancement_configs) == {os.path.abspath(self.ENH_FN), os.path.abspath(self.ENH_FN2)}) # test_sensor1 config should have been used because it is # alphabetically first np.testing.assert_allclose(img.data.values[0], ds.data / 200.0) def test_multisensor_exact(self): """Test that a DataArray with two sensors can match exactly.""" from xarray import DataArray from satpy.writers import Enhancer, get_enhanced_image ds = DataArray(np.arange(1, 11.).reshape((2, 5)), attrs={ 'name': 'my_comp', 'sensor': {'test_sensor2', 'test_sensor1'}, 'mode': 'L' }, dims=['y', 'x']) e = Enhancer() assert e.enhancement_tree is not None img = get_enhanced_image(ds, enhance=e) # make sure that both sensor configs were loaded assert (set(e.sensor_enhancement_configs) == {os.path.abspath(self.ENH_FN), os.path.abspath(self.ENH_FN2)}) # test_sensor1 config should have been used because it is # alphabetically first np.testing.assert_allclose(img.data.values[0], ds.data / 20.0) class TestEnhancerUserConfigs(_BaseCustomEnhancementConfigTests): """Test `Enhancer` functionality when user's custom configurations are present.""" ENH_FN = 'test_sensor.yaml' ENH_ENH_FN = os.path.join('enhancements', ENH_FN) ENH_FN2 = 'test_sensor2.yaml' ENH_ENH_FN2 = os.path.join('enhancements', ENH_FN2) ENH_FN3 = 'test_empty.yaml' TEST_CONFIGS = { ENH_FN: """ enhancements: test1_default: name: test1 operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: linear, cutoffs: [0., 0.]} """, ENH_ENH_FN: """ enhancements: test1_kelvin: name: test1 units: kelvin operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: crude, min_stretch: 0, max_stretch: 20} """, ENH_FN2: """ """, ENH_ENH_FN2: """ """, ENH_FN3: """""", } def test_enhance_empty_config(self): """Test Enhancer doesn't fail with empty enhancement file.""" from xarray import DataArray from satpy.writers import Enhancer, get_enhanced_image ds = DataArray(np.arange(1, 11.).reshape((2, 5)), attrs=dict(sensor='test_empty', mode='L'), dims=['y', 'x']) e = Enhancer() assert e.enhancement_tree is not None get_enhanced_image(ds, enhance=e) assert (set(e.sensor_enhancement_configs) == {os.path.abspath(self.ENH_FN3)}) def test_enhance_with_sensor_no_entry(self): """Test enhancing an image that has no configuration sections.""" from xarray import DataArray from satpy.writers import Enhancer, get_enhanced_image ds = DataArray(np.arange(1, 11.).reshape((2, 5)), attrs=dict(sensor='test_sensor2', mode='L'), dims=['y', 'x']) e = Enhancer() assert e.enhancement_tree is not None get_enhanced_image(ds, enhance=e) assert (set(e.sensor_enhancement_configs) == {os.path.abspath(self.ENH_FN2), os.path.abspath(self.ENH_ENH_FN2)}) def test_no_enhance(self): """Test turning off enhancements.""" from xarray import DataArray from satpy.writers import get_enhanced_image ds = DataArray(np.arange(1, 11.).reshape((2, 5)), attrs=dict(name='test1', sensor='test_sensor', mode='L'), dims=['y', 'x']) img = get_enhanced_image(ds, enhance=False) np.testing.assert_allclose(img.data.data.compute().squeeze(), ds.data) def test_writer_no_enhance(self): """Test turning off enhancements with writer.""" from xarray import DataArray ds = DataArray(np.arange(1, 11.).reshape((2, 5)), attrs=dict(name='test1', sensor='test_sensor', mode='L'), dims=['y', 'x']) writer = self.CustomImageWriter(enhance=False) writer.save_datasets((ds,), compute=False) img = writer.img np.testing.assert_allclose(img.data.data.compute().squeeze(), ds.data) def test_writer_custom_enhance(self): """Test using custom enhancements with writer.""" from xarray import DataArray from satpy.writers import Enhancer ds = DataArray(np.arange(1, 11.).reshape((2, 5)), attrs=dict(name='test1', sensor='test_sensor', mode='L'), dims=['y', 'x']) enhance = Enhancer() writer = self.CustomImageWriter(enhance=enhance) writer.save_datasets((ds,), compute=False) img = writer.img np.testing.assert_almost_equal(img.data.isel(bands=0).max().values, 1.) def test_enhance_with_sensor_entry(self): """Test enhancing an image with a configuration section.""" from xarray import DataArray from satpy.writers import Enhancer, get_enhanced_image ds = DataArray(np.arange(1, 11.).reshape((2, 5)), attrs=dict(name='test1', sensor='test_sensor', mode='L'), dims=['y', 'x']) e = Enhancer() assert e.enhancement_tree is not None img = get_enhanced_image(ds, enhance=e) assert (set(e.sensor_enhancement_configs) == {os.path.abspath(self.ENH_FN), os.path.abspath(self.ENH_ENH_FN)}) np.testing.assert_almost_equal(img.data.isel(bands=0).max().values, 1.) ds = DataArray(da.arange(1, 11., chunks=5).reshape((2, 5)), attrs=dict(name='test1', sensor='test_sensor', mode='L'), dims=['y', 'x']) e = Enhancer() assert e.enhancement_tree is not None img = get_enhanced_image(ds, enhance=e) assert (set(e.sensor_enhancement_configs) == {os.path.abspath(self.ENH_FN), os.path.abspath(self.ENH_ENH_FN)}) np.testing.assert_almost_equal(img.data.isel(bands=0).max().values, 1.) def test_enhance_with_sensor_entry2(self): """Test enhancing an image with a more detailed configuration section.""" from xarray import DataArray from satpy.writers import Enhancer, get_enhanced_image ds = DataArray(np.arange(1, 11.).reshape((2, 5)), attrs=dict(name='test1', units='kelvin', sensor='test_sensor', mode='L'), dims=['y', 'x']) e = Enhancer() assert e.enhancement_tree is not None img = get_enhanced_image(ds, enhance=e) assert (set(e.sensor_enhancement_configs) == {os.path.abspath(self.ENH_FN), os.path.abspath(self.ENH_ENH_FN)}) np.testing.assert_almost_equal(img.data.isel(bands=0).max().values, 0.5) class TestReaderEnhancerConfigs(_BaseCustomEnhancementConfigTests): """Test enhancement configs that use reader name.""" ENH_FN = 'test_sensor1.yaml' # NOTE: The sections are ordered in a special way so that if 'reader' key # isn't provided that we'll get the section we didn't want and all tests # will fail. Otherwise the correct sections get chosen just by the order # of how they are added to the decision tree. TEST_CONFIGS = { ENH_FN: """ enhancements: default_reader2: reader: reader2 operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: crude, min_stretch: 0, max_stretch: 75} default: operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: crude, min_stretch: 0, max_stretch: 100} test1_reader2_specific: name: test1 reader: reader2 operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: crude, min_stretch: 0, max_stretch: 50} test1_reader1_specific: name: test1 reader: reader1 operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: crude, min_stretch: 0, max_stretch: 200} """, } def _get_test_data_array(self): from xarray import DataArray ds = DataArray(np.arange(1, 11.).reshape((2, 5)), attrs={ 'name': 'test1', 'sensor': 'test_sensor1', 'mode': 'L', }, dims=['y', 'x']) return ds def _get_enhanced_image(self, data_arr): from satpy.writers import Enhancer, get_enhanced_image e = Enhancer() assert e.enhancement_tree is not None img = get_enhanced_image(data_arr, enhance=e) # make sure that both configs were loaded assert (set(e.sensor_enhancement_configs) == {os.path.abspath(self.ENH_FN)}) return img def test_no_reader(self): """Test that a DataArray with no 'reader' metadata works.""" data_arr = self._get_test_data_array() img = self._get_enhanced_image(data_arr) # no reader available, should use default no specified reader np.testing.assert_allclose(img.data.values[0], data_arr.data / 100.0) def test_no_matching_reader(self): """Test that a DataArray with no matching 'reader' works.""" data_arr = self._get_test_data_array() data_arr.attrs["reader"] = "reader3" img = self._get_enhanced_image(data_arr) # no reader available, should use default no specified reader np.testing.assert_allclose(img.data.values[0], data_arr.data / 100.0) def test_only_reader_matches(self): """Test that a DataArray with only a matching 'reader' works.""" data_arr = self._get_test_data_array() data_arr.attrs["reader"] = "reader2" data_arr.attrs["name"] = "not_configured" img = self._get_enhanced_image(data_arr) # no reader available, should use default no specified reader np.testing.assert_allclose(img.data.values[0], data_arr.data / 75.0) def test_reader_and_name_match(self): """Test that a DataArray with a matching 'reader' and 'name' works.""" data_arr = self._get_test_data_array() data_arr.attrs["reader"] = "reader2" img = self._get_enhanced_image(data_arr) # no reader available, should use default no specified reader np.testing.assert_allclose(img.data.values[0], data_arr.data / 50.0) class TestYAMLFiles(unittest.TestCase): """Test and analyze the writer configuration files.""" def test_filename_matches_writer_name(self): """Test that every writer filename matches the name in the YAML.""" import yaml class IgnoreLoader(yaml.SafeLoader): def _ignore_all_tags(self, tag_suffix, node): return tag_suffix + ' ' + node.value IgnoreLoader.add_multi_constructor('', IgnoreLoader._ignore_all_tags) from satpy._config import glob_config from satpy.writers import read_writer_config for writer_config in glob_config('writers/*.yaml'): writer_fn = os.path.basename(writer_config) writer_fn_name = os.path.splitext(writer_fn)[0] writer_info = read_writer_config([writer_config], loader=IgnoreLoader) self.assertEqual(writer_fn_name, writer_info['name'], "Writer YAML filename doesn't match writer " "name in the YAML file.") def test_available_writers(self): """Test the 'available_writers' function.""" from satpy import available_writers writer_names = available_writers() self.assertGreater(len(writer_names), 0) self.assertIsInstance(writer_names[0], str) self.assertIn('geotiff', writer_names) writer_infos = available_writers(as_dict=True) self.assertEqual(len(writer_names), len(writer_infos)) self.assertIsInstance(writer_infos[0], dict) for writer_info in writer_infos: self.assertIn('name', writer_info) class TestComputeWriterResults(unittest.TestCase): """Test compute_writer_results().""" def setUp(self): """Create temporary directory to save files to and a mock scene.""" import tempfile from datetime import datetime from satpy.scene import Scene ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={'name': 'test', 'start_time': datetime(2018, 1, 1, 0, 0, 0)} ) self.scn = Scene() self.scn['test'] = ds1 # Temp dir self.base_dir = tempfile.mkdtemp() def tearDown(self): """Remove the temporary directory created for a test.""" try: shutil.rmtree(self.base_dir, ignore_errors=True) except OSError: pass def test_empty(self): """Test empty result list.""" from satpy.writers import compute_writer_results compute_writer_results([]) def test_simple_image(self): """Test writing to PNG file.""" from satpy.writers import compute_writer_results fname = os.path.join(self.base_dir, 'simple_image.png') res = self.scn.save_datasets(filename=fname, datasets=['test'], writer='simple_image', compute=False) compute_writer_results([res]) self.assertTrue(os.path.isfile(fname)) def test_geotiff(self): """Test writing to mitiff file.""" from satpy.writers import compute_writer_results fname = os.path.join(self.base_dir, 'geotiff.tif') res = self.scn.save_datasets(filename=fname, datasets=['test'], writer='geotiff', compute=False) compute_writer_results([res]) self.assertTrue(os.path.isfile(fname)) # FIXME: This reader needs more information than exist at the moment # def test_mitiff(self): # """Test writing to mitiff file""" # fname = os.path.join(self.base_dir, 'mitiff.tif') # res = self.scn.save_datasets(filename=fname, # datasets=['test'], # writer='mitiff') # compute_writer_results([res]) # self.assertTrue(os.path.isfile(fname)) # FIXME: This reader needs more information than exist at the moment # def test_cf(self): # """Test writing to NetCDF4 file""" # fname = os.path.join(self.base_dir, 'cf.nc') # res = self.scn.save_datasets(filename=fname, # datasets=['test'], # writer='cf') # compute_writer_results([res]) # self.assertTrue(os.path.isfile(fname)) def test_multiple_geotiff(self): """Test writing to mitiff file.""" from satpy.writers import compute_writer_results fname1 = os.path.join(self.base_dir, 'geotiff1.tif') res1 = self.scn.save_datasets(filename=fname1, datasets=['test'], writer='geotiff', compute=False) fname2 = os.path.join(self.base_dir, 'geotiff2.tif') res2 = self.scn.save_datasets(filename=fname2, datasets=['test'], writer='geotiff', compute=False) compute_writer_results([res1, res2]) self.assertTrue(os.path.isfile(fname1)) self.assertTrue(os.path.isfile(fname2)) def test_multiple_simple(self): """Test writing to geotiff files.""" from satpy.writers import compute_writer_results fname1 = os.path.join(self.base_dir, 'simple_image1.png') res1 = self.scn.save_datasets(filename=fname1, datasets=['test'], writer='simple_image', compute=False) fname2 = os.path.join(self.base_dir, 'simple_image2.png') res2 = self.scn.save_datasets(filename=fname2, datasets=['test'], writer='simple_image', compute=False) compute_writer_results([res1, res2]) self.assertTrue(os.path.isfile(fname1)) self.assertTrue(os.path.isfile(fname2)) def test_mixed(self): """Test writing to multiple mixed-type files.""" from satpy.writers import compute_writer_results fname1 = os.path.join(self.base_dir, 'simple_image3.png') res1 = self.scn.save_datasets(filename=fname1, datasets=['test'], writer='simple_image', compute=False) fname2 = os.path.join(self.base_dir, 'geotiff3.tif') res2 = self.scn.save_datasets(filename=fname2, datasets=['test'], writer='geotiff', compute=False) res3 = [] compute_writer_results([res1, res2, res3]) self.assertTrue(os.path.isfile(fname1)) self.assertTrue(os.path.isfile(fname2)) class TestBaseWriter: """Test the base writer class.""" def setup_method(self): """Set up tests.""" import tempfile from datetime import datetime from satpy.scene import Scene ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={ 'name': 'test', 'start_time': datetime(2018, 1, 1, 0, 0, 0), 'sensor': 'fake_sensor', } ) ds2 = ds1.copy() ds2.attrs['sensor'] = {'fake_sensor1', 'fake_sensor2'} self.scn = Scene() self.scn['test'] = ds1 self.scn['test2'] = ds2 # Temp dir self.base_dir = tempfile.mkdtemp() def teardown_method(self): """Remove the temporary directory created for a test.""" try: shutil.rmtree(self.base_dir, ignore_errors=True) except OSError: pass def test_save_dataset_static_filename(self): """Test saving a dataset with a static filename specified.""" self.scn.save_datasets(base_dir=self.base_dir, filename='geotiff.tif') assert os.path.isfile(os.path.join(self.base_dir, 'geotiff.tif')) @pytest.mark.parametrize( ('fmt_fn', 'exp_fns'), [ ('geotiff_{name}_{start_time:%Y%m%d_%H%M%S}.tif', ['geotiff_test_20180101_000000.tif', 'geotiff_test2_20180101_000000.tif']), ('geotiff_{name}_{sensor}.tif', ['geotiff_test_fake_sensor.tif', 'geotiff_test2_fake_sensor1-fake_sensor2.tif']), ] ) def test_save_dataset_dynamic_filename(self, fmt_fn, exp_fns): """Test saving a dataset with a format filename specified.""" self.scn.save_datasets(base_dir=self.base_dir, filename=fmt_fn) for exp_fn in exp_fns: exp_path = os.path.join(self.base_dir, exp_fn) assert os.path.isfile(exp_path) def test_save_dataset_dynamic_filename_with_dir(self): """Test saving a dataset with a format filename that includes a directory.""" fmt_fn = os.path.join('{start_time:%Y%m%d}', 'geotiff_{name}_{start_time:%Y%m%d_%H%M%S}.tif') exp_fn = os.path.join('20180101', 'geotiff_test_20180101_000000.tif') self.scn.save_datasets(base_dir=self.base_dir, filename=fmt_fn) assert os.path.isfile(os.path.join(self.base_dir, exp_fn)) # change the filename pattern but keep the same directory fmt_fn2 = os.path.join('{start_time:%Y%m%d}', 'geotiff_{name}_{start_time:%Y%m%d_%H}.tif') exp_fn2 = os.path.join('20180101', 'geotiff_test_20180101_00.tif') self.scn.save_datasets(base_dir=self.base_dir, filename=fmt_fn2) assert os.path.isfile(os.path.join(self.base_dir, exp_fn2)) # the original file should still exist assert os.path.isfile(os.path.join(self.base_dir, exp_fn)) class TestOverlays(unittest.TestCase): """Tests for add_overlay and add_decorate functions.""" def setUp(self): """Create test data and mock pycoast/pydecorate.""" from pyresample.geometry import AreaDefinition from trollimage.xrimage import XRImage proj_dict = {'proj': 'lcc', 'datum': 'WGS84', 'ellps': 'WGS84', 'lon_0': -95., 'lat_0': 25, 'lat_1': 25, 'units': 'm', 'no_defs': True} self.area_def = AreaDefinition( 'test', 'test', 'test', proj_dict, 200, 400, (-1000., -1500., 1000., 1500.), ) self.orig_rgb_img = XRImage( xr.DataArray(da.arange(75., chunks=10).reshape(3, 5, 5) / 75., dims=('bands', 'y', 'x'), coords={'bands': ['R', 'G', 'B']}, attrs={'name': 'test_ds', 'area': self.area_def}) ) self.orig_l_img = XRImage( xr.DataArray(da.arange(25., chunks=10).reshape(5, 5) / 75., dims=('y', 'x'), attrs={'name': 'test_ds', 'area': self.area_def}) ) self.decorate = { 'decorate': [ {'logo': {'logo_path': '', 'height': 143, 'bg': 'white', 'bg_opacity': 255}}, {'text': { 'txt': 'TEST', 'align': {'top_bottom': 'bottom', 'left_right': 'right'}, 'font': '', 'font_size': 22, 'height': 30, 'bg': 'black', 'bg_opacity': 255, 'line': 'white'}}, {'scale': { 'colormap': greys, 'extend': False, 'width': 1670, 'height': 110, 'tick_marks': 5, 'minor_tick_marks': 1, 'cursor': [0, 0], 'bg':'white', 'title':'TEST TITLE OF SCALE', 'fontsize': 110, 'align': 'cc' }} ] } import_mock = mock.MagicMock() modules = {'pycoast': import_mock.pycoast, 'pydecorate': import_mock.pydecorate} self.module_patcher = mock.patch.dict('sys.modules', modules) self.module_patcher.start() def tearDown(self): """Turn off pycoast/pydecorate mocking.""" self.module_patcher.stop() def test_add_overlay_basic_rgb(self): """Test basic add_overlay usage with RGB data.""" from pycoast import ContourWriterAGG from satpy.writers import _burn_overlay, add_overlay coast_dir = '/path/to/coast/data' with mock.patch.object(self.orig_rgb_img, "apply_pil") as apply_pil: apply_pil.return_value = self.orig_rgb_img new_img = add_overlay(self.orig_rgb_img, self.area_def, coast_dir, fill_value=0) self.assertEqual(self.orig_rgb_img.mode, new_img.mode) new_img = add_overlay(self.orig_rgb_img, self.area_def, coast_dir) self.assertEqual(self.orig_rgb_img.mode + 'A', new_img.mode) with mock.patch.object(self.orig_rgb_img, "convert") as convert: convert.return_value = self.orig_rgb_img overlays = {'coasts': {'outline': 'red'}} new_img = add_overlay(self.orig_rgb_img, self.area_def, coast_dir, overlays=overlays, fill_value=0) pil_args = None pil_kwargs = {'fill_value': 0} fun_args = (self.orig_rgb_img.data.area, ContourWriterAGG.return_value, overlays) fun_kwargs = None apply_pil.assert_called_with(_burn_overlay, self.orig_rgb_img.mode, pil_args, pil_kwargs, fun_args, fun_kwargs) ContourWriterAGG.assert_called_with(coast_dir) # test legacy call grid = {'minor_is_tick': True} color = 'red' expected_overlays = {'coasts': {'outline': color, 'width': 0.5, 'level': 1}, 'borders': {'outline': color, 'width': 0.5, 'level': 1}, 'grid': grid} with warnings.catch_warnings(record=True) as wns: warnings.simplefilter("always") new_img = add_overlay(self.orig_rgb_img, self.area_def, coast_dir, color=color, grid=grid, fill_value=0) assert len(wns) == 1 assert issubclass(wns[0].category, DeprecationWarning) assert "deprecated" in str(wns[0].message) pil_args = None pil_kwargs = {'fill_value': 0} fun_args = (self.orig_rgb_img.data.area, ContourWriterAGG.return_value, expected_overlays) fun_kwargs = None apply_pil.assert_called_with(_burn_overlay, self.orig_rgb_img.mode, pil_args, pil_kwargs, fun_args, fun_kwargs) ContourWriterAGG.assert_called_with(coast_dir) def test_add_overlay_basic_l(self): """Test basic add_overlay usage with L data.""" from satpy.writers import add_overlay new_img = add_overlay(self.orig_l_img, self.area_def, '', fill_value=0) self.assertEqual('RGB', new_img.mode) new_img = add_overlay(self.orig_l_img, self.area_def, '') self.assertEqual('RGBA', new_img.mode) def test_add_decorate_basic_rgb(self): """Test basic add_decorate usage with RGB data.""" from satpy.writers import add_decorate new_img = add_decorate(self.orig_rgb_img, **self.decorate) self.assertEqual('RGBA', new_img.mode) def test_add_decorate_basic_l(self): """Test basic add_decorate usage with L data.""" from satpy.writers import add_decorate new_img = add_decorate(self.orig_l_img, **self.decorate) self.assertEqual('RGBA', new_img.mode) satpy-0.34.0/satpy/tests/test_yaml_reader.py000066400000000000000000001622761420401153000211410ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2019, 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Testing the yaml_reader module.""" import os import random import unittest from datetime import datetime from tempfile import mkdtemp from unittest.mock import MagicMock, patch import numpy as np import xarray as xr import satpy.readers.yaml_reader as yr from satpy.dataset import DataQuery from satpy.dataset.dataid import ModifierTuple from satpy.readers.aapp_mhs_amsub_l1c import FrequencyDoubleSideBand, FrequencyRange from satpy.readers.file_handlers import BaseFileHandler from satpy.tests.utils import make_dataid MHS_YAML_READER_DICT = { 'reader': {'name': 'mhs_l1c_aapp', 'description': 'AAPP l1c Reader for AMSU-B/MHS data', 'sensors': ['mhs'], 'default_channels': [1, 2, 3, 4, 5], 'data_identification_keys': {'name': {'required': True}, 'frequency_double_sideband': {'type': FrequencyDoubleSideBand}, 'frequency_range': {'type': FrequencyRange}, 'resolution': None, 'polarization': {'enum': ['H', 'V']}, 'calibration': {'enum': ['brightness_temperature'], 'transitive': True}, 'modifiers': {'required': True, 'default': [], 'type': ModifierTuple}}, 'config_files': ('satpy/etc/readers/mhs_l1c_aapp.yaml',)}, 'datasets': {'1': {'name': '1', 'frequency_range': {'central': 89.0, 'bandwidth': 2.8, 'unit': 'GHz'}, 'polarization': 'V', 'resolution': 16000, 'calibration': {'brightness_temperature': {'standard_name': 'toa_brightness_temperature'}}, 'coordinates': ['longitude', 'latitude'], 'file_type': 'mhs_aapp_l1c'}, '2': {'name': '2', 'frequency_range': {'central': 157.0, 'bandwidth': 2.8, 'unit': 'GHz'}, 'polarization': 'V', 'resolution': 16000, 'calibration': {'brightness_temperature': {'standard_name': 'toa_brightness_temperature'}}, 'coordinates': ['longitude', 'latitude'], 'file_type': 'mhs_aapp_l1c'}, '3': {'name': '3', 'frequency_double_sideband': {'unit': 'GHz', 'central': 183.31, 'side': 1.0, 'bandwidth': 1.0}, 'polarization': 'V', 'resolution': 16000, 'calibration': {'brightness_temperature': {'standard_name': 'toa_brightness_temperature'}}, 'coordinates': ['longitude', 'latitude'], 'file_type': 'mhs_aapp_l1c'}}, 'file_types': {'mhs_aapp_l1c': {'file_reader': BaseFileHandler, 'file_patterns': [ 'mhsl1c_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit_number:05d}.l1c']}}} # noqa class FakeFH(BaseFileHandler): """Fake file handler class.""" def __init__(self, start_time, end_time): """Initialize fake file handler.""" super(FakeFH, self).__init__("", {}, {}) self._start_time = start_time self._end_time = end_time self.get_bounding_box = MagicMock() fake_ds = MagicMock() fake_ds.return_value.dims = ['x', 'y'] self.get_dataset = fake_ds self.combine_info = MagicMock() @property def start_time(self): """Return start time.""" return self._start_time @property def end_time(self): """Return end time.""" return self._end_time class TestUtils(unittest.TestCase): """Test the utility functions.""" def test_get_filebase(self): """Check the get_filebase function.""" base_dir = os.path.join(os.path.expanduser('~'), 'data', 'satellite', 'Sentinel-3') base_data = ('S3A_OL_1_EFR____20161020T081224_20161020T081524_' '20161020T102406_0179_010_078_2340_SVL_O_NR_002.SEN3') base_dir = os.path.join(base_dir, base_data) pattern = ('{mission_id:3s}_OL_{processing_level:1s}_{datatype_id:_<6s' '}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{cre' 'ation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relati' 've_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:' '2s}_{collection:3s}.SEN3/geo_coordinates.nc') pattern = os.path.join(*pattern.split('/')) filename = os.path.join(base_dir, 'Oa05_radiance.nc') expected = os.path.join(base_data, 'Oa05_radiance.nc') self.assertEqual(yr._get_filebase(filename, pattern), expected) def test_match_filenames(self): """Check that matching filenames works.""" # just a fake path for testing that doesn't have to exist base_dir = os.path.join(os.path.expanduser('~'), 'data', 'satellite', 'Sentinel-3') base_data = ('S3A_OL_1_EFR____20161020T081224_20161020T081524_' '20161020T102406_0179_010_078_2340_SVL_O_NR_002.SEN3') base_dir = os.path.join(base_dir, base_data) pattern = ('{mission_id:3s}_OL_{processing_level:1s}_{datatype_id:_<6s' '}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{cre' 'ation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relati' 've_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:' '2s}_{collection:3s}.SEN3/geo_coordinates.nc') pattern = os.path.join(*pattern.split('/')) filenames = [os.path.join(base_dir, 'Oa05_radiance.nc'), os.path.join(base_dir, 'geo_coordinates.nc')] expected = os.path.join(base_dir, 'geo_coordinates.nc') self.assertEqual(yr._match_filenames(filenames, pattern), {expected}) def test_match_filenames_windows_forward_slash(self): """Check that matching filenames works on Windows with forward slashes. This is common from Qt5 which internally uses forward slashes everywhere. """ # just a fake path for testing that doesn't have to exist base_dir = os.path.join(os.path.expanduser('~'), 'data', 'satellite', 'Sentinel-3') base_data = ('S3A_OL_1_EFR____20161020T081224_20161020T081524_' '20161020T102406_0179_010_078_2340_SVL_O_NR_002.SEN3') base_dir = os.path.join(base_dir, base_data) pattern = ('{mission_id:3s}_OL_{processing_level:1s}_{datatype_id:_<6s' '}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{cre' 'ation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relati' 've_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:' '2s}_{collection:3s}.SEN3/geo_coordinates.nc') pattern = os.path.join(*pattern.split('/')) filenames = [os.path.join(base_dir, 'Oa05_radiance.nc').replace(os.sep, '/'), os.path.join(base_dir, 'geo_coordinates.nc').replace(os.sep, '/')] expected = os.path.join(base_dir, 'geo_coordinates.nc').replace(os.sep, '/') self.assertEqual(yr._match_filenames(filenames, pattern), {expected}) def test_listify_string(self): """Check listify_string.""" self.assertEqual(yr.listify_string(None), []) self.assertEqual(yr.listify_string('some string'), ['some string']) self.assertEqual(yr.listify_string(['some', 'string']), ['some', 'string']) class DummyReader(BaseFileHandler): """Dummy reader instance.""" def __init__(self, filename, filename_info, filetype_info): """Initialize the dummy reader.""" super(DummyReader, self).__init__( filename, filename_info, filetype_info) self._start_time = datetime(2000, 1, 1, 12, 1) self._end_time = datetime(2000, 1, 1, 12, 2) self.metadata = {} @property def start_time(self): """Return start time.""" return self._start_time @property def end_time(self): """Return end time.""" return self._end_time class TestFileFileYAMLReaderMultiplePatterns(unittest.TestCase): """Test units from FileYAMLReader with multiple readers.""" def setUp(self): """Prepare a reader instance with a fake config.""" patterns = ['a{something:3s}.bla', 'a0{something:2s}.bla'] res_dict = {'reader': {'name': 'fake', 'sensors': ['canon']}, 'file_types': {'ftype1': {'name': 'ft1', 'file_patterns': patterns, 'file_reader': DummyReader}}, 'datasets': {'ch1': {'name': 'ch01', 'wavelength': [0.5, 0.6, 0.7], 'calibration': 'reflectance', 'file_type': 'ftype1', 'coordinates': ['lons', 'lats']}, 'ch2': {'name': 'ch02', 'wavelength': [0.7, 0.75, 0.8], 'calibration': 'counts', 'file_type': 'ftype1', 'coordinates': ['lons', 'lats']}, 'lons': {'name': 'lons', 'file_type': 'ftype2'}, 'lats': {'name': 'lats', 'file_type': 'ftype2'}}} self.config = res_dict self.reader = yr.FileYAMLReader(self.config, filter_parameters={ 'start_time': datetime(2000, 1, 1), 'end_time': datetime(2000, 1, 2)}) def test_select_from_pathnames(self): """Check select_files_from_pathnames.""" filelist = ['a001.bla', 'a002.bla', 'abcd.bla', 'k001.bla', 'a003.bli'] res = self.reader.select_files_from_pathnames(filelist) for expected in ['a001.bla', 'a002.bla', 'abcd.bla']: self.assertIn(expected, res) self.assertEqual(len(res), 3) def test_fn_items_for_ft(self): """Check filename_items_for_filetype.""" filelist = ['a001.bla', 'a002.bla', 'abcd.bla', 'k001.bla', 'a003.bli'] ft_info = self.config['file_types']['ftype1'] fiter = self.reader.filename_items_for_filetype(filelist, ft_info) filenames = dict(fname for fname in fiter) self.assertEqual(len(filenames.keys()), 3) def test_create_filehandlers(self): """Check create_filehandlers.""" filelist = ['a001.bla', 'a002.bla', 'a001.bla', 'a002.bla', 'abcd.bla', 'k001.bla', 'a003.bli'] self.reader.create_filehandlers(filelist) self.assertEqual(len(self.reader.file_handlers['ftype1']), 3) def test_serializable(self): """Check that a reader is serializable by dask. This ensures users are able to serialize a Scene object that contains readers. """ from distributed.protocol import deserialize, serialize filelist = ['a001.bla', 'a002.bla', 'a001.bla', 'a002.bla', 'abcd.bla', 'k001.bla', 'a003.bli'] self.reader.create_filehandlers(filelist) cloned_reader = deserialize(*serialize(self.reader)) assert self.reader.file_handlers.keys() == cloned_reader.file_handlers.keys() assert self.reader.all_ids == cloned_reader.all_ids class TestFileYAMLReaderWithCustomIDKey(unittest.TestCase): """Test units from FileYAMLReader with custom id_keys.""" def setUp(self): """Set up the test case.""" self.config = MHS_YAML_READER_DICT self.reader = yr.FileYAMLReader(MHS_YAML_READER_DICT, filter_parameters={ 'start_time': datetime(2000, 1, 1), 'end_time': datetime(2000, 1, 2), }) def test_custom_type_with_dict_contents_gets_parsed_correctly(self): """Test custom type with dictionary contents gets parsed correctly.""" ds_ids = list(self.reader.all_dataset_ids) assert ds_ids[0]["frequency_range"] == FrequencyRange(89., 2.8, "GHz") assert ds_ids[2]["frequency_double_sideband"] == FrequencyDoubleSideBand(183.31, 1., 1., "GHz") class TestFileFileYAMLReader(unittest.TestCase): """Test units from FileYAMLReader.""" def setUp(self): """Prepare a reader instance with a fake config.""" patterns = ['a{something:3s}.bla'] res_dict = {'reader': {'name': 'fake', 'sensors': ['canon']}, 'file_types': {'ftype1': {'name': 'ft1', 'file_reader': BaseFileHandler, 'file_patterns': patterns}}, 'datasets': {'ch1': {'name': 'ch01', 'wavelength': [0.5, 0.6, 0.7], 'calibration': 'reflectance', 'file_type': 'ftype1', 'coordinates': ['lons', 'lats']}, 'ch2': {'name': 'ch02', 'wavelength': [0.7, 0.75, 0.8], 'calibration': 'counts', 'file_type': 'ftype1', 'coordinates': ['lons', 'lats']}, 'lons': {'name': 'lons', 'file_type': 'ftype2'}, 'lats': {'name': 'lats', 'file_type': 'ftype2'}}} self.config = res_dict self.reader = yr.FileYAMLReader(res_dict, filter_parameters={ 'start_time': datetime(2000, 1, 1), 'end_time': datetime(2000, 1, 2), }) def test_deprecated_passing_config_files(self): """Test that we get an exception when config files are passed to inti.""" self.assertRaises(ValueError, yr.FileYAMLReader, '/path/to/some/file.yaml') def test_all_data_ids(self): """Check that all datasets ids are returned.""" for dataid in self.reader.all_dataset_ids: name = dataid['name'].replace('0', '') assert self.config['datasets'][name]['name'] == dataid['name'] if 'wavelength' in self.config['datasets'][name]: assert self.config['datasets'][name]['wavelength'] == list(dataid['wavelength'])[:3] if 'calibration' in self.config['datasets'][name]: assert self.config['datasets'][name]['calibration'] == dataid['calibration'] def test_all_dataset_names(self): """Get all dataset names.""" self.assertSetEqual(self.reader.all_dataset_names, set(['ch01', 'ch02', 'lons', 'lats'])) def test_available_dataset_ids(self): """Get ids of the available datasets.""" loadables = self.reader.select_files_from_pathnames(['a001.bla']) self.reader.create_filehandlers(loadables) self.assertSetEqual(set(self.reader.available_dataset_ids), {make_dataid(name='ch02', wavelength=(0.7, 0.75, 0.8), calibration='counts', modifiers=()), make_dataid(name='ch01', wavelength=(0.5, 0.6, 0.7), calibration='reflectance', modifiers=())}) def test_available_dataset_names(self): """Get ids of the available datasets.""" loadables = self.reader.select_files_from_pathnames(['a001.bla']) self.reader.create_filehandlers(loadables) self.assertSetEqual(set(self.reader.available_dataset_names), set(["ch01", "ch02"])) def test_filter_fh_by_time(self): """Check filtering filehandlers by time.""" fh0 = FakeFH(datetime(1999, 12, 30), datetime(1999, 12, 31)) fh1 = FakeFH(datetime(1999, 12, 31, 10, 0), datetime(2000, 1, 1, 12, 30)) fh2 = FakeFH(datetime(2000, 1, 1, 10, 0), datetime(2000, 1, 1, 12, 30)) fh3 = FakeFH(datetime(2000, 1, 1, 12, 30), datetime(2000, 1, 2, 12, 30)) fh4 = FakeFH(datetime(2000, 1, 2, 12, 30), datetime(2000, 1, 3, 12, 30)) fh5 = FakeFH(datetime(1999, 12, 31, 10, 0), datetime(2000, 1, 3, 12, 30)) for idx, fh in enumerate([fh0, fh1, fh2, fh3, fh4, fh5]): res = self.reader.time_matches(fh.start_time, fh.end_time) # only the first one should be false self.assertEqual(res, idx not in [0, 4]) for idx, fh in enumerate([fh0, fh1, fh2, fh3, fh4, fh5]): res = self.reader.time_matches(fh.start_time, None) self.assertEqual(res, idx not in [0, 1, 4, 5]) @patch('satpy.readers.yaml_reader.get_area_def') @patch('satpy.readers.yaml_reader.AreaDefBoundary') @patch('satpy.readers.yaml_reader.Boundary') def test_file_covers_area(self, bnd, adb, gad): """Test that area coverage is checked properly.""" file_handler = FakeFH(datetime(1999, 12, 31, 10, 0), datetime(2000, 1, 3, 12, 30)) self.reader.filter_parameters['area'] = True bnd.return_value.contour_poly.intersection.return_value = True adb.return_value.contour_poly.intersection.return_value = True res = self.reader.check_file_covers_area(file_handler, True) self.assertTrue(res) bnd.return_value.contour_poly.intersection.return_value = False adb.return_value.contour_poly.intersection.return_value = False res = self.reader.check_file_covers_area(file_handler, True) self.assertFalse(res) file_handler.get_bounding_box.side_effect = NotImplementedError() self.reader.filter_parameters['area'] = True res = self.reader.check_file_covers_area(file_handler, True) self.assertTrue(res) def test_start_end_time(self): """Check start and end time behaviours.""" self.reader.file_handlers = {} def get_start_time(): return self.reader.start_time self.assertRaises(RuntimeError, get_start_time) def get_end_time(): return self.reader.end_time self.assertRaises(RuntimeError, get_end_time) fh0 = FakeFH(datetime(1999, 12, 30, 0, 0), datetime(1999, 12, 31, 0, 0)) fh1 = FakeFH(datetime(1999, 12, 31, 10, 0), datetime(2000, 1, 1, 12, 30)) fh2 = FakeFH(datetime(2000, 1, 1, 10, 0), datetime(2000, 1, 1, 12, 30)) fh3 = FakeFH(datetime(2000, 1, 1, 12, 30), datetime(2000, 1, 2, 12, 30)) fh4 = FakeFH(datetime(2000, 1, 2, 12, 30), datetime(2000, 1, 3, 12, 30)) fh5 = FakeFH(datetime(1999, 12, 31, 10, 0), datetime(2000, 1, 3, 12, 30)) self.reader.file_handlers = { '0': [fh1, fh2, fh3, fh4, fh5], '1': [fh0, fh1, fh2, fh3, fh4, fh5], '2': [fh2, fh3], } self.assertEqual(self.reader.start_time, datetime(1999, 12, 30, 0, 0)) self.assertEqual(self.reader.end_time, datetime(2000, 1, 3, 12, 30)) def test_select_from_pathnames(self): """Check select_files_from_pathnames.""" filelist = ['a001.bla', 'a002.bla', 'abcd.bla', 'k001.bla', 'a003.bli'] res = self.reader.select_files_from_pathnames(filelist) for expected in ['a001.bla', 'a002.bla', 'abcd.bla']: self.assertIn(expected, res) self.assertEqual(0, len(self.reader.select_files_from_pathnames([]))) def test_select_from_directory(self): """Check select_files_from_directory.""" filelist = ['a001.bla', 'a002.bla', 'abcd.bla', 'k001.bla', 'a003.bli'] dpath = mkdtemp() for fname in filelist: with open(os.path.join(dpath, fname), 'w'): pass res = self.reader.select_files_from_directory(dpath) for expected in ['a001.bla', 'a002.bla', 'abcd.bla']: self.assertIn(os.path.join(dpath, expected), res) for fname in filelist: os.remove(os.path.join(dpath, fname)) self.assertEqual(0, len(self.reader.select_files_from_directory(dpath))) os.rmdir(dpath) from fsspec.implementations.local import LocalFileSystem class Silly(LocalFileSystem): def glob(self, pattern): return ["/grocery/apricot.nc", "/grocery/aubergine.nc"] res = self.reader.select_files_from_directory(dpath, fs=Silly()) self.assertEqual( res, {"/grocery/apricot.nc", "/grocery/aubergine.nc"}) def test_supports_sensor(self): """Check supports_sensor.""" self.assertTrue(self.reader.supports_sensor('canon')) self.assertFalse(self.reader.supports_sensor('nikon')) @patch('satpy.readers.yaml_reader.StackedAreaDefinition') def test_load_area_def(self, sad): """Test loading the area def for the reader.""" dataid = MagicMock() file_handlers = [] items = random.randrange(2, 10) for _i in range(items): file_handlers.append(MagicMock()) final_area = self.reader._load_area_def(dataid, file_handlers) self.assertEqual(final_area, sad.return_value.squeeze.return_value) args, kwargs = sad.call_args self.assertEqual(len(args), items) def test_preferred_filetype(self): """Test finding the preferred filetype.""" self.reader.file_handlers = {'a': 'a', 'b': 'b', 'c': 'c'} self.assertEqual(self.reader._preferred_filetype(['c', 'a']), 'c') self.assertEqual(self.reader._preferred_filetype(['a', 'c']), 'a') self.assertEqual(self.reader._preferred_filetype(['d', 'e']), None) def test_get_coordinates_for_dataset_key(self): """Test getting coordinates for a key.""" ds_q = DataQuery(name='ch01', wavelength=(0.5, 0.6, 0.7, 'µm'), calibration='reflectance', modifiers=()) res = self.reader._get_coordinates_for_dataset_key(ds_q) self.assertListEqual(res, [make_dataid(name='lons'), make_dataid(name='lats')]) def test_get_coordinates_for_dataset_key_without(self): """Test getting coordinates for a key without coordinates.""" ds_id = make_dataid(name='lons', modifiers=()) res = self.reader._get_coordinates_for_dataset_key(ds_id) self.assertListEqual(res, []) def test_get_coordinates_for_dataset_keys(self): """Test getting coordinates for keys.""" ds_id1 = make_dataid(name='ch01', wavelength=(0.5, 0.6, 0.7), calibration='reflectance', modifiers=()) ds_id2 = make_dataid(name='ch02', wavelength=(0.7, 0.75, 0.8), calibration='counts', modifiers=()) lons = make_dataid(name='lons', modifiers=()) lats = make_dataid(name='lats', modifiers=()) res = self.reader._get_coordinates_for_dataset_keys([ds_id1, ds_id2, lons]) expected = {ds_id1: [lons, lats], ds_id2: [lons, lats], lons: []} self.assertDictEqual(res, expected) def test_get_file_handlers(self): """Test getting filehandler to load a dataset.""" ds_id1 = make_dataid(name='ch01', wavelength=(0.5, 0.6, 0.7), calibration='reflectance', modifiers=()) self.reader.file_handlers = {'ftype1': 'bla'} self.assertEqual(self.reader._get_file_handlers(ds_id1), 'bla') lons = make_dataid(name='lons', modifiers=()) self.assertEqual(self.reader._get_file_handlers(lons), None) @patch('satpy.readers.yaml_reader.xr') def test_load_entire_dataset(self, xarray): """Check loading an entire dataset.""" file_handlers = [FakeFH(None, None), FakeFH(None, None), FakeFH(None, None), FakeFH(None, None)] proj = self.reader._load_dataset(None, {}, file_handlers) self.assertIs(proj, xarray.concat.return_value) class TestFileYAMLReaderLoading(unittest.TestCase): """Tests for FileYAMLReader.load.""" def setUp(self): """Prepare a reader instance with a fake config.""" patterns = ['a{something:3s}.bla'] res_dict = {'reader': {'name': 'fake', 'sensors': ['canon']}, 'file_types': {'ftype1': {'name': 'ft1', 'file_reader': BaseFileHandler, 'file_patterns': patterns}}, 'datasets': {'ch1': {'name': 'ch01', 'wavelength': [0.5, 0.6, 0.7], 'calibration': 'reflectance', 'file_type': 'ftype1'}, }} self.config = res_dict self.reader = yr.FileYAMLReader(res_dict, filter_parameters={ 'start_time': datetime(2000, 1, 1), 'end_time': datetime(2000, 1, 2), }) fake_fh = FakeFH(None, None) self.lons = xr.DataArray(np.ones((2, 2)) * 2, dims=['y', 'x'], attrs={'standard_name': 'longitude', 'name': 'longitude'}) self.lats = xr.DataArray(np.ones((2, 2)) * 2, dims=['y', 'x'], attrs={'standard_name': 'latitude', 'name': 'latitude'}) self.data = None def _assign_array(dsid, *_args, **_kwargs): if dsid['name'] == 'longitude': return self.lons if dsid['name'] == 'latitude': return self.lats return self.data fake_fh.get_dataset.side_effect = _assign_array self.reader.file_handlers = {'ftype1': [fake_fh]} def test_load_dataset_with_builtin_coords(self): """Test loading a dataset with builtin coordinates.""" self.data = xr.DataArray(np.ones((2, 2)), coords={'longitude': self.lons, 'latitude': self.lats}, dims=['y', 'x']) self._check_area_for_ch01() def test_load_dataset_with_builtin_coords_in_wrong_order(self): """Test loading a dataset with builtin coordinates in the wrong order.""" self.data = xr.DataArray(np.ones((2, 2)), coords={'latitude': self.lats, 'longitude': self.lons}, dims=['y', 'x']) self._check_area_for_ch01() def _check_area_for_ch01(self): res = self.reader.load(['ch01']) assert 'area' in res['ch01'].attrs np.testing.assert_array_equal(res['ch01'].attrs['area'].lons, self.lons) np.testing.assert_array_equal(res['ch01'].attrs['area'].lats, self.lats) assert res['ch01'].attrs.get("reader") == "fake" class TestFileFileYAMLReaderMultipleFileTypes(unittest.TestCase): """Test units from FileYAMLReader with multiple file types.""" def setUp(self): """Prepare a reader instance with a fake config.""" # Example: GOES netCDF data # a) From NOAA CLASS: ftype1, including coordinates # b) From EUMETSAT: ftype2, coordinates in extra file (ftype3) # # For test completeness add one channel (ch3) which is only available # in ftype1. patterns1 = ['a.nc'] patterns2 = ['b.nc'] patterns3 = ['geo.nc'] res_dict = {'reader': {'name': 'fake', 'sensors': ['canon']}, 'file_types': {'ftype1': {'name': 'ft1', 'file_patterns': patterns1}, 'ftype2': {'name': 'ft2', 'file_patterns': patterns2}, 'ftype3': {'name': 'ft3', 'file_patterns': patterns3}}, 'datasets': {'ch1': {'name': 'ch01', 'wavelength': [0.5, 0.6, 0.7], 'calibration': 'reflectance', 'file_type': ['ftype1', 'ftype2'], 'coordinates': ['lons', 'lats']}, 'ch2': {'name': 'ch02', 'wavelength': [0.7, 0.75, 0.8], 'calibration': 'counts', 'file_type': ['ftype1', 'ftype2'], 'coordinates': ['lons', 'lats']}, 'ch3': {'name': 'ch03', 'wavelength': [0.8, 0.85, 0.9], 'calibration': 'counts', 'file_type': 'ftype1', 'coordinates': ['lons', 'lats']}, 'lons': {'name': 'lons', 'file_type': ['ftype1', 'ftype3']}, 'lats': {'name': 'lats', 'file_type': ['ftype1', 'ftype3']}}} self.config = res_dict self.reader = yr.FileYAMLReader(self.config) def test_update_ds_ids_from_file_handlers(self): """Test updating existing dataset IDs with information from the file.""" from functools import partial orig_ids = self.reader.all_ids for ftype, resol in zip(('ftype1', 'ftype2'), (1, 2)): # need to copy this because the dataset infos will be modified _orig_ids = {key: val.copy() for key, val in orig_ids.items()} with patch.dict(self.reader.all_ids, _orig_ids, clear=True), \ patch.dict(self.reader.available_ids, {}, clear=True): # Add a file handler with resolution property fh = MagicMock(filetype_info={'file_type': ftype}, resolution=resol) fh.available_datasets = partial(available_datasets, fh) fh.file_type_matches = partial(file_type_matches, fh) self.reader.file_handlers = { ftype: [fh]} # Update existing dataset IDs with resolution property from # the file handler self.reader.update_ds_ids_from_file_handlers() # Make sure the resolution property has been transferred # correctly from the file handler to the dataset ID for ds_id, ds_info in self.reader.all_ids.items(): file_types = ds_info['file_type'] if not isinstance(file_types, list): file_types = [file_types] if ftype in file_types: self.assertEqual(resol, ds_id['resolution']) # Test methods def available_datasets(self, configured_datasets=None): """Fake available_datasets for testing multiple file types.""" res = self.resolution # update previously configured datasets for is_avail, ds_info in (configured_datasets or []): if is_avail is not None: yield is_avail, ds_info matches = self.file_type_matches(ds_info['file_type']) if matches and ds_info.get('resolution') != res: new_info = ds_info.copy() new_info['resolution'] = res yield True, new_info elif is_avail is None: yield is_avail, ds_info def file_type_matches(self, ds_ftype): """Fake file_type_matches for testing multiple file types.""" if isinstance(ds_ftype, str) and ds_ftype == self.filetype_info['file_type']: return True if self.filetype_info['file_type'] in ds_ftype: return True return None class TestGEOFlippableFileYAMLReader(unittest.TestCase): """Test GEOFlippableFileYAMLReader.""" @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) @patch.object(yr.FileYAMLReader, "_load_dataset_with_area") def test_load_dataset_with_area_for_single_areas(self, ldwa): """Test _load_dataset_with_area() for single area definitions.""" from pyresample.geometry import AreaDefinition from satpy.readers.yaml_reader import GEOFlippableFileYAMLReader reader = GEOFlippableFileYAMLReader() dsid = MagicMock() coords = MagicMock() # create a dummy upright xarray original_area_extent = (-1500, -1000, 1500, 1000) original_array = np.arange(6).reshape((2, 3)) area_def = AreaDefinition( 'test', 'test', 'test', {'proj': 'geos', 'h': 35785831, 'type': 'crs'}, 3, 2, original_area_extent, ) dummy_ds_xr = xr.DataArray(original_array, coords={'y': np.arange(2), 'x': np.arange(3), 'time': ("y", np.arange(2))}, attrs={'area': area_def}, dims=('y', 'x')) # assign the dummy xr as return for the super _load_dataset_with_area method ldwa.return_value = dummy_ds_xr # check no input, nothing should change res = reader._load_dataset_with_area(dsid, coords) np.testing.assert_equal(res.values, original_array) np.testing.assert_equal(res.attrs['area'].area_extent, original_area_extent) np.testing.assert_equal(res.coords['y'], np.arange(2)) np.testing.assert_equal(res.coords['x'], np.arange(3)) np.testing.assert_equal(res.coords['time'], np.arange(2)) # check wrong input with self.assertRaises(ValueError): _ = reader._load_dataset_with_area(dsid, coords, 'wronginput') # check native orientation, nothing should change res = reader._load_dataset_with_area(dsid, coords, 'native') np.testing.assert_equal(res.values, original_array) np.testing.assert_equal(res.attrs['area'].area_extent, original_area_extent) np.testing.assert_equal(res.coords['y'], np.arange(2)) np.testing.assert_equal(res.coords['x'], np.arange(3)) np.testing.assert_equal(res.coords['time'], np.arange(2)) # check upright orientation, nothing should change since area is already upright res = reader._load_dataset_with_area(dsid, coords, 'NE') np.testing.assert_equal(res.values, original_array) np.testing.assert_equal(res.attrs['area'].area_extent, original_area_extent) np.testing.assert_equal(res.coords['y'], np.arange(2)) np.testing.assert_equal(res.coords['x'], np.arange(3)) np.testing.assert_equal(res.coords['time'], np.arange(2)) # check that left-right image is flipped correctly dummy_ds_xr.attrs['area'] = area_def.copy(area_extent=(1500, -1000, -1500, 1000)) ldwa.return_value = dummy_ds_xr.copy() res = reader._load_dataset_with_area(dsid, coords, 'NE') np.testing.assert_equal(res.values, np.fliplr(original_array)) np.testing.assert_equal(res.attrs['area'].area_extent, original_area_extent) np.testing.assert_equal(res.coords['y'], np.arange(2)) np.testing.assert_equal(res.coords['x'], np.flip(np.arange(3))) np.testing.assert_equal(res.coords['time'], np.arange(2)) # check that upside down image is flipped correctly dummy_ds_xr.attrs['area'] = area_def.copy(area_extent=(-1500, 1000, 1500, -1000)) ldwa.return_value = dummy_ds_xr.copy() res = reader._load_dataset_with_area(dsid, coords, 'NE') np.testing.assert_equal(res.values, np.flipud(original_array)) np.testing.assert_equal(res.attrs['area'].area_extent, original_area_extent) np.testing.assert_equal(res.coords['y'], np.flip(np.arange(2))) np.testing.assert_equal(res.coords['x'], np.arange(3)) np.testing.assert_equal(res.coords['time'], np.flip(np.arange(2))) # check different projection than geos, nothing should be changed area_def = AreaDefinition( 'test', 'test', 'test', {'proj': 'lcc', 'lat_1': 25.0, 'type': 'crs'}, 3, 2, original_area_extent, ) dummy_ds_xr = xr.DataArray(original_array, dims=('y', 'x'), attrs={'area': area_def}) ldwa.return_value = dummy_ds_xr res = reader._load_dataset_with_area(dsid, coords, 'NE') np.testing.assert_equal(res.values, original_array) np.testing.assert_equal(res.attrs['area'].area_extent, original_area_extent) @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) @patch.object(yr.FileYAMLReader, "_load_dataset_with_area") def test_load_dataset_with_area_for_stacked_areas(self, ldwa): """Test _load_dataset_with_area() for stacked area definitions.""" from pyresample.geometry import AreaDefinition, StackedAreaDefinition from satpy.readers.yaml_reader import GEOFlippableFileYAMLReader reader = GEOFlippableFileYAMLReader() dsid = MagicMock() coords = MagicMock() # create a dummy upright xarray original_area_extents = [(-1500, -1000, 1500, 1000), (3000, 5000, 7000, 8000)] original_array = np.arange(12).reshape((4, 3)) area_def0 = AreaDefinition( 'test', 'test', 'test', {'proj': 'geos', 'h': 35785831, 'type': 'crs'}, 3, 2, original_area_extents[0], ) area_def1 = area_def0.copy(area_extent=original_area_extents[1]) dummy_ds_xr = xr.DataArray(original_array, dims=('y', 'x'), coords={'y': np.arange(4), 'x': np.arange(3), 'time': ("y", np.arange(4))}, attrs={'area': StackedAreaDefinition(area_def0, area_def1)}) # check that left-right image is flipped correctly dummy_ds_xr.attrs['area'].defs[0] = area_def0.copy(area_extent=(1500, -1000, -1500, 1000)) dummy_ds_xr.attrs['area'].defs[1] = area_def1.copy(area_extent=(7000, 5000, 3000, 8000)) ldwa.return_value = dummy_ds_xr.copy() res = reader._load_dataset_with_area(dsid, coords, 'NE') np.testing.assert_equal(res.values, np.fliplr(original_array)) np.testing.assert_equal(res.attrs['area'].defs[0].area_extent, original_area_extents[0]) np.testing.assert_equal(res.attrs['area'].defs[1].area_extent, original_area_extents[1]) np.testing.assert_equal(res.coords['y'], np.arange(4)) np.testing.assert_equal(res.coords['x'], np.flip(np.arange(3))) np.testing.assert_equal(res.coords['time'], np.arange(4)) # check that upside down image is flipped correctly dummy_ds_xr.attrs['area'].defs[0] = area_def0.copy(area_extent=(-1500, 1000, 1500, -1000)) dummy_ds_xr.attrs['area'].defs[1] = area_def1.copy(area_extent=(3000, 8000, 7000, 5000)) ldwa.return_value = dummy_ds_xr.copy() res = reader._load_dataset_with_area(dsid, coords, 'NE') np.testing.assert_equal(res.values, np.flipud(original_array)) # note that the order of the stacked areadefs is flipped here, as expected np.testing.assert_equal(res.attrs['area'].defs[1].area_extent, original_area_extents[0]) np.testing.assert_equal(res.attrs['area'].defs[0].area_extent, original_area_extents[1]) np.testing.assert_equal(res.coords['y'], np.flip(np.arange(4))) np.testing.assert_equal(res.coords['x'], np.arange(3)) np.testing.assert_equal(res.coords['time'], np.flip(np.arange(4))) class TestGEOSegmentYAMLReader(unittest.TestCase): """Test GEOSegmentYAMLReader.""" @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) @patch.object(yr.FileYAMLReader, "create_filehandlers") def test_get_expected_segments(self, cfh): """Test that expected segments can come from the filename.""" from satpy.readers.yaml_reader import GEOSegmentYAMLReader reader = GEOSegmentYAMLReader() fake_fh = MagicMock() fake_fh.filename_info = {} fake_fh.filetype_info = {} cfh.return_value = {'ft1': [fake_fh]} # default (1) created_fhs = reader.create_filehandlers(['fake.nc']) es = created_fhs['ft1'][0].filetype_info['expected_segments'] self.assertEqual(es, 1) # YAML defined for each file type fake_fh.filetype_info['expected_segments'] = 2 created_fhs = reader.create_filehandlers(['fake.nc']) es = created_fhs['ft1'][0].filetype_info['expected_segments'] self.assertEqual(es, 2) # defined both in the filename and the YAML metadata # YAML has priority fake_fh.filename_info = {'total_segments': 3} fake_fh.filetype_info = {'expected_segments': 2} created_fhs = reader.create_filehandlers(['fake.nc']) es = created_fhs['ft1'][0].filetype_info['expected_segments'] self.assertEqual(es, 2) # defined in the filename fake_fh.filename_info = {'total_segments': 3} fake_fh.filetype_info = {} created_fhs = reader.create_filehandlers(['fake.nc']) es = created_fhs['ft1'][0].filetype_info['expected_segments'] self.assertEqual(es, 3) # check correct FCI chunk number reading into segment fake_fh.filename_info = {'count_in_repeat_cycle': 5} created_fhs = reader.create_filehandlers(['fake.nc']) es = created_fhs['ft1'][0].filename_info['segment'] self.assertEqual(es, 5) @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) @patch('satpy.readers.yaml_reader._get_empty_segment_with_height') @patch('satpy.readers.yaml_reader.FileYAMLReader._load_dataset') @patch('satpy.readers.yaml_reader.xr') @patch('satpy.readers.yaml_reader._find_missing_segments') def test_load_dataset(self, mss, xr, parent_load_dataset, geswh): """Test _load_dataset().""" from satpy.readers.yaml_reader import GEOSegmentYAMLReader reader = GEOSegmentYAMLReader() # Projectable is None mss.return_value = [0, 0, 0, False, None] with self.assertRaises(KeyError): res = reader._load_dataset(None, None, None) # Failure is True mss.return_value = [0, 0, 0, True, 0] with self.assertRaises(KeyError): res = reader._load_dataset(None, None, None) # Setup input, and output of mocked functions counter = 9 expected_segments = 8 seg = MagicMock(dims=['y', 'x']) slice_list = expected_segments * [seg, ] failure = False projectable = MagicMock() mss.return_value = (counter, expected_segments, slice_list, failure, projectable) empty_segment = MagicMock() xr.full_like.return_value = empty_segment concat_slices = MagicMock() xr.concat.return_value = concat_slices dataid = MagicMock() ds_info = MagicMock() file_handlers = MagicMock() # No missing segments res = reader._load_dataset(dataid, ds_info, file_handlers) self.assertTrue(res.attrs is file_handlers[0].combine_info.return_value) self.assertTrue(empty_segment not in slice_list) # One missing segment in the middle slice_list[4] = None counter = 8 mss.return_value = (counter, expected_segments, slice_list, failure, projectable) res = reader._load_dataset(dataid, ds_info, file_handlers) self.assertTrue(slice_list[4] is empty_segment) # The last segment is missing slice_list = expected_segments * [seg, ] slice_list[-1] = None counter = 8 mss.return_value = (counter, expected_segments, slice_list, failure, projectable) res = reader._load_dataset(dataid, ds_info, file_handlers) self.assertTrue(slice_list[-1] is empty_segment) # The last two segments are missing slice_list = expected_segments * [seg, ] slice_list[-1] = None counter = 7 mss.return_value = (counter, expected_segments, slice_list, failure, projectable) res = reader._load_dataset(dataid, ds_info, file_handlers) self.assertTrue(slice_list[-1] is empty_segment) self.assertTrue(slice_list[-2] is empty_segment) # The first segment is missing slice_list = expected_segments * [seg, ] slice_list[0] = None counter = 9 mss.return_value = (counter, expected_segments, slice_list, failure, projectable) res = reader._load_dataset(dataid, ds_info, file_handlers) self.assertTrue(slice_list[0] is empty_segment) # The first two segments are missing slice_list = expected_segments * [seg, ] slice_list[0] = None slice_list[1] = None counter = 9 mss.return_value = (counter, expected_segments, slice_list, failure, projectable) res = reader._load_dataset(dataid, ds_info, file_handlers) self.assertTrue(slice_list[0] is empty_segment) self.assertTrue(slice_list[1] is empty_segment) # Check that new FCI empty segment is generated if missing in the middle and at the end fake_fh = MagicMock() fake_fh.filename_info = {} fake_fh.filetype_info = {'file_type': 'fci_l1c_fdhsi'} empty_segment.shape = (140, 5568) slice_list[4] = None counter = 7 mss.return_value = (counter, expected_segments, slice_list, failure, projectable) res = reader._load_dataset(dataid, ds_info, [fake_fh]) assert 2 == geswh.call_count # Disable padding res = reader._load_dataset(dataid, ds_info, file_handlers, pad_data=False) parent_load_dataset.assert_called_once_with(dataid, ds_info, file_handlers) def test_get_empty_segment_with_height(self): """Test _get_empty_segment_with_height().""" from satpy.readers.yaml_reader import _get_empty_segment_with_height as geswh dim = 'y' # check expansion of empty segment empty_segment = xr.DataArray(np.ones((139, 5568)), dims=['y', 'x']) new_height = 140 new_empty_segment = geswh(empty_segment, new_height, dim) assert new_empty_segment.shape == (140, 5568) # check reduction of empty segment empty_segment = xr.DataArray(np.ones((140, 5568)), dims=['y', 'x']) new_height = 139 new_empty_segment = geswh(empty_segment, new_height, dim) assert new_empty_segment.shape == (139, 5568) # check that empty segment is not modified if it has the right height already empty_segment = xr.DataArray(np.ones((140, 5568)), dims=['y', 'x']) new_height = 140 new_empty_segment = geswh(empty_segment, new_height, dim) assert new_empty_segment is empty_segment @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) @patch('satpy.readers.yaml_reader._load_area_def') @patch('satpy.readers.yaml_reader._stack_area_defs') @patch('satpy.readers.yaml_reader._pad_earlier_segments_area') @patch('satpy.readers.yaml_reader._pad_later_segments_area') def test_load_area_def(self, pesa, plsa, sad, parent_load_area_def): """Test _load_area_def().""" from satpy.readers.yaml_reader import GEOSegmentYAMLReader reader = GEOSegmentYAMLReader() dataid = MagicMock() file_handlers = MagicMock() reader._load_area_def(dataid, file_handlers) pesa.assert_called_once() plsa.assert_called_once() sad.assert_called_once() parent_load_area_def.assert_not_called() # Disable padding reader._load_area_def(dataid, file_handlers, pad_data=False) parent_load_area_def.assert_called_once_with(dataid, file_handlers) @patch('satpy.readers.yaml_reader.AreaDefinition') def test_pad_later_segments_area(self, AreaDefinition): """Test _pad_later_segments_area().""" from satpy.readers.yaml_reader import _pad_later_segments_area as plsa seg1_area = MagicMock() seg1_area.crs = 'some_crs' seg1_area.area_extent = [0, 1000, 200, 500] seg1_area.shape = [200, 500] get_area_def = MagicMock() get_area_def.return_value = seg1_area fh_1 = MagicMock() filetype_info = {'expected_segments': 2} filename_info = {'segment': 1} fh_1.filetype_info = filetype_info fh_1.filename_info = filename_info fh_1.get_area_def = get_area_def file_handlers = [fh_1] dataid = 'dataid' res = plsa(file_handlers, dataid) self.assertEqual(len(res), 2) seg2_extent = (0, 1500, 200, 1000) expected_call = ('fill', 'fill', 'fill', 'some_crs', 500, 200, seg2_extent) AreaDefinition.assert_called_once_with(*expected_call) @patch('satpy.readers.yaml_reader.AreaDefinition') def test_pad_later_segments_area_for_FCI_padding(self, AreaDefinition): """Test _pad_later_segments_area() in the FCI padding case.""" from satpy.readers.yaml_reader import _pad_later_segments_area as plsa seg1_area = MagicMock() seg1_area.crs = 'some_crs' seg1_area.area_extent = [0, 1000, 200, 500] seg1_area.shape = [556, 11136] get_area_def = MagicMock() get_area_def.return_value = seg1_area fh_1 = MagicMock() filetype_info = {'expected_segments': 2, 'file_type': 'fci_l1c_fdhsi'} filename_info = {'segment': 1} fh_1.filetype_info = filetype_info fh_1.filename_info = filename_info fh_1.get_area_def = get_area_def file_handlers = [fh_1] dataid = 'dataid' res = plsa(file_handlers, dataid) self.assertEqual(len(res), 2) # the previous chunk size is 556, which is exactly double the size of the FCI chunk 2 size (278) # therefore, the new vertical area extent should be half of the previous size (1000-500)/2=250. # The new area extent lower-left row is therefore 1000+250=1250 seg2_extent = (0, 1250, 200, 1000) expected_call = ('fill', 'fill', 'fill', 'some_crs', 11136, 278, seg2_extent) AreaDefinition.assert_called_once_with(*expected_call) @patch('satpy.readers.yaml_reader.AreaDefinition') def test_pad_earlier_segments_area(self, AreaDefinition): """Test _pad_earlier_segments_area().""" from satpy.readers.yaml_reader import _pad_earlier_segments_area as pesa seg2_area = MagicMock() seg2_area.crs = 'some_crs' seg2_area.area_extent = [0, 1000, 200, 500] seg2_area.shape = [200, 500] get_area_def = MagicMock() get_area_def.return_value = seg2_area fh_2 = MagicMock() filetype_info = {'expected_segments': 2} filename_info = {'segment': 2} fh_2.filetype_info = filetype_info fh_2.filename_info = filename_info fh_2.get_area_def = get_area_def file_handlers = [fh_2] dataid = 'dataid' area_defs = {2: seg2_area} res = pesa(file_handlers, dataid, area_defs) self.assertEqual(len(res), 2) seg1_extent = (0, 500, 200, 0) expected_call = ('fill', 'fill', 'fill', 'some_crs', 500, 200, seg1_extent) AreaDefinition.assert_called_once_with(*expected_call) @patch('satpy.readers.yaml_reader.AreaDefinition') def test_pad_earlier_segments_area_for_FCI_padding(self, AreaDefinition): """Test _pad_earlier_segments_area() for the FCI case.""" from satpy.readers.yaml_reader import _pad_earlier_segments_area as pesa seg2_area = MagicMock() seg2_area.crs = 'some_crs' seg2_area.area_extent = [0, 1000, 200, 500] seg2_area.shape = [278, 5568] get_area_def = MagicMock() get_area_def.return_value = seg2_area fh_2 = MagicMock() filetype_info = {'expected_segments': 2, 'file_type': 'fci_l1c_fdhsi'} filename_info = {'segment': 2} fh_2.filetype_info = filetype_info fh_2.filename_info = filename_info fh_2.get_area_def = get_area_def file_handlers = [fh_2] dataid = 'dataid' area_defs = {2: seg2_area} res = pesa(file_handlers, dataid, area_defs) self.assertEqual(len(res), 2) # the previous chunk size is 278, which is exactly double the size of the FCI chunk 1 size (139) # therefore, the new vertical area extent should be half of the previous size (1000-500)/2=250. # The new area extent lower-left row is therefore 500-250=250 seg1_extent = (0, 500, 200, 250) expected_call = ('fill', 'fill', 'fill', 'some_crs', 5568, 139, seg1_extent) AreaDefinition.assert_called_once_with(*expected_call) def test_find_missing_segments(self): """Test _find_missing_segments().""" from satpy.readers.yaml_reader import _find_missing_segments as fms # Dataset with only one segment filename_info = {'segment': 1} fh_seg1 = MagicMock(filename_info=filename_info) projectable = 'projectable' get_dataset = MagicMock() get_dataset.return_value = projectable fh_seg1.get_dataset = get_dataset file_handlers = [fh_seg1] ds_info = {'file_type': []} dataid = 'dataid' res = fms(file_handlers, ds_info, dataid) counter, expected_segments, slice_list, failure, proj = res self.assertEqual(counter, 2) self.assertEqual(expected_segments, 1) self.assertTrue(projectable in slice_list) self.assertFalse(failure) self.assertTrue(proj is projectable) # Three expected segments, first and last missing filename_info = {'segment': 2} filetype_info = {'expected_segments': 3, 'file_type': 'foo'} fh_seg2 = MagicMock(filename_info=filename_info, filetype_info=filetype_info) projectable = 'projectable' get_dataset = MagicMock() get_dataset.return_value = projectable fh_seg2.get_dataset = get_dataset file_handlers = [fh_seg2] ds_info = {'file_type': ['foo']} dataid = 'dataid' res = fms(file_handlers, ds_info, dataid) counter, expected_segments, slice_list, failure, proj = res self.assertEqual(counter, 3) self.assertEqual(expected_segments, 3) self.assertEqual(slice_list, [None, projectable, None]) self.assertFalse(failure) self.assertTrue(proj is projectable) satpy-0.34.0/satpy/tests/utils.py000066400000000000000000000372111420401153000167440ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . """Utilities for various satpy tests.""" from contextlib import contextmanager from datetime import datetime from unittest import mock import dask.array as da import numpy as np from pyresample import create_area_def from pyresample.geometry import BaseDefinition, SwathDefinition from xarray import DataArray from satpy import Scene from satpy.composites import GenericCompositor, IncompatibleAreas from satpy.dataset import DataID, DataQuery from satpy.dataset.dataid import default_id_keys_config, minimal_default_keys_config from satpy.modifiers import ModifierBase from satpy.readers.file_handlers import BaseFileHandler FAKE_FILEHANDLER_START = datetime(2020, 1, 1, 0, 0, 0) FAKE_FILEHANDLER_END = datetime(2020, 1, 1, 1, 0, 0) def make_dataid(**items): """Make a DataID with default keys.""" return DataID(default_id_keys_config, **items) def make_cid(**items): """Make a DataID with a minimal set of keys to id composites.""" return DataID(minimal_default_keys_config, **items) def make_dsq(**items): """Make a dataset query.""" return DataQuery(**items) def spy_decorator(method_to_decorate): """Fancy decorator to wrap an object while still calling it. See https://stackoverflow.com/a/41599695/433202 """ tmp_mock = mock.MagicMock() def wrapper(self, *args, **kwargs): tmp_mock(*args, **kwargs) return method_to_decorate(self, *args, **kwargs) wrapper.mock = tmp_mock return wrapper def convert_file_content_to_data_array(file_content, attrs=tuple(), dims=('z', 'y', 'x')): """Help old reader tests that still use numpy arrays. A lot of old reader tests still use numpy arrays and depend on the "var_name/attr/attr_name" convention established before Satpy used xarray and dask. While these conventions are still used and should be supported, readers need to use xarray DataArrays instead. If possible, new tests should be based on pure DataArray objects instead of the "var_name/attr/attr_name" style syntax provided by the utility file handlers. Args: file_content (dict): Dictionary of string file keys to fake file data. attrs (iterable): Series of attributes to copy to DataArray object from file content dictionary. Defaults to no attributes. dims (iterable): Dimension names to use for resulting DataArrays. The second to last dimension is used for 1D arrays, so for dims of ``('z', 'y', 'x')`` this would use ``'y'``. Otherwise, the dimensions are used starting with the last, so 2D arrays are ``('y', 'x')`` Dimensions are used in reverse order so the last dimension specified is used as the only dimension for 1D arrays and the last dimension for other arrays. """ for key, val in file_content.items(): da_attrs = {} for a in attrs: if key + '/attr/' + a in file_content: da_attrs[a] = file_content[key + '/attr/' + a] if isinstance(val, np.ndarray): val = da.from_array(val, chunks=4096) if val.ndim == 1: da_dims = dims[-2] elif val.ndim > 1: da_dims = tuple(dims[-val.ndim:]) else: da_dims = None file_content[key] = DataArray(val, dims=da_dims, attrs=da_attrs) def _filter_datasets(all_ds, names_or_ids): """Help filtering DataIDs by name or DataQuery.""" # DataID will match a str to the name # need to separate them out str_filter = [ds_name for ds_name in names_or_ids if isinstance(ds_name, str)] id_filter = [ds_id for ds_id in names_or_ids if not isinstance(ds_id, str)] for ds_id in all_ds: if ds_id in id_filter or ds_id['name'] in str_filter: yield ds_id def _swath_def_of_data_arrays(rows, cols): return SwathDefinition( DataArray(da.zeros((rows, cols)), dims=('y', 'x')), DataArray(da.zeros((rows, cols)), dims=('y', 'x')), ) class FakeModifier(ModifierBase): """Act as a modifier that performs different modifications.""" def _handle_res_change(self, datasets, info): # assume this is used on the 500m version of ds5 info['resolution'] = 250 rep_data_arr = datasets[0] y_size = rep_data_arr.sizes['y'] x_size = rep_data_arr.sizes['x'] data = da.zeros((y_size * 2, x_size * 2)) if isinstance(rep_data_arr.attrs['area'], SwathDefinition): area = _swath_def_of_data_arrays(y_size * 2, x_size * 2) info['area'] = area else: raise NotImplementedError("'res_change' modifier can't handle " "AreaDefinition changes yet.") return data def __call__(self, datasets, optional_datasets=None, **kwargs): """Modify provided data depending on the modifier name and input data.""" if self.attrs['optional_prerequisites']: for opt_dep in self.attrs['optional_prerequisites']: opt_dep_name = opt_dep if isinstance(opt_dep, str) else opt_dep.get('name', '') if 'NOPE' in opt_dep_name or 'fail' in opt_dep_name: continue assert (optional_datasets is not None and len(optional_datasets)) resolution = datasets[0].attrs.get('resolution') mod_name = self.attrs['modifiers'][-1] data = datasets[0].data i = datasets[0].attrs.copy() if mod_name == 'res_change' and resolution is not None: data = self._handle_res_change(datasets, i) elif 'incomp_areas' in mod_name: raise IncompatibleAreas( "Test modifier 'incomp_areas' always raises IncompatibleAreas") self.apply_modifier_info(datasets[0].attrs, i) return DataArray(data, dims=datasets[0].dims, # coords=datasets[0].coords, attrs=i) class FakeCompositor(GenericCompositor): """Act as a compositor that produces fake RGB data.""" def __call__(self, projectables, nonprojectables=None, **kwargs): """Produce test compositor data depending on modifiers and input data provided.""" projectables = self.match_data_arrays(projectables) if nonprojectables: self.match_data_arrays(nonprojectables) info = self.attrs.copy() if self.attrs['name'] in ('comp14', 'comp26'): # used as a test when composites update the dataset id with # information from prereqs info['resolution'] = 555 if self.attrs['name'] in ('comp24', 'comp25'): # other composites that copy the resolution from inputs info['resolution'] = projectables[0].attrs.get('resolution') if len(projectables) != len(self.attrs['prerequisites']): raise ValueError("Not enough prerequisite datasets passed") info.update(kwargs) info['area'] = projectables[0].attrs['area'] dim_sizes = projectables[0].sizes return DataArray(data=da.zeros((dim_sizes['y'], dim_sizes['x'], 3)), attrs=info, dims=['y', 'x', 'bands'], coords={'bands': ['R', 'G', 'B']}) class FakeFileHandler(BaseFileHandler): """Fake file handler to be used by test readers.""" def __init__(self, filename, filename_info, filetype_info, **kwargs): """Initialize file handler and accept all keyword arguments.""" self.kwargs = kwargs super().__init__(filename, filename_info, filetype_info) @property def start_time(self): """Get static start time datetime object.""" return FAKE_FILEHANDLER_START @property def end_time(self): """Get static end time datetime object.""" return FAKE_FILEHANDLER_END @property def sensor_names(self): """Get sensor name from filetype configuration.""" sensor = self.filetype_info.get('sensor', 'fake_sensor') return {sensor} def get_dataset(self, data_id: DataID, ds_info: dict): """Get fake DataArray for testing.""" if data_id['name'] == 'ds9_fail_load': raise KeyError("Can't load '{}' because it is supposed to " "fail.".format(data_id['name'])) attrs = data_id.to_dict() attrs.update(ds_info) attrs['sensor'] = self.filetype_info.get('sensor', 'fake_sensor') attrs['platform_name'] = 'fake_platform' attrs['start_time'] = self.start_time attrs['end_time'] = self.end_time res = attrs.get('resolution', 250) rows = cols = { 250: 20, 500: 10, 1000: 5, }.get(res, 5) return DataArray(data=da.zeros((rows, cols)), attrs=attrs, dims=['y', 'x']) def available_datasets(self, configured_datasets=None): """Report YAML datasets available unless 'not_available' is specified during creation.""" not_available_names = self.kwargs.get("not_available", []) for is_avail, ds_info in (configured_datasets or []): if is_avail is not None: # some other file handler said it has this dataset # we don't know any more information than the previous # file handler so let's yield early yield is_avail, ds_info continue ft_matches = self.file_type_matches(ds_info['file_type']) if not ft_matches: yield None, ds_info continue # mimic what happens when a reader "knows" about one variable # but the files loaded don't have that variable is_avail = ds_info["name"] not in not_available_names yield is_avail, ds_info class CustomScheduler(object): """Scheduler raising an exception if data are computed too many times.""" def __init__(self, max_computes=1): """Set starting and maximum compute counts.""" self.max_computes = max_computes self.total_computes = 0 def __call__(self, dsk, keys, **kwargs): """Compute dask task and keep track of number of times we do so.""" import dask self.total_computes += 1 if self.total_computes > self.max_computes: raise RuntimeError("Too many dask computations were scheduled: " "{}".format(self.total_computes)) return dask.get(dsk, keys, **kwargs) @contextmanager def assert_maximum_dask_computes(max_computes=1): """Context manager to make sure dask computations are not executed more than ``max_computes`` times.""" import dask with dask.config.set(scheduler=CustomScheduler(max_computes=max_computes)) as new_config: yield new_config def make_fake_scene(content_dict, daskify=False, area=True, common_attrs=None): """Create a fake Scene. Create a fake Scene object from fake data. Data are provided in the ``content_dict`` argument. In ``content_dict``, keys should be strings or DataID, and values may be either numpy.ndarray or xarray.DataArray, in either case with exactly two dimensions. The function will convert each of the numpy.ndarray objects into an xarray.DataArray and assign those as datasets to a Scene object. A fake AreaDefinition will be assigned for each array, unless disabled by passing ``area=False``. When areas are automatically generated, arrays with the same shape will get the same area. This function is exclusively intended for testing purposes. If regular ndarrays are passed and the keyword argument daskify is True, DataArrays will be created as dask arrays. If False (default), regular DataArrays will be created. When the user passes xarray.DataArray objects then this flag has no effect. Args: content_dict (Mapping): Mapping where keys correspond to objects accepted by ``Scene.__setitem__``, i.e. strings or DataID, and values may be either ``numpy.ndarray`` or ``xarray.DataArray``. daskify (bool): optional, to use dask when converting ``numpy.ndarray`` to ``xarray.DataArray``. No effect when the values in ``content_dict`` are already ``xarray.DataArray``. area (bool or BaseDefinition): Can be ``True``, ``False``, or an instance of ``pyresample.geometry.BaseDefinition`` such as ``AreaDefinition`` or ``SwathDefinition``. If ``True``, which is the default, automatically generate areas. If ``False``, values will not have assigned areas. If an instance of ``pyresample.geometry.BaseDefinition``, those instances will be used for all generated fake datasets. Warning: Passing an area as a string (``area="germ"``) is not supported. common_attrs (Mapping): optional, additional attributes that will be added to every dataset in the scene. Returns: Scene object with datasets corresponding to content_dict. """ if common_attrs is None: common_attrs = {} sc = Scene() for (did, arr) in content_dict.items(): extra_attrs = common_attrs.copy() if isinstance(area, BaseDefinition): extra_attrs["area"] = area elif area: extra_attrs["area"] = create_area_def( "test-area", {"proj": "eqc", "lat_ts": 0, "lat_0": 0, "lon_0": 0, "x_0": 0, "y_0": 0, "ellps": "sphere", "units": "m", "no_defs": None, "type": "crs"}, units="m", shape=arr.shape, resolution=1000, center=(0, 0)) if isinstance(arr, DataArray): sc[did] = arr.copy() # don't change attributes of input sc[did].attrs.update(extra_attrs) else: if daskify: arr = da.from_array(arr) sc[did] = DataArray( arr, dims=("y", "x"), attrs=extra_attrs) return sc def assert_attrs_equal(attrs, attrs_exp, tolerance=0): """Test that attributes are equal. Walks dictionary recursively. Numerical attributes are compared with the given relative tolerance. """ keys_diff = set(attrs).difference(set(attrs_exp)) assert not keys_diff, "Different set of keys: {}".format(keys_diff) for key in attrs_exp: err_msg = "Attribute {} does not match expectation".format(key) if isinstance(attrs[key], dict): assert_attrs_equal(attrs[key], attrs_exp[key], tolerance) else: try: np.testing.assert_allclose( attrs[key], attrs_exp[key], rtol=tolerance, err_msg=err_msg ) except TypeError: assert attrs[key] == attrs_exp[key], err_msg satpy-0.34.0/satpy/tests/writer_tests/000077500000000000000000000000001420401153000177645ustar00rootroot00000000000000satpy-0.34.0/satpy/tests/writer_tests/__init__.py000066400000000000000000000013761420401153000221040ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The writer tests package.""" satpy-0.34.0/satpy/tests/writer_tests/test_awips_tiled.py000066400000000000000000000561661420401153000237170ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the AWIPS Tiled writer.""" import logging import os from datetime import datetime, timedelta from glob import glob import dask import dask.array as da import numpy as np import pytest import xarray as xr from pyproj import CRS from satpy.resample import update_resampled_coords def _check_production_location(ds): if 'production_site' in ds.attrs: prod_loc_name = 'production_site' elif 'production_location' in ds.attrs: prod_loc_name = 'producton_location' else: return if prod_loc_name in ds.attrs: assert len(ds.attrs[prod_loc_name]) == 31 def check_required_properties(unmasked_ds, masked_ds): """Check various aspects of coordinates and attributes for correctness.""" _check_scaled_x_coordinate_variable(unmasked_ds, masked_ds) _check_scaled_y_coordinate_variable(unmasked_ds, masked_ds) _check_required_common_attributes(unmasked_ds) def _check_required_common_attributes(ds): """Check common properties of the created AWIPS tiles for validity.""" for attr_name in ('tile_row_offset', 'tile_column_offset', 'product_tile_height', 'product_tile_width', 'number_product_tiles', 'product_rows', 'product_columns'): assert attr_name in ds.attrs _check_production_location(ds) for data_arr in ds.data_vars.values(): if data_arr.ndim == 0: # grid mapping variable assert 'grid_mapping_name' in data_arr.attrs continue assert data_arr.encoding.get('zlib', False) assert 'grid_mapping' in data_arr.attrs assert data_arr.attrs['grid_mapping'] in ds def _check_scaled_x_coordinate_variable(ds, masked_ds): assert 'x' in ds.coords x_coord = ds.coords['x'] np.testing.assert_equal(np.diff(x_coord), 1) x_attrs = x_coord.attrs assert x_attrs.get('standard_name') == 'projection_x_coordinate' assert x_attrs.get('units') == 'meter' assert 'scale_factor' in x_attrs assert x_attrs['scale_factor'] > 0 assert 'add_offset' in x_attrs unscaled_x = masked_ds.coords['x'].values assert (np.diff(unscaled_x) > 0).all() def _check_scaled_y_coordinate_variable(ds, masked_ds): assert 'y' in ds.coords y_coord = ds.coords['y'] np.testing.assert_equal(np.diff(y_coord), 1) y_attrs = y_coord.attrs assert y_attrs.get('standard_name') == 'projection_y_coordinate' assert y_attrs.get('units') == 'meter' assert 'scale_factor' in y_attrs assert y_attrs['scale_factor'] < 0 assert 'add_offset' in y_attrs unscaled_y = masked_ds.coords['y'].values assert (np.diff(unscaled_y) < 0).all() class TestAWIPSTiledWriter: """Test basic functionality of AWIPS Tiled writer.""" def setup_method(self): """Create temporary directory to save files to.""" import tempfile self.base_dir = tempfile.mkdtemp() self.start_time = datetime(2018, 1, 1, 12, 0, 0) self.end_time = self.start_time + timedelta(minutes=20) def teardown_method(self): """Remove the temporary directory created for a test.""" try: import shutil shutil.rmtree(self.base_dir, ignore_errors=True) except OSError: pass def test_init(self): """Test basic init method of writer.""" from satpy.writers.awips_tiled import AWIPSTiledWriter AWIPSTiledWriter(base_dir=self.base_dir) def _get_test_area(self, shape=(200, 100), crs=None, extents=None): from pyresample.geometry import AreaDefinition if crs is None: crs = CRS('+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs') if extents is None: extents = (-1000., -1500., 1000., 1500.) area_def = AreaDefinition( 'test', 'test', 'test', crs, shape[1], shape[0], extents, ) return area_def def _get_test_data(self, shape=(200, 100), chunks=50): data = np.linspace(0., 1., shape[0] * shape[1], dtype=np.float32).reshape(shape) return da.from_array(data, chunks=chunks) def _get_test_lcc_data(self, dask_arr, area_def): ds = xr.DataArray( dask_arr, dims=('y', 'x') if dask_arr.ndim == 2 else ('bands', 'y', 'x'), attrs=dict( name='test_ds', platform_name='PLAT', sensor='SENSOR', units='1', standard_name='toa_bidirectional_reflectance', area=area_def, start_time=self.start_time, end_time=self.end_time) ) return update_resampled_coords(ds, ds, area_def) @pytest.mark.parametrize('use_save_dataset', [(False,), (True,)]) def test_basic_numbered_1_tile(self, use_save_dataset, caplog): """Test creating a single numbered tile.""" from satpy.writers.awips_tiled import AWIPSTiledWriter data = self._get_test_data() area_def = self._get_test_area() input_data_arr = self._get_test_lcc_data(data, area_def) with caplog.at_level(logging.DEBUG): w = AWIPSTiledWriter(base_dir=self.base_dir, compress=True) if use_save_dataset: w.save_dataset(input_data_arr, sector_id='TEST', source_name='TESTS') else: w.save_datasets([input_data_arr], sector_id='TEST', source_name='TESTS') assert "no routine matching" not in caplog.text assert "Can't format string" not in caplog.text all_files = glob(os.path.join(self.base_dir, 'TESTS_AII*.nc')) assert len(all_files) == 1 assert os.path.basename(all_files[0]) == 'TESTS_AII_PLAT_SENSOR_test_ds_TEST_T001_20180101_1200.nc' for fn in all_files: unmasked_ds = xr.open_dataset(fn, mask_and_scale=False) output_ds = xr.open_dataset(fn, mask_and_scale=True) check_required_properties(unmasked_ds, output_ds) scale_factor = output_ds['data'].encoding['scale_factor'] np.testing.assert_allclose(input_data_arr.values, output_ds['data'].data, atol=scale_factor / 2) def test_units_length_warning(self): """Test long 'units' warnings are raised.""" from satpy.writers.awips_tiled import AWIPSTiledWriter data = self._get_test_data() area_def = self._get_test_area() input_data_arr = self._get_test_lcc_data(data, area_def) input_data_arr.attrs["units"] = "this is a really long units string" w = AWIPSTiledWriter(base_dir=self.base_dir, compress=True) with pytest.warns(UserWarning, match=r'.*this is a really long units string.*too long.*'): w.save_dataset(input_data_arr, sector_id='TEST', source_name='TESTS') @pytest.mark.parametrize( ("tile_count", "tile_size"), [ ((3, 3), None), (None, (67, 34)), (None, None), ] ) def test_basic_numbered_tiles(self, tile_count, tile_size): """Test creating a multiple numbered tiles.""" from satpy.tests.utils import CustomScheduler from satpy.writers.awips_tiled import AWIPSTiledWriter data = self._get_test_data() area_def = self._get_test_area() input_data_arr = self._get_test_lcc_data(data, area_def) w = AWIPSTiledWriter(base_dir=self.base_dir, compress=True) save_kwargs = dict( sector_id='TEST', source_name="TESTS", tile_count=tile_count, tile_size=tile_size, extra_global_attrs={'my_global': 'TEST'} ) should_error = tile_count is None and tile_size is None if should_error: with dask.config.set(scheduler=CustomScheduler(0)),\ pytest.raises(ValueError, match=r'Either.*tile_count.*'): w.save_datasets([input_data_arr], **save_kwargs) else: with dask.config.set(scheduler=CustomScheduler(1 * 2)): # precompute=*2 w.save_datasets([input_data_arr], **save_kwargs) all_files = glob(os.path.join(self.base_dir, 'TESTS_AII*.nc')) expected_num_files = 0 if should_error else 9 assert len(all_files) == expected_num_files for fn in all_files: unmasked_ds = xr.open_dataset(fn, mask_and_scale=False) masked_ds = xr.open_dataset(fn, mask_and_scale=True) check_required_properties(unmasked_ds, masked_ds) assert unmasked_ds.attrs['my_global'] == 'TEST' assert unmasked_ds.attrs['sector_id'] == 'TEST' assert 'physical_element' in unmasked_ds.attrs stime = input_data_arr.attrs['start_time'] assert unmasked_ds.attrs['start_date_time'] == stime.strftime('%Y-%m-%dT%H:%M:%S') def test_basic_lettered_tiles(self): """Test creating a lettered grid.""" import xarray as xr from satpy.writers.awips_tiled import AWIPSTiledWriter w = AWIPSTiledWriter(base_dir=self.base_dir, compress=True) data = self._get_test_data(shape=(2000, 1000), chunks=500) area_def = self._get_test_area(shape=(2000, 1000), extents=(-1000000., -1500000., 1000000., 1500000.)) ds = self._get_test_lcc_data(data, area_def) # tile_count should be ignored since we specified lettered_grid w.save_datasets([ds], sector_id='LCC', source_name="TESTS", tile_count=(3, 3), lettered_grid=True) all_files = glob(os.path.join(self.base_dir, 'TESTS_AII*.nc')) assert len(all_files) == 16 for fn in all_files: unmasked_ds = xr.open_dataset(fn, mask_and_scale=False) masked_ds = xr.open_dataset(fn, mask_and_scale=True) check_required_properties(unmasked_ds, masked_ds) assert masked_ds.attrs['start_date_time'] == self.start_time.strftime('%Y-%m-%dT%H:%M:%S') def test_basic_lettered_tiles_diff_projection(self): """Test creating a lettered grid from data with differing projection..""" import xarray as xr from satpy.writers.awips_tiled import AWIPSTiledWriter w = AWIPSTiledWriter(base_dir=self.base_dir, compress=True) crs = CRS("+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. +lat_0=45 +lat_1=45 +units=m +no_defs") data = self._get_test_data(shape=(2000, 1000), chunks=500) area_def = self._get_test_area(shape=(2000, 1000), crs=crs, extents=(-1000000., -1500000., 1000000., 1500000.)) ds = self._get_test_lcc_data(data, area_def) # tile_count should be ignored since we specified lettered_grid w.save_datasets([ds], sector_id='LCC', source_name="TESTS", tile_count=(3, 3), lettered_grid=True) all_files = sorted(glob(os.path.join(self.base_dir, 'TESTS_AII*.nc'))) assert len(all_files) == 24 assert "TC02" in all_files[0] # the first tile should be TC02 for fn in all_files: unmasked_ds = xr.open_dataset(fn, mask_and_scale=False) masked_ds = xr.open_dataset(fn, mask_and_scale=True) check_required_properties(unmasked_ds, masked_ds) assert masked_ds.attrs['start_date_time'] == self.start_time.strftime('%Y-%m-%dT%H:%M:%S') def test_lettered_tiles_update_existing(self): """Test updating lettered tiles with additional data.""" import shutil import dask import xarray as xr from satpy.writers.awips_tiled import AWIPSTiledWriter first_base_dir = os.path.join(self.base_dir, 'first') w = AWIPSTiledWriter(base_dir=first_base_dir, compress=True) shape = (2000, 1000) data = np.linspace(0., 1., shape[0] * shape[1], dtype=np.float32).reshape(shape) # pixels to be filled in later data[:, -200:] = np.nan data = da.from_array(data, chunks=500) area_def = self._get_test_area(shape=(2000, 1000), extents=(-1000000., -1500000., 1000000., 1500000.)) ds = self._get_test_lcc_data(data, area_def) # tile_count should be ignored since we specified lettered_grid w.save_datasets([ds], sector_id='LCC', source_name="TESTS", tile_count=(3, 3), lettered_grid=True) all_files = sorted(glob(os.path.join(first_base_dir, 'TESTS_AII*.nc'))) assert len(all_files) == 16 first_files = [] second_base_dir = os.path.join(self.base_dir, 'second') os.makedirs(second_base_dir) for fn in all_files: new_fn = fn.replace(first_base_dir, second_base_dir) shutil.copy(fn, new_fn) first_files.append(new_fn) # Second writing/updating # Area is about 100 pixels to the right area_def2 = self._get_test_area(shape=(2000, 1000), extents=(-800000., -1500000., 1200000., 1500000.)) data2 = np.linspace(0., 1., 2000000, dtype=np.float32).reshape((2000, 1000)) # a gap at the beginning where old values remain data2[:, :200] = np.nan # a gap at the end where old values remain data2[:, -400:-300] = np.nan data2 = da.from_array(data2, chunks=500) ds2 = self._get_test_lcc_data(data2, area_def2) w = AWIPSTiledWriter(base_dir=second_base_dir, compress=True) # HACK: The _copy_to_existing function hangs when opening the output # file multiple times...sometimes. If we limit dask to one worker # it seems to work fine. with dask.config.set(num_workers=1): w.save_datasets([ds2], sector_id='LCC', source_name="TESTS", tile_count=(3, 3), lettered_grid=True) all_files = glob(os.path.join(second_base_dir, 'TESTS_AII*.nc')) # 16 original tiles + 4 new tiles assert len(all_files) == 20 # these tiles should be the right-most edge of the first image first_right_edge_files = [x for x in first_files if 'P02' in x or 'P04' in x or 'V02' in x or 'V04' in x] for new_file in first_right_edge_files: orig_file = new_file.replace(second_base_dir, first_base_dir) orig_nc = xr.open_dataset(orig_file) orig_data = orig_nc['data'].values if not np.isnan(orig_data).any(): # we only care about the tiles that had NaNs originally continue new_nc = xr.open_dataset(new_file) new_data = new_nc['data'].values # there should be at least some areas of the file # that old data was present and hasn't been replaced np.testing.assert_allclose(orig_data[:, :20], new_data[:, :20]) # it isn't exactly 200 because the tiles aren't aligned with the # data (the left-most tile doesn't have data until some columns # in), but it should be at least that many columns assert np.isnan(orig_data[:, 200:]).all() assert not np.isnan(new_data[:, 200:]).all() def test_lettered_tiles_sector_ref(self): """Test creating a lettered grid using the sector as reference.""" import xarray as xr from satpy.writers.awips_tiled import AWIPSTiledWriter w = AWIPSTiledWriter(base_dir=self.base_dir, compress=True) data = self._get_test_data(shape=(2000, 1000), chunks=500) area_def = self._get_test_area(shape=(2000, 1000), extents=(-1000000., -1500000., 1000000., 1500000.)) ds = self._get_test_lcc_data(data, area_def) w.save_datasets([ds], sector_id='LCC', source_name="TESTS", lettered_grid=True, use_sector_reference=True, use_end_time=True) all_files = glob(os.path.join(self.base_dir, 'TESTS_AII*.nc')) assert len(all_files) == 16 for fn in all_files: unmasked_ds = xr.open_dataset(fn, mask_and_scale=False) masked_ds = xr.open_dataset(fn, mask_and_scale=True) check_required_properties(unmasked_ds, masked_ds) expected_start = (self.start_time + timedelta(minutes=20)).strftime('%Y-%m-%dT%H:%M:%S') assert masked_ds.attrs['start_date_time'] == expected_start def test_lettered_tiles_no_fit(self): """Test creating a lettered grid with no data overlapping the grid.""" from satpy.writers.awips_tiled import AWIPSTiledWriter w = AWIPSTiledWriter(base_dir=self.base_dir, compress=True) data = self._get_test_data(shape=(2000, 1000), chunks=500) area_def = self._get_test_area(shape=(2000, 1000), extents=(4000000., 5000000., 5000000., 6000000.)) ds = self._get_test_lcc_data(data, area_def) w.save_datasets([ds], sector_id='LCC', source_name="TESTS", tile_count=(3, 3), lettered_grid=True) # No files created all_files = glob(os.path.join(self.base_dir, 'TESTS_AII*.nc')) assert not all_files def test_lettered_tiles_no_valid_data(self): """Test creating a lettered grid with no valid data.""" from satpy.writers.awips_tiled import AWIPSTiledWriter w = AWIPSTiledWriter(base_dir=self.base_dir, compress=True) data = da.full((2000, 1000), np.nan, chunks=500, dtype=np.float32) area_def = self._get_test_area(shape=(2000, 1000), extents=(-1000000., -1500000., 1000000., 1500000.)) ds = self._get_test_lcc_data(data, area_def) w.save_datasets([ds], sector_id='LCC', source_name="TESTS", tile_count=(3, 3), lettered_grid=True) # No files created - all NaNs should result in no tiles being created all_files = glob(os.path.join(self.base_dir, 'TESTS_AII*.nc')) assert not all_files def test_lettered_tiles_bad_filename(self): """Test creating a lettered grid with a bad filename.""" from satpy.writers.awips_tiled import AWIPSTiledWriter w = AWIPSTiledWriter(base_dir=self.base_dir, compress=True, filename="{Bad Key}.nc") data = self._get_test_data(shape=(2000, 1000), chunks=500) area_def = self._get_test_area(shape=(2000, 1000), extents=(-1000000., -1500000., 1000000., 1500000.)) ds = self._get_test_lcc_data(data, area_def) with pytest.raises(KeyError): w.save_datasets([ds], sector_id='LCC', source_name='TESTS', tile_count=(3, 3), lettered_grid=True) def test_basic_numbered_tiles_rgb(self): """Test creating a multiple numbered tiles with RGB.""" import xarray as xr from satpy.writers.awips_tiled import AWIPSTiledWriter w = AWIPSTiledWriter(base_dir=self.base_dir, compress=True) data = da.from_array(np.linspace(0., 1., 60000, dtype=np.float32).reshape((3, 200, 100)), chunks=50) area_def = self._get_test_area() ds = self._get_test_lcc_data(data, area_def) ds = ds.rename(dict((old, new) for old, new in zip(ds.dims, ['bands', 'y', 'x']))) ds.coords['bands'] = ['R', 'G', 'B'] w.save_datasets([ds], sector_id='TEST', source_name="TESTS", tile_count=(3, 3)) chan_files = glob(os.path.join(self.base_dir, 'TESTS_AII*test_ds_R*.nc')) all_files = chan_files[:] assert len(chan_files) == 9 chan_files = glob(os.path.join(self.base_dir, 'TESTS_AII*test_ds_G*.nc')) all_files.extend(chan_files) assert len(chan_files) == 9 chan_files = glob(os.path.join(self.base_dir, 'TESTS_AII*test_ds_B*.nc')) assert len(chan_files) == 9 all_files.extend(chan_files) for fn in all_files: unmasked_ds = xr.open_dataset(fn, mask_and_scale=False) masked_ds = xr.open_dataset(fn, mask_and_scale=True) check_required_properties(unmasked_ds, masked_ds) @pytest.mark.parametrize( "sector", ['C', 'F'] ) @pytest.mark.parametrize( "extra_kwargs", [ {}, {'environment_prefix': 'AA'}, {'environment_prefix': 'BB', 'filename': '{environment_prefix}_{name}_GLM_T{tile_number:04d}.nc'}, ] ) def test_multivar_numbered_tiles_glm(self, sector, extra_kwargs): """Test creating a tiles with multiple variables.""" import os import xarray as xr from satpy.writers.awips_tiled import AWIPSTiledWriter os.environ['ORGANIZATION'] = '1' * 50 w = AWIPSTiledWriter(base_dir=self.base_dir, compress=True) data = self._get_test_data() area_def = self._get_test_area() ds1 = self._get_test_lcc_data(data, area_def) ds1.attrs.update( dict( name='total_energy', platform_name='GOES-17', sensor='SENSOR', units='1', scan_mode='M3', scene_abbr=sector, platform_shortname="G17" ) ) ds2 = ds1.copy() ds2.attrs.update({ 'name': 'flash_extent_density', }) ds3 = ds1.copy() ds3.attrs.update({ 'name': 'average_flash_area', }) dqf = ds1.copy() dqf = (dqf * 255).astype(np.uint8) dqf.attrs = ds1.attrs.copy() dqf.attrs.update({ 'name': 'DQF', '_FillValue': 1, }) w.save_datasets([ds1, ds2, ds3, dqf], sector_id='TEST', source_name="TESTS", tile_count=(3, 3), template='glm_l2_rad{}'.format(sector.lower()), **extra_kwargs) fn_glob = self._get_glm_glob_filename(extra_kwargs) all_files = glob(os.path.join(self.base_dir, fn_glob)) assert len(all_files) == 9 for fn in all_files: unmasked_ds = xr.open_dataset(fn, mask_and_scale=False) masked_ds = xr.open_dataset(fn, mask_and_scale=True) check_required_properties(unmasked_ds, masked_ds) if sector == 'C': assert masked_ds.attrs['time_coverage_end'] == self.end_time.strftime('%Y-%m-%dT%H:%M:%S.%fZ') else: # 'F' assert masked_ds.attrs['time_coverage_end'] == self.end_time.strftime('%Y-%m-%dT%H:%M:%SZ') @staticmethod def _get_glm_glob_filename(extra_kwargs): if 'filename' in extra_kwargs: return 'BB*_GLM*.nc' elif 'environment_prefix' in extra_kwargs: return 'AA*_GLM*.nc' return 'DR*_GLM*.nc' satpy-0.34.0/satpy/tests/writer_tests/test_cf.py000066400000000000000000001667001420401153000217770ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the CF writer.""" import os import tempfile import unittest from collections import OrderedDict from datetime import datetime from unittest import mock import numpy as np from satpy.tests.utils import make_dsq try: from pyproj import CRS except ImportError: CRS = None class TempFile(object): """A temporary filename class.""" def __init__(self, suffix=".nc"): """Initialize.""" self.filename = None self.suffix = suffix def __enter__(self): """Enter.""" self.handle, self.filename = tempfile.mkstemp(suffix=self.suffix) os.close(self.handle) return self.filename def __exit__(self, *args): """Exit.""" os.remove(self.filename) class TestCFWriter(unittest.TestCase): """Test case for CF writer.""" def test_init(self): """Test initializing the CFWriter class.""" from satpy.writers import configs_for_writer from satpy.writers.cf_writer import CFWriter CFWriter(config_files=list(configs_for_writer('cf'))[0]) def test_save_array(self): """Test saving an array to netcdf/cf.""" import xarray as xr from satpy import Scene scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) scn['test-array'] = xr.DataArray([1, 2, 3], attrs=dict(start_time=start_time, end_time=end_time, prerequisites=[make_dsq(name='hej')])) with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf') with xr.open_dataset(filename) as f: np.testing.assert_array_equal(f['test-array'][:], [1, 2, 3]) expected_prereq = ("DataQuery(name='hej')") self.assertEqual(f['test-array'].attrs['prerequisites'], expected_prereq) def test_save_with_compression(self): """Test saving an array with compression.""" import xarray as xr from satpy import Scene scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) with mock.patch('satpy.writers.cf_writer.xr.Dataset') as xrdataset,\ mock.patch('satpy.writers.cf_writer.make_time_bounds'): scn['test-array'] = xr.DataArray([1, 2, 3], attrs=dict(start_time=start_time, end_time=end_time, prerequisites=[make_dsq(name='hej')])) comp = {'zlib': True, 'complevel': 9} scn.save_datasets(filename='bla', writer='cf', compression=comp) ars, kws = xrdataset.call_args_list[1] self.assertDictEqual(ars[0]['test-array'].encoding, comp) def test_save_array_coords(self): """Test saving array with coordinates.""" import numpy as np import xarray as xr from satpy import Scene scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) coords = { 'x': np.arange(3), 'y': np.arange(1), } if CRS is not None: proj_str = ('+proj=geos +lon_0=-95.0 +h=35786023.0 ' '+a=6378137.0 +b=6356752.31414 +sweep=x ' '+units=m +no_defs') coords['crs'] = CRS.from_string(proj_str) scn['test-array'] = xr.DataArray([[1, 2, 3]], dims=('y', 'x'), coords=coords, attrs=dict(start_time=start_time, end_time=end_time, prerequisites=[make_dsq(name='hej')])) with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf') with xr.open_dataset(filename) as f: np.testing.assert_array_equal(f['test-array'][:], [[1, 2, 3]]) np.testing.assert_array_equal(f['x'][:], [0, 1, 2]) np.testing.assert_array_equal(f['y'][:], [0]) self.assertNotIn('crs', f) self.assertNotIn('_FillValue', f['x'].attrs) self.assertNotIn('_FillValue', f['y'].attrs) expected_prereq = ("DataQuery(name='hej')") self.assertEqual(f['test-array'].attrs['prerequisites'], expected_prereq) def test_save_dataset_a_digit(self): """Test saving an array to netcdf/cf where dataset name starting with a digit.""" import xarray as xr from satpy import Scene scn = Scene() scn['1'] = xr.DataArray([1, 2, 3]) with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf') with xr.open_dataset(filename) as f: np.testing.assert_array_equal(f['CHANNEL_1'][:], [1, 2, 3]) def test_save_dataset_a_digit_prefix(self): """Test saving an array to netcdf/cf where dataset name starting with a digit with prefix.""" import xarray as xr from satpy import Scene scn = Scene() scn['1'] = xr.DataArray([1, 2, 3]) with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf', numeric_name_prefix='TEST') with xr.open_dataset(filename) as f: np.testing.assert_array_equal(f['TEST1'][:], [1, 2, 3]) def test_save_dataset_a_digit_prefix_include_attr(self): """Test saving an array to netcdf/cf where dataset name starting with a digit with prefix include orig name.""" import xarray as xr from satpy import Scene scn = Scene() scn['1'] = xr.DataArray([1, 2, 3]) with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf', include_orig_name=True, numeric_name_prefix='TEST') with xr.open_dataset(filename) as f: np.testing.assert_array_equal(f['TEST1'][:], [1, 2, 3]) self.assertEqual(f['TEST1'].attrs['original_name'], '1') def test_save_dataset_a_digit_no_prefix_include_attr(self): """Test saving an array to netcdf/cf dataset name starting with a digit with no prefix include orig name.""" import xarray as xr from satpy import Scene scn = Scene() scn['1'] = xr.DataArray([1, 2, 3]) with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf', include_orig_name=True, numeric_name_prefix='') with xr.open_dataset(filename) as f: np.testing.assert_array_equal(f['1'][:], [1, 2, 3]) self.assertNotIn('original_name', f['1'].attrs) def test_ancillary_variables(self): """Test ancillary_variables cited each other.""" import xarray as xr from satpy import Scene from satpy.tests.utils import make_dataid scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) da = xr.DataArray([1, 2, 3], attrs=dict(start_time=start_time, end_time=end_time, prerequisites=[make_dataid(name='hej')])) scn['test-array-1'] = da scn['test-array-2'] = da.copy() scn['test-array-1'].attrs['ancillary_variables'] = [scn['test-array-2']] scn['test-array-2'].attrs['ancillary_variables'] = [scn['test-array-1']] with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf') with xr.open_dataset(filename) as f: self.assertEqual(f['test-array-1'].attrs['ancillary_variables'], 'test-array-2') self.assertEqual(f['test-array-2'].attrs['ancillary_variables'], 'test-array-1') def test_groups(self): """Test creating a file with groups.""" import xarray as xr from satpy import Scene tstart = datetime(2019, 4, 1, 12, 0) tend = datetime(2019, 4, 1, 12, 15) data_visir = [[1, 2], [3, 4]] y_visir = [1, 2] x_visir = [1, 2] time_vis006 = [1, 2] time_ir_108 = [3, 4] data_hrv = [[1, 2, 3], [4, 5, 6], [7, 8, 9]] y_hrv = [1, 2, 3] x_hrv = [1, 2, 3] time_hrv = [1, 2, 3] scn = Scene() scn['VIS006'] = xr.DataArray(data_visir, dims=('y', 'x'), coords={'y': y_visir, 'x': x_visir, 'acq_time': ('y', time_vis006)}, attrs={'name': 'VIS006', 'start_time': tstart, 'end_time': tend}) scn['IR_108'] = xr.DataArray(data_visir, dims=('y', 'x'), coords={'y': y_visir, 'x': x_visir, 'acq_time': ('y', time_ir_108)}, attrs={'name': 'IR_108', 'start_time': tstart, 'end_time': tend}) scn['HRV'] = xr.DataArray(data_hrv, dims=('y', 'x'), coords={'y': y_hrv, 'x': x_hrv, 'acq_time': ('y', time_hrv)}, attrs={'name': 'HRV', 'start_time': tstart, 'end_time': tend}) with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf', groups={'visir': ['IR_108', 'VIS006'], 'hrv': ['HRV']}, pretty=True) nc_root = xr.open_dataset(filename) self.assertIn('history', nc_root.attrs) self.assertSetEqual(set(nc_root.variables.keys()), set()) nc_visir = xr.open_dataset(filename, group='visir') nc_hrv = xr.open_dataset(filename, group='hrv') self.assertSetEqual(set(nc_visir.variables.keys()), {'VIS006', 'IR_108', 'y', 'x', 'VIS006_acq_time', 'IR_108_acq_time'}) self.assertSetEqual(set(nc_hrv.variables.keys()), {'HRV', 'y', 'x', 'acq_time'}) for tst, ref in zip([nc_visir['VIS006'], nc_visir['IR_108'], nc_hrv['HRV']], [scn['VIS006'], scn['IR_108'], scn['HRV']]): np.testing.assert_array_equal(tst.data, ref.data) nc_root.close() nc_visir.close() nc_hrv.close() # Different projection coordinates in one group are not supported with TempFile() as filename: self.assertRaises(ValueError, scn.save_datasets, datasets=['VIS006', 'HRV'], filename=filename, writer='cf') def test_single_time_value(self): """Test setting a single time value.""" import xarray as xr from satpy import Scene scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) test_array = np.array([[1, 2], [3, 4]]) scn['test-array'] = xr.DataArray(test_array, dims=['x', 'y'], coords={'time': np.datetime64('2018-05-30T10:05:00')}, attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf') with xr.open_dataset(filename, decode_cf=True) as f: np.testing.assert_array_equal(f['time'], scn['test-array']['time']) bounds_exp = np.array([[start_time, end_time]], dtype='datetime64[m]') np.testing.assert_array_equal(f['time_bnds'], bounds_exp) def test_time_coordinate_on_a_swath(self): """Test that time dimension is not added on swath data with time already as a coordinate.""" import xarray as xr from satpy import Scene scn = Scene() test_array = np.array([[1, 2], [3, 4], [5, 6], [7, 8]]) times = np.array(['2018-05-30T10:05:00', '2018-05-30T10:05:01', '2018-05-30T10:05:02', '2018-05-30T10:05:03'], dtype=np.datetime64) scn['test-array'] = xr.DataArray(test_array, dims=['y', 'x'], coords={'time': ('y', times)}, attrs=dict(start_time=times[0], end_time=times[-1])) with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf', pretty=True) with xr.open_dataset(filename, decode_cf=True) as f: np.testing.assert_array_equal(f['time'], scn['test-array']['time']) def test_bounds(self): """Test setting time bounds.""" import xarray as xr from satpy import Scene scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) test_array = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1) scn['test-array'] = xr.DataArray(test_array, dims=['x', 'y', 'time'], coords={'time': [np.datetime64('2018-05-30T10:05:00')]}, attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf') # Check decoded time coordinates & bounds with xr.open_dataset(filename, decode_cf=True) as f: bounds_exp = np.array([[start_time, end_time]], dtype='datetime64[m]') np.testing.assert_array_equal(f['time_bnds'], bounds_exp) self.assertEqual(f['time'].attrs['bounds'], 'time_bnds') # Check raw time coordinates & bounds with xr.open_dataset(filename, decode_cf=False) as f: np.testing.assert_almost_equal(f['time_bnds'], [[-0.0034722, 0.0069444]]) # User-specified time encoding should have preference with TempFile() as filename: time_units = 'seconds since 2018-01-01' scn.save_datasets(filename=filename, encoding={'time': {'units': time_units}}, writer='cf') with xr.open_dataset(filename, decode_cf=False) as f: np.testing.assert_array_equal(f['time_bnds'], [[12909600, 12910500]]) def test_bounds_minimum(self): """Test minimum bounds.""" import xarray as xr from satpy import Scene scn = Scene() start_timeA = datetime(2018, 5, 30, 10, 0) # expected to be used end_timeA = datetime(2018, 5, 30, 10, 20) start_timeB = datetime(2018, 5, 30, 10, 3) end_timeB = datetime(2018, 5, 30, 10, 15) # expected to be used test_arrayA = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1) test_arrayB = np.array([[1, 2], [3, 5]]).reshape(2, 2, 1) scn['test-arrayA'] = xr.DataArray(test_arrayA, dims=['x', 'y', 'time'], coords={'time': [np.datetime64('2018-05-30T10:05:00')]}, attrs=dict(start_time=start_timeA, end_time=end_timeA)) scn['test-arrayB'] = xr.DataArray(test_arrayB, dims=['x', 'y', 'time'], coords={'time': [np.datetime64('2018-05-30T10:05:00')]}, attrs=dict(start_time=start_timeB, end_time=end_timeB)) with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf') with xr.open_dataset(filename, decode_cf=True) as f: bounds_exp = np.array([[start_timeA, end_timeB]], dtype='datetime64[m]') np.testing.assert_array_equal(f['time_bnds'], bounds_exp) def test_bounds_missing_time_info(self): """Test time bounds generation in case of missing time.""" import xarray as xr from satpy import Scene scn = Scene() start_timeA = datetime(2018, 5, 30, 10, 0) end_timeA = datetime(2018, 5, 30, 10, 15) test_arrayA = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1) test_arrayB = np.array([[1, 2], [3, 5]]).reshape(2, 2, 1) scn['test-arrayA'] = xr.DataArray(test_arrayA, dims=['x', 'y', 'time'], coords={'time': [np.datetime64('2018-05-30T10:05:00')]}, attrs=dict(start_time=start_timeA, end_time=end_timeA)) scn['test-arrayB'] = xr.DataArray(test_arrayB, dims=['x', 'y', 'time'], coords={'time': [np.datetime64('2018-05-30T10:05:00')]}) with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf') with xr.open_dataset(filename, decode_cf=True) as f: bounds_exp = np.array([[start_timeA, end_timeA]], dtype='datetime64[m]') np.testing.assert_array_equal(f['time_bnds'], bounds_exp) def test_encoding_kwarg(self): """Test 'encoding' keyword argument.""" import xarray as xr from satpy import Scene scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) scn['test-array'] = xr.DataArray([1, 2, 3], attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: encoding = {'test-array': {'dtype': 'int8', 'scale_factor': 0.1, 'add_offset': 0.0, '_FillValue': 3}} scn.save_datasets(filename=filename, encoding=encoding, writer='cf') with xr.open_dataset(filename, mask_and_scale=False) as f: np.testing.assert_array_equal(f['test-array'][:], [10, 20, 30]) self.assertEqual(f['test-array'].attrs['scale_factor'], 0.1) self.assertEqual(f['test-array'].attrs['_FillValue'], 3) # check that dtype behave as int8 self.assertEqual(np.iinfo(f['test-array'][:].dtype).max, 127) def test_unlimited_dims_kwarg(self): """Test specification of unlimited dimensions.""" import xarray as xr from satpy import Scene scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) test_array = np.array([[1, 2], [3, 4]]) scn['test-array'] = xr.DataArray(test_array, dims=['x', 'y'], coords={'time': np.datetime64('2018-05-30T10:05:00')}, attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf', unlimited_dims=['time']) with xr.open_dataset(filename) as f: self.assertSetEqual(f.encoding['unlimited_dims'], {'time'}) def test_header_attrs(self): """Check global attributes are set.""" import xarray as xr from satpy import Scene scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) scn['test-array'] = xr.DataArray([1, 2, 3], attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: header_attrs = {'sensor': 'SEVIRI', 'orbit': 99999, 'none': None, 'list': [1, 2, 3], 'set': {1, 2, 3}, 'dict': {'a': 1, 'b': 2}, 'nested': {'outer': {'inner1': 1, 'inner2': 2}}, 'bool': True, 'bool_': np.bool_(True)} scn.save_datasets(filename=filename, header_attrs=header_attrs, flatten_attrs=True, writer='cf') with xr.open_dataset(filename) as f: self.assertIn('history', f.attrs) self.assertEqual(f.attrs['sensor'], 'SEVIRI') self.assertEqual(f.attrs['orbit'], 99999) np.testing.assert_array_equal(f.attrs['list'], [1, 2, 3]) self.assertEqual(f.attrs['set'], '{1, 2, 3}') self.assertEqual(f.attrs['dict_a'], 1) self.assertEqual(f.attrs['dict_b'], 2) self.assertEqual(f.attrs['nested_outer_inner1'], 1) self.assertEqual(f.attrs['nested_outer_inner2'], 2) self.assertEqual(f.attrs['bool'], 'true') self.assertEqual(f.attrs['bool_'], 'true') self.assertTrue('none' not in f.attrs.keys()) def get_test_attrs(self): """Create some dataset attributes for testing purpose. Returns: Attributes, encoded attributes, encoded and flattened attributes """ attrs = {'name': 'IR_108', 'start_time': datetime(2018, 1, 1, 0), 'end_time': datetime(2018, 1, 1, 0, 15), 'int': 1, 'float': 1.0, 'none': None, # should be dropped 'numpy_int': np.uint8(1), 'numpy_float': np.float32(1), 'numpy_bool': True, 'numpy_void': np.void(0), 'numpy_bytes': np.bytes_('test'), 'numpy_string': np.string_('test'), 'list': [1, 2, np.float64(3)], 'nested_list': ["1", ["2", [3]]], 'bool': True, 'array': np.array([1, 2, 3], dtype='uint8'), 'array_bool': np.array([True, False, True]), 'array_2d': np.array([[1, 2], [3, 4]]), 'array_3d': np.array([[[1, 2], [3, 4]], [[1, 2], [3, 4]]]), 'dict': {'a': 1, 'b': 2}, 'nested_dict': {'l1': {'l2': {'l3': np.array([1, 2, 3], dtype='uint8')}}}, 'raw_metadata': OrderedDict([ ('recarray', np.zeros(3, dtype=[('x', 'i4'), ('y', 'u1')])), ('flag', np.bool_(True)), ('dict', OrderedDict([('a', 1), ('b', np.array([1, 2, 3], dtype='uint8'))])) ])} encoded = {'name': 'IR_108', 'start_time': '2018-01-01 00:00:00', 'end_time': '2018-01-01 00:15:00', 'int': 1, 'float': 1.0, 'numpy_int': np.uint8(1), 'numpy_float': np.float32(1), 'numpy_bool': 'true', 'numpy_void': '[]', 'numpy_bytes': 'test', 'numpy_string': 'test', 'list': [1, 2, np.float64(3)], 'nested_list': '["1", ["2", [3]]]', 'bool': 'true', 'array': np.array([1, 2, 3], dtype='uint8'), 'array_bool': ['true', 'false', 'true'], 'array_2d': '[[1, 2], [3, 4]]', 'array_3d': '[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]', 'dict': '{"a": 1, "b": 2}', 'nested_dict': '{"l1": {"l2": {"l3": [1, 2, 3]}}}', 'raw_metadata': '{"recarray": [[0, 0], [0, 0], [0, 0]], ' '"flag": "true", "dict": {"a": 1, "b": [1, 2, 3]}}'} encoded_flat = {'name': 'IR_108', 'start_time': '2018-01-01 00:00:00', 'end_time': '2018-01-01 00:15:00', 'int': 1, 'float': 1.0, 'numpy_int': np.uint8(1), 'numpy_float': np.float32(1), 'numpy_bool': 'true', 'numpy_void': '[]', 'numpy_bytes': 'test', 'numpy_string': 'test', 'list': [1, 2, np.float64(3)], 'nested_list': '["1", ["2", [3]]]', 'bool': 'true', 'array': np.array([1, 2, 3], dtype='uint8'), 'array_bool': ['true', 'false', 'true'], 'array_2d': '[[1, 2], [3, 4]]', 'array_3d': '[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]', 'dict_a': 1, 'dict_b': 2, 'nested_dict_l1_l2_l3': np.array([1, 2, 3], dtype='uint8'), 'raw_metadata_recarray': '[[0, 0], [0, 0], [0, 0]]', 'raw_metadata_flag': 'true', 'raw_metadata_dict_a': 1, 'raw_metadata_dict_b': np.array([1, 2, 3], dtype='uint8')} return attrs, encoded, encoded_flat def assertDictWithArraysEqual(self, d1, d2): """Check that dicts containing arrays are equal.""" self.assertSetEqual(set(d1.keys()), set(d2.keys())) for key, val1 in d1.items(): val2 = d2[key] if isinstance(val1, np.ndarray): np.testing.assert_array_equal(val1, val2) self.assertEqual(val1.dtype, val2.dtype) else: self.assertEqual(val1, val2) if isinstance(val1, (np.floating, np.integer, np.bool_)): self.assertTrue(isinstance(val2, np.generic)) self.assertEqual(val1.dtype, val2.dtype) def test_encode_attrs_nc(self): """Test attributes encoding.""" import json from satpy.writers.cf_writer import encode_attrs_nc attrs, expected, _ = self.get_test_attrs() # Test encoding encoded = encode_attrs_nc(attrs) self.assertDictWithArraysEqual(expected, encoded) # Test decoding of json-encoded attributes raw_md_roundtrip = {'recarray': [[0, 0], [0, 0], [0, 0]], 'flag': 'true', 'dict': {'a': 1, 'b': [1, 2, 3]}} self.assertDictEqual(json.loads(encoded['raw_metadata']), raw_md_roundtrip) self.assertListEqual(json.loads(encoded['array_3d']), [[[1, 2], [3, 4]], [[1, 2], [3, 4]]]) self.assertDictEqual(json.loads(encoded['nested_dict']), {"l1": {"l2": {"l3": [1, 2, 3]}}}) self.assertListEqual(json.loads(encoded['nested_list']), ["1", ["2", [3]]]) def test_da2cf(self): """Test the conversion of a DataArray to a CF-compatible DataArray.""" import xarray as xr from satpy.writers.cf_writer import CFWriter # Create set of test attributes attrs, attrs_expected, attrs_expected_flat = self.get_test_attrs() attrs['area'] = 'some_area' attrs['prerequisites'] = [make_dsq(name='hej')] attrs['_satpy_id_name'] = 'myname' # Adjust expected attributes expected_prereq = ("DataQuery(name='hej')") update = {'prerequisites': [expected_prereq], 'long_name': attrs['name']} attrs_expected.update(update) attrs_expected_flat.update(update) attrs_expected.pop('name') attrs_expected_flat.pop('name') # Create test data array arr = xr.DataArray(np.array([[1, 2], [3, 4]]), attrs=attrs, dims=('y', 'x'), coords={'y': [0, 1], 'x': [1, 2], 'acq_time': ('y', [3, 4])}) # Test conversion to something cf-compliant res = CFWriter.da2cf(arr) np.testing.assert_array_equal(res['x'], arr['x']) np.testing.assert_array_equal(res['y'], arr['y']) np.testing.assert_array_equal(res['acq_time'], arr['acq_time']) self.assertDictEqual(res['x'].attrs, {'units': 'm', 'standard_name': 'projection_x_coordinate'}) self.assertDictEqual(res['y'].attrs, {'units': 'm', 'standard_name': 'projection_y_coordinate'}) self.assertDictWithArraysEqual(res.attrs, attrs_expected) # Test attribute kwargs res_flat = CFWriter.da2cf(arr, flatten_attrs=True, exclude_attrs=['int']) attrs_expected_flat.pop('int') self.assertDictWithArraysEqual(res_flat.attrs, attrs_expected_flat) @mock.patch('satpy.writers.cf_writer.CFWriter.__init__', return_value=None) def test_collect_datasets(self, *mocks): """Test collecting CF datasets from a DataArray objects.""" import pyresample.geometry import xarray as xr from satpy.writers.cf_writer import CFWriter geos = pyresample.geometry.AreaDefinition( area_id='geos', description='geos', proj_id='geos', projection={'proj': 'geos', 'h': 35785831., 'a': 6378169., 'b': 6356583.8}, width=2, height=2, area_extent=[-1, -1, 1, 1]) # Define test datasets data = [[1, 2], [3, 4]] y = [1, 2] x = [1, 2] time = [1, 2] tstart = datetime(2019, 4, 1, 12, 0) tend = datetime(2019, 4, 1, 12, 15) datasets = [xr.DataArray(data=data, dims=('y', 'x'), coords={'y': y, 'x': x, 'acq_time': ('y', time)}, attrs={'name': 'var1', 'start_time': tstart, 'end_time': tend, 'area': geos}), xr.DataArray(data=data, dims=('y', 'x'), coords={'y': y, 'x': x, 'acq_time': ('y', time)}, attrs={'name': 'var2', 'long_name': 'variable 2'})] # Collect datasets writer = CFWriter() datas, start_times, end_times = writer._collect_datasets(datasets, include_lonlats=True) # Test results self.assertEqual(len(datas), 3) self.assertEqual(set(datas.keys()), {'var1', 'var2', 'geos'}) self.assertListEqual(start_times, [None, tstart, None]) self.assertListEqual(end_times, [None, tend, None]) var1 = datas['var1'] var2 = datas['var2'] self.assertEqual(var1.name, 'var1') self.assertEqual(var1.attrs['grid_mapping'], 'geos') self.assertEqual(var1.attrs['start_time'], '2019-04-01 12:00:00') self.assertEqual(var1.attrs['end_time'], '2019-04-01 12:15:00') self.assertEqual(var1.attrs['long_name'], 'var1') # variable 2 self.assertNotIn('grid_mapping', var2.attrs) self.assertEqual(var2.attrs['long_name'], 'variable 2') def test_assert_xy_unique(self): """Test that the x and y coordinates are unique.""" import xarray as xr from satpy.writers.cf_writer import assert_xy_unique dummy = [[1, 2], [3, 4]] datas = {'a': xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}), 'b': xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}), 'n': xr.DataArray(data=dummy, dims=('v', 'w'), coords={'v': [1, 2], 'w': [3, 4]})} assert_xy_unique(datas) datas['c'] = xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 3], 'x': [3, 4]}) self.assertRaises(ValueError, assert_xy_unique, datas) def test_link_coords(self): """Check that coordinates link has been established correctly.""" import numpy as np import xarray as xr from satpy.writers.cf_writer import link_coords data = [[1, 2], [3, 4]] lon = np.zeros((2, 2)) lon2 = np.zeros((1, 2, 2)) lat = np.ones((2, 2)) datasets = { 'var1': xr.DataArray(data=data, dims=('y', 'x'), attrs={'coordinates': 'lon lat'}), 'var2': xr.DataArray(data=data, dims=('y', 'x')), 'var3': xr.DataArray(data=data, dims=('y', 'x'), attrs={'coordinates': 'lon2 lat'}), 'var4': xr.DataArray(data=data, dims=('y', 'x'), attrs={'coordinates': 'not_exist lon lat'}), 'lon': xr.DataArray(data=lon, dims=('y', 'x')), 'lon2': xr.DataArray(data=lon2, dims=('time', 'y', 'x')), 'lat': xr.DataArray(data=lat, dims=('y', 'x')) } link_coords(datasets) # Check that link has been established correctly and 'coordinate' atrribute has been dropped self.assertIn('lon', datasets['var1'].coords) self.assertIn('lat', datasets['var1'].coords) np.testing.assert_array_equal(datasets['var1']['lon'].data, lon) np.testing.assert_array_equal(datasets['var1']['lat'].data, lat) self.assertNotIn('coordinates', datasets['var1'].attrs) # There should be no link if there was no 'coordinate' attribute self.assertNotIn('lon', datasets['var2'].coords) self.assertNotIn('lat', datasets['var2'].coords) # The non-existant dimension or coordinate should be dropped self.assertNotIn('time', datasets['var3'].coords) self.assertNotIn('not_exist', datasets['var4'].coords) def test_make_alt_coords_unique(self): """Test that created coordinate variables are unique.""" import xarray as xr from satpy.writers.cf_writer import make_alt_coords_unique data = [[1, 2], [3, 4]] y = [1, 2] x = [1, 2] time1 = [1, 2] time2 = [3, 4] datasets = {'var1': xr.DataArray(data=data, dims=('y', 'x'), coords={'y': y, 'x': x, 'acq_time': ('y', time1)}), 'var2': xr.DataArray(data=data, dims=('y', 'x'), coords={'y': y, 'x': x, 'acq_time': ('y', time2)})} # Test that dataset names are prepended to alternative coordinates res = make_alt_coords_unique(datasets) np.testing.assert_array_equal(res['var1']['var1_acq_time'], time1) np.testing.assert_array_equal(res['var2']['var2_acq_time'], time2) self.assertNotIn('acq_time', res['var1'].coords) self.assertNotIn('acq_time', res['var2'].coords) # Make sure nothing else is modified np.testing.assert_array_equal(res['var1']['x'], x) np.testing.assert_array_equal(res['var1']['y'], y) np.testing.assert_array_equal(res['var2']['x'], x) np.testing.assert_array_equal(res['var2']['y'], y) # Coords not unique -> Dataset names must be prepended, even if pretty=True with mock.patch('satpy.writers.cf_writer.warnings.warn') as warn: res = make_alt_coords_unique(datasets, pretty=True) warn.assert_called() np.testing.assert_array_equal(res['var1']['var1_acq_time'], time1) np.testing.assert_array_equal(res['var2']['var2_acq_time'], time2) self.assertNotIn('acq_time', res['var1'].coords) self.assertNotIn('acq_time', res['var2'].coords) # Coords unique and pretty=True -> Don't modify coordinate names datasets['var2']['acq_time'] = ('y', time1) res = make_alt_coords_unique(datasets, pretty=True) np.testing.assert_array_equal(res['var1']['acq_time'], time1) np.testing.assert_array_equal(res['var2']['acq_time'], time1) self.assertNotIn('var1_acq_time', res['var1'].coords) self.assertNotIn('var2_acq_time', res['var2'].coords) def test_area2cf(self): """Test the conversion of an area to CF standards.""" import pyresample.geometry import xarray as xr from satpy.writers.cf_writer import area2cf ds_base = xr.DataArray(data=[[1, 2], [3, 4]], dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}, attrs={'name': 'var1'}) # a) Area Definition and strict=False geos = pyresample.geometry.AreaDefinition( area_id='geos', description='geos', proj_id='geos', projection={'proj': 'geos', 'h': 35785831., 'a': 6378169., 'b': 6356583.8}, width=2, height=2, area_extent=[-1, -1, 1, 1]) ds = ds_base.copy(deep=True) ds.attrs['area'] = geos res = area2cf(ds) self.assertEqual(len(res), 2) self.assertEqual(res[0].size, 1) # grid mapping variable self.assertEqual(res[0].name, res[1].attrs['grid_mapping']) # b) Area Definition and strict=False ds = ds_base.copy(deep=True) ds.attrs['area'] = geos res = area2cf(ds, strict=True) # same as above self.assertEqual(len(res), 2) self.assertEqual(res[0].size, 1) # grid mapping variable self.assertEqual(res[0].name, res[1].attrs['grid_mapping']) # but now also have the lon/lats self.assertIn('longitude', res[1].coords) self.assertIn('latitude', res[1].coords) # c) Swath Definition swath = pyresample.geometry.SwathDefinition(lons=[[1, 1], [2, 2]], lats=[[1, 2], [1, 2]]) ds = ds_base.copy(deep=True) ds.attrs['area'] = swath res = area2cf(ds) self.assertEqual(len(res), 1) self.assertIn('longitude', res[0].coords) self.assertIn('latitude', res[0].coords) self.assertNotIn('grid_mapping', res[0].attrs) def test_area2gridmapping(self): """Test the conversion from pyresample area object to CF grid mapping.""" import pyresample.geometry import xarray as xr from satpy.writers.cf_writer import area2gridmapping def _gm_matches(gmapping, expected): """Assert that all keys in ``expected`` match the values in ``gmapping``.""" for attr_key, attr_val in expected.attrs.items(): test_val = gmapping.attrs[attr_key] if attr_val is None or isinstance(attr_val, str): self.assertEqual(test_val, attr_val) else: np.testing.assert_almost_equal(test_val, attr_val, decimal=3) ds_base = xr.DataArray(data=[[1, 2], [3, 4]], dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}, attrs={'name': 'var1'}) # a) Projection has a corresponding CF representation (e.g. geos) a = 6378169. b = 6356583.8 h = 35785831. geos = pyresample.geometry.AreaDefinition( area_id='geos', description='geos', proj_id='geos', projection={'proj': 'geos', 'h': h, 'a': a, 'b': b, 'lat_0': 0, 'lon_0': 0}, width=2, height=2, area_extent=[-1, -1, 1, 1]) geos_expected = xr.DataArray(data=0, attrs={'perspective_point_height': h, 'latitude_of_projection_origin': 0, 'longitude_of_projection_origin': 0, 'grid_mapping_name': 'geostationary', 'semi_major_axis': a, 'semi_minor_axis': b, # 'sweep_angle_axis': None, }) ds = ds_base.copy() ds.attrs['area'] = geos new_ds, grid_mapping = area2gridmapping(ds) if 'sweep_angle_axis' in grid_mapping.attrs: # older versions of pyproj might not include this self.assertEqual(grid_mapping.attrs['sweep_angle_axis'], 'y') self.assertEqual(new_ds.attrs['grid_mapping'], 'geos') _gm_matches(grid_mapping, geos_expected) # should not have been modified self.assertNotIn('grid_mapping', ds.attrs) # b) Projection does not have a corresponding CF representation (COSMO) cosmo7 = pyresample.geometry.AreaDefinition( area_id='cosmo7', description='cosmo7', proj_id='cosmo7', projection={'proj': 'ob_tran', 'ellps': 'WGS84', 'lat_0': 46, 'lon_0': 4.535, 'o_proj': 'stere', 'o_lat_p': 90, 'o_lon_p': -5.465}, width=597, height=510, area_extent=[-1812933, -1003565, 814056, 1243448] ) ds = ds_base.copy() ds.attrs['area'] = cosmo7 new_ds, grid_mapping = area2gridmapping(ds) self.assertIn('crs_wkt', grid_mapping.attrs) wkt = grid_mapping.attrs['crs_wkt'] self.assertIn('ELLIPSOID["WGS 84"', wkt) self.assertIn('PARAMETER["lat_0",46', wkt) self.assertIn('PARAMETER["lon_0",4.535', wkt) self.assertIn('PARAMETER["o_lat_p",90', wkt) self.assertIn('PARAMETER["o_lon_p",-5.465', wkt) self.assertEqual(new_ds.attrs['grid_mapping'], 'cosmo7') # c) Projection Transverse Mercator lat_0 = 36.5 lon_0 = 15.0 tmerc = pyresample.geometry.AreaDefinition( area_id='tmerc', description='tmerc', proj_id='tmerc', projection={'proj': 'tmerc', 'ellps': 'WGS84', 'lat_0': 36.5, 'lon_0': 15.0}, width=2, height=2, area_extent=[-1, -1, 1, 1]) tmerc_expected = xr.DataArray(data=0, attrs={'latitude_of_projection_origin': lat_0, 'longitude_of_central_meridian': lon_0, 'grid_mapping_name': 'transverse_mercator', 'reference_ellipsoid_name': 'WGS 84', 'false_easting': 0., 'false_northing': 0., }) ds = ds_base.copy() ds.attrs['area'] = tmerc new_ds, grid_mapping = area2gridmapping(ds) self.assertEqual(new_ds.attrs['grid_mapping'], 'tmerc') _gm_matches(grid_mapping, tmerc_expected) # d) Projection that has a representation but no explicit a/b h = 35785831. geos = pyresample.geometry.AreaDefinition( area_id='geos', description='geos', proj_id='geos', projection={'proj': 'geos', 'h': h, 'datum': 'WGS84', 'ellps': 'GRS80', 'lat_0': 0, 'lon_0': 0}, width=2, height=2, area_extent=[-1, -1, 1, 1]) geos_expected = xr.DataArray(data=0, attrs={'perspective_point_height': h, 'latitude_of_projection_origin': 0, 'longitude_of_projection_origin': 0, 'grid_mapping_name': 'geostationary', # 'semi_major_axis': 6378137.0, # 'semi_minor_axis': 6356752.314, # 'sweep_angle_axis': None, }) ds = ds_base.copy() ds.attrs['area'] = geos new_ds, grid_mapping = area2gridmapping(ds) self.assertEqual(new_ds.attrs['grid_mapping'], 'geos') _gm_matches(grid_mapping, geos_expected) # e) oblique Mercator area = pyresample.geometry.AreaDefinition( area_id='omerc_otf', description='On-the-fly omerc area', proj_id='omerc', projection={'alpha': '9.02638777018478', 'ellps': 'WGS84', 'gamma': '0', 'k': '1', 'lat_0': '-0.256794486098476', 'lonc': '13.7888658224205', 'proj': 'omerc', 'units': 'm'}, width=2837, height=5940, area_extent=[-1460463.0893, 3455291.3877, 1538407.1158, 9615788.8787] ) omerc_dict = {'azimuth_of_central_line': 9.02638777018478, 'false_easting': 0., 'false_northing': 0., # 'gamma': 0, # this is not CF compliant 'grid_mapping_name': "oblique_mercator", 'latitude_of_projection_origin': -0.256794486098476, 'longitude_of_projection_origin': 13.7888658224205, # 'prime_meridian_name': "Greenwich", 'reference_ellipsoid_name': "WGS 84"} omerc_expected = xr.DataArray(data=0, attrs=omerc_dict) ds = ds_base.copy() ds.attrs['area'] = area new_ds, grid_mapping = area2gridmapping(ds) self.assertEqual(new_ds.attrs['grid_mapping'], 'omerc_otf') _gm_matches(grid_mapping, omerc_expected) # f) Projection that has a representation but no explicit a/b h = 35785831. geos = pyresample.geometry.AreaDefinition( area_id='geos', description='geos', proj_id='geos', projection={'proj': 'geos', 'h': h, 'datum': 'WGS84', 'ellps': 'GRS80', 'lat_0': 0, 'lon_0': 0}, width=2, height=2, area_extent=[-1, -1, 1, 1]) geos_expected = xr.DataArray(data=0, attrs={'perspective_point_height': h, 'latitude_of_projection_origin': 0, 'longitude_of_projection_origin': 0, 'grid_mapping_name': 'geostationary', 'reference_ellipsoid_name': 'WGS 84', }) ds = ds_base.copy() ds.attrs['area'] = geos new_ds, grid_mapping = area2gridmapping(ds) self.assertEqual(new_ds.attrs['grid_mapping'], 'geos') _gm_matches(grid_mapping, geos_expected) def test_area2lonlat(self): """Test the conversion from areas to lon/lat.""" import dask.array as da import pyresample.geometry import xarray as xr from satpy.writers.cf_writer import area2lonlat area = pyresample.geometry.AreaDefinition( 'seviri', 'Native SEVIRI grid', 'geos', "+a=6378169.0 +h=35785831.0 +b=6356583.8 +lon_0=0 +proj=geos", 2, 2, [-5570248.686685662, -5567248.28340708, 5567248.28340708, 5570248.686685662] ) lons_ref, lats_ref = area.get_lonlats() dataarray = xr.DataArray(data=[[1, 2], [3, 4]], dims=('y', 'x'), attrs={'area': area}) res = area2lonlat(dataarray) # original should be unmodified self.assertNotIn('longitude', dataarray.coords) self.assertEqual(set(res.coords), {'longitude', 'latitude'}) lat = res['latitude'] lon = res['longitude'] np.testing.assert_array_equal(lat.data, lats_ref) np.testing.assert_array_equal(lon.data, lons_ref) assert {'name': 'latitude', 'standard_name': 'latitude', 'units': 'degrees_north'}.items() <= lat.attrs.items() assert {'name': 'longitude', 'standard_name': 'longitude', 'units': 'degrees_east'}.items() <= lon.attrs.items() area = pyresample.geometry.AreaDefinition( 'seviri', 'Native SEVIRI grid', 'geos', "+a=6378169.0 +h=35785831.0 +b=6356583.8 +lon_0=0 +proj=geos", 10, 10, [-5570248.686685662, -5567248.28340708, 5567248.28340708, 5570248.686685662] ) lons_ref, lats_ref = area.get_lonlats() dataarray = xr.DataArray(data=da.from_array(np.arange(3*10*10).reshape(3, 10, 10), chunks=(1, 5, 5)), dims=('bands', 'y', 'x'), attrs={'area': area}) res = area2lonlat(dataarray) # original should be unmodified self.assertNotIn('longitude', dataarray.coords) self.assertEqual(set(res.coords), {'longitude', 'latitude'}) lat = res['latitude'] lon = res['longitude'] np.testing.assert_array_equal(lat.data, lats_ref) np.testing.assert_array_equal(lon.data, lons_ref) assert {'name': 'latitude', 'standard_name': 'latitude', 'units': 'degrees_north'}.items() <= lat.attrs.items() assert {'name': 'longitude', 'standard_name': 'longitude', 'units': 'degrees_east'}.items() <= lon.attrs.items() def test_load_module_with_old_pyproj(self): """Test that cf_writer can still be loaded with pyproj 1.9.6.""" import importlib import sys import pyproj # noqa 401 old_version = sys.modules['pyproj'].__version__ sys.modules['pyproj'].__version__ = "1.9.6" try: importlib.reload(sys.modules['satpy.writers.cf_writer']) finally: # Tear down sys.modules['pyproj'].__version__ = old_version importlib.reload(sys.modules['satpy.writers.cf_writer']) def test_global_attr_default_history_and_Conventions(self): """Test saving global attributes history and Conventions.""" import xarray as xr from satpy import Scene scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) scn['test-array'] = xr.DataArray([[1, 2, 3]], dims=('y', 'x'), attrs=dict(start_time=start_time, end_time=end_time, prerequisites=[make_dsq(name='hej')])) with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf') with xr.open_dataset(filename) as f: self.assertEqual(f.attrs['Conventions'], 'CF-1.7') self.assertIn('Created by pytroll/satpy on', f.attrs['history']) def test_global_attr_history_and_Conventions(self): """Test saving global attributes history and Conventions.""" import xarray as xr from satpy import Scene scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) scn['test-array'] = xr.DataArray([[1, 2, 3]], dims=('y', 'x'), attrs=dict(start_time=start_time, end_time=end_time, prerequisites=[make_dsq(name='hej')])) header_attrs = {} header_attrs['history'] = ('TEST add history',) header_attrs['Conventions'] = 'CF-1.7, ACDD-1.3' with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf', header_attrs=header_attrs) with xr.open_dataset(filename) as f: self.assertEqual(f.attrs['Conventions'], 'CF-1.7, ACDD-1.3') self.assertIn('TEST add history\n', f.attrs['history']) self.assertIn('Created by pytroll/satpy on', f.attrs['history']) class TestCFWriterData(unittest.TestCase): """Test case for CF writer where data arrays are needed.""" def setUp(self): """Create some test data.""" import pyresample.geometry import xarray as xr data = [[75, 2], [3, 4]] y = [1, 2] x = [1, 2] geos = pyresample.geometry.AreaDefinition( area_id='geos', description='geos', proj_id='geos', projection={'proj': 'geos', 'h': 35785831., 'a': 6378169., 'b': 6356583.8}, width=2, height=2, area_extent=[-1, -1, 1, 1]) self.datasets = {'var1': xr.DataArray(data=data, dims=('y', 'x'), coords={'y': y, 'x': x}), 'var2': xr.DataArray(data=data, dims=('y', 'x'), coords={'y': y, 'x': x}), 'lat': xr.DataArray(data=data, dims=('y', 'x'), coords={'y': y, 'x': x}), 'lon': xr.DataArray(data=data, dims=('y', 'x'), coords={'y': y, 'x': x})} self.datasets['lat'].attrs['standard_name'] = 'latitude' self.datasets['var1'].attrs['standard_name'] = 'dummy' self.datasets['var2'].attrs['standard_name'] = 'dummy' self.datasets['var2'].attrs['area'] = geos self.datasets['var1'].attrs['area'] = geos self.datasets['lat'].attrs['name'] = 'lat' self.datasets['var1'].attrs['name'] = 'var1' self.datasets['var2'].attrs['name'] = 'var2' self.datasets['lon'].attrs['name'] = 'lon' def test_dataset_is_projection_coords(self): """Test the dataset_is_projection_coords function.""" from satpy.writers.cf_writer import dataset_is_projection_coords self.assertTrue(dataset_is_projection_coords(self.datasets['lat'])) self.assertFalse(dataset_is_projection_coords(self.datasets['var1'])) def test_has_projection_coords(self): """Test the has_projection_coords function.""" from satpy.writers.cf_writer import has_projection_coords self.assertTrue(has_projection_coords(self.datasets)) self.datasets['lat'].attrs['standard_name'] = 'dummy' self.assertFalse(has_projection_coords(self.datasets)) @mock.patch('satpy.writers.cf_writer.CFWriter.__init__', return_value=None) def test_collect_datasets_with_latitude_named_lat(self, *mocks): """Test collecting CF datasets with latitude named lat.""" from operator import getitem from satpy.writers.cf_writer import CFWriter self.datasets_list = [self.datasets[key] for key in self.datasets] self.datasets_list_no_latlon = [self.datasets[key] for key in ['var1', 'var2']] # Collect datasets writer = CFWriter() datas, start_times, end_times = writer._collect_datasets(self.datasets_list, include_lonlats=True) datas2, start_times, end_times = writer._collect_datasets(self.datasets_list_no_latlon, include_lonlats=True) # Test results self.assertEqual(len(datas), 5) self.assertEqual(set(datas.keys()), {'var1', 'var2', 'lon', 'lat', 'geos'}) self.assertRaises(KeyError, getitem, datas['var1'], 'latitude') self.assertRaises(KeyError, getitem, datas['var1'], 'longitude') self.assertEqual(datas2['var1']['latitude'].attrs['name'], 'latitude') self.assertEqual(datas2['var1']['longitude'].attrs['name'], 'longitude') class EncodingUpdateTest(unittest.TestCase): """Test update of netCDF encoding.""" def setUp(self): """Create fake data for testing.""" import xarray as xr self.ds = xr.Dataset({'foo': (('y', 'x'), [[1, 2], [3, 4]]), 'bar': (('y', 'x'), [[3, 4], [5, 6]])}, coords={'y': [1, 2], 'x': [3, 4], 'lon': (('y', 'x'), [[7, 8], [9, 10]])}) self.ds_digit = xr.Dataset({'CHANNEL_1': (('y', 'x'), [[1, 2], [3, 4]]), 'CHANNEL_2': (('y', 'x'), [[3, 4], [5, 6]])}, coords={'y': [1, 2], 'x': [3, 4], 'lon': (('y', 'x'), [[7, 8], [9, 10]])}) def test_dataset_name_digit(self): """Test data with dataset name staring with a digit.""" from satpy.writers.cf_writer import update_encoding # Dataset with name staring with digit ds = self.ds_digit kwargs = {'encoding': {'1': {'dtype': 'float32'}, '2': {'dtype': 'float32'}}, 'other': 'kwargs'} enc, other_kwargs = update_encoding(ds, kwargs, numeric_name_prefix='CHANNEL_') self.assertDictEqual(enc, {'y': {'_FillValue': None}, 'x': {'_FillValue': None}, 'CHANNEL_1': {'dtype': 'float32'}, 'CHANNEL_2': {'dtype': 'float32'}}) self.assertDictEqual(other_kwargs, {'other': 'kwargs'}) def test_without_time(self): """Test data with no time dimension.""" from satpy.writers.cf_writer import update_encoding # Without time dimension ds = self.ds.chunk(2) kwargs = {'encoding': {'bar': {'chunksizes': (1, 1)}}, 'other': 'kwargs'} enc, other_kwargs = update_encoding(ds, kwargs) self.assertDictEqual(enc, {'y': {'_FillValue': None}, 'x': {'_FillValue': None}, 'lon': {'chunksizes': (2, 2)}, 'foo': {'chunksizes': (2, 2)}, 'bar': {'chunksizes': (1, 1)}}) self.assertDictEqual(other_kwargs, {'other': 'kwargs'}) # Chunksize may not exceed shape ds = self.ds.chunk(8) kwargs = {'encoding': {}, 'other': 'kwargs'} enc, other_kwargs = update_encoding(ds, kwargs) self.assertDictEqual(enc, {'y': {'_FillValue': None}, 'x': {'_FillValue': None}, 'lon': {'chunksizes': (2, 2)}, 'foo': {'chunksizes': (2, 2)}, 'bar': {'chunksizes': (2, 2)}}) def test_with_time(self): """Test data with a time dimension.""" from satpy.writers.cf_writer import update_encoding # With time dimension ds = self.ds.chunk(8).expand_dims({'time': [datetime(2009, 7, 1, 12, 15)]}) kwargs = {'encoding': {'bar': {'chunksizes': (1, 1, 1)}}, 'other': 'kwargs'} enc, other_kwargs = update_encoding(ds, kwargs) self.assertDictEqual(enc, {'y': {'_FillValue': None}, 'x': {'_FillValue': None}, 'lon': {'chunksizes': (2, 2)}, 'foo': {'chunksizes': (1, 2, 2)}, 'bar': {'chunksizes': (1, 1, 1)}, 'time': {'_FillValue': None, 'calendar': 'proleptic_gregorian', 'units': 'days since 2009-07-01 12:15:00'}, 'time_bnds': {'_FillValue': None, 'calendar': 'proleptic_gregorian', 'units': 'days since 2009-07-01 12:15:00'}}) # User-defined encoding may not be altered self.assertDictEqual(kwargs['encoding'], {'bar': {'chunksizes': (1, 1, 1)}}) satpy-0.34.0/satpy/tests/writer_tests/test_geotiff.py000066400000000000000000000162701420401153000230260ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the geotiff writer.""" import unittest from unittest import mock import numpy as np class TestGeoTIFFWriter(unittest.TestCase): """Test the GeoTIFF Writer class.""" def setUp(self): """Create temporary directory to save files to.""" import tempfile self.base_dir = tempfile.mkdtemp() def tearDown(self): """Remove the temporary directory created for a test.""" try: import shutil shutil.rmtree(self.base_dir, ignore_errors=True) except OSError: pass def _get_test_datasets(self): """Create a single test dataset.""" from datetime import datetime import dask.array as da import xarray as xr ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={'name': 'test', 'start_time': datetime.utcnow()} ) return [ds1] def test_init(self): """Test creating the writer with no arguments.""" from satpy.writers.geotiff import GeoTIFFWriter GeoTIFFWriter() def test_simple_write(self): """Test basic writer operation.""" from satpy.writers.geotiff import GeoTIFFWriter datasets = self._get_test_datasets() w = GeoTIFFWriter(base_dir=self.base_dir) w.save_datasets(datasets) def test_simple_delayed_write(self): """Test writing can be delayed.""" import dask.array as da from satpy.writers.geotiff import GeoTIFFWriter datasets = self._get_test_datasets() w = GeoTIFFWriter(base_dir=self.base_dir) # when we switch to rio_save on XRImage then this will be sources # and targets res = w.save_datasets(datasets, compute=False) # this will fail if rasterio isn't installed self.assertIsInstance(res, tuple) # two lists, sources and destinations self.assertEqual(len(res), 2) self.assertIsInstance(res[0], list) self.assertIsInstance(res[1], list) self.assertIsInstance(res[0][0], da.Array) da.store(res[0], res[1]) for target in res[1]: if hasattr(target, 'close'): target.close() def test_colormap_write(self): """Test writing an image with a colormap.""" from trollimage.colormap import spectral from trollimage.xrimage import XRImage from satpy.writers.geotiff import GeoTIFFWriter datasets = self._get_test_datasets() w = GeoTIFFWriter(base_dir=self.base_dir) # we'd have to customize enhancements to test this through # save_datasets. We'll use `save_image` as a workaround. img = XRImage(datasets[0]) img.palettize(spectral) w.save_image(img, keep_palette=True) def test_float_write(self): """Test that geotiffs can be written as floats. NOTE: Does not actually check that the output is floats. """ from satpy.writers.geotiff import GeoTIFFWriter datasets = self._get_test_datasets() w = GeoTIFFWriter(base_dir=self.base_dir, enhance=False, dtype=np.float32) w.save_datasets(datasets) def test_dtype_for_enhance_false(self): """Test that dtype of dataset is used if parameters enhance=False and dtype=None.""" from satpy.writers.geotiff import GeoTIFFWriter datasets = self._get_test_datasets() w = GeoTIFFWriter(base_dir=self.base_dir, enhance=False) with mock.patch('satpy.writers.XRImage.save') as save_method: save_method.return_value = None w.save_datasets(datasets, compute=False) self.assertEqual(save_method.call_args[1]['dtype'], np.float64) def test_dtype_for_enhance_false_and_given_dtype(self): """Test that dtype of dataset is used if enhance=False and dtype=uint8.""" from satpy.writers.geotiff import GeoTIFFWriter datasets = self._get_test_datasets() w = GeoTIFFWriter(base_dir=self.base_dir, enhance=False, dtype=np.uint8) with mock.patch('satpy.writers.XRImage.save') as save_method: save_method.return_value = None w.save_datasets(datasets, compute=False) self.assertEqual(save_method.call_args[1]['dtype'], np.uint8) def test_fill_value_from_config(self): """Test fill_value coming from the writer config.""" from satpy.writers.geotiff import GeoTIFFWriter datasets = self._get_test_datasets() w = GeoTIFFWriter(base_dir=self.base_dir) w.info['fill_value'] = 128 with mock.patch('satpy.writers.XRImage.save') as save_method: save_method.return_value = None w.save_datasets(datasets, compute=False) self.assertEqual(save_method.call_args[1]['fill_value'], 128) def test_tags(self): """Test tags being added.""" from satpy.writers.geotiff import GeoTIFFWriter datasets = self._get_test_datasets() w = GeoTIFFWriter(tags={'test1': 1}, base_dir=self.base_dir) w.info['fill_value'] = 128 with mock.patch('satpy.writers.XRImage.save') as save_method: save_method.return_value = None w.save_datasets(datasets, tags={'test2': 2}, compute=False) called_tags = save_method.call_args[1]['tags'] self.assertDictEqual(called_tags, {'test1': 1, 'test2': 2}) def test_scale_offset(self): """Test tags being added.""" from satpy.writers.geotiff import GeoTIFFWriter datasets = self._get_test_datasets() w = GeoTIFFWriter(tags={'test1': 1}, base_dir=self.base_dir) w.info['fill_value'] = 128 with mock.patch('satpy.writers.XRImage.save') as save_method: save_method.return_value = None w.save_datasets(datasets, tags={'test2': 2}, compute=False, include_scale_offset=True) called_include = save_method.call_args[1]['include_scale_offset_tags'] self.assertTrue(called_include) def test_tiled_value_from_config(self): """Test tiled value coming from the writer config.""" from satpy.writers.geotiff import GeoTIFFWriter datasets = self._get_test_datasets() w = GeoTIFFWriter(base_dir=self.base_dir) with mock.patch('satpy.writers.XRImage.save') as save_method: save_method.return_value = None w.save_datasets(datasets, compute=False) self.assertEqual(save_method.call_args[1]['tiled'], True) satpy-0.34.0/satpy/tests/writer_tests/test_mitiff.py000066400000000000000000001530161420401153000226610ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018-2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the mitiff writer. Based on the test for geotiff writer """ import unittest class TestMITIFFWriter(unittest.TestCase): """Test the MITIFF Writer class.""" def setUp(self): """Create temporary directory to save files to.""" import tempfile self.base_dir = tempfile.mkdtemp() def tearDown(self): """Remove the temporary directory created for a test.""" try: import shutil shutil.rmtree(self.base_dir, ignore_errors=True) except OSError: pass def _get_test_datasets(self): """Create a datasets list.""" from datetime import datetime import dask.array as da import xarray as xr from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict area_def = AreaDefinition( 'test', 'test', 'test', proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), 100, 200, (-1000., -1500., 1000., 1500.), ) ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={'name': '1', 'start_time': datetime.utcnow(), 'platform_name': "TEST_PLATFORM_NAME", 'sensor': 'TEST_SENSOR_NAME', 'area': area_def, 'prerequisites': ['1'], 'calibration': 'reflectance', 'metadata_requirements': { 'order': ['1'], 'config': { '1': {'alias': '1-VIS0.63', 'calibration': 'reflectance', 'min-val': '0', 'max-val': '100'}, }, 'translate': {'1': '1', }, 'file_pattern': '1_{start_time:%Y%m%d_%H%M%S}.mitiff' }} ) ds2 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={'name': '4', 'start_time': datetime.utcnow(), 'platform_name': "TEST_PLATFORM_NAME", 'sensor': 'TEST_SENSOR_NAME', 'area': area_def, 'prerequisites': ['4'], 'calibration': 'brightness_temperature', 'metadata_requirements': { 'order': ['4'], 'config': { '4': {'alias': '4-IR10.8', 'calibration': 'brightness_temperature', 'min-val': '-150', 'max-val': '50'}, }, 'translate': {'4': '4', }, 'file_pattern': '4_{start_time:%Y%m%d_%H%M%S}.mitiff'} } ) return [ds1, ds2] def _get_test_datasets_sensor_set(self): """Create a datasets list.""" from datetime import datetime import dask.array as da import xarray as xr from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict area_def = AreaDefinition( 'test', 'test', 'test', proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), 100, 200, (-1000., -1500., 1000., 1500.), ) ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={'name': '1', 'start_time': datetime.utcnow(), 'platform_name': "TEST_PLATFORM_NAME", 'sensor': {'TEST_SENSOR_NAME'}, 'area': area_def, 'prerequisites': ['1'], 'calibration': 'reflectance', 'metadata_requirements': { 'order': ['1'], 'config': { '1': {'alias': '1-VIS0.63', 'calibration': 'reflectance', 'min-val': '0', 'max-val': '100'}, }, 'translate': {'1': '1', }, 'file_pattern': '1_{start_time:%Y%m%d_%H%M%S}.mitiff' }} ) ds2 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={'name': '4', 'start_time': datetime.utcnow(), 'platform_name': "TEST_PLATFORM_NAME", 'sensor': {'TEST_SENSOR_NAME'}, 'area': area_def, 'prerequisites': ['4'], 'calibration': 'brightness_temperature', 'metadata_requirements': { 'order': ['4'], 'config': { '4': {'alias': '4-IR10.8', 'calibration': 'brightness_temperature', 'min-val': '-150', 'max-val': '50'}, }, 'translate': {'4': '4', }, 'file_pattern': '4_{start_time:%Y%m%d_%H%M%S}.mitiff'} } ) return [ds1, ds2] def _get_test_dataset(self, bands=3): """Create a single test dataset.""" from datetime import datetime import dask.array as da import xarray as xr from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict area_def = AreaDefinition( 'test', 'test', 'test', proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), 100, 200, (-1000., -1500., 1000., 1500.), ) ds1 = xr.DataArray( da.zeros((bands, 100, 200), chunks=50), dims=('bands', 'y', 'x'), attrs={'name': 'test', 'start_time': datetime.utcnow(), 'platform_name': "TEST_PLATFORM_NAME", 'sensor': 'TEST_SENSOR_NAME', 'area': area_def, 'prerequisites': ['1', '2', '3']} ) return ds1 def _get_test_one_dataset(self): """Create a single test dataset.""" from datetime import datetime import dask.array as da import xarray as xr from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict area_def = AreaDefinition( 'test', 'test', 'test', proj4_str_to_dict('+proj=geos +datum=WGS84 +ellps=WGS84 ' '+lon_0=0. h=36000. +units=km'), 100, 200, (-1000., -1500., 1000., 1500.), ) ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={'name': 'test', 'start_time': datetime.utcnow(), 'platform_name': "TEST_PLATFORM_NAME", 'sensor': 'avhrr', 'area': area_def, 'prerequisites': [10.8]} ) return ds1 def _get_test_one_dataset_sensor_set(self): """Create a single test dataset.""" from datetime import datetime import dask.array as da import xarray as xr from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict area_def = AreaDefinition( 'test', 'test', 'test', proj4_str_to_dict('+proj=geos +datum=WGS84 +ellps=WGS84 ' '+lon_0=0. h=36000. +units=km'), 100, 200, (-1000., -1500., 1000., 1500.), ) ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={'name': 'test', 'start_time': datetime.utcnow(), 'platform_name': "TEST_PLATFORM_NAME", 'sensor': {'avhrr'}, 'area': area_def, 'prerequisites': [10.8]} ) return ds1 def _get_test_dataset_with_bad_values(self, bands=3): """Create a single test dataset.""" from datetime import datetime import numpy as np import xarray as xr from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict area_def = AreaDefinition( 'test', 'test', 'test', proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), 100, 200, (-1000., -1500., 1000., 1500.), ) data = np.arange(-210, 790, 100).reshape((2, 5)) * 0.95 data /= 5.605 data[0, 0] = np.nan # need a nan value data[0, 1] = 0. # Need a 0 value rgb_data = np.stack([data, data, data]) ds1 = xr.DataArray(rgb_data, dims=('bands', 'y', 'x'), attrs={'name': 'test', 'start_time': datetime.utcnow(), 'platform_name': "TEST_PLATFORM_NAME", 'sensor': 'TEST_SENSOR_NAME', 'area': area_def, 'prerequisites': ['1', '2', '3']}) return ds1 def _get_test_dataset_calibration(self, bands=6): """Create a single test dataset.""" from datetime import datetime import dask.array as da import xarray as xr from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict from satpy.scene import Scene from satpy.tests.utils import make_dsq area_def = AreaDefinition( 'test', 'test', 'test', proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), 100, 200, (-1000., -1500., 1000., 1500.), ) prereqs = [ make_dsq(name='1', calibration='reflectance'), make_dsq(name='2', calibration='reflectance'), make_dsq(name='3', calibration='brightness_temperature'), make_dsq(name='4', calibration='brightness_temperature'), make_dsq(name='5', calibration='brightness_temperature'), make_dsq(name='6', calibration='reflectance') ] scene = Scene() scene["1"] = xr.DataArray(da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={'calibration': 'reflectance'}) scene["2"] = xr.DataArray(da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={'calibration': 'reflectance'}) scene["3"] = xr.DataArray(da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={'calibration': 'brightness_temperature'}) scene["4"] = xr.DataArray(da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={'calibration': 'brightness_temperature'}) scene["5"] = xr.DataArray(da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={'calibration': 'brightness_temperature'}) scene["6"] = xr.DataArray(da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={'calibration': 'reflectance'}) data = xr.concat(scene, 'bands', coords='minimal') bands = [] calibration = [] for p in scene: calibration.append(p.attrs['calibration']) bands.append(p.attrs['name']) data['bands'] = list(bands) new_attrs = {'name': 'datasets', 'start_time': datetime.utcnow(), 'platform_name': "TEST_PLATFORM_NAME", 'sensor': 'test-sensor', 'area': area_def, 'prerequisites': prereqs, 'metadata_requirements': { 'order': ['1', '2', '3', '4', '5', '6'], 'config': { '1': {'alias': '1-VIS0.63', 'calibration': 'reflectance', 'min-val': '0', 'max-val': '100'}, '2': {'alias': '2-VIS0.86', 'calibration': 'reflectance', 'min-val': '0', 'max-val': '100'}, '3': {'alias': '3(3B)-IR3.7', 'calibration': 'brightness_temperature', 'min-val': '-150', 'max-val': '50'}, '4': {'alias': '4-IR10.8', 'calibration': 'brightness_temperature', 'min-val': '-150', 'max-val': '50'}, '5': {'alias': '5-IR11.5', 'calibration': 'brightness_temperature', 'min-val': '-150', 'max-val': '50'}, '6': {'alias': '6(3A)-VIS1.6', 'calibration': 'reflectance', 'min-val': '0', 'max-val': '100'} }, 'translate': {'1': '1', '2': '2', '3': '3', '4': '4', '5': '5', '6': '6' }, 'file_pattern': 'test-dataset-{start_time:%Y%m%d%H%M%S}.mitiff' } } ds1 = xr.DataArray(data=data.data, attrs=new_attrs, dims=data.dims, coords=data.coords) return ds1 def _get_test_dataset_calibration_one_dataset(self, bands=1): """Create a single test dataset.""" from datetime import datetime import dask.array as da import xarray as xr from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict from satpy.scene import Scene from satpy.tests.utils import make_dsq area_def = AreaDefinition( 'test', 'test', 'test', proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), 100, 200, (-1000., -1500., 1000., 1500.), ) prereqs = [make_dsq(name='4', calibration='brightness_temperature')] scene = Scene() scene["4"] = xr.DataArray(da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={'calibration': 'brightness_temperature'}) data = scene['4'] calibration = [] for p in scene: calibration.append(p.attrs['calibration']) new_attrs = {'name': 'datasets', 'start_time': datetime.utcnow(), 'platform_name': "TEST_PLATFORM_NAME", 'sensor': 'test-sensor', 'area': area_def, 'prerequisites': prereqs, 'metadata_requirements': { 'order': ['4'], 'config': { '4': {'alias': 'BT', 'calibration': 'brightness_temperature', 'min-val': '-150', 'max-val': '50'}, }, 'translate': {'4': '4', }, 'file_pattern': 'test-dataset-{start_time:%Y%m%d%H%M%S}.mitiff' } } ds1 = xr.DataArray(data=data.data, attrs=new_attrs, dims=data.dims, coords=data.coords) return ds1 def _get_test_dataset_three_bands_two_prereq(self, bands=3): """Create a single test dataset.""" from datetime import datetime import dask.array as da import xarray as xr from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict from satpy.tests.utils import make_dsq area_def = AreaDefinition( 'test', 'test', 'test', proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), 100, 200, (-1000., -1500., 1000., 1500.), ) ds1 = xr.DataArray( da.zeros((bands, 100, 200), chunks=50), coords=[['R', 'G', 'B'], list(range(100)), list(range(200))], dims=('bands', 'y', 'x'), attrs={'name': 'test', 'start_time': datetime.utcnow(), 'platform_name': "TEST_PLATFORM_NAME", 'sensor': 'TEST_SENSOR_NAME', 'area': area_def, 'prerequisites': [make_dsq(name='1', calibration='reflectance'), make_dsq(name='2', calibration='reflectance')]} ) return ds1 def _get_test_dataset_three_bands_prereq(self, bands=3): """Create a single test dataset.""" from datetime import datetime import dask.array as da import xarray as xr from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict from satpy.tests.utils import make_dsq area_def = AreaDefinition( 'test', 'test', 'test', proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), 100, 200, (-1000., -1500., 1000., 1500.), ) ds1 = xr.DataArray( da.zeros((bands, 100, 200), chunks=50), coords=[['R', 'G', 'B'], list(range(100)), list(range(200))], dims=('bands', 'y', 'x'), attrs={'name': 'test', 'start_time': datetime.utcnow(), 'platform_name': "TEST_PLATFORM_NAME", 'sensor': 'TEST_SENSOR_NAME', 'area': area_def, 'prerequisites': [make_dsq(wavelength=0.6, modifiers=('sunz_corrected',)), make_dsq(wavelength=0.8, modifiers=('sunz_corrected',)), 10.8]}) return ds1 def test_init(self): """Test creating the writer with no arguments.""" from satpy.writers.mitiff import MITIFFWriter MITIFFWriter() def test_simple_write(self): """Test basic writer operation.""" from satpy.writers.mitiff import MITIFFWriter dataset = self._get_test_dataset() w = MITIFFWriter(base_dir=self.base_dir) w.save_dataset(dataset) def test_save_datasets(self): """Test basic writer operation save_datasets.""" import os import numpy as np from libtiff import TIFF from satpy.writers.mitiff import MITIFFWriter expected = np.full((100, 200), 0) dataset = self._get_test_datasets() w = MITIFFWriter(base_dir=self.base_dir) w.save_datasets(dataset) filename = (dataset[0].attrs['metadata_requirements']['file_pattern']).format( start_time=dataset[0].attrs['start_time']) tif = TIFF.open(os.path.join(self.base_dir, filename)) for image in tif.iter_images(): np.testing.assert_allclose(image, expected, atol=1.e-6, rtol=0) def test_save_datasets_sensor_set(self): """Test basic writer operation save_datasets.""" import os import numpy as np from libtiff import TIFF from satpy.writers.mitiff import MITIFFWriter expected = np.full((100, 200), 0) dataset = self._get_test_datasets_sensor_set() w = MITIFFWriter(base_dir=self.base_dir) w.save_datasets(dataset) filename = (dataset[0].attrs['metadata_requirements']['file_pattern']).format( start_time=dataset[0].attrs['start_time']) tif = TIFF.open(os.path.join(self.base_dir, filename)) for image in tif.iter_images(): np.testing.assert_allclose(image, expected, atol=1.e-6, rtol=0) def test_save_one_dataset(self): """Test basic writer operation with one dataset ie. no bands.""" import os from libtiff import TIFF from satpy.writers.mitiff import MITIFFWriter dataset = self._get_test_one_dataset() w = MITIFFWriter(base_dir=self.base_dir) w.save_dataset(dataset) tif = TIFF.open(os.path.join(self.base_dir, os.listdir(self.base_dir)[0])) IMAGEDESCRIPTION = 270 imgdesc = (tif.GetField(IMAGEDESCRIPTION)).decode('utf-8').split('\n') for key in imgdesc: if 'In this file' in key: self.assertEqual(key, ' Channels: 1 In this file: 1') def test_save_one_dataset_sesnor_set(self): """Test basic writer operation with one dataset ie. no bands.""" import os from libtiff import TIFF from satpy.writers.mitiff import MITIFFWriter dataset = self._get_test_one_dataset_sensor_set() w = MITIFFWriter(base_dir=self.base_dir) w.save_dataset(dataset) tif = TIFF.open(os.path.join(self.base_dir, os.listdir(self.base_dir)[0])) IMAGEDESCRIPTION = 270 imgdesc = (tif.GetField(IMAGEDESCRIPTION)).decode('utf-8').split('\n') for key in imgdesc: if 'In this file' in key: self.assertEqual(key, ' Channels: 1 In this file: 1') def test_save_dataset_with_calibration(self): """Test writer operation with calibration.""" import os import numpy as np from libtiff import TIFF from satpy.writers.mitiff import MITIFFWriter expected_ir = np.full((100, 200), 255) expected_vis = np.full((100, 200), 0) expected = np.stack([expected_vis, expected_vis, expected_ir, expected_ir, expected_ir, expected_vis]) expected_key_channel = ['Table_calibration: 1-VIS0.63, Reflectance(Albedo), [%], 8, [ 0.00 0.39 0.78 1.18 1.57 ' '1.96 2.35 2.75 3.14 3.53 3.92 4.31 4.71 5.10 5.49 5.88 6.27 6.67 7.06 7.45 7.84 8.24 ' '8.63 9.02 9.41 9.80 10.20 10.59 10.98 11.37 11.76 12.16 12.55 12.94 13.33 13.73 14.12 ' '14.51 14.90 15.29 15.69 16.08 16.47 16.86 17.25 17.65 18.04 18.43 18.82 19.22 19.61 ' '20.00 20.39 20.78 21.18 21.57 21.96 22.35 22.75 23.14 23.53 23.92 24.31 24.71 25.10 ' '25.49 25.88 26.27 26.67 27.06 27.45 27.84 28.24 28.63 29.02 29.41 29.80 30.20 30.59 ' '30.98 31.37 31.76 32.16 32.55 32.94 33.33 33.73 34.12 34.51 34.90 35.29 35.69 36.08 ' '36.47 36.86 37.25 37.65 38.04 38.43 38.82 39.22 39.61 40.00 40.39 40.78 41.18 41.57 ' '41.96 42.35 42.75 43.14 43.53 43.92 44.31 44.71 45.10 45.49 45.88 46.27 46.67 47.06 ' '47.45 47.84 48.24 48.63 49.02 49.41 49.80 50.20 50.59 50.98 51.37 51.76 52.16 52.55 ' '52.94 53.33 53.73 54.12 54.51 54.90 55.29 55.69 56.08 56.47 56.86 57.25 57.65 58.04 ' '58.43 58.82 59.22 59.61 60.00 60.39 60.78 61.18 61.57 61.96 62.35 62.75 63.14 63.53 ' '63.92 64.31 64.71 65.10 65.49 65.88 66.27 66.67 67.06 67.45 67.84 68.24 68.63 69.02 ' '69.41 69.80 70.20 70.59 70.98 71.37 71.76 72.16 72.55 72.94 73.33 73.73 74.12 74.51 ' '74.90 75.29 75.69 76.08 76.47 76.86 77.25 77.65 78.04 78.43 78.82 79.22 79.61 80.00 ' '80.39 80.78 81.18 81.57 81.96 82.35 82.75 83.14 83.53 83.92 84.31 84.71 85.10 85.49 ' '85.88 86.27 86.67 87.06 87.45 87.84 88.24 88.63 89.02 89.41 89.80 90.20 90.59 90.98 ' '91.37 91.76 92.16 92.55 92.94 93.33 93.73 94.12 94.51 94.90 95.29 95.69 96.08 96.47 ' '96.86 97.25 97.65 98.04 98.43 98.82 99.22 99.61 100.00 ]', 'Table_calibration: 2-VIS0.86, Reflectance(Albedo), [%], 8, [ 0.00 0.39 0.78 1.18 1.57 ' '1.96 2.35 2.75 3.14 3.53 3.92 4.31 4.71 5.10 5.49 5.88 6.27 6.67 7.06 7.45 7.84 8.24 ' '8.63 9.02 9.41 9.80 10.20 10.59 10.98 11.37 11.76 12.16 12.55 12.94 13.33 13.73 14.12 ' '14.51 14.90 15.29 15.69 16.08 16.47 16.86 17.25 17.65 18.04 18.43 18.82 19.22 19.61 ' '20.00 20.39 20.78 21.18 21.57 21.96 22.35 22.75 23.14 23.53 23.92 24.31 24.71 25.10 ' '25.49 25.88 26.27 26.67 27.06 27.45 27.84 28.24 28.63 29.02 29.41 29.80 30.20 30.59 ' '30.98 31.37 31.76 32.16 32.55 32.94 33.33 33.73 34.12 34.51 34.90 35.29 35.69 36.08 ' '36.47 36.86 37.25 37.65 38.04 38.43 38.82 39.22 39.61 40.00 40.39 40.78 41.18 41.57 ' '41.96 42.35 42.75 43.14 43.53 43.92 44.31 44.71 45.10 45.49 45.88 46.27 46.67 47.06 ' '47.45 47.84 48.24 48.63 49.02 49.41 49.80 50.20 50.59 50.98 51.37 51.76 52.16 52.55 ' '52.94 53.33 53.73 54.12 54.51 54.90 55.29 55.69 56.08 56.47 56.86 57.25 57.65 58.04 ' '58.43 58.82 59.22 59.61 60.00 60.39 60.78 61.18 61.57 61.96 62.35 62.75 63.14 63.53 ' '63.92 64.31 64.71 65.10 65.49 65.88 66.27 66.67 67.06 67.45 67.84 68.24 68.63 69.02 ' '69.41 69.80 70.20 70.59 70.98 71.37 71.76 72.16 72.55 72.94 73.33 73.73 74.12 74.51 ' '74.90 75.29 75.69 76.08 76.47 76.86 77.25 77.65 78.04 78.43 78.82 79.22 79.61 80.00 ' '80.39 80.78 81.18 81.57 81.96 82.35 82.75 83.14 83.53 83.92 84.31 84.71 85.10 85.49 ' '85.88 86.27 86.67 87.06 87.45 87.84 88.24 88.63 89.02 89.41 89.80 90.20 90.59 90.98 ' '91.37 91.76 92.16 92.55 92.94 93.33 93.73 94.12 94.51 94.90 95.29 95.69 96.08 96.47 ' '96.86 97.25 97.65 98.04 98.43 98.82 99.22 99.61 100.00 ]', u'Table_calibration: 3(3B)-IR3.7, BT, °[C], 8, [ 50.00 49.22 48.43 47.65 46.86 46.08 ' '45.29 44.51 43.73 42.94 42.16 41.37 40.59 39.80 39.02 38.24 37.45 36.67 35.88 35.10 ' '34.31 33.53 32.75 31.96 31.18 30.39 29.61 28.82 28.04 27.25 26.47 25.69 24.90 24.12 ' '23.33 22.55 21.76 20.98 20.20 19.41 18.63 17.84 17.06 16.27 15.49 14.71 13.92 13.14 ' '12.35 11.57 10.78 10.00 9.22 8.43 7.65 6.86 6.08 5.29 4.51 3.73 2.94 2.16 1.37 0.59 ' '-0.20 -0.98 -1.76 -2.55 -3.33 -4.12 -4.90 -5.69 -6.47 -7.25 -8.04 -8.82 -9.61 -10.39 ' '-11.18 -11.96 -12.75 -13.53 -14.31 -15.10 -15.88 -16.67 -17.45 -18.24 -19.02 -19.80 ' '-20.59 -21.37 -22.16 -22.94 -23.73 -24.51 -25.29 -26.08 -26.86 -27.65 -28.43 -29.22 ' '-30.00 -30.78 -31.57 -32.35 -33.14 -33.92 -34.71 -35.49 -36.27 -37.06 -37.84 -38.63 ' '-39.41 -40.20 -40.98 -41.76 -42.55 -43.33 -44.12 -44.90 -45.69 -46.47 -47.25 -48.04 ' '-48.82 -49.61 -50.39 -51.18 -51.96 -52.75 -53.53 -54.31 -55.10 -55.88 -56.67 -57.45 ' '-58.24 -59.02 -59.80 -60.59 -61.37 -62.16 -62.94 -63.73 -64.51 -65.29 -66.08 -66.86 ' '-67.65 -68.43 -69.22 -70.00 -70.78 -71.57 -72.35 -73.14 -73.92 -74.71 -75.49 -76.27 ' '-77.06 -77.84 -78.63 -79.41 -80.20 -80.98 -81.76 -82.55 -83.33 -84.12 -84.90 -85.69 ' '-86.47 -87.25 -88.04 -88.82 -89.61 -90.39 -91.18 -91.96 -92.75 -93.53 -94.31 -95.10 ' '-95.88 -96.67 -97.45 -98.24 -99.02 -99.80 -100.59 -101.37 -102.16 -102.94 -103.73 ' '-104.51 -105.29 -106.08 -106.86 -107.65 -108.43 -109.22 -110.00 -110.78 -111.57 ' '-112.35 -113.14 -113.92 -114.71 -115.49 -116.27 -117.06 -117.84 -118.63 -119.41 ' '-120.20 -120.98 -121.76 -122.55 -123.33 -124.12 -124.90 -125.69 -126.47 -127.25 ' '-128.04 -128.82 -129.61 -130.39 -131.18 -131.96 -132.75 -133.53 -134.31 -135.10 ' '-135.88 -136.67 -137.45 -138.24 -139.02 -139.80 -140.59 -141.37 -142.16 -142.94 ' '-143.73 -144.51 -145.29 -146.08 -146.86 -147.65 -148.43 -149.22 -150.00 ]', u'Table_calibration: 4-IR10.8, BT, °[C], 8, [ 50.00 49.22 48.43 47.65 46.86 46.08 ' '45.29 ' '44.51 43.73 42.94 42.16 41.37 40.59 39.80 39.02 38.24 37.45 36.67 35.88 35.10 34.31 ' '33.53 32.75 31.96 31.18 30.39 29.61 28.82 28.04 27.25 26.47 25.69 24.90 24.12 23.33 ' '22.55 21.76 20.98 20.20 19.41 18.63 17.84 17.06 16.27 15.49 14.71 13.92 13.14 12.35 ' '11.57 10.78 10.00 9.22 8.43 7.65 6.86 6.08 5.29 4.51 3.73 2.94 2.16 1.37 0.59 -0.20 ' '-0.98 -1.76 -2.55 -3.33 -4.12 -4.90 -5.69 -6.47 -7.25 -8.04 -8.82 -9.61 -10.39 -11.18 ' '-11.96 -12.75 -13.53 -14.31 -15.10 -15.88 -16.67 -17.45 -18.24 -19.02 -19.80 -20.59 ' '-21.37 -22.16 -22.94 -23.73 -24.51 -25.29 -26.08 -26.86 -27.65 -28.43 -29.22 -30.00 ' '-30.78 -31.57 -32.35 -33.14 -33.92 -34.71 -35.49 -36.27 -37.06 -37.84 -38.63 -39.41 ' '-40.20 -40.98 -41.76 -42.55 -43.33 -44.12 -44.90 -45.69 -46.47 -47.25 -48.04 -48.82 ' '-49.61 -50.39 -51.18 -51.96 -52.75 -53.53 -54.31 -55.10 -55.88 -56.67 -57.45 -58.24 ' '-59.02 -59.80 -60.59 -61.37 -62.16 -62.94 -63.73 -64.51 -65.29 -66.08 -66.86 -67.65 ' '-68.43 -69.22 -70.00 -70.78 -71.57 -72.35 -73.14 -73.92 -74.71 -75.49 -76.27 -77.06 ' '-77.84 -78.63 -79.41 -80.20 -80.98 -81.76 -82.55 -83.33 -84.12 -84.90 -85.69 -86.47 ' '-87.25 -88.04 -88.82 -89.61 -90.39 -91.18 -91.96 -92.75 -93.53 -94.31 -95.10 -95.88 ' '-96.67 -97.45 -98.24 -99.02 -99.80 -100.59 -101.37 -102.16 -102.94 -103.73 -104.51 ' '-105.29 -106.08 -106.86 -107.65 -108.43 -109.22 -110.00 -110.78 -111.57 -112.35 ' '-113.14 -113.92 -114.71 -115.49 -116.27 -117.06 -117.84 -118.63 -119.41 -120.20 ' '-120.98 -121.76 -122.55 -123.33 -124.12 -124.90 -125.69 -126.47 -127.25 -128.04 ' '-128.82 -129.61 -130.39 -131.18 -131.96 -132.75 -133.53 -134.31 -135.10 -135.88 ' '-136.67 -137.45 -138.24 -139.02 -139.80 -140.59 -141.37 -142.16 -142.94 -143.73 ' '-144.51 -145.29 -146.08 -146.86 -147.65 -148.43 -149.22 -150.00 ]', u'Table_calibration: 5-IR11.5, BT, °[C], 8, [ 50.00 49.22 48.43 47.65 46.86 46.08 ' '45.29 ' '44.51 43.73 42.94 42.16 41.37 40.59 39.80 39.02 38.24 37.45 36.67 35.88 35.10 34.31 ' '33.53 32.75 31.96 31.18 30.39 29.61 28.82 28.04 27.25 26.47 25.69 24.90 24.12 23.33 ' '22.55 21.76 20.98 20.20 19.41 18.63 17.84 17.06 16.27 15.49 14.71 13.92 13.14 12.35 ' '11.57 10.78 10.00 9.22 8.43 7.65 6.86 6.08 5.29 4.51 3.73 2.94 2.16 1.37 0.59 -0.20 ' '-0.98 -1.76 -2.55 -3.33 -4.12 -4.90 -5.69 -6.47 -7.25 -8.04 -8.82 -9.61 -10.39 -11.18 ' '-11.96 -12.75 -13.53 -14.31 -15.10 -15.88 -16.67 -17.45 -18.24 -19.02 -19.80 -20.59 ' '-21.37 -22.16 -22.94 -23.73 -24.51 -25.29 -26.08 -26.86 -27.65 -28.43 -29.22 -30.00 ' '-30.78 -31.57 -32.35 -33.14 -33.92 -34.71 -35.49 -36.27 -37.06 -37.84 -38.63 -39.41 ' '-40.20 -40.98 -41.76 -42.55 -43.33 -44.12 -44.90 -45.69 -46.47 -47.25 -48.04 -48.82 ' '-49.61 -50.39 -51.18 -51.96 -52.75 -53.53 -54.31 -55.10 -55.88 -56.67 -57.45 -58.24 ' '-59.02 -59.80 -60.59 -61.37 -62.16 -62.94 -63.73 -64.51 -65.29 -66.08 -66.86 -67.65 ' '-68.43 -69.22 -70.00 -70.78 -71.57 -72.35 -73.14 -73.92 -74.71 -75.49 -76.27 -77.06 ' '-77.84 -78.63 -79.41 -80.20 -80.98 -81.76 -82.55 -83.33 -84.12 -84.90 -85.69 -86.47 ' '-87.25 -88.04 -88.82 -89.61 -90.39 -91.18 -91.96 -92.75 -93.53 -94.31 -95.10 -95.88 ' '-96.67 -97.45 -98.24 -99.02 -99.80 -100.59 -101.37 -102.16 -102.94 -103.73 -104.51 ' '-105.29 -106.08 -106.86 -107.65 -108.43 -109.22 -110.00 -110.78 -111.57 -112.35 ' '-113.14 -113.92 -114.71 -115.49 -116.27 -117.06 -117.84 -118.63 -119.41 -120.20 ' '-120.98 -121.76 -122.55 -123.33 -124.12 -124.90 -125.69 -126.47 -127.25 -128.04 ' '-128.82 -129.61 -130.39 -131.18 -131.96 -132.75 -133.53 -134.31 -135.10 -135.88 ' '-136.67 -137.45 -138.24 -139.02 -139.80 -140.59 -141.37 -142.16 -142.94 -143.73 ' '-144.51 -145.29 -146.08 -146.86 -147.65 -148.43 -149.22 -150.00 ]', 'Table_calibration: 6(3A)-VIS1.6, Reflectance(Albedo), [%], 8, [ 0.00 0.39 0.78 1.18 ' '1.57 1.96 2.35 2.75 3.14 3.53 3.92 4.31 4.71 5.10 5.49 5.88 6.27 6.67 7.06 7.45 7.84 ' '8.24 8.63 9.02 9.41 9.80 10.20 10.59 10.98 11.37 11.76 12.16 12.55 12.94 13.33 13.73 ' '14.12 14.51 14.90 15.29 15.69 16.08 16.47 16.86 17.25 17.65 18.04 18.43 18.82 19.22 ' '19.61 20.00 20.39 20.78 21.18 21.57 21.96 22.35 22.75 23.14 23.53 23.92 24.31 24.71 ' '25.10 25.49 25.88 26.27 26.67 27.06 27.45 27.84 28.24 28.63 29.02 29.41 29.80 30.20 ' '30.59 30.98 31.37 31.76 32.16 32.55 32.94 33.33 33.73 34.12 34.51 34.90 35.29 35.69 ' '36.08 36.47 36.86 37.25 37.65 38.04 38.43 38.82 39.22 39.61 40.00 40.39 40.78 41.18 ' '41.57 41.96 42.35 42.75 43.14 43.53 43.92 44.31 44.71 45.10 45.49 45.88 46.27 46.67 ' '47.06 47.45 47.84 48.24 48.63 49.02 49.41 49.80 50.20 50.59 50.98 51.37 51.76 52.16 ' '52.55 52.94 53.33 53.73 54.12 54.51 54.90 55.29 55.69 56.08 56.47 56.86 57.25 57.65 ' '58.04 58.43 58.82 59.22 59.61 60.00 60.39 60.78 61.18 61.57 61.96 62.35 62.75 63.14 ' '63.53 63.92 64.31 64.71 65.10 65.49 65.88 66.27 66.67 67.06 67.45 67.84 68.24 68.63 ' '69.02 69.41 69.80 70.20 70.59 70.98 71.37 71.76 72.16 72.55 72.94 73.33 73.73 74.12 ' '74.51 74.90 75.29 75.69 76.08 76.47 76.86 77.25 77.65 78.04 78.43 78.82 79.22 79.61 ' '80.00 80.39 80.78 81.18 81.57 81.96 82.35 82.75 83.14 83.53 83.92 84.31 84.71 85.10 ' '85.49 85.88 86.27 86.67 87.06 87.45 87.84 88.24 88.63 89.02 89.41 89.80 90.20 90.59 ' '90.98 91.37 91.76 92.16 92.55 92.94 93.33 93.73 94.12 94.51 94.90 95.29 95.69 96.08 ' '96.47 96.86 97.25 97.65 98.04 98.43 98.82 99.22 99.61 100.00 ]'] dataset = self._get_test_dataset_calibration() w = MITIFFWriter(filename=dataset.attrs['metadata_requirements']['file_pattern'], base_dir=self.base_dir) w.save_dataset(dataset) filename = (dataset.attrs['metadata_requirements']['file_pattern']).format( start_time=dataset.attrs['start_time']) tif = TIFF.open(os.path.join(self.base_dir, filename)) IMAGEDESCRIPTION = 270 imgdesc = (tif.GetField(IMAGEDESCRIPTION)).decode('utf-8').split('\n') found_table_calibration = False number_of_calibrations = 0 for key in imgdesc: if 'Table_calibration' in key: found_table_calibration = True if '1-VIS0.63' in key: self.assertEqual(key, expected_key_channel[0]) number_of_calibrations += 1 elif '2-VIS0.86' in key: self.assertEqual(key, expected_key_channel[1]) number_of_calibrations += 1 elif '3(3B)-IR3.7' in key: self.assertEqual(key, expected_key_channel[2]) number_of_calibrations += 1 elif '4-IR10.8' in key: self.assertEqual(key, expected_key_channel[3]) number_of_calibrations += 1 elif '5-IR11.5' in key: self.assertEqual(key, expected_key_channel[4]) number_of_calibrations += 1 elif '6(3A)-VIS1.6' in key: self.assertEqual(key, expected_key_channel[5]) number_of_calibrations += 1 else: self.fail("Not a valid channel description i the given key.") self.assertTrue(found_table_calibration, "Table_calibration is not found in the imagedescription.") self.assertEqual(number_of_calibrations, 6) for i, image in enumerate(tif.iter_images()): np.testing.assert_allclose(image, expected[i], atol=1.e-6, rtol=0) def test_save_dataset_with_calibration_one_dataset(self): """Test saving if mitiff as dataset with only one channel.""" import os import numpy as np from libtiff import TIFF from satpy.writers.mitiff import MITIFFWriter expected = np.full((100, 200), 255) expected_key_channel = [u'Table_calibration: BT, BT, °[C], 8, [ 50.00 49.22 48.43 47.65 46.86 46.08 45.29 ' '44.51 43.73 42.94 42.16 41.37 40.59 39.80 39.02 38.24 37.45 36.67 35.88 35.10 34.31 ' '33.53 32.75 31.96 31.18 30.39 29.61 28.82 28.04 27.25 26.47 25.69 24.90 24.12 23.33 ' '22.55 21.76 20.98 20.20 19.41 18.63 17.84 17.06 16.27 15.49 14.71 13.92 13.14 12.35 ' '11.57 10.78 10.00 9.22 8.43 7.65 6.86 6.08 5.29 4.51 3.73 2.94 2.16 1.37 0.59 -0.20 ' '-0.98 -1.76 -2.55 -3.33 -4.12 -4.90 -5.69 -6.47 -7.25 -8.04 -8.82 -9.61 -10.39 -11.18 ' '-11.96 -12.75 -13.53 -14.31 -15.10 -15.88 -16.67 -17.45 -18.24 -19.02 -19.80 -20.59 ' '-21.37 -22.16 -22.94 -23.73 -24.51 -25.29 -26.08 -26.86 -27.65 -28.43 -29.22 -30.00 ' '-30.78 -31.57 -32.35 -33.14 -33.92 -34.71 -35.49 -36.27 -37.06 -37.84 -38.63 -39.41 ' '-40.20 -40.98 -41.76 -42.55 -43.33 -44.12 -44.90 -45.69 -46.47 -47.25 -48.04 -48.82 ' '-49.61 -50.39 -51.18 -51.96 -52.75 -53.53 -54.31 -55.10 -55.88 -56.67 -57.45 -58.24 ' '-59.02 -59.80 -60.59 -61.37 -62.16 -62.94 -63.73 -64.51 -65.29 -66.08 -66.86 -67.65 ' '-68.43 -69.22 -70.00 -70.78 -71.57 -72.35 -73.14 -73.92 -74.71 -75.49 -76.27 -77.06 ' '-77.84 -78.63 -79.41 -80.20 -80.98 -81.76 -82.55 -83.33 -84.12 -84.90 -85.69 -86.47 ' '-87.25 -88.04 -88.82 -89.61 -90.39 -91.18 -91.96 -92.75 -93.53 -94.31 -95.10 -95.88 ' '-96.67 -97.45 -98.24 -99.02 -99.80 -100.59 -101.37 -102.16 -102.94 -103.73 -104.51 ' '-105.29 -106.08 -106.86 -107.65 -108.43 -109.22 -110.00 -110.78 -111.57 -112.35 ' '-113.14 -113.92 -114.71 -115.49 -116.27 -117.06 -117.84 -118.63 -119.41 -120.20 ' '-120.98 -121.76 -122.55 -123.33 -124.12 -124.90 -125.69 -126.47 -127.25 -128.04 ' '-128.82 -129.61 -130.39 -131.18 -131.96 -132.75 -133.53 -134.31 -135.10 -135.88 ' '-136.67 -137.45 -138.24 -139.02 -139.80 -140.59 -141.37 -142.16 -142.94 -143.73 ' '-144.51 -145.29 -146.08 -146.86 -147.65 -148.43 -149.22 -150.00 ]', ] dataset = self._get_test_dataset_calibration_one_dataset() w = MITIFFWriter(filename=dataset.attrs['metadata_requirements']['file_pattern'], base_dir=self.base_dir) w.save_dataset(dataset) filename = (dataset.attrs['metadata_requirements']['file_pattern']).format( start_time=dataset.attrs['start_time']) tif = TIFF.open(os.path.join(self.base_dir, filename)) IMAGEDESCRIPTION = 270 imgdesc = (tif.GetField(IMAGEDESCRIPTION)).decode('utf-8').split('\n') found_table_calibration = False number_of_calibrations = 0 for key in imgdesc: if 'Table_calibration' in key: found_table_calibration = True if 'BT' in key: self.assertEqual(key, expected_key_channel[0]) number_of_calibrations += 1 self.assertTrue(found_table_calibration, "Expected table_calibration is not found in the imagedescription.") self.assertEqual(number_of_calibrations, 1) for image in tif.iter_images(): np.testing.assert_allclose(image, expected, atol=1.e-6, rtol=0) def test_save_dataset_with_bad_value(self): """Test writer operation with bad values.""" import os import numpy as np from libtiff import TIFF from satpy.writers.mitiff import MITIFFWriter expected = np.array([[0, 4, 1, 37, 73], [110, 146, 183, 219, 255]]) dataset = self._get_test_dataset_with_bad_values() w = MITIFFWriter(base_dir=self.base_dir) w.save_dataset(dataset) filename = "{:s}_{:%Y%m%d_%H%M%S}.mitiff".format(dataset.attrs['name'], dataset.attrs['start_time']) tif = TIFF.open(os.path.join(self.base_dir, filename)) for image in tif.iter_images(): np.testing.assert_allclose(image, expected, atol=1.e-6, rtol=0) def test_convert_proj4_string(self): """Test conversion of geolocations.""" import dask.array as da import xarray as xr from pyresample.geometry import AreaDefinition from satpy.writers.mitiff import MITIFFWriter checks = [{'epsg': '+init=EPSG:32631', 'proj4': (' Proj string: +proj=etmerc +lat_0=0 +lon_0=3 +k=0.9996 ' '+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 ' '+y_0=1515.000000\n')}, {'epsg': '+init=EPSG:32632', 'proj4': (' Proj string: +proj=etmerc +lat_0=0 +lon_0=9 +k=0.9996 ' '+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 ' '+y_0=1515.000000\n')}, {'epsg': '+init=EPSG:32633', 'proj4': (' Proj string: +proj=etmerc +lat_0=0 +lon_0=15 +k=0.9996 ' '+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 ' '+y_0=1515.000000\n')}, {'epsg': '+init=EPSG:32634', 'proj4': (' Proj string: +proj=etmerc +lat_0=0 +lon_0=21 +k=0.9996 ' '+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 ' '+y_0=1515.000000\n')}, {'epsg': '+init=EPSG:32635', 'proj4': (' Proj string: +proj=etmerc +lat_0=0 +lon_0=27 +k=0.9996 ' '+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 ' '+y_0=1515.000000\n')}] for check in checks: area_def = AreaDefinition( 'test', 'test', 'test', check['epsg'], 100, 200, (-1000., -1500., 1000., 1500.), ) ds1 = xr.DataArray( da.zeros((10, 20), chunks=20), dims=('y', 'x'), attrs={'area': area_def} ) w = MITIFFWriter(filename='dummy.tif', base_dir=self.base_dir) proj4_string = w._add_proj4_string(ds1, ds1) self.assertEqual(proj4_string, check['proj4']) def test_save_dataset_palette(self): """Test writer operation as palette.""" import os import numpy as np from libtiff import TIFF from satpy.writers.mitiff import MITIFFWriter expected = np.full((100, 200), 0) exp_c = ([0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [2, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]) color_map = [[0, 3], [1, 4], [2, 5]] pal_desc = ['test', 'test2'] unit = "Test" dataset = self._get_test_one_dataset() palette = {'palette': True, 'palette_color_map': color_map, 'palette_description': pal_desc, 'palette_unit': unit, 'palette_channel_name': dataset.attrs['name']} w = MITIFFWriter(base_dir=self.base_dir) w.save_dataset(dataset, **palette) filename = "{:s}_{:%Y%m%d_%H%M%S}.mitiff".format(dataset.attrs['name'], dataset.attrs['start_time']) tif = TIFF.open(os.path.join(self.base_dir, filename)) # Need to check PHOTOMETRIC is 3, ie palette self.assertEqual(tif.GetField('PHOTOMETRIC'), 3) colormap = tif.GetField('COLORMAP') # Check the colormap of the palette image self.assertEqual(colormap, exp_c) IMAGEDESCRIPTION = 270 imgdesc = (tif.GetField(IMAGEDESCRIPTION)).decode('utf-8').split('\n') found_color_info = False unit_name_found = False name_length_found = False name_length = 0 names = [] unit_name = None for key in imgdesc: if name_length_found and name_length > len(names): names.append(key) continue elif unit_name_found: name_length = int(key) name_length_found = True unit_name_found = False elif found_color_info: unit_name = key unit_name_found = True found_color_info = False elif 'COLOR INFO:' in key: found_color_info = True # Check the name of the palette description self.assertEqual(name_length, 2) # Check the name and unit name of the palette self.assertEqual(unit_name, ' Test') # Check the palette description of the palette self.assertEqual(names, [' test', ' test2']) for image in tif.iter_images(): np.testing.assert_allclose(image, expected, atol=1.e-6, rtol=0) def test_simple_write_two_bands(self): """Test basic writer operation with 3 bands from 2 prerequisites.""" from satpy.writers.mitiff import MITIFFWriter dataset = self._get_test_dataset_three_bands_two_prereq() w = MITIFFWriter(base_dir=self.base_dir) w.save_dataset(dataset) def test_get_test_dataset_three_bands_prereq(self): """Test basic writer operation with 3 bands with DataQuery prerequisites with missing name.""" import os from libtiff import TIFF from satpy.writers.mitiff import MITIFFWriter IMAGEDESCRIPTION = 270 dataset = self._get_test_dataset_three_bands_prereq() w = MITIFFWriter(base_dir=self.base_dir) w.save_dataset(dataset) filename = "{:s}_{:%Y%m%d_%H%M%S}.mitiff".format(dataset.attrs['name'], dataset.attrs['start_time']) tif = TIFF.open(os.path.join(self.base_dir, filename)) imgdesc = (tif.GetField(IMAGEDESCRIPTION)).decode('utf-8').split('\n') for element in imgdesc: if ' Channels:' in element: self.assertEqual(element, ' Channels: 3 In this file: 1 2 3') satpy-0.34.0/satpy/tests/writer_tests/test_ninjogeotiff.py000066400000000000000000000741111420401153000240620ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for writing GeoTIFF files with NinJoTIFF tags.""" import datetime import logging import os import dask.array as da import numpy as np import pytest import xarray as xr from pyresample import create_area_def from satpy import Scene from satpy.writers import get_enhanced_image try: from math import prod except ImportError: # Remove when dropping Python < 3.8 from functools import reduce from operator import mul def prod(iterable): # type: ignore """Drop-in replacement for math.prod.""" return reduce(mul, iterable, 1) def _get_fake_da(lo, hi, shp, dtype="f4"): """Generate dask array with synthetic data. This is more or less a 2d linspace: it'll return a 2-d dask array of shape ``shp``, lowest value is ``lo``, highest value is ``hi``. """ return da.arange(lo, hi, (hi-lo)/prod(shp), chunks=50, dtype=dtype).reshape(shp) @pytest.fixture(scope="module") def test_area_tiny_eqc_sphere(): """Create 10x00 test equirectangular area centered on (40, -30), spherical geoid, m.""" shp = (10, 20) test_area = create_area_def( "test-area-eqc-sphere", {"proj": "eqc", "lat_ts": 0., "lat_0": 0., "lon_0": 0., "x_0": 0., "y_0": 0., "ellps": "sphere", "units": "m", "no_defs": None, "type": "crs"}, units="m", shape=shp, resolution=1000, center=(-3330000.0, 4440000.0)) return test_area @pytest.fixture(scope="module") def test_area_small_eqc_wgs84(): """Create 50x100 test equirectangular area centered on (50, 90), wgs84.""" shp = (50, 100) test_area = create_area_def( "test-area-eqc-wgs84", {"proj": "eqc", "lat_0": 2.5, "lon_0": 1., "ellps": "WGS84"}, units="m", shape=shp, resolution=1000, center=(10000000.0, 6000000.0)) return test_area @pytest.fixture(scope="module") def test_area_tiny_stereographic_wgs84(): """Create a 20x10 test stereographic area centered near the north pole, wgs84.""" shp = (20, 10) test_area = create_area_def( "test-area-north-stereo", {"proj": "stere", "lat_0": 75.0, "lon_0": 2.0, "lat_ts": 60.0, "ellps": "WGS84", "units": "m", "type": "crs"}, units="m", shape=shp, resolution=1000, center=(0.0, 1500000.0)) return test_area @pytest.fixture(scope="module") def test_area_tiny_antarctic(): """Create a 20x10 test stereographic area centered near the south pole, wgs84.""" shp = (20, 10) test_area = create_area_def( "test-area-south-stereo", {"proj": "stere", "lat_0": -75.0, "lon_0": 2.0, "lat_ts": 60.0, "ellps": "WGS84", "units": "m", "type": "crs"}, units="m", shape=shp, resolution=1000, center=(0.0, -1500000.0)) return test_area @pytest.fixture(scope="module") def test_area_northpole(): """Create a 20x10 test area centered exactly on the north pole. This has no well-defined central meridian so needs separate testing. """ shp = (20, 10) test_area = create_area_def( "test-area-north-pole", {"proj": "stere", "lat_0": 90, "lat_ts": 60, "ellps": "WGS84"}, shape=shp, resolution=1000, center=(0.0, 15000000.0)) return test_area @pytest.fixture(scope="module") def test_area_merc(): """Create a mercator area.""" from pyproj import CRS shp = (20, 10) test_area = create_area_def( "test-area-merc", CRS("+proj=merc"), units="m", shape=shp, resolution=1000, center=(0.0, 0.0)) return test_area @pytest.fixture(scope="module") def test_area_weird(): """Create a weird area (interrupted goode homolosine) to test error handling.""" from pyproj import CRS shp = (20, 10) test_area = create_area_def( "test-area-north-stereo", CRS("+proj=igh"), units="m", shape=shp, resolution=1000, center=(0.0, 1500000.0)) return test_area @pytest.fixture(scope="module") def test_area_epsg4326(): """Test with EPSG4326 (latlong) area, which has no CRS coordinate operation.""" from pyproj import CRS shp = (16, 8) euro4326 = create_area_def( "epgs4326europa", CRS.from_epsg(4326), resolution=1/128, shape=shp, center=(0, 0)) return euro4326 @pytest.fixture(scope="module") def test_image_small_mid_atlantic_L(test_area_tiny_eqc_sphere): """Get a small test image in mode L, over Atlantic.""" arr = xr.DataArray( _get_fake_da(-80, 40, test_area_tiny_eqc_sphere.shape + (1,)), dims=("y", "x", "bands"), attrs={ "name": "test-small-mid-atlantic", "start_time": datetime.datetime(1985, 8, 13, 13, 0), "area": test_area_tiny_eqc_sphere}) return get_enhanced_image(arr) @pytest.fixture(scope="module") def test_image_small_mid_atlantic_K_L(test_area_tiny_eqc_sphere): """Get a small test image in units K, mode L, over Atlantic.""" arr = xr.DataArray( _get_fake_da(-80+273.15, 40+273.15, test_area_tiny_eqc_sphere.shape + (1,)), dims=("y", "x", "bands"), attrs={ "name": "test-small-mid-atlantic", "start_time": datetime.datetime(1985, 8, 13, 13, 0), "area": test_area_tiny_eqc_sphere, "units": "K"}) return get_enhanced_image(arr) @pytest.fixture(scope="module") def test_image_large_asia_RGB(test_area_small_eqc_wgs84): """Get a large-ish test image in mode RGB, over Asia.""" arr = xr.DataArray( _get_fake_da(0, 255, test_area_small_eqc_wgs84.shape + (3,), "uint8"), dims=("y", "x", "bands"), coords={"bands": ["R", "G", "B"]}, attrs={ "name": "test-large-asia", "start_time": datetime.datetime(2015, 10, 21, 20, 25, 0), "area": test_area_small_eqc_wgs84, "mode": "RGB"}) return get_enhanced_image(arr) @pytest.fixture(scope="module") def test_image_small_arctic_P(test_area_tiny_stereographic_wgs84): """Get a small-ish test image in mode P, over Arctic.""" arr = xr.DataArray( _get_fake_da(0, 10, test_area_tiny_stereographic_wgs84.shape + (1,), "uint8"), dims=("y", "x", "bands"), coords={"bands": ["P"]}, attrs={ "name": "test-small-arctic", "start_time": datetime.datetime(2027, 8, 2, 8, 20), "area": test_area_tiny_stereographic_wgs84, "mode": "P"}) return get_enhanced_image(arr) @pytest.fixture(scope="module") def test_image_northpole(test_area_northpole): """Test image with area exactly on northpole.""" arr = xr.DataArray( _get_fake_da(1, 100, test_area_northpole.shape + (1,), "uint8"), dims=("y", "x", "bands"), coords={"bands": ["L"]}, attrs={ "name": "test-northpole", "start_time": datetime.datetime(1926, 5, 12, 0), "area": test_area_northpole, "mode": "L"}) return get_enhanced_image(arr) @pytest.fixture(scope="module") def test_image_weird(test_area_weird): """Get a small image with some weird properties to test error handling.""" da = xr.DataArray( _get_fake_da(1, 2, test_area_weird.shape + (2,), "uint8"), dims=("y", "x", "bands"), coords={"bands": ["L", "A"]}, attrs={ "name": "interrupted image", "start_time": datetime.datetime(1970, 1, 1), "area": test_area_weird, "mode": "LA"}) return get_enhanced_image(da) @pytest.fixture(scope="module") def test_image_rgba_merc(test_area_merc): """Get a small test image in mode RGBA and mercator.""" arr = xr.DataArray( _get_fake_da(-80, 40, test_area_merc.shape + (4,)), dims=("y", "x", "bands"), coords={"bands": ["R", "G", "B", "A"]}, attrs={ "name": "test-rgba", "start_time": datetime.datetime(2013, 2, 22, 12, 0), "area": test_area_merc, "mode": "RGBA"}) return get_enhanced_image(arr) @pytest.fixture(scope="module") def test_image_cmyk_antarctic(test_area_tiny_antarctic): """Get a small test image in mode CMYK on south pole.""" arr = xr.DataArray( _get_fake_da(-80, 40, test_area_tiny_antarctic.shape + (4,)), dims=("y", "x", "bands"), coords={"bands": ["C", "M", "Y", "K"]}, attrs={ "name": "test-cmyk", "start_time": datetime.datetime(2065, 11, 22, 11), "area": test_area_tiny_antarctic, "mode": "CMYK"}) return get_enhanced_image(arr) @pytest.fixture(scope="module") def test_image_latlon(test_area_epsg4326): """Get image with latlon areadefinition.""" arr = xr.DataArray( _get_fake_da(-50, 30, test_area_epsg4326.shape + (1,)), dims=("y", "x", "bands"), coords={"bands": ["L"]}, attrs={ "name": "test-latlon", "start_time": datetime.datetime(2001, 1, 1, 0), "area": test_area_epsg4326, "mode": "L"}) return get_enhanced_image(arr) @pytest.fixture(scope="module") def ntg1(test_image_small_mid_atlantic_L): """Create instance of NinJoTagGenerator class.""" from satpy.writers.ninjogeotiff import NinJoTagGenerator return NinJoTagGenerator( test_image_small_mid_atlantic_L, 255, "quinoa.tif", ChannelID=900015, DataType="GORN", PhysicUnit="C", PhysicValue="Temperature", SatelliteNameID=6400014, DataSource="dowsing rod") @pytest.fixture(scope="module") def ntg2(test_image_large_asia_RGB): """Create instance of NinJoTagGenerator class.""" from satpy.writers.ninjogeotiff import NinJoTagGenerator return NinJoTagGenerator( test_image_large_asia_RGB, 0, "seitan.tif", ChannelID=1000015, DataType="GORN", PhysicUnit="N/A", PhysicValue="N/A", SatelliteNameID=6400014) @pytest.fixture(scope="module") def ntg3(test_image_small_arctic_P): """Create instance of NinJoTagGenerator class.""" from satpy.writers.ninjogeotiff import NinJoTagGenerator return NinJoTagGenerator( test_image_small_arctic_P, 255, "spelt.tif", ChannelID=800012, DataType="PPRN", PhysicUnit="N/A", PhysicValue="N/A", SatelliteNameID=6500014, OverFlightTime=42) @pytest.fixture(scope="module") def ntg_northpole(test_image_northpole): """Create NinJoTagGenerator with north pole image.""" from satpy.writers.ninjogeotiff import NinJoTagGenerator return NinJoTagGenerator( test_image_northpole, 255, "lentils.tif", ChannelID=900012, DataType="PORN", PhysicUnit="Temperature", PhysicValue="K", SatelliteNameID=7500014) @pytest.fixture(scope="module") def ntg_weird(test_image_weird): """Create NinJoTagGenerator instance with weird image.""" from satpy.writers.ninjogeotiff import NinJoTagGenerator return NinJoTagGenerator( test_image_weird, 12, "tempeh.tif", ChannelID=800012, DataType="PPRN", PhysicUnit="N/A", PhysicValue="N/A", SatelliteNameID=6500014) @pytest.fixture(scope="module") def ntg_no_fill_value(test_image_small_mid_atlantic_L): """Create instance of NinJoTagGenerator class.""" from satpy.writers.ninjogeotiff import NinJoTagGenerator return NinJoTagGenerator( test_image_small_mid_atlantic_L, None, "bulgur.tif", ChannelID=900015, DataType="GORN", PhysicUnit="C", PhysicValue="Temperature", SatelliteNameID=6400014, DataSource="dowsing rod") @pytest.fixture(scope="module") def ntg_rgba(test_image_rgba_merc): """Create NinJoTagGenerator instance with RGBA image.""" from satpy.writers.ninjogeotiff import NinJoTagGenerator return NinJoTagGenerator( test_image_rgba_merc, 12, "soy.tif", ChannelID=800042, DataType="GORN", PhysicUnit="N/A", PhysicValue="N/A", SatelliteNameID=6500014) @pytest.fixture(scope="module") def ntg_cmyk(test_image_cmyk_antarctic): """Create NinJoTagGenerator instance with CMYK image.""" from satpy.writers.ninjogeotiff import NinJoTagGenerator return NinJoTagGenerator( test_image_cmyk_antarctic, 0, "tvp.tif", ChannelID=123042, DataType="PPRN", PhysicUnit="N/A", PhysicValue="N/A", SatelliteNameID=6500014) @pytest.fixture(scope="module") def ntg_latlon(test_image_latlon): """Create NinJoTagGenerator with latlon-area image.""" from satpy.writers.ninjogeotiff import NinJoTagGenerator return NinJoTagGenerator( test_image_latlon, 0, "latlon.tif", ChannelID=123456, DataType="GORN", PhysicUnit="%", PhysicValue="Reflectance", SatelliteNameID=654321) @pytest.fixture def patch_datetime_now(monkeypatch): """Get a fake datetime.datetime.now().""" # Source: https://stackoverflow.com/a/20503374/974555, CC-BY-SA 4.0 class mydatetime(datetime.datetime): """Drop-in replacement for datetime.datetime.""" @classmethod def now(cls, tz=datetime.timezone.utc): """Drop-in replacement for datetime.datetime.now.""" return datetime.datetime(2033, 5, 18, 3, 33, 20, tzinfo=tz) monkeypatch.setattr(datetime, 'datetime', mydatetime) def test_write_and_read_file(test_image_small_mid_atlantic_L, tmp_path): """Test that it writes a GeoTIFF with the appropriate NinJo-tags.""" import rasterio from satpy.writers.ninjogeotiff import NinJoGeoTIFFWriter fn = os.fspath(tmp_path / "test.tif") ngtw = NinJoGeoTIFFWriter() ngtw.save_dataset( test_image_small_mid_atlantic_L.data, filename=fn, fill_value=0, blockxsize=128, blockysize=128, compress="lzw", predictor=2, PhysicUnit="C", PhysicValue="Temperature", SatelliteNameID=6400014, ChannelID=900015, DataType="GORN", DataSource="dowsing rod") src = rasterio.open(fn) tgs = src.tags() assert tgs["ninjo_FileName"] == fn assert tgs["ninjo_DataSource"] == "dowsing rod" np.testing.assert_allclose(float(tgs["ninjo_Gradient"]), 0.4653780307919959) np.testing.assert_allclose(float(tgs["ninjo_AxisIntercept"]), -79.86837954904149) def test_write_and_read_file_RGB(test_image_large_asia_RGB, tmp_path): """Test writing and reading RGB.""" import rasterio from satpy.writers.ninjogeotiff import NinJoGeoTIFFWriter fn = os.fspath(tmp_path / "test.tif") ngtw = NinJoGeoTIFFWriter() ngtw.save_dataset( test_image_large_asia_RGB.data, filename=fn, fill_value=0, PhysicUnit="N/A", PhysicValue="N/A", SatelliteNameID=6400014, ChannelID=900015, DataType="GORN", DataSource="dowsing rod") src = rasterio.open(fn) tgs = src.tags() assert tgs["ninjo_FileName"] == fn assert tgs["ninjo_DataSource"] == "dowsing rod" assert "ninjo_Gradient" not in tgs.keys() assert "ninjo_AxisIntercept" not in tgs.keys() assert tgs["ninjo_PhysicValue"] == "N/A" def test_write_and_read_file_LA(test_image_latlon, tmp_path): """Test writing and reading LA image.""" import rasterio from satpy.writers.ninjogeotiff import NinJoGeoTIFFWriter fn = os.fspath(tmp_path / "test.tif") ngtw = NinJoGeoTIFFWriter() ngtw.save_dataset( test_image_latlon.data, filename=fn, fill_value=None, # to make it LA PhysicUnit="%", PhysicValue="Reflectance", SatelliteNameID=6400014, ChannelID=900015, DataType="GORN", DataSource="dowsing rod") src = rasterio.open(fn) assert len(src.indexes) == 2 # mode LA tgs = src.tags() assert tgs["ninjo_FileName"] == fn assert tgs["ninjo_DataSource"] == "dowsing rod" np.testing.assert_allclose(float(tgs["ninjo_Gradient"]), 0.30816176470588236) np.testing.assert_allclose(float(tgs["ninjo_AxisIntercept"]), -49.603125) assert tgs["ninjo_PhysicValue"] == "Reflectance" assert tgs["ninjo_TransparentPixel"] == "-1" # meaning not set def test_write_and_read_file_P(test_image_small_arctic_P, tmp_path): """Test writing and reading P image.""" import rasterio from trollimage.colormap import Colormap from satpy.writers.ninjogeotiff import NinJoGeoTIFFWriter fn = os.fspath(tmp_path / "test.tif") ngtw = NinJoGeoTIFFWriter() ngtw.save_image( test_image_small_arctic_P, filename=fn, fill_value=255, PhysicUnit="N/A", PhysicValue="N/A", SatelliteNameID=6400014, ChannelID=900015, DataType="PPRN", DataSource="dowsing rod", keep_palette=True, cmap=Colormap(*enumerate(zip(*([np.linspace(0, 1, 256)]*3))))) src = rasterio.open(fn) assert len(src.indexes) == 1 # mode P assert src.colorinterp[0] == rasterio.enums.ColorInterp.palette tgs = src.tags() assert tgs["ninjo_FileName"] == fn assert tgs["ninjo_DataSource"] == "dowsing rod" def test_write_and_read_file_units( test_image_small_mid_atlantic_K_L, tmp_path, caplog): """Test that it writes a GeoTIFF with the appropriate NinJo-tags and units.""" import rasterio from satpy.writers.ninjogeotiff import NinJoGeoTIFFWriter fn = os.fspath(tmp_path / "test.tif") ngtw = NinJoGeoTIFFWriter() with caplog.at_level(logging.DEBUG): ngtw.save_dataset( test_image_small_mid_atlantic_K_L.data, filename=fn, fill_value=0, blockxsize=128, blockysize=128, compress="lzw", predictor=2, PhysicUnit="C", PhysicValue="Temperature", SatelliteNameID=6400014, ChannelID=900015, DataType="GORN", DataSource="dowsing rod") assert "Adding offset for K → °C conversion" in caplog.text # a better test would be to check that the attributes haven't changed at # all, but that currently fails due to # https://github.com/pytroll/satpy/issues/2022 assert test_image_small_mid_atlantic_K_L.data.attrs["enhancement_history"][0] != {"scale": 1, "offset": 273.15} src = rasterio.open(fn) tgs = src.tags() assert tgs["ninjo_FileName"] == fn assert tgs["ninjo_DataSource"] == "dowsing rod" np.testing.assert_allclose(float(tgs["ninjo_Gradient"]), 0.465379, rtol=1e-5) np.testing.assert_allclose(float(tgs["ninjo_AxisIntercept"]), -79.86838) fn2 = os.fspath(tmp_path / "test2.tif") with caplog.at_level(logging.WARNING): ngtw.save_dataset( test_image_small_mid_atlantic_K_L.data, filename=fn2, fill_value=0, blockxsize=128, blockysize=128, compress="lzw", predictor=2, PhysicUnit="F", PhysicValue="Temperature", SatelliteNameID=6400014, ChannelID=900015, DataType="GORN", DataSource="dowsing rod") assert ("Writing F to ninjogeotiff headers, but " "data attributes have unit K. " "No conversion applied.") in caplog.text def test_write_and_read_via_scene(test_image_small_mid_atlantic_L, tmp_path): """Test that all attributes are written also when writing from scene. It appears that :func:`Satpy.Scene.save_dataset` does not pass the filename to the writer. Test that filename is still written to header when saving this way (the regular way). """ import rasterio sc = Scene() fn = os.fspath(tmp_path / "test-{name}.tif") sc["montanha-do-pico"] = test_image_small_mid_atlantic_L.data sc.save_dataset( "montanha-do-pico", writer="ninjogeotiff", filename=fn, fill_value=0, PhysicUnit="C", PhysicValue="Temperature", SatelliteNameID=6400014, ChannelID=900015, DataType="GORN") src = rasterio.open(tmp_path / "test-montanha-do-pico.tif") tgs = src.tags() assert tgs["ninjo_FileName"] == os.fspath(tmp_path / "test-montanha-do-pico.tif") def test_get_all_tags(ntg1, ntg3, ntg_latlon, ntg_northpole, caplog): """Test getting all tags from dataset.""" # test that passed, dynamic, and mandatory tags are all included, and # nothing more t1 = ntg1.get_all_tags() assert set(t1.keys()) == ( ntg1.fixed_tags.keys() | ntg1.passed_tags | ntg1.dynamic_tags.keys() | {"DataSource"}) # test that when extra tag is passed this is also included t3 = ntg3.get_all_tags() assert t3.keys() == ( ntg3.fixed_tags.keys() | ntg3.passed_tags | ntg3.dynamic_tags.keys() | {"OverFlightTime"}) assert t3["OverFlightTime"] == 42 # test that CentralMeridian skipped and warning logged with caplog.at_level(logging.DEBUG): t_latlon = ntg_latlon.get_all_tags() assert ("Unable to obtain value for optional NinJo tag CentralMeridian" in caplog.text) assert "CentralMeridian" not in t_latlon.keys() t_northpole = ntg_northpole.get_all_tags() assert "CentralMeridian" not in t_northpole.keys() def test_calc_single_tag_by_name(ntg1, ntg2, ntg3): """Test calculating single tag from dataset.""" assert ntg1.get_tag("Magic") == "NINJO" assert ntg1.get_tag("DataType") == "GORN" assert ntg2.get_tag("DataType") == "GORN" assert ntg3.get_tag("DataType") == "PPRN" assert ntg1.get_tag("DataSource") == "dowsing rod" with pytest.raises(ValueError): ntg1.get_tag("invalid") with pytest.raises(ValueError): ntg1.get_tag("OriginalHeader") with pytest.raises(ValueError): ntg1.get_tag("Gradient") def test_get_central_meridian(ntg1, ntg2, ntg3, ntg_latlon, ntg_northpole): """Test calculating the central meridian.""" cm = ntg1.get_central_meridian() assert isinstance(cm, float) np.testing.assert_allclose(cm, 0.0) np.testing.assert_allclose(ntg2.get_central_meridian(), 1.0) np.testing.assert_allclose(ntg3.get_central_meridian(), 2.0) with pytest.raises(AttributeError): # latlon area has no central meridian ntg_latlon.get_central_meridian() with pytest.raises(KeyError): # nor does area exactly on northpole ntg_northpole.get_central_meridian() def test_get_color_depth(ntg1, ntg2, ntg3, ntg_weird, ntg_rgba, ntg_cmyk): """Test extracting the color depth.""" cd = ntg1.get_color_depth() assert isinstance(cd, int) assert cd == 8 # mode L assert ntg2.get_color_depth() == 24 # mode RGB assert ntg3.get_color_depth() == 8 # mode P assert ntg_weird.get_color_depth() == 16 # mode LA assert ntg_rgba.get_color_depth() == 32 # mode RGBA with pytest.raises(ValueError): ntg_cmyk.get_color_depth() def test_get_creation_date_id(ntg1, ntg2, ntg3, patch_datetime_now): """Test getting the creation date ID. This is the time at which the file was created. This test believes it is run at 2033-5-18 05:33:20Z. """ cdid = ntg1.get_creation_date_id() assert isinstance(cdid, int) assert cdid == 2000000000 assert ntg2.get_creation_date_id() == 2000000000 assert ntg3.get_creation_date_id() == 2000000000 def test_get_date_id(ntg1, ntg2, ntg3): """Test getting the date ID.""" did = ntg1.get_date_id() assert isinstance(did, int) assert did == 492786000 assert ntg2.get_date_id() == 1445459100 assert ntg3.get_date_id() == 1817194800 def test_get_earth_radius_large(ntg1, ntg2, ntg3): """Test getting the Earth semi-major axis.""" erl = ntg1.get_earth_radius_large() assert isinstance(erl, float) np.testing.assert_allclose(erl, 6370997.0) np.testing.assert_allclose(ntg2.get_earth_radius_large(), 6378137.0) np.testing.assert_allclose(ntg3.get_earth_radius_large(), 6378137.0) def test_get_earth_radius_small(ntg1, ntg2, ntg3): """Test getting the Earth semi-minor axis.""" ers = ntg1.get_earth_radius_small() assert isinstance(ers, float) np.testing.assert_allclose(ers, 6370997.0) np.testing.assert_allclose(ntg2.get_earth_radius_small(), 6356752.314245179) np.testing.assert_allclose(ntg3.get_earth_radius_small(), 6356752.314245179) def test_get_filename(ntg1, ntg2, ntg3): """Test getting the filename.""" assert ntg1.get_filename() == "quinoa.tif" assert ntg2.get_filename() == "seitan.tif" assert ntg3.get_filename() == "spelt.tif" def test_get_min_gray_value_L(ntg1): """Test getting min gray value for mode L.""" mg = ntg1.get_min_gray_value() assert isinstance(mg.compute().item(), int) assert mg.compute() == 0 def test_get_min_gray_value_RGB(ntg2): """Test getting min gray value for RGB. Note that min/max gray value is mandatory in NinJo even for RGBs? """ assert ntg2.get_min_gray_value().compute().item() == 1 # fill value 0 def test_get_min_gray_value_P(ntg3): """Test getting min gray value for mode P.""" assert ntg3.get_min_gray_value().compute().item() == 0 def test_get_max_gray_value_L(ntg1): """Test getting max gray value for mode L.""" mg = ntg1.get_max_gray_value().compute().item() assert isinstance(mg, int) assert mg == 254 # fill value is 255 def test_get_max_gray_value_RGB(ntg2): """Test max gray value for RGB.""" assert ntg2.get_max_gray_value() == 255 @pytest.mark.xfail(reason="Needs GeoTIFF P fixes, see GH#1844") def test_get_max_gray_value_P(ntg3): """Test getting max gray value for mode P.""" assert ntg3.get_max_gray_value().compute().item() == 10 @pytest.mark.xfail(reason="not easy, not needed, not implemented") def test_get_meridian_east(ntg1, ntg2, ntg3): """Test getting east meridian.""" np.testing.assert_allclose(ntg1.get_meridian_east(), -29.048101549452294) np.testing.assert_allclose(ntg2.get_meridian_east(), 180.0) np.testing.assert_allclose(ntg3.get_meridian_east(), 99.81468125314737) @pytest.mark.xfail(reason="not easy, not needed, not implemented") def test_get_meridian_west(ntg1, ntg2, ntg3): """Test getting west meridian.""" np.testing.assert_allclose(ntg1.get_meridian_west(), -30.846745608241903) np.testing.assert_allclose(ntg2.get_meridian_east(), -180.0) np.testing.assert_allclose(ntg3.get_meridian_west(), 81.84837557075694) def test_get_projection(ntg1, ntg2, ntg3, ntg_weird, ntg_rgba, ntg_cmyk, ntg_latlon): """Test getting projection string.""" assert ntg1.get_projection() == "PLAT" assert ntg2.get_projection() == "PLAT" assert ntg3.get_projection() == "NPOL" assert ntg_cmyk.get_projection() == "SPOL" assert ntg_rgba.get_projection() == "MERC" assert ntg_latlon.get_projection() == "PLAT" with pytest.raises(ValueError): ntg_weird.get_projection() def test_get_ref_lat_1(ntg1, ntg2, ntg3, ntg_weird, ntg_latlon): """Test getting reference latitude 1.""" rl1 = ntg1.get_ref_lat_1() assert isinstance(rl1, float) np.testing.assert_allclose(rl1, 0.0) np.testing.assert_allclose(ntg2.get_ref_lat_1(), 2.5) np.testing.assert_allclose(ntg3.get_ref_lat_1(), 75) with pytest.raises(ValueError): ntg_weird.get_ref_lat_1() with pytest.raises(AttributeError): ntg_latlon.get_ref_lat_1() @pytest.mark.xfail(reason="Not implemented, what is this?") def test_get_ref_lat_2(ntg1, ntg2, ntg3): """Test getting reference latitude 2.""" rl2 = ntg1.get_ref_lat_2() assert isinstance(rl2, float) np.testing.assert_allclose(rl2, 0.0) np.testing.assert_allclose(ntg2.get_ref_lat_2(), 0.0) np.testing.assert_allclose(ntg2.get_ref_lat_3(), 0.0) def test_get_transparent_pixel(ntg1, ntg2, ntg3, ntg_no_fill_value): """Test getting fill value.""" tp = ntg1.get_transparent_pixel() assert isinstance(tp, int) assert tp == 255 assert ntg2.get_transparent_pixel() == 0 assert ntg3.get_transparent_pixel() == 255 assert ntg_no_fill_value.get_transparent_pixel() == -1 def test_get_xmax(ntg1, ntg2, ntg3): """Test getting maximum x.""" xmax = ntg1.get_xmaximum() assert isinstance(xmax, int) assert xmax == 20 assert ntg2.get_xmaximum() == 100 assert ntg3.get_xmaximum() == 10 def test_get_ymax(ntg1, ntg2, ntg3): """Test getting maximum y.""" ymax = ntg1.get_ymaximum() assert isinstance(ymax, int) assert ymax == 10 assert ntg2.get_ymaximum() == 50 assert ntg3.get_ymaximum() == 20 def test_create_unknown_tags(test_image_small_arctic_P): """Test that unknown tags raise ValueError.""" from satpy.writers.ninjogeotiff import NinJoTagGenerator with pytest.raises(ValueError): NinJoTagGenerator( test_image_small_arctic_P, 42, "quorn.tif", ChannelID=800012, DataType="GPRN", PhysicUnit="N/A", PhysicValue="N/A", SatelliteNameID=6500014, Locatie="Hozomeen") satpy-0.34.0/satpy/tests/writer_tests/test_ninjotiff.py000066400000000000000000000151271420401153000233710ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the NinJoTIFF writer.""" import sys import unittest from unittest import mock import numpy as np import pytest import xarray as xr class FakeImage: """Fake image.""" def __init__(self, data, mode): """Init fake image.""" self.data = data self.mode = mode def get_scaling_from_history(self): """Return dummy scale and offset.""" return xr.DataArray(1), xr.DataArray(0) pyninjotiff_mock = mock.Mock() pyninjotiff_mock.ninjotiff = mock.Mock() @mock.patch.dict(sys.modules, {'pyninjotiff': pyninjotiff_mock, 'pyninjotiff.ninjotiff': pyninjotiff_mock.ninjotiff}) class TestNinjoTIFFWriter(unittest.TestCase): """The ninjo tiff writer tests.""" @mock.patch('satpy.writers.ninjotiff.nt', pyninjotiff_mock.ninjotiff) def test_init(self): """Test the init.""" from satpy.writers.ninjotiff import NinjoTIFFWriter ninjo_tags = {40000: 'NINJO'} ntw = NinjoTIFFWriter(tags=ninjo_tags) self.assertDictEqual(ntw.tags, ninjo_tags) @mock.patch('satpy.writers.ninjotiff.ImageWriter.save_dataset') @mock.patch('satpy.writers.ninjotiff.nt', pyninjotiff_mock.ninjotiff) def test_dataset(self, iwsd): """Test saving a dataset.""" from satpy.writers.ninjotiff import NinjoTIFFWriter ntw = NinjoTIFFWriter() dataset = xr.DataArray([1, 2, 3], attrs={'units': 'K'}) with mock.patch('satpy.writers.ninjotiff.convert_units') as uconv: ntw.save_dataset(dataset, physic_unit='CELSIUS') uconv.assert_called_once_with(dataset, 'K', 'CELSIUS') self.assertEqual(iwsd.call_count, 1) @mock.patch('satpy.writers.ninjotiff.ImageWriter.save_dataset') @mock.patch('satpy.writers.ninjotiff.nt', pyninjotiff_mock.ninjotiff) def test_dataset_skip_unit_conversion(self, iwsd): """Test saving a dataset without unit conversion.""" from satpy.writers.ninjotiff import NinjoTIFFWriter ntw = NinjoTIFFWriter() dataset = xr.DataArray([1, 2, 3], attrs={'units': 'K'}) with mock.patch('satpy.writers.ninjotiff.convert_units') as uconv: ntw.save_dataset(dataset, physic_unit='CELSIUS', convert_temperature_units=False) uconv.assert_not_called() self.assertEqual(iwsd.call_count, 1) @mock.patch('satpy.writers.ninjotiff.NinjoTIFFWriter.save_dataset') @mock.patch('satpy.writers.ninjotiff.ImageWriter.save_image') @mock.patch('satpy.writers.ninjotiff.nt', pyninjotiff_mock.ninjotiff) def test_image(self, iwsi, save_dataset): """Test saving an image.""" nt = pyninjotiff_mock.ninjotiff nt.reset_mock() from satpy.writers.ninjotiff import NinjoTIFFWriter ntw = NinjoTIFFWriter() dataset = xr.DataArray([1, 2, 3], attrs={'units': 'K'}) img = FakeImage(dataset, 'L') ret = ntw.save_image(img, filename='bla.tif', compute=False) nt.save.assert_called() assert(nt.save.mock_calls[0][2]['compute'] is False) assert(nt.save.mock_calls[0][2]['ch_min_measurement_unit'] < nt.save.mock_calls[0][2]['ch_max_measurement_unit']) assert(ret == nt.save.return_value) def test_convert_units_self(self): """Test that unit conversion to themselves do nothing.""" from satpy.writers.ninjotiff import convert_units from ..utils import make_fake_scene # ensure that converting from % to itself does not change the data sc = make_fake_scene( {"VIS006": np.arange(25, dtype="f4").reshape(5, 5)}, common_attrs={"units": "%"}) ds_in = sc["VIS006"] ds_out = convert_units(ds_in, "%", "%") np.testing.assert_array_equal(ds_in, ds_out) assert ds_in.attrs == ds_out.attrs def test_convert_units_temp(self): """Test that temperature unit conversions works as expected.""" # test converting between °C and K from satpy.writers.ninjotiff import convert_units from ..utils import make_fake_scene sc = make_fake_scene( {"IR108": np.arange(25, dtype="f4").reshape(5, 5)}, common_attrs={"units": "K"}) ds_in_k = sc["IR108"] for out_unit in ("C", "CELSIUS"): ds_out_c = convert_units(ds_in_k, "K", out_unit) np.testing.assert_array_almost_equal(ds_in_k - 273.15, ds_out_c) assert ds_in_k.attrs != ds_out_c.attrs assert ds_out_c.attrs["units"] == out_unit # test that keys aren't lost assert ds_out_c.attrs.keys() - ds_in_k.attrs.keys() <= {"units"} assert ds_in_k.attrs.keys() <= ds_out_c.attrs.keys() def test_convert_units_other(self): """Test that other unit conversions are not implemented.""" # test arbitrary different conversion from satpy.writers.ninjotiff import convert_units from ..utils import make_fake_scene sc = make_fake_scene( {"rain_rate": np.arange(25, dtype="f8").reshape(5, 5)}, common_attrs={"units": "millimeter/hour"}) ds_in = sc["rain_rate"] with pytest.raises(NotImplementedError): convert_units(ds_in, "millimeter/hour", "m/s") @mock.patch('satpy.writers.ninjotiff.NinjoTIFFWriter.save_dataset') @mock.patch('satpy.writers.ninjotiff.ImageWriter.save_image') @mock.patch('satpy.writers.ninjotiff.nt', pyninjotiff_mock.ninjotiff) def test_P_image_is_uint8(self, iwsi, save_dataset): """Test that a P-mode image is converted to uint8s.""" nt = pyninjotiff_mock.ninjotiff nt.reset_mock() from satpy.writers.ninjotiff import NinjoTIFFWriter ntw = NinjoTIFFWriter() dataset = xr.DataArray([1, 2, 3]).astype(int) img = FakeImage(dataset, 'P') ntw.save_image(img, filename='bla.tif', compute=False) assert nt.save.mock_calls[0][1][0].data.dtype == np.uint8 satpy-0.34.0/satpy/tests/writer_tests/test_simple_image.py000066400000000000000000000051201420401153000240260ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the simple image writer.""" import unittest class TestPillowWriter(unittest.TestCase): """Test Pillow/PIL writer.""" def setUp(self): """Create temporary directory to save files to.""" import tempfile self.base_dir = tempfile.mkdtemp() def tearDown(self): """Remove the temporary directory created for a test.""" try: import shutil shutil.rmtree(self.base_dir, ignore_errors=True) except OSError: pass @staticmethod def _get_test_datasets(): """Create DataArray for testing.""" from datetime import datetime import dask.array as da import xarray as xr ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={'name': 'test', 'start_time': datetime.utcnow()} ) return [ds1] def test_init(self): """Test creating the default writer.""" from satpy.writers.simple_image import PillowWriter PillowWriter() def test_simple_write(self): """Test writing datasets with default behavior.""" from satpy.writers.simple_image import PillowWriter datasets = self._get_test_datasets() w = PillowWriter(base_dir=self.base_dir) w.save_datasets(datasets) def test_simple_delayed_write(self): """Test writing datasets with delayed computation.""" from dask.delayed import Delayed from satpy.writers import compute_writer_results from satpy.writers.simple_image import PillowWriter datasets = self._get_test_datasets() w = PillowWriter(base_dir=self.base_dir) res = w.save_datasets(datasets, compute=False) for r__ in res: self.assertIsInstance(r__, Delayed) r__.compute() compute_writer_results(res) satpy-0.34.0/satpy/tests/writer_tests/test_utils.py000066400000000000000000000023321420401153000225350ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for writer utilities.""" import unittest import satpy.writers.utils as wutils class WriterUtilsTest(unittest.TestCase): """Test various writer utilities.""" def test_flatten_dict(self): """Test dictionary flattening.""" d = {'a': 1, 'b': {'c': 1, 'd': {'e': 1, 'f': {'g': [1, 2]}}}} expected = {'a': 1, 'b_c': 1, 'b_d_e': 1, 'b_d_f_g': [1, 2]} self.assertDictEqual(wutils.flatten_dict(d), expected) satpy-0.34.0/satpy/utils.py000066400000000000000000000351071420401153000156040ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2009-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # satpy is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . """Module defining various utilities.""" from __future__ import annotations import contextlib import logging import os import warnings from typing import Mapping import numpy as np import xarray as xr import yaml from yaml import BaseLoader try: from yaml import UnsafeLoader except ImportError: from yaml import Loader as UnsafeLoader # type: ignore _is_logging_on = False TRACE_LEVEL = 5 def ensure_dir(filename): """Check if the dir of f exists, otherwise create it.""" directory = os.path.dirname(filename) if directory and not os.path.isdir(directory): os.makedirs(directory) def debug_on(deprecation_warnings=True): """Turn debugging logging on. Sets up a StreamHandler to to `sys.stderr` at debug level for all loggers, such that all debug messages (and log messages with higher severity) are logged to the standard error stream. By default, since Satpy 0.26, this also enables the global visibility of deprecation warnings. This can be suppressed by passing a false value. Args: deprecation_warnings (Optional[bool]): Switch on deprecation warnings. Defaults to True. Returns: None """ logging_on(logging.DEBUG) if deprecation_warnings: deprecation_warnings_on() def debug_off(): """Turn debugging logging off. This disables both debugging logging and the global visibility of deprecation warnings. """ logging_off() deprecation_warnings_off() @contextlib.contextmanager def debug(deprecation_warnings=True): """Context manager to temporarily set debugging on. Example:: >>> with satpy.utils.debug(): ... code_here() Args: deprecation_warnings (Optional[bool]): Switch on deprecation warnings. Defaults to True. """ debug_on(deprecation_warnings=deprecation_warnings) yield debug_off() def trace_on(): """Turn trace logging on.""" logging_on(TRACE_LEVEL) class _WarningManager: """Class to handle switching warnings on and off.""" filt = None _warning_manager = _WarningManager() def deprecation_warnings_on(): """Switch on deprecation warnings.""" warnings.filterwarnings("default", category=DeprecationWarning) _warning_manager.filt = warnings.filters[0] def deprecation_warnings_off(): """Switch off deprecation warnings.""" if _warning_manager.filt in warnings.filters: warnings.filters.remove(_warning_manager.filt) def logging_on(level=logging.WARNING): """Turn logging on.""" global _is_logging_on if not _is_logging_on: console = logging.StreamHandler() console.setFormatter(logging.Formatter("[%(levelname)s: %(asctime)s :" " %(name)s] %(message)s", '%Y-%m-%d %H:%M:%S')) console.setLevel(level) logging.getLogger('').addHandler(console) _is_logging_on = True log = logging.getLogger('') log.setLevel(level) for h in log.handlers: h.setLevel(level) def logging_off(): """Turn logging off.""" logging.getLogger('').handlers = [logging.NullHandler()] def get_logger(name): """Return logger with null handler added if needed.""" if not hasattr(logging.Logger, 'trace'): logging.addLevelName(TRACE_LEVEL, 'TRACE') def trace(self, message, *args, **kwargs): if self.isEnabledFor(TRACE_LEVEL): # Yes, logger takes its '*args' as 'args'. self._log(TRACE_LEVEL, message, args, **kwargs) logging.Logger.trace = trace log = logging.getLogger(name) return log def in_ipynb(): """Check if we are in a jupyter notebook.""" try: return 'ZMQ' in get_ipython().__class__.__name__ except NameError: return False # Spherical conversions def lonlat2xyz(lon, lat): """Convert lon lat to cartesian.""" lat = np.deg2rad(lat) lon = np.deg2rad(lon) x = np.cos(lat) * np.cos(lon) y = np.cos(lat) * np.sin(lon) z = np.sin(lat) return x, y, z def xyz2lonlat(x, y, z, asin=False): """Convert cartesian to lon lat.""" lon = np.rad2deg(np.arctan2(y, x)) if asin: lat = np.rad2deg(np.arcsin(z)) else: lat = np.rad2deg(np.arctan2(z, np.sqrt(x ** 2 + y ** 2))) return lon, lat def angle2xyz(azi, zen): """Convert azimuth and zenith to cartesian.""" azi = np.deg2rad(azi) zen = np.deg2rad(zen) x = np.sin(zen) * np.sin(azi) y = np.sin(zen) * np.cos(azi) z = np.cos(zen) return x, y, z def xyz2angle(x, y, z, acos=False): """Convert cartesian to azimuth and zenith.""" azi = np.rad2deg(np.arctan2(x, y)) if acos: zen = np.rad2deg(np.arccos(z)) else: zen = 90 - np.rad2deg(np.arctan2(z, np.sqrt(x ** 2 + y ** 2))) return azi, zen def proj_units_to_meters(proj_str): """Convert projection units from kilometers to meters.""" proj_parts = proj_str.split() new_parts = [] for itm in proj_parts: key, val = itm.split('=') key = key.strip('+') if key in ['a', 'b', 'h']: val = float(val) if val < 6e6: val *= 1000. val = '%.3f' % val if key == 'units' and val == 'km': continue new_parts.append('+%s=%s' % (key, val)) return ' '.join(new_parts) def _get_sunz_corr_li_and_shibata(cos_zen): return 24.35 / (2. * cos_zen + np.sqrt(498.5225 * cos_zen**2 + 1)) def atmospheric_path_length_correction(data, cos_zen, limit=88., max_sza=95.): """Perform Sun zenith angle correction. This function uses the correction method proposed by Li and Shibata (2006): https://doi.org/10.1175/JAS3682.1 The correction is limited to ``limit`` degrees (default: 88.0 degrees). For larger zenith angles, the correction is the same as at the ``limit`` if ``max_sza`` is `None`. The default behavior is to gradually reduce the correction past ``limit`` degrees up to ``max_sza`` where the correction becomes 0. Both ``data`` and ``cos_zen`` should be 2D arrays of the same shape. """ # Convert the zenith angle limit to cosine of zenith angle limit_rad = np.deg2rad(limit) limit_cos = np.cos(limit_rad) max_sza_rad = np.deg2rad(max_sza) if max_sza is not None else max_sza # Cosine correction corr = _get_sunz_corr_li_and_shibata(cos_zen) # Use constant value (the limit) for larger zenith angles corr_lim = _get_sunz_corr_li_and_shibata(limit_cos) if max_sza is not None: # gradually fall off for larger zenith angle grad_factor = (np.arccos(cos_zen) - limit_rad) / (max_sza_rad - limit_rad) # invert the factor so maximum correction is done at `limit` and falls off later grad_factor = 1. - np.log(grad_factor + 1) / np.log(2) # make sure we don't make anything negative grad_factor = grad_factor.clip(0.) else: # Use constant value (the limit) for larger zenith angles grad_factor = 1. corr = corr.where(cos_zen > limit_cos, grad_factor * corr_lim) # Force "night" pixels to 0 (where SZA is invalid) corr = corr.where(cos_zen.notnull(), 0) return data * corr def get_satpos(dataset): """Get satellite position from dataset attributes. Preferences are: * Longitude & Latitude: Nadir, actual, nominal, projection * Altitude: Actual, nominal, projection A warning is issued when projection values have to be used because nothing else is available. Returns: Geodetic longitude, latitude, altitude """ try: orb_params = dataset.attrs['orbital_parameters'] alt = _get_sat_altitude(orb_params) lon, lat = _get_sat_lonlat(orb_params) except KeyError: # Legacy lon = dataset.attrs['satellite_longitude'] lat = dataset.attrs['satellite_latitude'] alt = dataset.attrs['satellite_altitude'] return lon, lat, alt def _get_sat_altitude(orb_params): # Altitude try: alt = orb_params['satellite_actual_altitude'] except KeyError: try: alt = orb_params['satellite_nominal_altitude'] except KeyError: alt = orb_params['projection_altitude'] warnings.warn('Actual satellite altitude not available, using projection altitude instead.') return alt def _get_sat_lonlat(orb_params): # Longitude & Latitude try: lon = orb_params['nadir_longitude'] lat = orb_params['nadir_latitude'] except KeyError: try: lon = orb_params['satellite_actual_longitude'] lat = orb_params['satellite_actual_latitude'] except KeyError: try: lon = orb_params['satellite_nominal_longitude'] lat = orb_params['satellite_nominal_latitude'] except KeyError: lon = orb_params['projection_longitude'] lat = orb_params['projection_latitude'] warnings.warn('Actual satellite lon/lat not available, using projection centre instead.') return lon, lat def recursive_dict_update(d, u): """Recursive dictionary update. Copied from: http://stackoverflow.com/questions/3232943/update-value-of-a-nested-dictionary-of-varying-depth """ for k, v in u.items(): if isinstance(v, Mapping): r = recursive_dict_update(d.get(k, {}), v) d[k] = r else: d[k] = u[k] return d def _check_yaml_configs(configs, key): """Get a diagnostic for the yaml *configs*. *key* is the section to look for to get a name for the config at hand. """ diagnostic = {} for i in configs: for fname in i: with open(fname, 'r', encoding='utf-8') as stream: try: res = yaml.load(stream, Loader=UnsafeLoader) msg = 'ok' except yaml.YAMLError as err: stream.seek(0) res = yaml.load(stream, Loader=BaseLoader) if err.context == 'while constructing a Python object': msg = err.problem else: msg = 'error' finally: try: diagnostic[res[key]['name']] = msg except (KeyError, TypeError): # this object doesn't have a 'name' pass return diagnostic def _check_import(module_names): """Import the specified modules and provide status.""" diagnostics = {} for module_name in module_names: try: __import__(module_name) res = 'ok' except ImportError as err: res = str(err) diagnostics[module_name] = res return diagnostics def check_satpy(readers=None, writers=None, extras=None): """Check the satpy readers and writers for correct installation. Args: readers (list or None): Limit readers checked to those specified writers (list or None): Limit writers checked to those specified extras (list or None): Limit extras checked to those specified Returns: bool True if all specified features were successfully loaded. """ from satpy.readers import configs_for_reader from satpy.writers import configs_for_writer print('Readers') print('=======') for reader, res in sorted(_check_yaml_configs(configs_for_reader(reader=readers), 'reader').items()): print(reader + ': ', res) print() print('Writers') print('=======') for writer, res in sorted(_check_yaml_configs(configs_for_writer(writer=writers), 'writer').items()): print(writer + ': ', res) print() print('Extras') print('======') module_names = extras if extras is not None else ('cartopy', 'geoviews') for module_name, res in sorted(_check_import(module_names).items()): print(module_name + ': ', res) print() def unify_chunks(*data_arrays: xr.DataArray) -> tuple[xr.DataArray, ...]: """Run :func:`xarray.unify_chunks` if input dimensions are all the same size. This is mostly used in :class:`satpy.composites.CompositeBase` to safe guard against running :func:`dask.array.core.map_blocks` with arrays of different chunk sizes. Doing so can cause unexpected results or errors. However, xarray's ``unify_chunks`` will raise an exception if dimensions of the provided DataArrays are different sizes. This is a common case for Satpy. For example, the "bands" dimension may be 1 (L), 2 (LA), 3 (RGB), or 4 (RGBA) for most compositor operations that combine other composites together. """ if not hasattr(xr, "unify_chunks"): return data_arrays if not _all_dims_same_size(data_arrays): return data_arrays return tuple(xr.unify_chunks(*data_arrays)) def _all_dims_same_size(data_arrays: tuple[xr.DataArray, ...]) -> bool: known_sizes: dict[str, int] = {} for data_arr in data_arrays: for dim, dim_size in data_arr.sizes.items(): known_size = known_sizes.setdefault(dim, dim_size) if dim_size != known_size: # this dimension is a different size than previously found # xarray.unify_chunks will error out if we tried to use it return False return True @contextlib.contextmanager def ignore_invalid_float_warnings(): """Ignore warnings generated for working with NaN/inf values. Numpy and dask sometimes don't like NaN or inf values in normal function calls. This context manager hides/ignores them inside its context. Examples: Use around numpy operations that you expect to produce warnings:: with ignore_invalid_float_warnings(): np.nanmean(np.nan) """ with np.errstate(invalid="ignore"), warnings.catch_warnings(): warnings.simplefilter("ignore", RuntimeWarning) yield satpy-0.34.0/satpy/writers/000077500000000000000000000000001420401153000155635ustar00rootroot00000000000000satpy-0.34.0/satpy/writers/__init__.py000066400000000000000000001341231420401153000177000ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Shared objects of the various writer classes. For now, this includes enhancement configuration utilities. """ import logging import os import warnings import dask.array as da import numpy as np import xarray as xr import yaml try: from yaml import UnsafeLoader except ImportError: from yaml import Loader as UnsafeLoader # type: ignore from trollimage.xrimage import XRImage from trollsift import parser from satpy import CHUNK_SIZE from satpy._config import config_search_paths, glob_config from satpy.aux_download import DataDownloadMixin from satpy.plugin_base import Plugin from satpy.resample import get_area_def from satpy.utils import recursive_dict_update LOG = logging.getLogger(__name__) def read_writer_config(config_files, loader=UnsafeLoader): """Read the writer `config_files` and return the info extracted.""" conf = {} LOG.debug('Reading %s', str(config_files)) for config_file in config_files: with open(config_file) as fd: conf.update(yaml.load(fd.read(), Loader=loader)) try: writer_info = conf['writer'] except KeyError: raise KeyError( "Malformed config file {}: missing writer 'writer'".format( config_files)) writer_info['config_files'] = config_files return writer_info def load_writer_configs(writer_configs, **writer_kwargs): """Load the writer from the provided `writer_configs`.""" try: writer_info = read_writer_config(writer_configs) writer_class = writer_info['writer'] except (ValueError, KeyError, yaml.YAMLError): raise ValueError("Invalid writer configs: " "'{}'".format(writer_configs)) init_kwargs, kwargs = writer_class.separate_init_kwargs(writer_kwargs) writer = writer_class(config_files=writer_configs, **init_kwargs) return writer, kwargs def load_writer(writer, **writer_kwargs): """Find and load writer `writer` in the available configuration files.""" config_fn = writer + ".yaml" if "." not in writer else writer config_files = config_search_paths(os.path.join("writers", config_fn)) writer_kwargs.setdefault("config_files", config_files) if not writer_kwargs['config_files']: raise ValueError("Unknown writer '{}'".format(writer)) try: return load_writer_configs(writer_kwargs['config_files'], **writer_kwargs) except ValueError: raise ValueError("Writer '{}' does not exist or could not be " "loaded".format(writer)) def configs_for_writer(writer=None): """Generate writer configuration files for one or more writers. Args: writer (Optional[str]): Yield configs only for this writer Returns: Generator of lists of configuration files """ if writer is not None: if not isinstance(writer, (list, tuple)): writer = [writer] # given a config filename or writer name config_files = [w if w.endswith('.yaml') else w + '.yaml' for w in writer] else: writer_configs = glob_config(os.path.join('writers', '*.yaml')) config_files = set(writer_configs) for config_file in config_files: config_basename = os.path.basename(config_file) writer_configs = config_search_paths( os.path.join("writers", config_basename)) if not writer_configs: LOG.warning("No writer configs found for '%s'", writer) continue yield writer_configs def available_writers(as_dict=False): """Available writers based on current configuration. Args: as_dict (bool): Optionally return writer information as a dictionary. Default: False Returns: List of available writer names. If `as_dict` is `True` then a list of dictionaries including additionally writer information is returned. """ writers = [] for writer_configs in configs_for_writer(): try: writer_info = read_writer_config(writer_configs) except (KeyError, IOError, yaml.YAMLError): LOG.warning("Could not import writer config from: %s", writer_configs) LOG.debug("Error loading YAML", exc_info=True) continue writers.append(writer_info if as_dict else writer_info['name']) return writers def _determine_mode(dataset): if "mode" in dataset.attrs: return dataset.attrs["mode"] if dataset.ndim == 2: return "L" if dataset.shape[0] == 2: return "LA" if dataset.shape[0] == 3: return "RGB" if dataset.shape[0] == 4: return "RGBA" raise RuntimeError("Can't determine 'mode' of dataset: %s" % str(dataset)) def _burn_overlay(img, image_metadata, area, cw_, overlays): """Burn the overlay in the image array.""" del image_metadata cw_.add_overlay_from_dict(overlays, area, background=img) return img def add_overlay(orig_img, area, coast_dir, color=None, width=None, resolution=None, level_coast=None, level_borders=None, fill_value=None, grid=None, overlays=None): """Add coastline, political borders and grid(graticules) to image. Uses ``color`` for feature colors where ``color`` is a 3-element tuple of integers between 0 and 255 representing (R, G, B). .. warning:: This function currently loses the data mask (alpha band). ``resolution`` is chosen automatically if None (default), otherwise it should be one of: +-----+-------------------------+---------+ | 'f' | Full resolution | 0.04 km | +-----+-------------------------+---------+ | 'h' | High resolution | 0.2 km | +-----+-------------------------+---------+ | 'i' | Intermediate resolution | 1.0 km | +-----+-------------------------+---------+ | 'l' | Low resolution | 5.0 km | +-----+-------------------------+---------+ | 'c' | Crude resolution | 25 km | +-----+-------------------------+---------+ ``grid`` is a dictionary with key values as documented in detail in pycoast eg. overlay={'grid': {'major_lonlat': (10, 10), 'write_text': False, 'outline': (224, 224, 224), 'width': 0.5}} Here major_lonlat is plotted every 10 deg for both longitude and latitude, no labels for the grid lines are plotted, the color used for the grid lines is light gray, and the width of the gratucules is 0.5 pixels. For grid if aggdraw is used, font option is mandatory, if not ``write_text`` is set to False:: font = aggdraw.Font('black', '/usr/share/fonts/truetype/msttcorefonts/Arial.ttf', opacity=127, size=16) """ if area is None: raise ValueError("Area of image is None, can't add overlay.") from pycoast import ContourWriterAGG if isinstance(area, str): area = get_area_def(area) LOG.info("Add coastlines and political borders to image.") old_args = [color, width, resolution, grid, level_coast, level_borders] if any(arg is not None for arg in old_args): warnings.warn("'color', 'width', 'resolution', 'grid', 'level_coast', 'level_borders'" " arguments will be deprecated soon. Please use 'overlays' instead.", DeprecationWarning) if hasattr(orig_img, 'convert'): # image must be in RGB space to work with pycoast/pydecorate res_mode = ('RGBA' if orig_img.final_mode(fill_value).endswith('A') else 'RGB') orig_img = orig_img.convert(res_mode) elif not orig_img.mode.startswith('RGB'): raise RuntimeError("'trollimage' 1.6+ required to support adding " "overlays/decorations to non-RGB data.") if overlays is None: overlays = _create_overlays_dict(color, width, grid, level_coast, level_borders) cw_ = ContourWriterAGG(coast_dir) new_image = orig_img.apply_pil(_burn_overlay, res_mode, None, {'fill_value': fill_value}, (area, cw_, overlays), None) return new_image def _create_overlays_dict(color, width, grid, level_coast, level_borders): """Fill in the overlays dict.""" overlays = dict() # fill with sensible defaults general_params = {'outline': color or (0, 0, 0), 'width': width or 0.5} for key, val in general_params.items(): if val is not None: overlays.setdefault('coasts', {}).setdefault(key, val) overlays.setdefault('borders', {}).setdefault(key, val) if level_coast is None: level_coast = 1 overlays.setdefault('coasts', {}).setdefault('level', level_coast) if level_borders is None: level_borders = 1 overlays.setdefault('borders', {}).setdefault('level', level_borders) if grid is not None: if 'major_lonlat' in grid and grid['major_lonlat']: major_lonlat = grid.pop('major_lonlat') minor_lonlat = grid.pop('minor_lonlat', major_lonlat) grid.update({'Dlonlat': major_lonlat, 'dlonlat': minor_lonlat}) for key, val in grid.items(): overlays.setdefault('grid', {}).setdefault(key, val) return overlays def add_text(orig, dc, img, text): """Add text to an image using the pydecorate package. All the features of pydecorate's ``add_text`` are available. See documentation of :doc:`pydecorate:index` for more info. """ LOG.info("Add text to image.") dc.add_text(**text) arr = da.from_array(np.array(img) / 255.0, chunks=CHUNK_SIZE) new_data = xr.DataArray(arr, dims=['y', 'x', 'bands'], coords={'y': orig.data.coords['y'], 'x': orig.data.coords['x'], 'bands': list(img.mode)}, attrs=orig.data.attrs) return XRImage(new_data) def add_logo(orig, dc, img, logo): """Add logos or other images to an image using the pydecorate package. All the features of pydecorate's ``add_logo`` are available. See documentation of :doc:`pydecorate:index` for more info. """ LOG.info("Add logo to image.") dc.add_logo(**logo) arr = da.from_array(np.array(img) / 255.0, chunks=CHUNK_SIZE) new_data = xr.DataArray(arr, dims=['y', 'x', 'bands'], coords={'y': orig.data.coords['y'], 'x': orig.data.coords['x'], 'bands': list(img.mode)}, attrs=orig.data.attrs) return XRImage(new_data) def add_scale(orig, dc, img, scale): """Add scale to an image using the pydecorate package. All the features of pydecorate's ``add_scale`` are available. See documentation of :doc:`pydecorate:index` for more info. """ LOG.info("Add scale to image.") dc.add_scale(**scale) arr = da.from_array(np.array(img) / 255.0, chunks=CHUNK_SIZE) new_data = xr.DataArray(arr, dims=['y', 'x', 'bands'], coords={'y': orig.data.coords['y'], 'x': orig.data.coords['x'], 'bands': list(img.mode)}, attrs=orig.data.attrs) return XRImage(new_data) def add_decorate(orig, fill_value=None, **decorate): """Decorate an image with text and/or logos/images. This call adds text/logos in order as given in the input to keep the alignment features available in pydecorate. An example of the decorate config:: decorate = { 'decorate': [ {'logo': {'logo_path': , 'height': 143, 'bg': 'white', 'bg_opacity': 255}}, {'text': {'txt': start_time_txt, 'align': {'top_bottom': 'bottom', 'left_right': 'right'}, 'font': , 'font_size': 22, 'height': 30, 'bg': 'black', 'bg_opacity': 255, 'line': 'white'}} ] } Any numbers of text/logo in any order can be added to the decorate list, but the order of the list is kept as described above. Note that a feature given in one element, eg. bg (which is the background color) will also apply on the next elements unless a new value is given. align is a special keyword telling where in the image to start adding features, top_bottom is either top or bottom and left_right is either left or right. """ LOG.info("Decorate image.") # Need to create this here to possible keep the alignment # when adding text and/or logo with pydecorate if hasattr(orig, 'convert'): # image must be in RGB space to work with pycoast/pydecorate orig = orig.convert('RGBA' if orig.mode.endswith('A') else 'RGB') elif not orig.mode.startswith('RGB'): raise RuntimeError("'trollimage' 1.6+ required to support adding " "overlays/decorations to non-RGB data.") img_orig = orig.pil_image(fill_value=fill_value) from pydecorate import DecoratorAGG dc = DecoratorAGG(img_orig) # decorate need to be a list to maintain the alignment # as ordered in the list img = orig if 'decorate' in decorate: for dec in decorate['decorate']: if 'logo' in dec: img = add_logo(img, dc, img_orig, logo=dec['logo']) elif 'text' in dec: img = add_text(img, dc, img_orig, text=dec['text']) elif 'scale' in dec: img = add_scale(img, dc, img_orig, scale=dec['scale']) return img def get_enhanced_image(dataset, enhance=None, overlay=None, decorate=None, fill_value=None): """Get an enhanced version of `dataset` as an :class:`~trollimage.xrimage.XRImage` instance. Args: dataset (xarray.DataArray): Data to be enhanced and converted to an image. enhance (bool or Enhancer): Whether to automatically enhance data to be more visually useful and to fit inside the file format being saved to. By default this will default to using the enhancement configuration files found using the default :class:`~satpy.writers.Enhancer` class. This can be set to `False` so that no enhancments are performed. This can also be an instance of the :class:`~satpy.writers.Enhancer` class if further custom enhancement is needed. overlay (dict): Options for image overlays. See :func:`add_overlay` for available options. decorate (dict): Options for decorating the image. See :func:`add_decorate` for available options. fill_value (int or float): Value to use when pixels are masked or invalid. Default of `None` means to create an alpha channel. See :meth:`~trollimage.xrimage.XRImage.finalize` for more details. Only used when adding overlays or decorations. Otherwise it is up to the caller to "finalize" the image before using it except if calling ``img.show()`` or providing the image to a writer as these will finalize the image. .. versionchanged:: 0.10 Deprecated `enhancement_config_file` and 'enhancer' in favor of `enhance`. Pass an instance of the `Enhancer` class to `enhance` instead. """ if enhance is False: # no enhancement enhancer = None elif enhance is None or enhance is True: # default enhancement enhancer = Enhancer() else: # custom enhancer enhancer = enhance # Create an image for enhancement img = to_image(dataset) if enhancer is None or enhancer.enhancement_tree is None: LOG.debug("No enhancement being applied to dataset") else: if dataset.attrs.get("sensor", None): enhancer.add_sensor_enhancements(dataset.attrs["sensor"]) enhancer.apply(img, **dataset.attrs) if overlay is not None: img = add_overlay(img, dataset.attrs['area'], fill_value=fill_value, **overlay) if decorate is not None: img = add_decorate(img, fill_value=fill_value, **decorate) return img def show(dataset, **kwargs): """Display the dataset as an image.""" img = get_enhanced_image(dataset.squeeze(), **kwargs) img.show() return img def to_image(dataset): """Convert ``dataset`` into a :class:`~trollimage.xrimage.XRImage` instance. Convert the ``dataset`` into an instance of the :class:`~trollimage.xrimage.XRImage` class. This function makes no other changes. To get an enhanced image, possibly with overlays and decoration, see :func:`~get_enhanced_image`. Args: dataset (xarray.DataArray): Data to be converted to an image. Returns: Instance of :class:`~trollimage.xrimage.XRImage`. """ dataset = dataset.squeeze() if dataset.ndim < 2: raise ValueError("Need at least a 2D array to make an image.") return XRImage(dataset) def split_results(results): """Split results. Get sources, targets and delayed objects to separate lists from a list of results collected from (multiple) writer(s). """ from dask.delayed import Delayed def flatten(results): out = [] if isinstance(results, (list, tuple)): for itm in results: out.extend(flatten(itm)) return out return [results] sources = [] targets = [] delayeds = [] for res in flatten(results): if isinstance(res, da.Array): sources.append(res) elif isinstance(res, Delayed): delayeds.append(res) else: targets.append(res) return sources, targets, delayeds def compute_writer_results(results): """Compute all the given dask graphs `results` so that the files are saved. Args: results (iterable): Iterable of dask graphs resulting from calls to `scn.save_datasets(..., compute=False)` """ if not results: return sources, targets, delayeds = split_results(results) # one or more writers have targets that we need to close in the future if targets: delayeds.append(da.store(sources, targets, compute=False)) if delayeds: da.compute(delayeds) if targets: for target in targets: if hasattr(target, 'close'): target.close() class Writer(Plugin, DataDownloadMixin): """Base Writer class for all other writers. A minimal writer subclass should implement the `save_dataset` method. """ def __init__(self, name=None, filename=None, base_dir=None, **kwargs): """Initialize the writer object. Args: name (str): A name for this writer for log and error messages. If this writer is configured in a YAML file its name should match the name of the YAML file. Writer names may also appear in output file attributes. filename (str): Filename to save data to. This filename can and should specify certain python string formatting fields to differentiate between data written to the files. Any attributes provided by the ``.attrs`` of a DataArray object may be included. Format and conversion specifiers provided by the :class:`trollsift ` package may also be used. Any directories in the provided pattern will be created if they do not exist. Example:: {platform_name}_{sensor}_{name}_{start_time:%Y%m%d_%H%M%S}.tif base_dir (str): Base destination directories for all created files. kwargs (dict): Additional keyword arguments to pass to the :class:`~satpy.plugin_base.Plugin` class. """ # Load the config Plugin.__init__(self, **kwargs) self.info = self.config.get('writer', {}) if 'file_pattern' in self.info: warnings.warn("Writer YAML config is using 'file_pattern' which " "has been deprecated, use 'filename' instead.") self.info['filename'] = self.info.pop('file_pattern') if 'file_pattern' in kwargs: warnings.warn("'file_pattern' has been deprecated, use 'filename' instead.", DeprecationWarning) filename = kwargs.pop('file_pattern') # Use options from the config file if they weren't passed as arguments self.name = self.info.get("name", None) if name is None else name self.file_pattern = self.info.get("filename", None) if filename is None else filename if self.name is None: raise ValueError("Writer 'name' not provided") self.filename_parser = self.create_filename_parser(base_dir) self.register_data_files() @classmethod def separate_init_kwargs(cls, kwargs): """Help separating arguments between init and save methods. Currently the :class:`~satpy.scene.Scene` is passed one set of arguments to represent the Writer creation and saving steps. This is not preferred for Writer structure, but provides a simpler interface to users. This method splits the provided keyword arguments between those needed for initialization and those needed for the ``save_dataset`` and ``save_datasets`` method calls. Writer subclasses should try to prefer keyword arguments only for the save methods only and leave the init keyword arguments to the base classes when possible. """ # FUTURE: Don't pass Scene.save_datasets kwargs to init and here init_kwargs = {} kwargs = kwargs.copy() for kw in ['base_dir', 'filename', 'file_pattern']: if kw in kwargs: init_kwargs[kw] = kwargs.pop(kw) return init_kwargs, kwargs def create_filename_parser(self, base_dir): """Create a :class:`trollsift.parser.Parser` object for later use.""" # just in case a writer needs more complex file patterns # Set a way to create filenames if we were given a pattern if base_dir and self.file_pattern: file_pattern = os.path.join(base_dir, self.file_pattern) else: file_pattern = self.file_pattern return parser.Parser(file_pattern) if file_pattern else None @staticmethod def _prepare_metadata_for_filename_formatting(attrs): if isinstance(attrs.get('sensor'), set): attrs['sensor'] = '-'.join(sorted(attrs['sensor'])) def get_filename(self, **kwargs): """Create a filename where output data will be saved. Args: kwargs (dict): Attributes and other metadata to use for formatting the previously provided `filename`. """ if self.filename_parser is None: raise RuntimeError("No filename pattern or specific filename provided") self._prepare_metadata_for_filename_formatting(kwargs) output_filename = self.filename_parser.compose(kwargs) dirname = os.path.dirname(output_filename) if dirname and not os.path.isdir(dirname): LOG.info("Creating output directory: {}".format(dirname)) os.makedirs(dirname) return output_filename def save_datasets(self, datasets, compute=True, **kwargs): """Save all datasets to one or more files. Subclasses can use this method to save all datasets to one single file or optimize the writing of individual datasets. By default this simply calls `save_dataset` for each dataset provided. Args: datasets (iterable): Iterable of `xarray.DataArray` objects to save using this writer. compute (bool): If `True` (default), compute all of the saves to disk. If `False` then the return value is either a :doc:`dask:delayed` object or two lists to be passed to a :func:`dask.array.store` call. See return values below for more details. **kwargs: Keyword arguments to pass to `save_dataset`. See that documentation for more details. Returns: Value returned depends on `compute` keyword argument. If `compute` is `True` the value is the result of a either a :func:`dask.array.store` operation or a :doc:`dask:delayed` compute, typically this is `None`. If `compute` is `False` then the result is either a :doc:`dask:delayed` object that can be computed with `delayed.compute()` or a two element tuple of sources and targets to be passed to :func:`dask.array.store`. If `targets` is provided then it is the caller's responsibility to close any objects that have a "close" method. """ results = [] for ds in datasets: results.append(self.save_dataset(ds, compute=False, **kwargs)) if compute: LOG.info("Computing and writing results...") return compute_writer_results([results]) targets, sources, delayeds = split_results([results]) if delayeds: # This writer had only delayed writes return delayeds else: return targets, sources def save_dataset(self, dataset, filename=None, fill_value=None, compute=True, **kwargs): """Save the ``dataset`` to a given ``filename``. This method must be overloaded by the subclass. Args: dataset (xarray.DataArray): Dataset to save using this writer. filename (str): Optionally specify the filename to save this dataset to. If not provided then `filename` which can be provided to the init method will be used and formatted by dataset attributes. fill_value (int or float): Replace invalid values in the dataset with this fill value if applicable to this writer. compute (bool): If `True` (default), compute and save the dataset. If `False` return either a :doc:`dask:delayed` object or tuple of (source, target). See the return values below for more information. **kwargs: Other keyword arguments for this particular writer. Returns: Value returned depends on `compute`. If `compute` is `True` then the return value is the result of computing a :doc:`dask:delayed` object or running :func:`dask.array.store`. If `compute` is `False` then the returned value is either a :doc:`dask:delayed` object that can be computed using `delayed.compute()` or a tuple of (source, target) that should be passed to :func:`dask.array.store`. If target is provided the the caller is responsible for calling `target.close()` if the target has this method. """ raise NotImplementedError( "Writer '%s' has not implemented dataset saving" % (self.name, )) class ImageWriter(Writer): """Base writer for image file formats.""" def __init__(self, name=None, filename=None, base_dir=None, enhance=None, **kwargs): """Initialize image writer object. Args: name (str): A name for this writer for log and error messages. If this writer is configured in a YAML file its name should match the name of the YAML file. Writer names may also appear in output file attributes. filename (str): Filename to save data to. This filename can and should specify certain python string formatting fields to differentiate between data written to the files. Any attributes provided by the ``.attrs`` of a DataArray object may be included. Format and conversion specifiers provided by the :class:`trollsift ` package may also be used. Any directories in the provided pattern will be created if they do not exist. Example:: {platform_name}_{sensor}_{name}_{start_time:%Y%m%d_%H%M%S}.tif base_dir (str): Base destination directories for all created files. enhance (bool or Enhancer): Whether to automatically enhance data to be more visually useful and to fit inside the file format being saved to. By default this will default to using the enhancement configuration files found using the default :class:`~satpy.writers.Enhancer` class. This can be set to `False` so that no enhancments are performed. This can also be an instance of the :class:`~satpy.writers.Enhancer` class if further custom enhancement is needed. kwargs (dict): Additional keyword arguments to pass to the :class:`~satpy.writer.Writer` base class. .. versionchanged:: 0.10 Deprecated `enhancement_config_file` and 'enhancer' in favor of `enhance`. Pass an instance of the `Enhancer` class to `enhance` instead. """ super(ImageWriter, self).__init__(name, filename, base_dir, **kwargs) if enhance is False: # No enhancement self.enhancer = False elif enhance is None or enhance is True: # default enhancement enhancement_config = self.info.get("enhancement_config", None) self.enhancer = Enhancer(enhancement_config_file=enhancement_config) else: # custom enhancer self.enhancer = enhance @classmethod def separate_init_kwargs(cls, kwargs): """Separate the init kwargs.""" # FUTURE: Don't pass Scene.save_datasets kwargs to init and here init_kwargs, kwargs = super(ImageWriter, cls).separate_init_kwargs(kwargs) for kw in ['enhancement_config', 'enhance']: if kw in kwargs: init_kwargs[kw] = kwargs.pop(kw) return init_kwargs, kwargs def save_dataset(self, dataset, filename=None, fill_value=None, overlay=None, decorate=None, compute=True, **kwargs): """Save the ``dataset`` to a given ``filename``. This method creates an enhanced image using :func:`get_enhanced_image`. The image is then passed to :meth:`save_image`. See both of these functions for more details on the arguments passed to this method. """ img = get_enhanced_image(dataset.squeeze(), enhance=self.enhancer, overlay=overlay, decorate=decorate, fill_value=fill_value) return self.save_image(img, filename=filename, compute=compute, fill_value=fill_value, **kwargs) def save_image(self, img, filename=None, compute=True, **kwargs): """Save Image object to a given ``filename``. Args: img (trollimage.xrimage.XRImage): Image object to save to disk. filename (str): Optionally specify the filename to save this dataset to. It may include string formatting patterns that will be filled in by dataset attributes. compute (bool): If `True` (default), compute and save the dataset. If `False` return either a :doc:`dask:delayed` object or tuple of (source, target). See the return values below for more information. **kwargs: Other keyword arguments to pass to this writer. Returns: Value returned depends on `compute`. If `compute` is `True` then the return value is the result of computing a :doc:`dask:delayed` object or running :func:`dask.array.store`. If `compute` is `False` then the returned value is either a :doc:`dask:delayed` object that can be computed using `delayed.compute()` or a tuple of (source, target) that should be passed to :func:`dask.array.store`. If target is provided the the caller is responsible for calling `target.close()` if the target has this method. """ raise NotImplementedError("Writer '%s' has not implemented image saving" % (self.name,)) class DecisionTree(object): """Structure to search for nearest match from a set of parameters. This class is used to find the best configuration section by matching a set of attributes. The provided dictionary contains a mapping of "section name" to "decision" dictionaries. Each decision dictionary contains the attributes that will be used for matching plus any additional keys that could be useful when matched. This class will search these decisions and return the one with the most matching parameters to the attributes passed to the :meth:`~satpy.writers.DecisionTree.find_match` method. Note that decision sections are provided as a dict instead of a list so that they can be overwritten or updated by doing the equivalent of a ``current_dicts.update(new_dicts)``. Examples: Decision sections are provided as a dictionary of dictionaries. The returned match will be the first result found by searching provided `match_keys` in order. :: decisions = { 'first_section': { 'a': 1, 'b': 2, 'useful_key': 'useful_value', }, 'second_section': { 'a': 5, 'useful_key': 'other_useful_value1', }, 'third_section': { 'b': 4, 'useful_key': 'other_useful_value2', }, } tree = DecisionTree(decisions, ('a', 'b')) tree.find_match(a=5, b=2) # second_section dict tree.find_match(a=1, b=2) # first_section dict tree.find_match(a=5, b=4) # second_section dict tree.find_match(a=3, b=2) # no match """ any_key = None def __init__(self, decision_dicts, match_keys, multival_keys=None): """Init the decision tree. Args: decision_dicts (dict): Dictionary of dictionaries. Each sub-dictionary contains key/value pairs that can be matched from the `find_match` method. Sub-dictionaries can include additional keys outside of the ``match_keys`` provided to act as the "result" of a query. The keys of the root dict are arbitrary. match_keys (list): Keys of the provided dictionary to use for matching. multival_keys (list): Keys of `match_keys` that can be provided as multiple values. A multi-value key can be specified as a single value (typically a string) or a set. If a set, it will be sorted and converted to a tuple and then used for matching. When querying the tree, these keys will be searched for exact multi-value results (the sorted tuple) and if not found then each of the values will be searched individually in alphabetical order. """ self._match_keys = match_keys self._multival_keys = multival_keys or [] self._tree = {} if not isinstance(decision_dicts, (list, tuple)): decision_dicts = [decision_dicts] self.add_config_to_tree(*decision_dicts) def add_config_to_tree(self, *decision_dicts): """Add a configuration to the tree.""" conf = {} for decision_dict in decision_dicts: conf = recursive_dict_update(conf, decision_dict) self._build_tree(conf) def _build_tree(self, conf): """Build the tree. Create a tree structure of dicts where each level represents the possible matches for a specific ``match_key``. When finding matches we will iterate through the tree matching each key that we know about. The last dict in the "tree" will contain the configure section whose match values led down that path in the tree. See :meth:`DecisionTree.find_match` for more information. """ for _section_name, sect_attrs in conf.items(): # Set a path in the tree for each section in the config files curr_level = self._tree for match_key in self._match_keys: # or None is necessary if they have empty strings this_attr_val = sect_attrs.get(match_key, self.any_key) or None if match_key in self._multival_keys and isinstance(this_attr_val, list): this_attr_val = tuple(sorted(this_attr_val)) is_last_key = match_key == self._match_keys[-1] level_needs_init = this_attr_val not in curr_level if is_last_key: # if we are at the last attribute, then assign the value # set the dictionary of attributes because the config is # not persistent curr_level[this_attr_val] = sect_attrs elif level_needs_init: curr_level[this_attr_val] = {} curr_level = curr_level[this_attr_val] @staticmethod def _convert_query_val_to_hashable(query_val): _sorted_query_val = sorted(query_val) query_vals = [tuple(_sorted_query_val)] + _sorted_query_val query_vals += query_val return query_vals def _get_query_values(self, query_dict, curr_match_key): query_val = query_dict[curr_match_key] if curr_match_key in self._multival_keys and isinstance(query_val, set): query_vals = self._convert_query_val_to_hashable(query_val) else: query_vals = [query_val] return query_vals def _find_match_if_known(self, curr_level, remaining_match_keys, query_dict): match = None curr_match_key = remaining_match_keys[0] if curr_match_key not in query_dict: return match query_vals = self._get_query_values(query_dict, curr_match_key) for query_val in query_vals: if query_val not in curr_level: continue match = self._find_match(curr_level[query_val], remaining_match_keys[1:], query_dict) if match is not None: break return match def _find_match(self, curr_level, remaining_match_keys, query_dict): """Find a match.""" if len(remaining_match_keys) == 0: # we're at the bottom level, we must have found something return curr_level match = self._find_match_if_known( curr_level, remaining_match_keys, query_dict) if match is None and self.any_key in curr_level: # if we couldn't find it using the attribute then continue with # the other attributes down the 'any' path match = self._find_match( curr_level[self.any_key], remaining_match_keys[1:], query_dict) return match def find_match(self, **query_dict): """Find a match. Recursively search through the tree structure for a path that matches the provided match parameters. """ try: match = self._find_match(self._tree, self._match_keys, query_dict) except (KeyError, IndexError, ValueError): LOG.debug("Match exception:", exc_info=True) LOG.error("Error when finding matching decision section") if match is None: # only possible if no default section was provided raise KeyError("No decision section found for %s" % (query_dict.get("uid", None),)) return match class EnhancementDecisionTree(DecisionTree): """The enhancement decision tree.""" def __init__(self, *decision_dicts, **kwargs): """Init the decision tree.""" match_keys = kwargs.pop("match_keys", ("name", "reader", "platform_name", "sensor", "standard_name", "units", )) self.prefix = kwargs.pop("config_section", "enhancements") multival_keys = kwargs.pop("multival_keys", ["sensor"]) super(EnhancementDecisionTree, self).__init__( decision_dicts, match_keys, multival_keys) def add_config_to_tree(self, *decision_dict): """Add configuration to tree.""" conf = {} for config_file in decision_dict: if os.path.isfile(config_file): with open(config_file) as fd: enhancement_config = yaml.load(fd, Loader=UnsafeLoader) if enhancement_config is None: # empty file continue enhancement_section = enhancement_config.get( self.prefix, {}) if not enhancement_section: LOG.debug("Config '{}' has no '{}' section or it is empty".format(config_file, self.prefix)) continue conf = recursive_dict_update(conf, enhancement_section) elif isinstance(config_file, dict): conf = recursive_dict_update(conf, config_file) else: LOG.debug("Loading enhancement config string") d = yaml.load(config_file, Loader=UnsafeLoader) if not isinstance(d, dict): raise ValueError( "YAML file doesn't exist or string is not YAML dict: {}".format(config_file)) conf = recursive_dict_update(conf, d) self._build_tree(conf) def find_match(self, **query_dict): """Find a match.""" try: return super(EnhancementDecisionTree, self).find_match(**query_dict) except KeyError: # give a more understandable error message raise KeyError("No enhancement configuration found for %s" % (query_dict.get("uid", None),)) class Enhancer(object): """Helper class to get enhancement information for images.""" def __init__(self, enhancement_config_file=None): """Initialize an Enhancer instance. Args: enhancement_config_file: The enhancement configuration to apply, False to leave as is. """ self.enhancement_config_file = enhancement_config_file # Set enhancement_config_file to False for no enhancements if self.enhancement_config_file is None: # it wasn't specified in the config or in the kwargs, we should # provide a default config_fn = os.path.join("enhancements", "generic.yaml") self.enhancement_config_file = config_search_paths(config_fn) if not self.enhancement_config_file: # They don't want any automatic enhancements self.enhancement_tree = None else: if not isinstance(self.enhancement_config_file, (list, tuple)): self.enhancement_config_file = [self.enhancement_config_file] self.enhancement_tree = EnhancementDecisionTree(*self.enhancement_config_file) self.sensor_enhancement_configs = [] def get_sensor_enhancement_config(self, sensor): """Get the sensor-specific config.""" if isinstance(sensor, str): # one single sensor sensor = [sensor] for sensor_name in sensor: config_fn = os.path.join("enhancements", sensor_name + ".yaml") config_files = config_search_paths(config_fn) # Note: Enhancement configuration files can't overwrite individual # options, only entire sections are overwritten for config_file in config_files: yield config_file def add_sensor_enhancements(self, sensor): """Add sensor-specific enhancements.""" # XXX: Should we just load all enhancements from the base directory? new_configs = [] for config_file in self.get_sensor_enhancement_config(sensor): if config_file not in self.sensor_enhancement_configs: self.sensor_enhancement_configs.append(config_file) new_configs.append(config_file) if new_configs: self.enhancement_tree.add_config_to_tree(*new_configs) def apply(self, img, **info): """Apply the enhancements.""" enh_kwargs = self.enhancement_tree.find_match(**info) LOG.debug("Enhancement configuration options: %s" % (str(enh_kwargs['operations']), )) for operation in enh_kwargs['operations']: fun = operation['method'] args = operation.get('args', []) kwargs = operation.get('kwargs', {}) fun(img, *args, **kwargs) # img.enhance(**enh_kwargs) satpy-0.34.0/satpy/writers/awips_tiled.py000066400000000000000000002442611420401153000204520ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The AWIPS Tiled writer is used to create AWIPS-compatible tiled NetCDF4 files. The Advanced Weather Interactive Processing System (AWIPS) is a program used by the United States National Weather Service (NWS) and others to view different forms of weather imagery. The original Sectorized Cloud and Moisture Imagery (SCMI) functionality in AWIPS was a NetCDF4 format supported by AWIPS to store one image broken up in to one or more "tiles". This format has since been expanded to support many other products and so the writer for this format in Satpy is generically called the "AWIPS Tiled" writer. You may still see SCMI referenced in this documentation or in the source code for the writer. Once AWIPS is configured for specific products this writer can be used to provide compatible products to the system. The AWIPS Tiled writer takes 2D (y, x) geolocated data and creates one or more AWIPS-compatible NetCDF4 files. The writer and the AWIPS client may need to be configured to make things appear the way the user wants in the AWIPS client. The writer can only produce files for datasets mapped to areas with specific projections: - lcc - geos - merc - stere This is a limitation of the AWIPS client and not of the writer. In the case where AWIPS has been updated to support additional projections, this writer may also need to be updated to support those projections. AWIPS Configuration ------------------- Depending on how this writer is used and the data it is provided, AWIPS may need additional configuration on the server side to properly ingest the files produced. This will require administrator privileges to the ingest server(s) and is not something that can be configured on the client. Note that any changes required must be done on all servers that you wish to ingest your data files. The generic "polar" template this writer defaults to should limit the number of modifications needed for any new data fields that AWIPS previously was unaware of. Once the data is ingested, the client can be used to customize how the data looks on screen. AWIPS requires files to follow a specific naming scheme so they can be routed to specific "decoders". For the files produced by this writer, this typically means editing the "goesr" decoder configuration in a directory like:: /awips2/edex/data/utility/common_static/site//distribution/goesr.xml The "goesr" decoder is a subclass of the "satellite" decoder. You may see either name show up in the AWIPS ingest logs. With the correct regular expression in the above file, your files should be passed to the right decoder, opened, and parsed for data. To tell AWIPS exactly what attributes and variables mean in your file, you'll need to create or configure an XML file in:: /awips2/edex/data/utility/common_static/site//satellite/goesr/descriptions/ See the existing files in this directory for examples. The "polar" template (see below) that this writer uses by default is already configured in the "Polar" subdirectory assuming that the TOWR-S RPM package has been installed on your AWIPS ingest server. Templates --------- This writer allows for a "template" to be specified to control how the output files are structured and created. Templates can be configured in the writer YAML file (``awips_tiled.yaml``) or passed as a dictionary to the ``template`` keyword argument. Templates have three main sections: 1. global_attributes 2. coordinates 3. variables Additionally, you can specify whether a template should produce files with one variable per file by specifying ``single_variable: true`` or multiple variables per file by specifying ``single_variable: false``. You can also specify the output filename for a template using a Python format string. See ``awips_tiled.yaml`` for examples. Lastly, a ``add_sector_id_global`` boolean parameter can be specified to add the user-provided ``sector_id`` keyword argument as a global attribute to the file. The ``global_attributes`` section takes names of global attributes and then a series of options to "render" that attribute from the metadata provided when creating files. For example:: product_name: value: "{name}" For more information see the :meth:`satpy.writers.awips_tiled.NetCDFTemplate.get_attr_value` method. The ``coordinates`` and ``variables`` are similar to each other in that they define how a variable should be created, the attributes it should have, and the encoding to write to the file. Coordinates typically don't need to be modified as tiled files usually have only ``x`` and ``y`` dimension variables. The Variables on the other hand use a decision tree to determine what section applies for a particular DataArray being saved. The basic structure is:: variables: arbitrary_section_name: var_name: "output_netcdf_variable_name" attributes: encoding: The "decision tree matching parameters" can be one or more of "name", "standard_name', "satellite", "sensor", "area_id', "units", or "reader". The writer will choose the best section for the DataArray being saved (the most matches). If none of these parameters are specified in a section then it will be used when no other matches are found (the "default" section). The "encoding" parameters can be anything accepted by xarray's ``to_netcdf`` method. See :meth:`xarray.Dataset.to_netcdf` for more information on the `encoding`` keyword argument. For more examples see the existing builtin templates defined in ``awips_tiled.yaml``. Builtin Templates ^^^^^^^^^^^^^^^^^ There are only a few templates provided in Sapty currently. * **polar**: A custom format developed for the CSPP Polar2Grid project at the University of Wisconsin - Madison Space Science and Engineering Center (SSEC). This format is made available through the TOWR-S package that can be installed for GOES-R support in AWIPS. This format is meant to be very generic and should theoretically allow any variable to get ingested into AWIPS. * **glm_l2_radc**: This format is used to produce standard files for the gridded GLM products produced by the CSPP Geo Gridded GLM package. Support for this format is also available in the TOWR-S package on an AWIPS ingest server. This format is specific to gridded GLM on the CONUS sector and is not meant to work for other data. * **glm_l2_radf**: This format is used to produce standard files for the gridded GLM productes produced by the CSPP Geo Gridded GLM package. Support for this format is also available in the TOWR-S package on an AWIPS ingest server. This format is specific to gridded GLM on the Full Disk sector and is not meant to work for other data. Numbered versus Lettered Grids ------------------------------ By default this writer will save tiles by number starting with '1' representing the upper-left image tile. Tile numbers then increase along the column and then on to the next row. By specifying `lettered_grid` as `True` tiles can be designated with a letter. Lettered grids or sectors are preconfigured in the `awips_tiled.yaml` configuration file. The lettered tile locations are static and will not change with the data being written to them. Each lettered tile is split into a certain number of subtiles (`num_subtiles`), default 2 rows by 2 columns. Lettered tiles are meant to make it easier for receiving AWIPS clients/stations to filter what tiles they receive; saving time, bandwidth, and space. Any tiles (numbered or lettered) not containing any valid data are not created. Updating tiles -------------- There are some input data cases where we want to put new data in a tile file written by a previous execution. An example is a pre-tiled input dataset that is processed one tile at a time. One input tile may map to one or more output AWIPS tiles, but may not perfectly aligned, leaving empty/unused space in the output tile. The next input tile may be able to fill in that empty space and should be allowed to write the "new" data to the file. This is the default behavior of the AWIPS tiled writer. In cases where data overlaps the existing data in the tile, the newer data has priority. Shifting Lettered Grids ----------------------- Due to the static nature of the lettered grids, there is sometimes a need to shift the locations of where these tiles are by up to 0.5 pixels in each dimension to align with the data being processed. This means that the tiles for a 1000m resolution grid may be shifted up to 500m in each direction from the original definition of the lettered "sector". This can cause differences in the location of the tiles between executions depending on the locations of the input data. In the worst case tile A01 from one execution could be shifted up to 1 grid cell from tile A01 in another execution (one is shifted 0.5 pixels to the left, the other is shifted 0.5 to the right). This shifting makes the calculations for generating tiles easier and more accurate. By default, the lettered tile locations are changed to match the location of the data. This works well when output tiles will not be updated (see above) in future processing. In cases where output tiles will be filled in or updated with more data the ``use_sector_reference`` keyword argument can be set to ``True`` to tell the writer to shift the data's geolocation by up to 0.5 pixels in each dimension instead of shifting the lettered tile locations. """ import logging import os import string import sys import warnings from collections import namedtuple from datetime import datetime, timedelta import dask import dask.array as da import numpy as np import xarray as xr from pyproj import CRS, Proj, Transformer from pyresample.geometry import AreaDefinition from trollsift.parser import Parser, StringFormatter from satpy import __version__ from satpy.writers import DecisionTree, Enhancer, Writer, get_enhanced_image LOG = logging.getLogger(__name__) DEFAULT_OUTPUT_PATTERN = '{source_name}_AII_{platform_name}_{sensor}_' \ '{name}_{sector_id}_{tile_id}_' \ '{start_time:%Y%m%d_%H%M}.nc' UNIT_CONV = { 'micron': 'microm', 'mm h-1': 'mm/h', '1': '*1', 'none': '*1', 'percent': '%', 'Kelvin': 'kelvin', 'K': 'kelvin', } TileInfo = namedtuple('TileInfo', ['tile_count', 'image_shape', 'tile_shape', 'tile_row_offset', 'tile_column_offset', 'tile_id', 'tile_number', 'x', 'y', 'xy_factors', 'tile_slices', 'data_slices']) XYFactors = namedtuple('XYFactors', ['mx', 'bx', 'my', 'by']) def fix_awips_file(fn): """Hack the NetCDF4 files to workaround NetCDF-Java bugs used by AWIPS. This should not be needed for new versions of AWIPS. """ # hack to get files created by new NetCDF library # versions to be read by AWIPS buggy java version # of NetCDF LOG.info("Modifying output NetCDF file to work with AWIPS") import h5py h = h5py.File(fn, 'a') if '_NCProperties' in h.attrs: del h.attrs['_NCProperties'] h.close() class NumberedTileGenerator(object): """Helper class to generate per-tile metadata for numbered tiles.""" def __init__(self, area_definition, tile_shape=None, tile_count=None): """Initialize and generate tile information for this sector/grid for later use.""" self.area_definition = area_definition self._rows = self.area_definition.height self._cols = self.area_definition.width # get tile shape, number of tiles, etc. self._get_tile_properties(tile_shape, tile_count) # scaling parameters for the overall images X and Y coordinates # they must be the same for all X and Y variables for all tiles # and must be stored in the file as 0, 1, 2, 3, ... # (X factor, X offset, Y factor, Y offset) self.mx, self.bx, self.my, self.by = self._get_xy_scaling_parameters() self.xy_factors = XYFactors(self.mx, self.bx, self.my, self.by) self._tile_cache = [] def _get_tile_properties(self, tile_shape, tile_count): """Generate tile information for numbered tiles.""" if tile_shape is not None: tile_shape = (int(min(tile_shape[0], self._rows)), int(min(tile_shape[1], self._cols))) tile_count = (int(np.ceil(self._rows / float(tile_shape[0]))), int(np.ceil(self._cols / float(tile_shape[1])))) elif tile_count: tile_shape = (int(np.ceil(self._rows / float(tile_count[0]))), int(np.ceil(self._cols / float(tile_count[1])))) else: raise ValueError("Either 'tile_count' or 'tile_shape' must be provided") # number of pixels per each tile (rows, cols) self.tile_shape = tile_shape # number of tiles in each direction (rows, columns) self.tile_count = tile_count # number of tiles in the entire image self.total_tiles = tile_count[0] * tile_count[1] # number of pixels in the whole image (rows, columns) self.image_shape = (self.tile_shape[0] * self.tile_count[0], self.tile_shape[1] * self.tile_count[1]) # X and Y coordinates of the whole image self.x, self.y = self._get_xy_arrays() def _get_xy_arrays(self): """Get the overall X/Y coordinate variable arrays.""" gd = self.area_definition ts = self.tile_shape tc = self.tile_count # Since our tiles may go over the edge of the original "grid" we # need to make sure we calculate X/Y to the edge of all of the tiles imaginary_data_size = (ts[0] * tc[0], ts[1] * tc[1]) ps_x = gd.pixel_size_x ps_y = gd.pixel_size_y # tiles start from upper-left new_extents = ( gd.area_extent[0], gd.area_extent[1] - ps_y * (imaginary_data_size[0] - gd.height), gd.area_extent[2] + ps_x * (imaginary_data_size[1] - gd.width), gd.area_extent[3]) imaginary_grid_def = AreaDefinition( gd.area_id, gd.description, gd.proj_id, gd.crs, imaginary_data_size[1], imaginary_data_size[0], new_extents, ) x, y = imaginary_grid_def.get_proj_vectors() return x, y def _get_xy_scaling_parameters(self): """Get the X/Y coordinate limits for the full resulting image.""" gd = self.area_definition bx = self.x.min() mx = gd.pixel_size_x by = self.y.max() my = -abs(gd.pixel_size_y) return mx, bx, my, by def _tile_number(self, ty, tx): """Get tile number from tile row/column.""" # e.g. # 001 002 003 004 # 005 006 ... return ty * self.tile_count[1] + tx + 1 def _tile_identifier(self, ty, tx): """Get tile identifier for numbered tiles.""" return "T{:03d}".format(self._tile_number(ty, tx)) def _generate_tile_info(self): """Get numbered tile metadata.""" x = self.x y = self.y ts = self.tile_shape tc = self.tile_count if self._tile_cache: for tile_info in self._tile_cache: yield tile_info for ty in range(tc[0]): for tx in range(tc[1]): tile_id = self._tile_identifier(ty, tx) tile_row_offset = ty * ts[0] tile_column_offset = tx * ts[1] # store tile data to an intermediate array # the tile may be larger than the remaining data, handle that: max_row_idx = min((ty + 1) * ts[0], self._rows) - (ty * ts[0]) max_col_idx = min((tx + 1) * ts[1], self._cols) - (tx * ts[1]) tile_slices = (slice(0, max_row_idx), slice(0, max_col_idx)) data_slices = (slice(ty * ts[0], (ty + 1) * ts[0]), slice(tx * ts[1], (tx + 1) * ts[1])) tmp_x = x[data_slices[1]] tmp_y = y[data_slices[0]] tile_number = self._tile_number(ty, tx) tile_info = TileInfo( tc, self.image_shape, ts, tile_row_offset, tile_column_offset, tile_id, tile_number, tmp_x, tmp_y, self.xy_factors, tile_slices, data_slices) self._tile_cache.append(tile_info) yield tile_info def __call__(self): """Provide simple call interface for getting tile metadata.""" if self._tile_cache: tile_infos = self._tile_cache else: tile_infos = self._generate_tile_info() for tile_info in tile_infos: # TODO: Return the slice instead of the actual data array # Use the slicing start/end to determine if it is empty # tile_data = data[tile_info.data_slices] # if not tile_data.size: # LOG.info("Tile {} is empty, skipping...".format(tile_info[2])) # continue yield tile_info class LetteredTileGenerator(NumberedTileGenerator): """Helper class to generate per-tile metadata for lettered tiles.""" def __init__(self, area_definition, extents, sector_crs, cell_size=(2000000, 2000000), num_subtiles=None, use_sector_reference=False): """Initialize tile information for later generation. Args: area_definition (AreaDefinition): Area of the data being saved. extents (tuple): Four element tuple of the configured lettered area. sector_crs (pyproj.CRS): CRS of the configured lettered sector area. cell_size (tuple): Two element tuple of resolution of each tile in sector projection units (y, x). """ # (row subtiles, col subtiles) self.num_subtiles = num_subtiles or (2, 2) self.cell_size = cell_size # (row tile height, col tile width) # x/y self.ll_extents = extents[:2] # (x min, y min) self.ur_extents = extents[2:] # (x max, y max) self.use_sector_reference = use_sector_reference self._transformer = Transformer.from_crs(sector_crs, area_definition.crs) super().__init__(area_definition) def _get_tile_properties(self, tile_shape, tile_count): """Calculate tile information for this particular sector/grid.""" # ignore tile_shape and tile_count # they come from the base class, but aren't used here del tile_shape, tile_count # get original image's X/Y ad = self.area_definition x, y = ad.get_proj_vectors() ll_xy = self._transformer.transform(*self.ll_extents) ur_xy = self._transformer.transform(*self.ur_extents) cw = abs(ad.pixel_size_x) ch = abs(ad.pixel_size_y) st = self.num_subtiles cs = self.cell_size # row height, column width # make sure the number of total tiles is a factor of the subtiles # meaning each letter has the full number of subtiles # Tile numbering/naming starts from the upper left corner ul_xy = (ll_xy[0], ur_xy[1]) # Adjust the upper-left corner to 'perfectly' match the data # X/Y are center of pixels, adjust by half a pixels to get upper-left pixel corner shift_x = float(ul_xy[0] - (x.min() - cw / 2.)) % cw # could be negative shift_y = float(ul_xy[1] - (y.max() + ch / 2.)) % ch # could be negative # if we're really close to 0 then don't worry about it if abs(shift_x) < 1e-10 or abs(shift_x - cw) < 1e-10: shift_x = 0 if abs(shift_y) < 1e-10 or abs(shift_y - ch) < 1e-10: shift_y = 0 if self.use_sector_reference: LOG.debug("Adjusting X/Y by (%f, %f) so it better matches lettered grid", shift_x, shift_y) x = x + shift_x y = y + shift_y else: LOG.debug("Adjusting lettered grid by (%f, %f) so it better matches data X/Y", shift_x, shift_y) ul_xy = (ul_xy[0] - shift_x, ul_xy[1] - shift_y) # outer edge of grid # always keep the same distance between the extents ll_xy = (ul_xy[0], ll_xy[1] - shift_y) ur_xy = (ur_xy[0] - shift_x, ul_xy[1]) fcs_y, fcs_x = (np.ceil(float(cs[0]) / st[0]), np.ceil(float(cs[1]) / st[1])) # need X/Y for *whole* tiles max_cols = np.ceil((ur_xy[0] - ul_xy[0]) / fcs_x) max_rows = np.ceil((ul_xy[1] - ll_xy[1]) / fcs_y) # don't create partial alpha-tiles max_cols = int(np.ceil(max_cols / st[1]) * st[1]) max_rows = int(np.ceil(max_rows / st[0]) * st[0]) # make tile cell size a factor of pixel size num_pixels_x = int(np.floor(fcs_x / cw)) num_pixels_y = int(np.floor(fcs_y / ch)) # NOTE: this does not change the *total* number of columns/rows that # will be produced. This is important because otherwise the number # of lettered tiles could depend on the input data which is not what we # want fcs_x = num_pixels_x * cw fcs_y = num_pixels_y * ch # NOTE: this takes the center of the pixel relative to the upper-left outer edge: min_col = max(int(np.floor((x.min() - ul_xy[0]) / fcs_x)), 0) max_col = min(int(np.floor((x.max() - ul_xy[0]) / fcs_x)), max_cols - 1) min_row = max(int(np.floor((ul_xy[1] - y.max()) / fcs_y)), 0) max_row = min(int(np.floor((ul_xy[1] - y.min()) / fcs_y)), max_rows - 1) num_cols = max_col - min_col + 1 num_rows = max_row - min_row + 1 total_alphas = (max_cols * max_rows) / (st[0] * st[1]) if total_alphas > 26: raise ValueError("Too many lettered grid cells '{}' (sector cell size too small). " "Maximum of 26".format(total_alphas)) self.tile_shape = (num_pixels_y, num_pixels_x) self.total_tile_count = (max_rows, max_cols) self.tile_count = (num_rows, num_cols) self.total_tiles = num_rows * num_cols self.image_shape = (num_pixels_y * num_rows, num_pixels_x * num_cols) self.min_col = min_col self.max_col = max_col self.min_row = min_row self.max_row = max_row self.ul_xy = ul_xy self.mx = cw self.bx = ul_xy[0] + cw / 2.0 # X represents the center of the pixel self.my = -ch self.by = ul_xy[1] - ch / 2.0 # Y represents the center of the pixel self.x = x self.y = y def _get_xy_scaling_parameters(self): """Get the X/Y coordinate limits for the full resulting image.""" return self.mx, self.bx, self.my, self.by def _tile_identifier(self, ty, tx): """Get tile identifier (name) for a particular tile row/column.""" st = self.num_subtiles ttc = self.total_tile_count alpha_num = int((ty // st[0]) * (ttc[1] // st[1]) + (tx // st[1])) alpha = string.ascii_uppercase[alpha_num] tile_num = int((ty % st[0]) * st[1] + (tx % st[1])) + 1 return "T{}{:02d}".format(alpha, tile_num) def _generate_tile_info(self): """Create generator of individual tile metadata.""" if self._tile_cache: for tile_info in self._tile_cache: yield tile_info ts = self.tile_shape ul_xy = self.ul_xy x, y = self.x, self.y cw = abs(float(self.area_definition.pixel_size_x)) ch = abs(float(self.area_definition.pixel_size_y)) # where does the data fall in our lettered grid for gy in range(self.min_row, self.max_row + 1): for gx in range(self.min_col, self.max_col + 1): tile_id = self._tile_identifier(gy, gx) # ul_xy is outer-edge of upper-left corner # x/y are center of each data pixel x_left = ul_xy[0] + gx * ts[1] * cw x_right = x_left + ts[1] * cw y_top = ul_xy[1] - gy * ts[0] * ch y_bot = y_top - ts[0] * ch x_mask = np.nonzero((x >= x_left) & (x < x_right))[0] y_mask = np.nonzero((y > y_bot) & (y <= y_top))[0] if not x_mask.any() or not y_mask.any(): # no data in this tile LOG.debug("Tile '%s' doesn't have any data in it", tile_id) continue x_slice = slice(x_mask[0], x_mask[-1] + 1) # assume it's continuous y_slice = slice(y_mask[0], y_mask[-1] + 1) # theoretically we can precompute the X/Y now # instead of taking the x/y data and mapping it # to the tile tmp_x = np.arange(x_left + cw / 2., x_right, cw) tmp_y = np.arange(y_top - ch / 2., y_bot, -ch) data_x_idx_min = np.nonzero(np.isclose(tmp_x, x[x_slice.start]))[0][0] data_x_idx_max = np.nonzero(np.isclose(tmp_x, x[x_slice.stop - 1]))[0][0] # I have a half pixel error some where data_y_idx_min = np.nonzero(np.isclose(tmp_y, y[y_slice.start]))[0][0] data_y_idx_max = np.nonzero(np.isclose(tmp_y, y[y_slice.stop - 1]))[0][0] # now put the data in the grid tile tile_slices = (slice(data_y_idx_min, data_y_idx_max + 1), slice(data_x_idx_min, data_x_idx_max + 1)) data_slices = (y_slice, x_slice) tile_number = self._tile_number(gy, gx) tile_info = TileInfo( self.tile_count, self.image_shape, ts, gy * ts[0], gx * ts[1], tile_id, tile_number, tmp_x, tmp_y, self.xy_factors, tile_slices, data_slices) self._tile_cache.append(tile_info) yield tile_info def _get_factor_offset_fill(input_data_arr, vmin, vmax, encoding): dtype_str = encoding['dtype'] dtype = np.dtype(getattr(np, dtype_str)) file_bit_depth = dtype.itemsize * 8 unsigned_in_signed = encoding.get('_Unsigned') == "true" is_unsigned = dtype.kind == 'u' bit_depth = input_data_arr.attrs.get('bit_depth', file_bit_depth) num_fills = 1 # future: possibly support more than one fill value if bit_depth is None: bit_depth = file_bit_depth if bit_depth >= file_bit_depth: bit_depth = file_bit_depth else: # don't take away from the data bit depth if there is room in # file data type to allow for extra fill values num_fills = 0 if is_unsigned or unsigned_in_signed: # max value fills = [2 ** file_bit_depth - 1] else: # max value fills = [2 ** (file_bit_depth - 1) - 1] mx = (vmax - vmin) / (2 ** bit_depth - 1 - num_fills) bx = vmin if not is_unsigned and not unsigned_in_signed: bx += 2 ** (bit_depth - 1) * mx return mx, bx, fills[0] def _get_data_vmin_vmax(input_data_arr): input_metadata = input_data_arr.attrs valid_range = input_metadata.get("valid_range") if valid_range: valid_min, valid_max = valid_range else: valid_min = input_metadata.get("valid_min") valid_max = input_metadata.get("valid_max") return valid_min, valid_max def _add_valid_ranges(data_arrs): """Add 'valid_range' metadata if not present. If valid_range or valid_min/valid_max are not present in a DataArrays metadata (``.attrs``), then lazily compute it with dask so it can be computed later when we write tiles out. AWIPS requires that scale_factor/add_offset/_FillValue be the **same** for all tiles. We must do this calculation before splitting the data into tiles otherwise the values will be different. """ for data_arr in data_arrs: vmin, vmax = _get_data_vmin_vmax(data_arr) if vmin is None: # XXX: Do we need to handle category products here? vmin = data_arr.min(skipna=True).data vmax = data_arr.max(skipna=True).data # we don't want to effect the original attrs data_arr = data_arr.copy(deep=False) # these are dask arrays, they need to get computed later data_arr.attrs['valid_range'] = (vmin, vmax) yield data_arr class AWIPSTiledVariableDecisionTree(DecisionTree): """Load AWIPS-specific metadata from YAML configuration.""" def __init__(self, decision_dicts, **kwargs): """Initialize decision tree with specific keys to look for.""" # Fields used to match a product object to it's correct configuration attrs = kwargs.pop('attrs', ["name", "standard_name", "satellite", "sensor", "area_id", "units", "reader"] ) super(AWIPSTiledVariableDecisionTree, self).__init__(decision_dicts, attrs, **kwargs) class NetCDFTemplate: """Helper class to convert a dictionary-based NetCDF template to an :class:`xarray.Dataset`.""" def __init__(self, template_dict): """Parse template dictionary and prepare for rendering.""" self.is_single_variable = template_dict.get('single_variable', False) self.global_attributes = template_dict.get('global_attributes', {}) default_var_config = { "default": { "encoding": {"dtype": "uint16"}, } } self.variables = template_dict.get('variables', default_var_config) default_coord_config = { "default": { "encoding": {"dtype": "uint16"}, } } self.coordinates = template_dict.get('coordinates', default_coord_config) self._var_tree = AWIPSTiledVariableDecisionTree([self.variables]) self._coord_tree = AWIPSTiledVariableDecisionTree([self.coordinates]) self._filename_format_str = template_dict.get('filename') self._str_formatter = StringFormatter() self._template_dict = template_dict def get_filename(self, base_dir='', **kwargs): """Generate output NetCDF file from metadata.""" # format the filename if self._filename_format_str is None: raise ValueError("Template does not have a configured " "'filename' pattern.") fn_format_str = os.path.join(base_dir, self._filename_format_str) filename_parser = Parser(fn_format_str) output_filename = filename_parser.compose(kwargs) dirname = os.path.dirname(output_filename) if dirname and not os.path.isdir(dirname): LOG.info("Creating output directory: %s", dirname) os.makedirs(dirname) return output_filename def get_attr_value(self, attr_name, input_metadata, value=None, raw_key=None, raw_value=None, prefix="_"): """Determine attribute value using the provided configuration information. If `value` and `raw_key` are not provided, this method will search for a method named ````, which will be called with one argument (`input_metadata`) to get the value to return. See the documentation for the `prefix` keyword argument below for more information. Args: attr_name (str): Name of the attribute whose value we are generating. input_metadata (dict): Dictionary of metadata from the input DataArray and other context information. Used to provide information to `value` or access data from using `raw_key` if provided. value (Any): Value to assign to this attribute. If a string, it may be a python format string which will be provided the data from `input_metadata`. For example, ``{name}`` will be filled with the value for the ``"name"`` in `input_metadata`. It can also include environment variables (ex. ``"${MY_ENV_VAR}"``) which will be expanded. String formatting is accomplished by the special :class:`trollsift.parser.StringFormatter` which allows for special common conversions. raw_key (str): Key to access value from `input_metadata`, but without any string formatting applied to it. This allows for metadata of non-string types to be requested. raw_value (Any): Static hardcoded value to set this attribute to. Overrides all other options. prefix (str): Prefix to use when `value` and `raw_key` are both ``None``. Default is ``"_"``. This will be used to find custom attribute handlers in subclasses. For example, if `value` and `raw_key` are both ``None`` and `attr_name` is ``"my_attr"``, then the method ``self._my_attr`` will be called as ``return self._my_attr(input_metadata)``. See :meth:`NetCDFTemplate.render_global_attributes` for additional information (prefix is ``"_global_"``). """ if raw_value is not None: return raw_value if raw_key is not None and raw_key in input_metadata: value = input_metadata[raw_key] return value if isinstance(value, str): try: value = os.path.expandvars(value) value = self._str_formatter.format(value, **input_metadata) except (KeyError, ValueError): LOG.debug("Can't format string '%s' with provided " "input metadata.", value) value = None # raise ValueError("Can't format string '{}' with provided " # "input metadata.".format(value)) if value is not None: return value meth_name = prefix + attr_name func = getattr(self, meth_name, None) if func is not None: value = func(input_metadata) if value is None: LOG.debug('no routine matching %s', meth_name) return value def _render_attrs(self, attr_configs, input_metadata, prefix="_"): attrs = {} for attr_name, attr_config_dict in attr_configs.items(): val = self.get_attr_value(attr_name, input_metadata, prefix=prefix, **attr_config_dict) if val is None: # NetCDF attributes can't have a None value continue attrs[attr_name] = val return attrs def _render_global_attributes(self, input_metadata): attr_configs = self.global_attributes return self._render_attrs(attr_configs, input_metadata, prefix="_global_") def _render_variable_attributes(self, var_config, input_metadata): attr_configs = var_config['attributes'] var_attrs = self._render_attrs(attr_configs, input_metadata, prefix="_data_") return var_attrs def _render_coordinate_attributes(self, coord_config, input_metadata): attr_configs = coord_config['attributes'] coord_attrs = self._render_attrs(attr_configs, input_metadata, prefix="_coord_") return coord_attrs def _render_variable_encoding(self, var_config, input_data_arr): new_encoding = input_data_arr.encoding.copy() # determine fill value and if 'encoding' in var_config: new_encoding.update(var_config['encoding']) new_encoding.setdefault('dtype', 'uint16') return new_encoding def _render_variable(self, data_arr): var_config = self._var_tree.find_match(**data_arr.attrs) new_var_name = var_config.get('var_name', data_arr.attrs['name']) new_data_arr = data_arr.copy() # remove coords which may cause issues later on new_data_arr = new_data_arr.reset_coords(drop=True) var_encoding = self._render_variable_encoding(var_config, data_arr) new_data_arr.encoding = var_encoding var_attrs = self._render_variable_attributes(var_config, data_arr.attrs) new_data_arr.attrs = var_attrs return new_var_name, new_data_arr def _get_matchable_coordinate_metadata(self, coord_name, coord_attrs): match_kwargs = {} if 'name' not in coord_attrs: match_kwargs['name'] = coord_name match_kwargs.update(coord_attrs) return match_kwargs def _render_coordinates(self, ds): new_coords = {} for coord_name, coord_arr in ds.coords.items(): match_kwargs = self._get_matchable_coordinate_metadata(coord_name, coord_arr.attrs) coord_config = self._coord_tree.find_match(**match_kwargs) coord_attrs = self._render_coordinate_attributes(coord_config, coord_arr.attrs) coord_encoding = self._render_variable_encoding(coord_config, coord_arr) new_coords[coord_name] = ds.coords[coord_name].copy() new_coords[coord_name].attrs = coord_attrs new_coords[coord_name].encoding = coord_encoding return new_coords def render(self, dataset_or_data_arrays, shared_attrs=None): """Create :class:`xarray.Dataset` from provided data.""" data_arrays = dataset_or_data_arrays if isinstance(data_arrays, xr.Dataset): data_arrays = data_arrays.data_vars.values() new_ds = xr.Dataset() for data_arr in data_arrays: new_var_name, new_data_arr = self._render_variable(data_arr) new_ds[new_var_name] = new_data_arr new_coords = self._render_coordinates(new_ds) new_ds.coords.update(new_coords) # use first data array as "representative" for global attributes # XXX: Should we use global attributes if dataset_or_data_arrays is a Dataset if shared_attrs is None: shared_attrs = data_arrays[0].attrs new_ds.attrs = self._render_global_attributes(shared_attrs) return new_ds class AWIPSNetCDFTemplate(NetCDFTemplate): """NetCDF template renderer specifically for tiled AWIPS files.""" def __init__(self, template_dict, swap_end_time=False): """Handle AWIPS special cases and initialize template helpers.""" self._swap_end_time = swap_end_time if swap_end_time: self._swap_attributes_end_time(template_dict) super().__init__(template_dict) def _swap_attributes_end_time(self, template_dict): """Swap every use of 'start_time' to use 'end_time' instead.""" variable_attributes = [var_section['attributes'] for var_section in template_dict.get('variables', {}).values()] global_attributes = template_dict.get('global_attributes', {}) for attr_section in variable_attributes + [global_attributes]: for attr_name in attr_section: attr_config = attr_section[attr_name] if '{start_time' in attr_config.get('value', ''): attr_config['value'] = attr_config['value'].replace('{start_time', '{end_time') if attr_config.get('raw_key', '') == 'start_time': attr_config['raw_key'] = 'end_time' def _data_units(self, input_metadata): units = input_metadata.get('units', '1') # we *know* AWIPS can't handle some units return UNIT_CONV.get(units, units) def _global_start_date_time(self, input_metadata): start_time = input_metadata['start_time'] if self._swap_end_time: start_time = input_metadata['end_time'] return start_time.strftime("%Y-%m-%dT%H:%M:%S") def _global_awips_id(self, input_metadata): return "AWIPS_" + input_metadata['name'] def _global_physical_element(self, input_metadata): var_config = self._var_tree.find_match(**input_metadata) attr_config = {"physical_element": var_config["attributes"]["physical_element"]} result = self._render_attrs(attr_config, input_metadata, prefix="_data_") return result["physical_element"] def _global_production_location(self, input_metadata): """Get default global production_location attribute.""" del input_metadata org = os.environ.get('ORGANIZATION', None) if org is not None: prod_location = org else: LOG.warning('environment ORGANIZATION not set for .production_location attribute, using hostname') import socket prod_location = socket.gethostname() # FUTURE: something more correct but this will do for now if len(prod_location) > 31: warnings.warn("Production location attribute is longer than 31 " "characters (AWIPS limit). Set it to a smaller " "value with the 'ORGANIZATION' environment " "variable. Defaults to hostname and is currently " "set to '{}'.".format(prod_location)) prod_location = prod_location[:31] return prod_location _global_production_site = _global_production_location @staticmethod def _get_vmin_vmax(var_config, input_data_arr): if 'valid_range' in var_config: return var_config['valid_range'] data_vmin, data_vmax = _get_data_vmin_vmax(input_data_arr) return data_vmin, data_vmax def _render_variable_encoding(self, var_config, input_data_arr): new_encoding = super()._render_variable_encoding(var_config, input_data_arr) vmin, vmax = self._get_vmin_vmax(var_config, input_data_arr) has_flag_meanings = 'flag_meanings' in input_data_arr.attrs is_int = np.issubdtype(input_data_arr.dtype, np.integer) is_cat = has_flag_meanings or is_int has_sf = new_encoding.get('scale_factor') is not None if not has_sf and is_cat: # AWIPS doesn't like Identity conversion so we can't have # a factor of 1 and an offset of 0 # new_encoding['scale_factor'] = None # new_encoding['add_offset'] = None if '_FillValue' in input_data_arr.attrs: new_encoding['_FillValue'] = input_data_arr.attrs['_FillValue'] elif not has_sf and vmin is not None and vmax is not None: # calculate scale_factor and add_offset sf, ao, fill = _get_factor_offset_fill( input_data_arr, vmin, vmax, new_encoding ) # NOTE: These could be dask arrays that will be computed later # when we go to write the files. new_encoding['scale_factor'] = sf new_encoding['add_offset'] = ao new_encoding['_FillValue'] = fill new_encoding['coordinates'] = ' '.join([ele for ele in input_data_arr.dims]) return new_encoding def _get_projection_attrs(self, area_def): """Assign projection attributes per CF standard.""" proj_attrs = area_def.crs.to_cf() proj_encoding = {"dtype": "i4"} proj_attrs['short_name'] = area_def.area_id gmap_name = proj_attrs['grid_mapping_name'] preferred_names = { 'geostationary': 'fixedgrid_projection', 'lambert_conformal_conic': 'lambert_projection', 'polar_stereographic': 'polar_projection', 'mercator': 'mercator_projection', } if gmap_name not in preferred_names: LOG.warning("Data is in projection %s which may not be supported " "by AWIPS", gmap_name) area_id_as_var_name = area_def.area_id.replace('-', '_').lower() proj_name = preferred_names.get(gmap_name, area_id_as_var_name) return proj_name, proj_attrs, proj_encoding def _set_xy_coords_attrs(self, new_ds, crs): y_attrs = new_ds.coords['y'].attrs if crs.is_geographic: self._fill_units_and_standard_name(y_attrs, 'degrees_north', 'latitude') else: self._fill_units_and_standard_name(y_attrs, 'meter', 'projection_y_coordinate') y_attrs['axis'] = 'Y' x_attrs = new_ds.coords['x'].attrs if crs.is_geographic: self._fill_units_and_standard_name(x_attrs, 'degrees_east', 'longitude') else: self._fill_units_and_standard_name(x_attrs, 'meter', 'projection_x_coordinate') x_attrs['axis'] = 'X' @staticmethod def _fill_units_and_standard_name(attrs, units, standard_name): """Fill in units and standard_name if not set in `attrs`.""" if attrs.get('units') is None: attrs['units'] = units if attrs.get('standard_name') is None: attrs['standard_name'] = standard_name def apply_area_def(self, new_ds, area_def): """Apply information we can gather from the AreaDefinition.""" gmap_name, gmap_attrs, gmap_encoding = self._get_projection_attrs(area_def) gmap_data_arr = xr.DataArray(0, attrs=gmap_attrs) gmap_data_arr.encoding = gmap_encoding new_ds[gmap_name] = gmap_data_arr self._set_xy_coords_attrs(new_ds, area_def.crs) for data_arr in new_ds.data_vars.values(): if 'y' in data_arr.dims and 'x' in data_arr.dims: data_arr.attrs['grid_mapping'] = gmap_name new_ds.attrs['pixel_x_size'] = area_def.pixel_size_x / 1000.0 new_ds.attrs['pixel_y_size'] = area_def.pixel_size_y / 1000.0 return new_ds def apply_tile_coord_encoding(self, new_ds, xy_factors): """Add encoding information specific to the coordinate variables.""" if 'x' in new_ds.coords: new_ds.coords['x'].encoding['dtype'] = 'int16' new_ds.coords['x'].encoding['scale_factor'] = np.float64(xy_factors.mx) new_ds.coords['x'].encoding['add_offset'] = np.float64(xy_factors.bx) new_ds.coords['x'].encoding['_FillValue'] = -1 if 'y' in new_ds.coords: new_ds.coords['y'].encoding['dtype'] = 'int16' new_ds.coords['y'].encoding['scale_factor'] = np.float64(xy_factors.my) new_ds.coords['y'].encoding['add_offset'] = np.float64(xy_factors.by) new_ds.coords['y'].encoding['_FillValue'] = -1 return new_ds def apply_tile_info(self, new_ds, tile_info): """Apply attributes associated with the current tile.""" total_tiles = tile_info.tile_count total_pixels = tile_info.image_shape tile_row = tile_info.tile_row_offset tile_column = tile_info.tile_column_offset tile_height = new_ds.sizes['y'] tile_width = new_ds.sizes['x'] new_ds.attrs['tile_row_offset'] = tile_row new_ds.attrs['tile_column_offset'] = tile_column new_ds.attrs['product_tile_height'] = tile_height new_ds.attrs['product_tile_width'] = tile_width new_ds.attrs['number_product_tiles'] = total_tiles[0] * total_tiles[1] new_ds.attrs['product_rows'] = total_pixels[0] new_ds.attrs['product_columns'] = total_pixels[1] return new_ds def _add_sector_id_global(self, new_ds, sector_id): if not self._template_dict.get('add_sector_id_global'): return if sector_id is None: raise ValueError("Keyword 'sector_id' is required for this " "template.") new_ds.attrs['sector_id'] = sector_id def apply_misc_metadata(self, new_ds, sector_id=None, creator=None, creation_time=None): """Add attributes that don't fit into any other category.""" if creator is None: creator = "Satpy Version {} - AWIPS Tiled Writer".format(__version__) if creation_time is None: creation_time = datetime.utcnow() self._add_sector_id_global(new_ds, sector_id) new_ds.attrs['Conventions'] = "CF-1.7" new_ds.attrs['creator'] = creator new_ds.attrs['creation_time'] = creation_time.strftime('%Y-%m-%dT%H:%M:%S') return new_ds def _render_variable_attributes(self, var_config, input_metadata): attrs = super()._render_variable_attributes(var_config, input_metadata) # AWIPS validation checks if len(attrs.get("units", "")) > 26: warnings.warn( "AWIPS 'units' must be limited to a maximum of 26 characters. " "Units '{}' is too long and will be truncated.".format(attrs["units"])) attrs["units"] = attrs["units"][:26] return attrs def render(self, dataset_or_data_arrays, area_def, tile_info, sector_id, creator=None, creation_time=None, shared_attrs=None, extra_global_attrs=None): """Create a :class:`xarray.Dataset` from template using information provided.""" new_ds = super().render(dataset_or_data_arrays, shared_attrs=shared_attrs) new_ds = self.apply_area_def(new_ds, area_def) new_ds = self.apply_tile_coord_encoding(new_ds, tile_info.xy_factors) new_ds = self.apply_tile_info(new_ds, tile_info) new_ds = self.apply_misc_metadata(new_ds, sector_id, creator, creation_time) if extra_global_attrs: new_ds.attrs.update(extra_global_attrs) return new_ds def _notnull(data_arr, check_categories=True): is_int = np.issubdtype(data_arr.dtype, np.integer) fill_value = data_arr.encoding.get('_FillValue', data_arr.attrs.get('_FillValue')) if is_int and fill_value is not None: # some DQF datasets are always valid if check_categories: return data_arr != fill_value return False return data_arr.notnull() def _any_notnull(data_arr, check_categories): not_null = _notnull(data_arr, check_categories) if not_null is False: return False return not_null.any() def _is_empty_tile(dataset_to_save, check_categories): # check if this tile is empty # if so, don't create it for data_var in dataset_to_save.data_vars.values(): if data_var.ndim == 2 and _any_notnull(data_var, check_categories): return False return True def _copy_to_existing(dataset_to_save, output_filename): # Experimental: This function doesn't seem to behave well with xarray file # caching and/or multiple dask workers. It causes tests to hang, but # only sometimes. Limiting dask to 1 worker seems to fix this. # I (David Hoese) was unable to make a script that reproduces this # without using this writer (makes it difficult to file a bug report). existing_dataset = xr.open_dataset(output_filename) # the below used to trick xarray into working, but this doesn't work # in newer versions. This was a hack in the first place so I'm keeping it # here for reference. # existing_dataset = existing_dataset.copy(deep=True) # existing_dataset.close() # update existing data with new valid data for var_name, var_data_arr in dataset_to_save.data_vars.items(): if var_name not in existing_dataset: continue if var_data_arr.ndim != 2: continue existing_data_arr = existing_dataset[var_name] valid_current = _notnull(var_data_arr) new_data = existing_data_arr.data[:] new_data[valid_current] = var_data_arr.data[valid_current] var_data_arr.data[:] = new_data var_data_arr.encoding.update(existing_data_arr.encoding) var_data_arr.encoding.pop('source', None) return dataset_to_save def _extract_factors(dataset_to_save): factors = {} for data_var in dataset_to_save.data_vars.values(): enc = data_var.encoding data_var.attrs.pop('valid_range', None) factor_set = (enc.pop('scale_factor', None), enc.pop('add_offset', None), enc.pop('_FillValue', None)) factors[data_var.name] = factor_set return factors def _reapply_factors(dataset_to_save, factors): for var_name, factor_set in factors.items(): data_arr = dataset_to_save[var_name] if factor_set[0] is not None: data_arr.encoding['scale_factor'] = factor_set[0] if factor_set[1] is not None: data_arr.encoding['add_offset'] = factor_set[1] if factor_set[2] is not None: data_arr.encoding['_FillValue'] = factor_set[2] return dataset_to_save def to_nonempty_netcdf(dataset_to_save: xr.Dataset, factors: dict, output_filename: str, update_existing: bool = True, check_categories: bool = True): """Save :class:`xarray.Dataset` to a NetCDF file if not all fills. In addition to checking certain Dataset variables for fill values, this function can also "update" an existing NetCDF file with the new valid data provided. """ dataset_to_save = _reapply_factors(dataset_to_save, factors) if _is_empty_tile(dataset_to_save, check_categories): LOG.debug("Skipping tile creation for %s because it would be " "empty.", output_filename) return None, None, None # TODO: Allow for new variables to be created if update_existing and os.path.isfile(output_filename): dataset_to_save = _copy_to_existing(dataset_to_save, output_filename) mode = 'a' else: mode = 'w' return dataset_to_save, output_filename, mode # return dataset_to_save.to_netcdf(output_filename, mode=mode) # if fix_awips: # fix_awips_file(output_filename) delayed_to_notempty_netcdf = dask.delayed(to_nonempty_netcdf, pure=True) def tile_filler(data_arr_data, tile_shape, tile_slices, fill_value): """Create an empty tile array and fill the proper locations with data.""" empty_tile = np.full(tile_shape, fill_value, dtype=data_arr_data.dtype) empty_tile[tile_slices] = data_arr_data return empty_tile class AWIPSTiledWriter(Writer): """Writer for AWIPS NetCDF4 Tile files. See :mod:`satpy.writers.awips_tiled` documentation for more information on templates and produced file format. """ def __init__(self, compress=False, fix_awips=False, **kwargs): """Initialize writer and decision trees.""" super(AWIPSTiledWriter, self).__init__(default_config_filename="writers/awips_tiled.yaml", **kwargs) self.base_dir = kwargs.get('base_dir', '') self.awips_sectors = self.config['sectors'] self.templates = self.config['templates'] self.compress = compress self.fix_awips = fix_awips self._fill_sector_info() self._enhancer = None if self.fix_awips: warnings.warn("'fix_awips' flag no longer has any effect and is " "deprecated. Modern versions of AWIPS should not " "require this hack.", DeprecationWarning) self.fix_awips = False @property def enhancer(self): """Get lazy loaded enhancer object only if needed.""" if self._enhancer is None: self._enhancer = Enhancer() return self._enhancer @classmethod def separate_init_kwargs(cls, kwargs): """Separate keyword arguments by initialization and saving keyword arguments.""" # FUTURE: Don't pass Scene.save_datasets kwargs to init and here init_kwargs, kwargs = super(AWIPSTiledWriter, cls).separate_init_kwargs( kwargs) for kw in ['compress', 'fix_awips']: if kw in kwargs: init_kwargs[kw] = kwargs.pop(kw) return init_kwargs, kwargs def _fill_sector_info(self): """Convert sector extents if needed.""" for sector_info in self.awips_sectors.values(): sector_info['projection'] = CRS.from_user_input(sector_info['projection']) p = Proj(sector_info['projection']) if 'lower_left_xy' in sector_info: sector_info['lower_left_lonlat'] = p(*sector_info['lower_left_xy'], inverse=True) else: sector_info['lower_left_xy'] = p(*sector_info['lower_left_lonlat']) if 'upper_right_xy' in sector_info: sector_info['upper_right_lonlat'] = p(*sector_info['upper_right_xy'], inverse=True) else: sector_info['upper_right_xy'] = p(*sector_info['upper_right_lonlat']) def _get_lettered_sector_info(self, sector_id): """Get metadata for the current sector if configured. This is not necessary for numbered grids. If found, the sector info will provide the overall tile layout for this grid/sector. This allows for consistent tile numbering/naming regardless of where the data being converted actually is. """ if sector_id is None: raise TypeError("Keyword 'sector_id' is required for lettered grids.") try: return self.awips_sectors[sector_id] except KeyError: raise ValueError("Unknown sector '{}'".format(sector_id)) def _get_tile_generator(self, area_def, lettered_grid, sector_id, num_subtiles, tile_size, tile_count, use_sector_reference=False): """Get the appropriate tile generator class for lettered or numbered tiles.""" # Create a tile generator for this grid definition if lettered_grid: sector_info = self._get_lettered_sector_info(sector_id) tile_gen = LetteredTileGenerator( area_def, sector_info['lower_left_xy'] + sector_info['upper_right_xy'], sector_crs=sector_info['projection'], cell_size=sector_info['resolution'], num_subtiles=num_subtiles, use_sector_reference=use_sector_reference, ) else: tile_gen = NumberedTileGenerator( area_def, tile_shape=tile_size, tile_count=tile_count, ) return tile_gen def _group_by_area(self, datasets): """Group datasets by their area.""" def _area_id(area_def): return area_def.description + str(area_def.area_extent) + str(area_def.shape) # get all of the datasets stored by area area_datasets = {} for x in datasets: area_id = _area_id(x.attrs['area']) area, ds_list = area_datasets.setdefault(area_id, (x.attrs['area'], [])) ds_list.append(x) return area_datasets def _split_rgbs(self, ds): """Split a single RGB dataset in to multiple.""" for component in 'RGB': band_data = ds.sel(bands=component) band_data.attrs['name'] += '_{}'.format(component) band_data.attrs['valid_min'] = 0.0 band_data.attrs['valid_max'] = 1.0 yield band_data def _enhance_and_split_rgbs(self, datasets): """Handle multi-band images by splitting in to separate products.""" new_datasets = [] for ds in datasets: if ds.ndim == 2: new_datasets.append(ds) continue elif ds.ndim > 3 or ds.ndim < 1 or (ds.ndim == 3 and 'bands' not in ds.coords): LOG.error("Can't save datasets with more or less than 2 dimensions " "that aren't RGBs to AWIPS Tiled format: %s", ds.name) else: # this is an RGB img = get_enhanced_image(ds.squeeze(), enhance=self.enhancer) res_data = img.finalize(fill_value=0, dtype=np.float32)[0] new_datasets.extend(self._split_rgbs(res_data)) return new_datasets def _tile_filler(self, tile_info, data_arr): fill = np.nan if np.issubdtype(data_arr.dtype, np.floating) else data_arr.attrs.get('_FillValue', 0) data_arr_data = data_arr.data[tile_info.data_slices] data_arr_data = data_arr_data.rechunk(data_arr_data.shape) new_data = da.map_blocks(tile_filler, data_arr_data, tile_info.tile_shape, tile_info.tile_slices, fill, dtype=data_arr.dtype, chunks=tile_info.tile_shape) return xr.DataArray(new_data, dims=('y', 'x'), attrs=data_arr.attrs.copy()) def _slice_and_update_coords(self, tile_info, data_arrays): new_x = xr.DataArray(tile_info.x, dims=('x',)) if 'x' in data_arrays[0].coords: old_x = data_arrays[0].coords['x'] new_x.attrs.update(old_x.attrs) new_x.encoding = old_x.encoding new_y = xr.DataArray(tile_info.y, dims=('y',)) if 'y' in data_arrays[0].coords: old_y = data_arrays[0].coords['y'] new_y.attrs.update(old_y.attrs) new_y.encoding = old_y.encoding for data_arr in data_arrays: new_data_arr = self._tile_filler(tile_info, data_arr) new_data_arr.coords['x'] = new_x new_data_arr.coords['y'] = new_y yield new_data_arr def _iter_tile_info_and_datasets(self, tile_gen, data_arrays, single_variable=True): all_data_arrays = self._enhance_and_split_rgbs(data_arrays) if single_variable: all_data_arrays = [[single_data_arr] for single_data_arr in all_data_arrays] else: all_data_arrays = [all_data_arrays] for data_arrays_set in all_data_arrays: for tile_info in tile_gen(): data_arrays_tile_set = list(self._slice_and_update_coords(tile_info, data_arrays_set)) yield tile_info, data_arrays_tile_set def _iter_area_tile_info_and_datasets(self, area_datasets, template, lettered_grid, sector_id, num_subtiles, tile_size, tile_count, use_sector_reference): for area_def, data_arrays in area_datasets.values(): data_arrays = list(_add_valid_ranges(data_arrays)) tile_gen = self._get_tile_generator( area_def, lettered_grid, sector_id, num_subtiles, tile_size, tile_count, use_sector_reference=use_sector_reference) for tile_info, data_arrs in self._iter_tile_info_and_datasets( tile_gen, data_arrays, single_variable=template.is_single_variable): yield area_def, tile_info, data_arrs def save_dataset(self, dataset, **kwargs): """Save a single DataArray to one or more NetCDF4 Tile files.""" LOG.warning("For best performance use `save_datasets`") return self.save_datasets([dataset], **kwargs) def get_filename(self, template, area_def, tile_info, sector_id, **kwargs): """Generate output NetCDF file from metadata.""" # format the filename try: return super(AWIPSTiledWriter, self).get_filename( area_id=area_def.area_id, rows=area_def.height, columns=area_def.width, sector_id=sector_id, tile_id=tile_info.tile_id, tile_number=tile_info.tile_number, **kwargs) except RuntimeError: # the user didn't provide a specific filename, use the template return template.get_filename( base_dir=self.base_dir, area_id=area_def.area_id, rows=area_def.height, columns=area_def.width, sector_id=sector_id, tile_id=tile_info.tile_id, tile_number=tile_info.tile_number, **kwargs) def check_tile_exists(self, output_filename): """Check if tile exists and report error accordingly.""" if os.path.isfile(output_filename): LOG.info("AWIPS file already exists, will update with new data: %s", output_filename) def _save_nonempty_mfdatasets(self, datasets_to_save, output_filenames, **kwargs): for dataset_to_save, output_filename in zip(datasets_to_save, output_filenames): factors = _extract_factors(dataset_to_save) delayed_res = delayed_to_notempty_netcdf( dataset_to_save, factors, output_filename, **kwargs) yield delayed_res def _adjust_metadata_times(self, ds_info): debug_shift_time = int(os.environ.get("DEBUG_TIME_SHIFT", 0)) if debug_shift_time: ds_info["start_time"] += timedelta(minutes=debug_shift_time) ds_info["end_time"] += timedelta(minutes=debug_shift_time) def _get_tile_data_info(self, data_arrs, creation_time, source_name): # use the first data array as a "representative" for the group ds_info = data_arrs[0].attrs.copy() # we want to use our own creation_time ds_info['creation_time'] = creation_time if source_name is not None: ds_info['source_name'] = source_name self._adjust_metadata_times(ds_info) return ds_info # TODO: Add additional untiled variable support def save_datasets(self, datasets, sector_id=None, source_name=None, tile_count=(1, 1), tile_size=None, lettered_grid=False, num_subtiles=None, use_end_time=False, use_sector_reference=False, template='polar', check_categories=True, extra_global_attrs=None, environment_prefix='DR', compute=True, **kwargs): """Write a series of DataArray objects to multiple NetCDF4 Tile files. Args: datasets (iterable): Series of gridded :class:`~xarray.DataArray` objects with the necessary metadata to be converted to a valid tile product file. sector_id (str): Name of the region or sector that the provided data is on. This name will be written to the NetCDF file and will be used as the sector in the AWIPS client for the 'polar' template. For lettered grids this name should match the name configured in the writer YAML. This is required for some templates (ex. default 'polar' template) but is defined as a keyword argument for better error handling in Satpy. source_name (str): Name of producer of these files (ex. "SSEC"). This name is used to create the output filename for some templates. environment_prefix (str): Prefix of filenames for some templates. For operational real-time data this is usually "OR", "OT" for test data, "IR" for test system real-time data, and "IT" for test system test data. This defaults to "DR" for "Developer Real-time" to avoid anyone accidentally producing files that could be mistaken for the operational system. tile_count (tuple): For numbered tiles only, how many tile rows and tile columns to produce. Default to ``(1, 1)``, a single giant tile. Either ``tile_count``, ``tile_size``, or ``lettered_grid`` should be specified. tile_size (tuple): For numbered tiles only, how many pixels each tile should be. This takes precedence over ``tile_count`` if specified. Either ``tile_count``, ``tile_size``, or ``lettered_grid`` should be specified. lettered_grid (bool): Whether to use a preconfigured grid and label tiles with letters and numbers instead of only numbers. For example, tiles will be named "A01", "A02", "B01", and so on in the first row of data and continue on to "A03", "A04", and "B03" in the default case where ``num_subtiles`` is (2, 2). Letters start in the upper-left corner and will go from A up to Z, if necessary. num_subtiles (tuple): For lettered tiles only, how many rows and columns to split each lettered tile in to. By default 2 rows and 2 columns will be created. For example, the tile for letter "A" will have "A01" and "A02" in the top row and "A03" and "A04" in the second row. use_end_time (bool): Instead of using the ``start_time`` for the product filename and time written to the file, use the ``end_time``. This is useful for multi-day composites where the ``end_time`` is a better representation of what data is in the file. use_sector_reference (bool): For lettered tiles only, whether to shift the data locations to align with the preconfigured grid's pixels. By default this is False meaning that the grid's tiles will be shifted to align with the data locations. If True, the data is shifted. At most the data will be shifted by 0.5 pixels. See :mod:`satpy.writers.awips_tiled` for more information. template (str or dict): Name of the template configured in the writer YAML file. This can also be a dictionary with a full template configuration. See the :mod:`satpy.writers.awips_tiled` documentation for more information on templates. Defaults to the 'polar' builtin template. check_categories (bool): Whether category and flag products should be included in the checks for empty or not empty tiles. In some cases (ex. data quality flags) category products may look like all valid data (a non-empty tile) but shouldn't be used to determine the emptiness of the overall tile (good quality versus non-existent). Default is True. Set to False to ignore category (integer dtype or "flag_meanings" defined) when checking for valid data. extra_global_attrs (dict): Additional global attributes to be added to every produced file. These attributes are applied at the end of template rendering and will therefore overwrite template generated values with the same global attribute name. compute (bool): Compute and write the output immediately using dask. Default to ``False``. """ if not isinstance(template, dict): template = self.config['templates'][template] template = AWIPSNetCDFTemplate(template, swap_end_time=use_end_time) area_data_arrs = self._group_by_area(datasets) datasets_to_save = [] output_filenames = [] creation_time = datetime.utcnow() area_tile_data_gen = self._iter_area_tile_info_and_datasets( area_data_arrs, template, lettered_grid, sector_id, num_subtiles, tile_size, tile_count, use_sector_reference) for area_def, tile_info, data_arrs in area_tile_data_gen: # TODO: Create Dataset object of all of the sliced-DataArrays (optional) ds_info = self._get_tile_data_info(data_arrs, creation_time, source_name) output_filename = self.get_filename(template, area_def, tile_info, sector_id, environment_prefix=environment_prefix, **ds_info) self.check_tile_exists(output_filename) # TODO: Provide attribute caching for things that likely won't change (functools lrucache) new_ds = template.render(data_arrs, area_def, tile_info, sector_id, creation_time=creation_time, shared_attrs=ds_info, extra_global_attrs=extra_global_attrs) if self.compress: new_ds.encoding['zlib'] = True for var in new_ds.variables.values(): var.encoding['zlib'] = True datasets_to_save.append(new_ds) output_filenames.append(output_filename) if not datasets_to_save: # no tiles produced return [] delayed_gen = self._save_nonempty_mfdatasets(datasets_to_save, output_filenames, check_categories=check_categories, update_existing=True) delayeds = self._delay_netcdf_creation(delayed_gen) if not compute: return delayeds return dask.compute(delayeds) def _delay_netcdf_creation(self, delayed_gen, precompute=True, use_distributed=False): """Workaround random dask and xarray hanging executions. In previous implementations this writer called 'to_dataset' directly in a delayed function. This seems to cause random deadlocks where execution would hang indefinitely. """ delayeds = [] if precompute: dataset_iter = self._get_delayed_iter(use_distributed) for dataset_to_save, output_filename, mode in dataset_iter(delayed_gen): delayed_save = dataset_to_save.to_netcdf(output_filename, mode, compute=False) delayeds.append(delayed_save) else: for delayed_result in delayed_gen: delayeds.append(delayed_result) return delayeds @staticmethod def _get_delayed_iter(use_distributed=False): if use_distributed: def dataset_iter(_delayed_gen): from dask.distributed import as_completed, get_client client = get_client() futures = client.compute(list(_delayed_gen)) for _, (dataset_to_save, output_filename, mode) in as_completed(futures, with_results=True): if dataset_to_save is None: continue yield dataset_to_save, output_filename, mode else: def dataset_iter(_delayed_gen): # compute all datasets results = dask.compute(_delayed_gen)[0] for result in results: if result[0] is None: continue yield result return dataset_iter def _create_debug_array(sector_info, num_subtiles, font_path='Verdana.ttf'): from PIL import Image, ImageDraw, ImageFont from pkg_resources import resource_filename as get_resource_filename size = (1000, 1000) img = Image.new("L", size, 0) draw = ImageDraw.Draw(img) if ':' in font_path: # load from a python package font_path = get_resource_filename(*font_path.split(':')) font = ImageFont.truetype(font_path, 25) ll_extent = sector_info['lower_left_xy'] ur_extent = sector_info['upper_right_xy'] total_meters_x = ur_extent[0] - ll_extent[0] total_meters_y = ur_extent[1] - ll_extent[1] fcs_x = np.ceil(float(sector_info['resolution'][1]) / num_subtiles[1]) fcs_y = np.ceil(float(sector_info['resolution'][0]) / num_subtiles[0]) total_cells_x = np.ceil(total_meters_x / fcs_x) total_cells_y = np.ceil(total_meters_y / fcs_y) total_cells_x = np.ceil(total_cells_x / num_subtiles[1]) * num_subtiles[1] total_cells_y = np.ceil(total_cells_y / num_subtiles[0]) * num_subtiles[0] # total_alpha_cells_x = int(total_cells_x / num_subtiles[1]) # total_alpha_cells_y = int(total_cells_y / num_subtiles[0]) # "round" the total meters up to the number of alpha cells # total_meters_x = total_cells_x * fcs_x # total_meters_y = total_cells_y * fcs_y # Pixels per tile ppt_x = np.floor(float(size[0]) / total_cells_x) ppt_y = np.floor(float(size[1]) / total_cells_y) half_ppt_x = np.floor(ppt_x / 2.) half_ppt_y = np.floor(ppt_y / 2.) # Meters per pixel meters_ppx = fcs_x / ppt_x meters_ppy = fcs_y / ppt_y for idx, alpha in enumerate(string.ascii_uppercase): for i in range(4): st_x = i % num_subtiles[1] st_y = int(i / num_subtiles[1]) t = "{}{:02d}".format(alpha, i + 1) t_size = font.getsize(t) cell_x = (idx * num_subtiles[1] + st_x) % total_cells_x cell_y = int(idx / (total_cells_x / num_subtiles[1])) * num_subtiles[0] + st_y if (cell_x > total_cells_x) or (cell_y > total_cells_y): continue x = ppt_x * cell_x + half_ppt_x y = ppt_y * cell_y + half_ppt_y # draw box around the tile edge # PIL Documentation: "The second point is just outside the drawn rectangle." # we want to be just inside 0 and just inside the outer edge of the tile draw_rectangle(draw, (x - half_ppt_x, y - half_ppt_y, x + half_ppt_x, y + half_ppt_y), outline=255, fill=75, width=3) draw.text((x - t_size[0] / 2., y - t_size[1] / 2.), t, fill=255, font=font) img.save("test.png") new_extents = ( ll_extent[0], ur_extent[1] - 1001. * meters_ppy, ll_extent[0] + 1001. * meters_ppx, ur_extent[1], ) grid_def = AreaDefinition( 'debug_grid', 'debug_grid', 'debug_grid', sector_info['projection'], 1000, 1000, new_extents ) return grid_def, np.array(img) def draw_rectangle(draw, coordinates, outline=None, fill=None, width=1): """Draw simple rectangle in to a numpy array image.""" for i in range(width): rect_start = (coordinates[0] + i, coordinates[1] + i) rect_end = (coordinates[2] - i, coordinates[3] - i) draw.rectangle((rect_start, rect_end), outline=outline, fill=fill) def create_debug_lettered_tiles(**writer_kwargs): """Create tile files with tile identifiers "burned" in to the image data for debugging.""" writer_kwargs['lettered_grid'] = True writer_kwargs['num_subtiles'] = (2, 2) # default, don't use command line argument init_kwargs, save_kwargs = AWIPSTiledWriter.separate_init_kwargs(**writer_kwargs) writer = AWIPSTiledWriter(**init_kwargs) sector_id = save_kwargs['sector_id'] sector_info = writer.awips_sectors[sector_id] area_def, arr = _create_debug_array(sector_info, save_kwargs['num_subtiles']) now = datetime.utcnow() product = xr.DataArray(da.from_array(arr, chunks='auto'), attrs=dict( name='debug_{}'.format(sector_id), platform_name='DEBUG', sensor='TILES', start_time=now, end_time=now, area=area_def, standard_name="toa_bidirectional_reflectance", units='1', valid_min=0, valid_max=255, )) created_files = writer.save_dataset( product, **save_kwargs ) return created_files def main(): """Command line interface mimicing CSPP Polar2Grid.""" import argparse parser = argparse.ArgumentParser(description="Create AWIPS compatible NetCDF tile files") parser.add_argument("--create-debug", action='store_true', help='Create debug NetCDF files to show tile locations in AWIPS') parser.add_argument('-v', '--verbose', dest='verbosity', action="count", default=0, help='each occurrence increases verbosity 1 level through ' 'ERROR-WARNING-INFO-DEBUG (default INFO)') parser.add_argument('-l', '--log', dest="log_fn", default=None, help="specify the log filename") group_1 = parser.add_argument_group(title="Writer Initialization") group_1.add_argument("--backend-configs", nargs="*", dest="backend_configs", help="alternative backend configuration files") group_1.add_argument("--compress", action="store_true", help="zlib compress each netcdf file") # group_1.add_argument("--fix-awips", action="store_true", # help="modify NetCDF output to work with the old/broken AWIPS NetCDF library") group_2 = parser.add_argument_group(title="Wrtier Save") group_2.add_argument("--tiles", dest="tile_count", nargs=2, type=int, default=[1, 1], help="Number of tiles to produce in Y (rows) and X (cols) direction respectively") group_2.add_argument("--tile-size", dest="tile_size", nargs=2, type=int, default=None, help="Specify how many pixels are in each tile (overrides '--tiles')") # group.add_argument('--tile-offset', nargs=2, default=(0, 0), # help="Start counting tiles from this offset ('row_offset col_offset')") group_2.add_argument("--letters", dest="lettered_grid", action='store_true', help="Create tiles from a static letter-based grid based on the product projection") group_2.add_argument("--letter-subtiles", nargs=2, type=int, default=(2, 2), help="Specify number of subtiles in each lettered tile: \'row col\'") group_2.add_argument("--output-pattern", default=DEFAULT_OUTPUT_PATTERN, help="output filenaming pattern") group_2.add_argument("--source-name", default='SSEC', help="specify processing source name used in attributes and filename (default 'SSEC')") group_2.add_argument("--sector-id", required=True, help="specify name for sector/region used in attributes and filename (example 'LCC')") group_2.add_argument("--template", default='polar', help="specify the template name to use (default: polar)") args = parser.parse_args() # Logs are renamed once data the provided start date is known levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG] logging.basicConfig(level=levels[min(3, args.verbosity)], filename=args.log_fn) if args.create_debug: writer_kwargs = vars(args) create_debug_lettered_tiles(**writer_kwargs) return else: raise NotImplementedError("Command line interface not implemented yet for AWIPS tiled writer") if __name__ == '__main__': sys.exit(main()) satpy-0.34.0/satpy/writers/cf_writer.py000066400000000000000000001061221420401153000201230ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Writer for netCDF4/CF. Example usage ------------- The CF writer saves datasets in a Scene as `CF-compliant`_ netCDF file. Here is an example with MSG SEVIRI data in HRIT format: >>> from satpy import Scene >>> import glob >>> filenames = glob.glob('data/H*201903011200*') >>> scn = Scene(filenames=filenames, reader='seviri_l1b_hrit') >>> scn.load(['VIS006', 'IR_108']) >>> scn.save_datasets(writer='cf', datasets=['VIS006', 'IR_108'], filename='seviri_test.nc', exclude_attrs=['raw_metadata']) * You can select the netCDF backend using the ``engine`` keyword argument. If `None` if follows :meth:`~xarray.Dataset.to_netcdf` engine choices with a preference for 'netcdf4'. * For datasets with area definition you can exclude lat/lon coordinates by setting ``include_lonlats=False``. * By default non-dimensional coordinates (such as scanline timestamps) are prefixed with the corresponding dataset name. This is because they are likely to be different for each dataset. If a non-dimensional coordinate is identical for all datasets, the prefix can be removed by setting ``pretty=True``. * Some dataset names start with a digit, like AVHRR channels 1, 2, 3a, 3b, 4 and 5. This doesn't comply with CF https://cfconventions.org/Data/cf-conventions/cf-conventions-1.7/build/ch02s03.html. These channels are prefixed with `CHANNEL_` by default. This can be controlled with the variable `numeric_name_prefix` to `save_datasets`. Setting it to `None` or `''` will skip the prefixing. Grouping ~~~~~~~~ All datasets to be saved must have the same projection coordinates ``x`` and ``y``. If a scene holds datasets with different grids, the CF compliant workaround is to save the datasets to separate files. Alternatively, you can save datasets with common grids in separate netCDF groups as follows: >>> scn.load(['VIS006', 'IR_108', 'HRV']) >>> scn.save_datasets(writer='cf', datasets=['VIS006', 'IR_108', 'HRV'], filename='seviri_test.nc', exclude_attrs=['raw_metadata'], groups={'visir': ['VIS006', 'IR_108'], 'hrv': ['HRV']}) Note that the resulting file will not be fully CF compliant. Dataset Encoding ~~~~~~~~~~~~~~~~ Dataset encoding can be specified in two ways: 1) Via the ``encoding`` keyword argument of ``save_datasets``: >>> my_encoding = { ... 'my_dataset_1': { ... 'zlib': True, ... 'complevel': 9, ... 'scale_factor': 0.01, ... 'add_offset': 100, ... 'dtype': np.int16 ... }, ... 'my_dataset_2': { ... 'zlib': False ... } ... } >>> scn.save_datasets(writer='cf', filename='encoding_test.nc', encoding=my_encoding) 2) Via the ``encoding`` attribute of the datasets in a scene. For example >>> scn['my_dataset'].encoding = {'zlib': False} >>> scn.save_datasets(writer='cf', filename='encoding_test.nc') See the `xarray encoding documentation`_ for all encoding options. Attribute Encoding ~~~~~~~~~~~~~~~~~~ In the above examples, raw metadata from the HRIT files have been excluded. If you want all attributes to be included, just remove the ``exclude_attrs`` keyword argument. By default, dict-type dataset attributes, such as the raw metadata, are encoded as a string using json. Thus, you can use json to decode them afterwards: >>> import xarray as xr >>> import json >>> # Save scene to nc-file >>> scn.save_datasets(writer='cf', datasets=['VIS006', 'IR_108'], filename='seviri_test.nc') >>> # Now read data from the nc-file >>> ds = xr.open_dataset('seviri_test.nc') >>> raw_mda = json.loads(ds['IR_108'].attrs['raw_metadata']) >>> print(raw_mda['RadiometricProcessing']['Level15ImageCalibration']['CalSlope']) [0.020865 0.0278287 0.0232411 0.00365867 0.00831811 0.03862197 0.12674432 0.10396091 0.20503568 0.22231115 0.1576069 0.0352385] Alternatively it is possible to flatten dict-type attributes by setting ``flatten_attrs=True``. This is more human readable as it will create a separate nc-attribute for each item in every dictionary. Keys are concatenated with underscore separators. The `CalSlope` attribute can then be accessed as follows: >>> scn.save_datasets(writer='cf', datasets=['VIS006', 'IR_108'], filename='seviri_test.nc', flatten_attrs=True) >>> ds = xr.open_dataset('seviri_test.nc') >>> print(ds['IR_108'].attrs['raw_metadata_RadiometricProcessing_Level15ImageCalibration_CalSlope']) [0.020865 0.0278287 0.0232411 0.00365867 0.00831811 0.03862197 0.12674432 0.10396091 0.20503568 0.22231115 0.1576069 0.0352385] This is what the corresponding ``ncdump`` output would look like in this case: .. code-block:: none $ ncdump -h test_seviri.nc ... IR_108:raw_metadata_RadiometricProcessing_Level15ImageCalibration_CalOffset = -1.064, ...; IR_108:raw_metadata_RadiometricProcessing_Level15ImageCalibration_CalSlope = 0.021, ...; IR_108:raw_metadata_RadiometricProcessing_MPEFCalFeedback_AbsCalCoeff = 0.021, ...; ... .. _CF-compliant: http://cfconventions.org/ .. _xarray encoding documentation: http://xarray.pydata.org/en/stable/user-guide/io.html?highlight=encoding#writing-encoded-data """ import copy import json import logging import warnings from collections import OrderedDict, defaultdict from datetime import datetime from distutils.version import LooseVersion import numpy as np import xarray as xr from dask.base import tokenize from pyresample.geometry import AreaDefinition, SwathDefinition from xarray.coding.times import CFDatetimeCoder from satpy.writers import Writer from satpy.writers.utils import flatten_dict logger = logging.getLogger(__name__) EPOCH = u"seconds since 1970-01-01 00:00:00" # Check availability of either netCDF4 or h5netcdf package try: import netCDF4 except ImportError: netCDF4 = None try: import h5netcdf except ImportError: h5netcdf = None # Ensure that either netCDF4 or h5netcdf is available to avoid silent failure if netCDF4 is None and h5netcdf is None: raise ImportError('Ensure that the netCDF4 or h5netcdf package is installed.') # Numpy datatypes compatible with all netCDF4 backends. ``np.unicode_`` is # excluded because h5py (and thus h5netcdf) has problems with unicode, see # https://github.com/h5py/h5py/issues/624.""" NC4_DTYPES = [np.dtype('int8'), np.dtype('uint8'), np.dtype('int16'), np.dtype('uint16'), np.dtype('int32'), np.dtype('uint32'), np.dtype('int64'), np.dtype('uint64'), np.dtype('float32'), np.dtype('float64'), np.string_] # Unsigned and int64 isn't CF 1.7 compatible CF_DTYPES = [np.dtype('int8'), np.dtype('int16'), np.dtype('int32'), np.dtype('float32'), np.dtype('float64'), np.string_] CF_VERSION = 'CF-1.7' def create_grid_mapping(area): """Create the grid mapping instance for `area`.""" import pyproj if LooseVersion(pyproj.__version__) < LooseVersion('2.4.1'): # technically 2.2, but important bug fixes in 2.4.1 raise ImportError("'cf' writer requires pyproj 2.4.1 or greater") # let pyproj do the heavily lifting # pyproj 2.0+ required grid_mapping = area.crs.to_cf() return area.area_id, grid_mapping def get_extra_ds(dataset, keys=None): """Get the extra datasets associated to *dataset*.""" ds_collection = {} for ds in dataset.attrs.get('ancillary_variables', []): if keys and ds.name not in keys: keys.append(ds.name) ds_collection.update(get_extra_ds(ds, keys)) ds_collection[dataset.attrs['name']] = dataset return ds_collection def area2lonlat(dataarray): """Convert an area to longitudes and latitudes.""" dataarray = dataarray.copy() area = dataarray.attrs['area'] ignore_dims = {dim: 0 for dim in dataarray.dims if dim not in ['x', 'y']} chunks = getattr(dataarray.isel(**ignore_dims), 'chunks', None) lons, lats = area.get_lonlats(chunks=chunks) dataarray['longitude'] = xr.DataArray(lons, dims=['y', 'x'], attrs={'name': "longitude", 'standard_name': "longitude", 'units': 'degrees_east'}, name='longitude') dataarray['latitude'] = xr.DataArray(lats, dims=['y', 'x'], attrs={'name': "latitude", 'standard_name': "latitude", 'units': 'degrees_north'}, name='latitude') return dataarray def area2gridmapping(dataarray): """Convert an area to at CF grid mapping.""" dataarray = dataarray.copy() area = dataarray.attrs['area'] gmapping_var_name, attrs = create_grid_mapping(area) dataarray.attrs['grid_mapping'] = gmapping_var_name return dataarray, xr.DataArray(0, attrs=attrs, name=gmapping_var_name) def area2cf(dataarray, strict=False, got_lonlats=False): """Convert an area to at CF grid mapping or lon and lats.""" res = [] if not got_lonlats and (isinstance(dataarray.attrs['area'], SwathDefinition) or strict): dataarray = area2lonlat(dataarray) if isinstance(dataarray.attrs['area'], AreaDefinition): dataarray, gmapping = area2gridmapping(dataarray) res.append(gmapping) res.append(dataarray) return res def make_time_bounds(start_times, end_times): """Create time bounds for the current *dataarray*.""" start_time = min(start_time for start_time in start_times if start_time is not None) end_time = min(end_time for end_time in end_times if end_time is not None) data = xr.DataArray([[np.datetime64(start_time), np.datetime64(end_time)]], dims=['time', 'bnds_1d']) return data def assert_xy_unique(datas): """Check that all datasets share the same projection coordinates x/y.""" unique_x = set() unique_y = set() for dataset in datas.values(): if 'y' in dataset.dims: token_y = tokenize(dataset['y'].data) unique_y.add(token_y) if 'x' in dataset.dims: token_x = tokenize(dataset['x'].data) unique_x.add(token_x) if len(unique_x) > 1 or len(unique_y) > 1: raise ValueError('Datasets to be saved in one file (or one group) must have identical projection coordinates. ' 'Please group them by area or save them in separate files.') def link_coords(datas): """Link dataarrays and coordinates. If the `coordinates` attribute of a data array links to other dataarrays in the scene, for example `coordinates='lon lat'`, add them as coordinates to the data array and drop that attribute. In the final call to `xr.Dataset.to_netcdf()` all coordinate relations will be resolved and the `coordinates` attributes be set automatically. """ for da_name, data in datas.items(): declared_coordinates = data.attrs.get('coordinates', []) if isinstance(declared_coordinates, str): declared_coordinates = declared_coordinates.split(' ') for coord in declared_coordinates: if coord not in data.coords: try: dimensions_not_in_data = list(set(datas[coord].dims) - set(data.dims)) data[coord] = datas[coord].squeeze(dimensions_not_in_data, drop=True) except KeyError: warnings.warn('Coordinate "{}" referenced by dataarray {} does not exist, dropping reference.' .format(coord, da_name)) continue # Drop 'coordinates' attribute in any case to avoid conflicts in xr.Dataset.to_netcdf() data.attrs.pop('coordinates', None) def dataset_is_projection_coords(dataset): """Check if dataset is a projection coords.""" if 'standard_name' in dataset.attrs and dataset.attrs['standard_name'] in ['longitude', 'latitude']: return True return False def has_projection_coords(ds_collection): """Check if collection has a projection coords among data arrays.""" for dataset in ds_collection.values(): if dataset_is_projection_coords(dataset): return True return False def make_alt_coords_unique(datas, pretty=False): """Make non-dimensional coordinates unique among all datasets. Non-dimensional (or alternative) coordinates, such as scanline timestamps, may occur in multiple datasets with the same name and dimension but different values. In order to avoid conflicts, prepend the dataset name to the coordinate name. If a non-dimensional coordinate is unique among all datasets and ``pretty=True``, its name will not be modified. Since all datasets must have the same projection coordinates, this is not applied to latitude and longitude. Args: datas (dict): Dictionary of (dataset name, dataset) pretty (bool): Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. Returns: Dictionary holding the updated datasets """ # Determine which non-dimensional coordinates are unique tokens = defaultdict(set) for dataset in datas.values(): for coord_name in dataset.coords: if not dataset_is_projection_coords(dataset[coord_name]) and coord_name not in dataset.dims: tokens[coord_name].add(tokenize(dataset[coord_name].data)) coords_unique = dict([(coord_name, len(tokens) == 1) for coord_name, tokens in tokens.items()]) # Prepend dataset name, if not unique or no pretty-format desired new_datas = datas.copy() for coord_name, unique in coords_unique.items(): if not pretty or not unique: if pretty: warnings.warn('Cannot pretty-format "{}" coordinates because they are not unique among the ' 'given datasets'.format(coord_name)) for ds_name, dataset in datas.items(): if coord_name in dataset.coords: rename = {coord_name: '{}_{}'.format(ds_name, coord_name)} new_datas[ds_name] = new_datas[ds_name].rename(rename) return new_datas class AttributeEncoder(json.JSONEncoder): """JSON encoder for dataset attributes.""" def default(self, obj): """Return a json-serializable object for *obj*. In order to facilitate decoding, elements in dictionaries, lists/tuples and multi-dimensional arrays are encoded recursively. """ if isinstance(obj, dict): serialized = {} for key, val in obj.items(): serialized[key] = self.default(val) return serialized elif isinstance(obj, (list, tuple, np.ndarray)): return [self.default(item) for item in obj] return self._encode(obj) def _encode(self, obj): """Encode the given object as a json-serializable datatype.""" if isinstance(obj, (bool, np.bool_)): # Bool has to be checked first, because it is a subclass of int return str(obj).lower() elif isinstance(obj, (int, float, str)): return obj elif isinstance(obj, np.integer): return int(obj) elif isinstance(obj, np.floating): return float(obj) elif isinstance(obj, np.void): return tuple(obj) elif isinstance(obj, np.ndarray): return obj.tolist() return str(obj) def _encode_nc(obj): """Try to encode `obj` as a netcdf compatible datatype which most closely resembles the object's nature. Raises: ValueError if no such datatype could be found """ if isinstance(obj, int) and not isinstance(obj, (bool, np.bool_)): return obj elif isinstance(obj, (float, str, np.integer, np.floating)): return obj elif isinstance(obj, np.ndarray): # Only plain 1-d arrays are supported. Skip record arrays and multi-dimensional arrays. is_plain_1d = not obj.dtype.fields and len(obj.shape) <= 1 if is_plain_1d: if obj.dtype in NC4_DTYPES: return obj elif obj.dtype == np.bool_: # Boolean arrays are not supported, convert to array of strings. return [s.lower() for s in obj.astype(str)] return obj.tolist() raise ValueError('Unable to encode') def encode_nc(obj): """Encode the given object as a netcdf compatible datatype.""" try: return obj.to_cf() except AttributeError: return _encode_python_objects(obj) def _encode_python_objects(obj): """Try to find the datatype which most closely resembles the object's nature. If on failure, encode as a string. Plain lists are encoded recursively. """ if isinstance(obj, (list, tuple)) and all([not isinstance(item, (list, tuple)) for item in obj]): return [encode_nc(item) for item in obj] try: dump = _encode_nc(obj) except ValueError: try: # Decode byte-strings decoded = obj.decode() except AttributeError: decoded = obj dump = json.dumps(decoded, cls=AttributeEncoder).strip('"') return dump def encode_attrs_nc(attrs): """Encode dataset attributes in a netcdf compatible datatype. Args: attrs (dict): Attributes to be encoded Returns: dict: Encoded (and sorted) attributes """ encoded_attrs = [] for key, val in sorted(attrs.items()): if val is not None: encoded_attrs.append((key, encode_nc(val))) return OrderedDict(encoded_attrs) def _set_default_chunks(encoding, dataset): """Update encoding to preserve current dask chunks. Existing user-defined chunks take precedence. """ for var_name, variable in dataset.variables.items(): if variable.chunks: chunks = tuple( np.stack([variable.data.chunksize, variable.shape]).min(axis=0) ) # Chunksize may not exceed shape encoding.setdefault(var_name, {}) encoding[var_name].setdefault('chunksizes', chunks) def _set_default_fill_value(encoding, dataset): """Set default fill values. Avoid _FillValue attribute being added to coordinate variables (https://github.com/pydata/xarray/issues/1865). """ coord_vars = [] for data_array in dataset.values(): coord_vars.extend(set(data_array.dims).intersection(data_array.coords)) for coord_var in coord_vars: encoding.setdefault(coord_var, {}) encoding[coord_var].update({'_FillValue': None}) def _set_default_time_encoding(encoding, dataset): """Set default time encoding. Make sure time coordinates and bounds have the same units. Default is xarray's CF datetime encoding, which can be overridden by user-defined encoding. """ if 'time' in dataset: try: dtnp64 = dataset['time'].data[0] except IndexError: dtnp64 = dataset['time'].data default = CFDatetimeCoder().encode(xr.DataArray(dtnp64)) time_enc = {'units': default.attrs['units'], 'calendar': default.attrs['calendar']} time_enc.update(encoding.get('time', {})) bounds_enc = {'units': time_enc['units'], 'calendar': time_enc['calendar'], '_FillValue': None} encoding['time'] = time_enc encoding['time_bnds'] = bounds_enc # FUTURE: Not required anymore with xarray-0.14+ def _set_encoding_dataset_names(encoding, dataset, numeric_name_prefix): """Set Netcdf variable names encoding according to numeric_name_prefix. A lot of channel names in satpy starts with a digit. When writing data with the satpy_cf_nc these channels are prepended with numeric_name_prefix. This ensures this is also done with any matching variables in encoding. """ for _var_name, _variable in dataset.variables.items(): if not numeric_name_prefix or not _var_name.startswith(numeric_name_prefix): continue _orig_var_name = _var_name.replace(numeric_name_prefix, '') if _orig_var_name in encoding: encoding[_var_name] = encoding.pop(_orig_var_name) def update_encoding(dataset, to_netcdf_kwargs, numeric_name_prefix='CHANNEL_'): """Update encoding. Preserve dask chunks, avoid fill values in coordinate variables and make sure that time & time bounds have the same units. """ other_to_netcdf_kwargs = to_netcdf_kwargs.copy() encoding = other_to_netcdf_kwargs.pop('encoding', {}).copy() _set_encoding_dataset_names(encoding, dataset, numeric_name_prefix) _set_default_chunks(encoding, dataset) _set_default_fill_value(encoding, dataset) _set_default_time_encoding(encoding, dataset) return encoding, other_to_netcdf_kwargs def _handle_dataarray_name(original_name, numeric_name_prefix): name = original_name if name[0].isdigit(): if numeric_name_prefix: name = numeric_name_prefix + original_name else: warnings.warn('Invalid NetCDF dataset name: {} starts with a digit.'.format(name)) return original_name, name def _get_compression(compression): warnings.warn("The default behaviour of the CF writer will soon change to not compress data by default.", FutureWarning) if compression is None: compression = {'zlib': True} else: warnings.warn("The `compression` keyword will soon be deprecated. Please use the `encoding` of the " "DataArrays to tune compression from now on.", FutureWarning) return compression def _set_history(root): _history_create = 'Created by pytroll/satpy on {}'.format(datetime.utcnow()) if 'history' in root.attrs: if isinstance(root.attrs['history'], list): root.attrs['history'] = ''.join(root.attrs['history']) root.attrs['history'] += '\n' + _history_create else: root.attrs['history'] = _history_create def _get_groups(groups, datasets, root): if groups is None: # Groups are not CF-1.7 compliant if 'Conventions' not in root.attrs: root.attrs['Conventions'] = CF_VERSION # Write all datasets to the file root without creating a group groups_ = {None: datasets} else: # User specified a group assignment using dataset names. Collect the corresponding datasets. groups_ = defaultdict(list) for dataset in datasets: for group_name, group_members in groups.items(): if dataset.attrs['name'] in group_members: groups_[group_name].append(dataset) break return groups_ class CFWriter(Writer): """Writer producing NetCDF/CF compatible datasets.""" @staticmethod def da2cf(dataarray, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, compression=None, include_orig_name=True, numeric_name_prefix='CHANNEL_'): """Convert the dataarray to something cf-compatible. Args: dataarray (xr.DataArray): The data array to be converted epoch (str): Reference time for encoding of time coordinates flatten_attrs (bool): If True, flatten dict-type attributes exclude_attrs (list): List of dataset attributes to be excluded include_orig_name (bool): Include the original dataset name in the netcdf variable attributes numeric_name_prefix (str): Prepend dataset name with this if starting with a digit """ if exclude_attrs is None: exclude_attrs = [] original_name = None new_data = dataarray.copy() if 'name' in new_data.attrs: name = new_data.attrs.pop('name') original_name, name = _handle_dataarray_name(name, numeric_name_prefix) new_data = new_data.rename(name) CFWriter._remove_satpy_attributes(new_data) # Remove area as well as user-defined attributes for key in ['area'] + exclude_attrs: new_data.attrs.pop(key, None) anc = [ds.attrs['name'] for ds in new_data.attrs.get('ancillary_variables', [])] if anc: new_data.attrs['ancillary_variables'] = ' '.join(anc) # TODO: make this a grid mapping or lon/lats # new_data.attrs['area'] = str(new_data.attrs.get('area')) CFWriter._cleanup_attrs(new_data) if compression is not None: new_data.encoding.update(compression) new_data = CFWriter._encode_time(new_data, epoch) new_data = CFWriter._encode_coords(new_data) if 'long_name' not in new_data.attrs and 'standard_name' not in new_data.attrs: new_data.attrs['long_name'] = new_data.name if 'prerequisites' in new_data.attrs: new_data.attrs['prerequisites'] = [np.string_(str(prereq)) for prereq in new_data.attrs['prerequisites']] if include_orig_name and numeric_name_prefix and original_name and original_name != name: new_data.attrs['original_name'] = original_name # Flatten dict-type attributes, if desired if flatten_attrs: new_data.attrs = flatten_dict(new_data.attrs) # Encode attributes to netcdf-compatible datatype new_data.attrs = encode_attrs_nc(new_data.attrs) return new_data @staticmethod def _cleanup_attrs(new_data): for key, val in new_data.attrs.copy().items(): if val is None: new_data.attrs.pop(key) if key == 'ancillary_variables' and val == []: new_data.attrs.pop(key) @staticmethod def _encode_coords(new_data): if 'x' in new_data.coords: new_data['x'].attrs['standard_name'] = 'projection_x_coordinate' new_data['x'].attrs['units'] = 'm' if 'y' in new_data.coords: new_data['y'].attrs['standard_name'] = 'projection_y_coordinate' new_data['y'].attrs['units'] = 'm' if 'crs' in new_data.coords: new_data = new_data.drop_vars('crs') return new_data @staticmethod def _encode_time(new_data, epoch): if 'time' in new_data.coords: new_data['time'].encoding['units'] = epoch new_data['time'].attrs['standard_name'] = 'time' new_data['time'].attrs.pop('bounds', None) new_data = CFWriter._add_time_dimension(new_data) return new_data @staticmethod def _add_time_dimension(new_data): if 'time' not in new_data.dims and new_data["time"].size not in new_data.shape: new_data = new_data.expand_dims('time') return new_data @staticmethod def _remove_satpy_attributes(new_data): # Remove _satpy* attributes satpy_attrs = [key for key in new_data.attrs if key.startswith('_satpy')] for satpy_attr in satpy_attrs: new_data.attrs.pop(satpy_attr) new_data.attrs.pop('_last_resampler', None) @staticmethod def update_encoding(dataset, to_netcdf_kwargs): """Update encoding info (deprecated).""" warnings.warn('CFWriter.update_encoding is deprecated. ' 'Use satpy.writers.cf_writer.update_encoding instead.', DeprecationWarning) return update_encoding(dataset, to_netcdf_kwargs) def save_dataset(self, dataset, filename=None, fill_value=None, **kwargs): """Save the *dataset* to a given *filename*.""" return self.save_datasets([dataset], filename, **kwargs) def _collect_datasets(self, datasets, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, include_lonlats=True, pretty=False, compression=None, include_orig_name=True, numeric_name_prefix='CHANNEL_'): """Collect and prepare datasets to be written.""" ds_collection = {} for ds in datasets: ds_collection.update(get_extra_ds(ds)) got_lonlats = has_projection_coords(ds_collection) datas = {} start_times = [] end_times = [] # sort by name, but don't use the name for _, ds in sorted(ds_collection.items()): if ds.dtype not in CF_DTYPES: warnings.warn('Dtype {} not compatible with {}.'.format(str(ds.dtype), CF_VERSION)) # we may be adding attributes, coordinates, or modifying the # structure of attributes ds = ds.copy(deep=True) try: new_datasets = area2cf(ds, strict=include_lonlats, got_lonlats=got_lonlats) except KeyError: new_datasets = [ds] for new_ds in new_datasets: start_times.append(new_ds.attrs.get("start_time", None)) end_times.append(new_ds.attrs.get("end_time", None)) new_var = self.da2cf(new_ds, epoch=epoch, flatten_attrs=flatten_attrs, exclude_attrs=exclude_attrs, compression=compression, include_orig_name=include_orig_name, numeric_name_prefix=numeric_name_prefix) datas[new_var.name] = new_var # Check and prepare coordinates assert_xy_unique(datas) link_coords(datas) datas = make_alt_coords_unique(datas, pretty=pretty) return datas, start_times, end_times def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, engine=None, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, include_lonlats=True, pretty=False, compression=None, include_orig_name=True, numeric_name_prefix='CHANNEL_', **to_netcdf_kwargs): """Save the given datasets in one netCDF file. Note that all datasets (if grouping: in one group) must have the same projection coordinates. Args: datasets (list): Datasets to be saved filename (str): Output file groups (dict): Group datasets according to the given assignment: `{'group_name': ['dataset1', 'dataset2', ...]}`. Group name `None` corresponds to the root of the file, i.e. no group will be created. Warning: The results will not be fully CF compliant! header_attrs: Global attributes to be included engine (str): Module to be used for writing netCDF files. Follows xarray's :meth:`~xarray.Dataset.to_netcdf` engine choices with a preference for 'netcdf4'. epoch (str): Reference time for encoding of time coordinates flatten_attrs (bool): If True, flatten dict-type attributes exclude_attrs (list): List of dataset attributes to be excluded include_lonlats (bool): Always include latitude and longitude coordinates, even for datasets with area definition pretty (bool): Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. compression (dict): Compression to use on the datasets before saving, for example {'zlib': True, 'complevel': 9}. This is in turn passed the xarray's `to_netcdf` method: http://xarray.pydata.org/en/stable/generated/xarray.Dataset.to_netcdf.html for more possibilities. (This parameter is now being deprecated, please use the DataArrays's `encoding` from now on.) include_orig_name (bool). Include the original dataset name as an varaibel attribute in the final netcdf numeric_name_prefix (str): Prefix to add the each variable with name starting with a digit. Use '' or None to leave this out. """ logger.info('Saving datasets to NetCDF4/CF.') compression = _get_compression(compression) # Write global attributes to file root (creates the file) filename = filename or self.get_filename(**datasets[0].attrs) root = xr.Dataset({}, attrs={}) if header_attrs is not None: if flatten_attrs: header_attrs = flatten_dict(header_attrs) root.attrs = encode_attrs_nc(header_attrs) _set_history(root) # Remove satpy-specific kwargs to_netcdf_kwargs = copy.deepcopy(to_netcdf_kwargs) # may contain dictionaries (encoding) satpy_kwargs = ['overlay', 'decorate', 'config_files'] for kwarg in satpy_kwargs: to_netcdf_kwargs.pop(kwarg, None) init_nc_kwargs = to_netcdf_kwargs.copy() init_nc_kwargs.pop('encoding', None) # No variables to be encoded at this point init_nc_kwargs.pop('unlimited_dims', None) groups_ = _get_groups(groups, datasets, root) written = [root.to_netcdf(filename, engine=engine, mode='w', **init_nc_kwargs)] # Write datasets to groups (appending to the file; group=None means no group) for group_name, group_datasets in groups_.items(): # XXX: Should we combine the info of all datasets? datas, start_times, end_times = self._collect_datasets( group_datasets, epoch=epoch, flatten_attrs=flatten_attrs, exclude_attrs=exclude_attrs, include_lonlats=include_lonlats, pretty=pretty, compression=compression, include_orig_name=include_orig_name, numeric_name_prefix=numeric_name_prefix) dataset = xr.Dataset(datas) if 'time' in dataset: dataset['time_bnds'] = make_time_bounds(start_times, end_times) dataset['time'].attrs['bounds'] = "time_bnds" dataset['time'].attrs['standard_name'] = "time" else: grp_str = ' of group {}'.format(group_name) if group_name is not None else '' logger.warning('No time dimension in datasets{}, skipping time bounds creation.'.format(grp_str)) encoding, other_to_netcdf_kwargs = update_encoding(dataset, to_netcdf_kwargs, numeric_name_prefix) res = dataset.to_netcdf(filename, engine=engine, group=group_name, mode='a', encoding=encoding, **other_to_netcdf_kwargs) written.append(res) return written satpy-0.34.0/satpy/writers/geotiff.py000066400000000000000000000300411420401153000175560ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """GeoTIFF writer objects for creating GeoTIFF files from `DataArray` objects.""" import logging import numpy as np # make sure we have rasterio even though we don't use it until trollimage # saves the image import rasterio # noqa from satpy.writers import ImageWriter LOG = logging.getLogger(__name__) class GeoTIFFWriter(ImageWriter): """Writer to save GeoTIFF images. Basic example from Scene: >>> scn.save_datasets(writer='geotiff') By default the writer will use the :class:`~satpy.writers.Enhancer` class to linear stretch the data (see :doc:`../enhancements`). To get Un-enhanced images ``enhance=False`` can be specified which will write a geotiff with the data type of the dataset. The fill value defaults to the the datasets ``"_FillValue"`` attribute if not ``None`` and no value is passed to ``fill_value`` for integer data. In case of float data if ``fill_value`` is not passed NaN will be used. If a geotiff with a certain datatype is desired for example 32 bit floating point geotiffs: >>> scn.save_datasets(writer='geotiff', dtype=np.float32, enhance=False) To add custom metadata use `tags`: >>> scn.save_dataset(dataset_name, writer='geotiff', ... tags={'offset': 291.8, 'scale': -0.35}) Images are tiled by default. To create striped TIFF files ``tiled=False`` can be specified: >>> scn.save_datasets(writer='geotiff', tiled=False) For performance tips on creating geotiffs quickly and making them smaller see the :ref:`faq`. """ GDAL_OPTIONS = ("tfw", "rpb", "rpctxt", "interleave", "tiled", "blockxsize", "blockysize", "nbits", "compress", "num_threads", "predictor", "discard_lsb", "sparse_ok", "jpeg_quality", "jpegtablesmode", "zlevel", "photometric", "alpha", "profile", "bigtiff", "pixeltype", "copy_src_overviews", # COG driver options (different from GTiff above) "blocksize", "resampling", "quality", "level", "overview_resampling", "warp_resampling", "overview_compress", "overview_quality", "overview_predictor", "tiling_scheme", "zoom_level_strategy", "target_srs", "res", "extent", "aligned_levels", "add_alpha", ) def __init__(self, dtype=None, tags=None, **kwargs): """Init the writer.""" super(GeoTIFFWriter, self).__init__(default_config_filename="writers/geotiff.yaml", **kwargs) self.dtype = self.info.get("dtype") if dtype is None else dtype self.tags = self.info.get("tags", None) if tags is None else tags if self.tags is None: self.tags = {} elif not isinstance(self.tags, dict): # if it's coming from a config file self.tags = dict(tuple(x.split("=")) for x in self.tags.split(",")) # GDAL specific settings self.gdal_options = {} for k in self.GDAL_OPTIONS: if k in kwargs or k in self.info: self.gdal_options[k] = kwargs.get(k, self.info[k]) @classmethod def separate_init_kwargs(cls, kwargs): """Separate the init keyword args.""" # FUTURE: Don't pass Scene.save_datasets kwargs to init and here init_kwargs, kwargs = super(GeoTIFFWriter, cls).separate_init_kwargs( kwargs) for kw in ['dtype', 'tags']: if kw in kwargs: init_kwargs[kw] = kwargs.pop(kw) return init_kwargs, kwargs def save_image(self, img, filename=None, dtype=None, fill_value=None, compute=True, keep_palette=False, cmap=None, tags=None, overviews=None, overviews_minsize=256, overviews_resampling=None, include_scale_offset=False, scale_offset_tags=None, driver=None, tiled=True, **kwargs): """Save the image to the given ``filename`` in geotiff_ format. Note for faster output and reduced memory usage the ``rasterio`` library must be installed. This writer currently falls back to using ``gdal`` directly, but that will be deprecated in the future. Args: img (xarray.DataArray): Data to save to geotiff. filename (str): Filename to save the image to. Defaults to ``filename`` passed during writer creation. Unlike the creation ``filename`` keyword argument, this filename does not get formatted with data attributes. dtype (numpy.dtype): Numpy data type to save the image as. Defaults to 8-bit unsigned integer (``np.uint8``) or the data type of the data to be saved if ``enhance=False``. If the ``dtype`` argument is provided during writer creation then that will be used as the default. fill_value (int or float): Value to use where data values are NaN/null. If this is specified in the writer configuration file that value will be used as the default. compute (bool): Compute dask arrays and save the image immediately. If ``False`` then the return value can be passed to :func:`~satpy.writers.compute_writer_results` to do the computation. This is useful when multiple images may share input calculations where dask can benefit from not repeating them multiple times. Defaults to ``True`` in the writer by itself, but is typically passed as ``False`` by callers where calculations can be combined. keep_palette (bool): Save palette/color table to geotiff. To be used with images that were palettized with the "palettize" enhancement. Setting this to ``True`` will cause the colormap of the image to be written as a "color table" in the output geotiff and the image data values will represent the index values in to that color table. By default, this will use the colormap used in the "palettize" operation. See the ``cmap`` option for other options. This option defaults to ``False`` and palettized images will be converted to RGB/A. cmap (trollimage.colormap.Colormap or None): Colormap to save as a color table in the output geotiff. See ``keep_palette`` for more information. Defaults to the palette of the provided ``img`` object. The colormap's range should be set to match the index range of the palette (ex. `cmap.set_range(0, len(colors))`). tags (dict): Extra metadata to store in geotiff. overviews (list): The reduction factors of the overviews to include in the image, eg:: scn.save_datasets(overviews=[2, 4, 8, 16]) If provided as an empty list, then levels will be computed as powers of two until the last level has less pixels than `overviews_minsize`. Default is to not add overviews. overviews_minsize (int): Minimum number of pixels for the smallest overview size generated when `overviews` is auto-generated. Defaults to 256. overviews_resampling (str): Resampling method to use when generating overviews. This must be the name of an enum value from :class:`rasterio.enums.Resampling` and only takes effect if the `overviews` keyword argument is provided. Common values include `nearest` (default), `bilinear`, `average`, and many others. See the rasterio documentation for more information. scale_offset_tags (Tuple[str, str]): If set, include inclusion of scale and offset in the GeoTIFF headers in the GDALMetaData tag. The value of this argument should be a keyword argument ``(scale_label, offset_label)``, for example, ``("scale", "offset")``, indicating the labels to be used. include_scale_offset (deprecated, bool): Deprecated. Use ``scale_offset_tags=("scale", "offset")`` to include scale and offset tags. tiled (bool): For performance this defaults to ``True``. Pass ``False`` to created striped TIFF files. .. _geotiff: http://trac.osgeo.org/geotiff/ """ filename = filename or self.get_filename(**img.data.attrs) gdal_options = self._get_gdal_options(kwargs) if fill_value is None: # fall back to fill_value from configuration file fill_value = self.info.get('fill_value') dtype = dtype if dtype is not None else self.dtype if dtype is None and self.enhancer is not False: dtype = np.uint8 elif dtype is None: dtype = img.data.dtype.type if "alpha" in kwargs: raise ValueError( "Keyword 'alpha' is automatically set based on 'fill_value' " "and should not be specified") if np.issubdtype(dtype, np.floating): if img.mode != "L": raise ValueError("Image must be in 'L' mode for floating " "point geotiff saving") if fill_value is None: LOG.debug("Alpha band not supported for float geotiffs, " "setting fill value to 'NaN'") fill_value = np.nan if keep_palette and cmap is None and img.palette is not None: from satpy.enhancements import create_colormap cmap = create_colormap({'colors': img.palette}) cmap.set_range(0, len(img.palette) - 1) if tags is None: tags = {} tags.update(self.tags) return img.save(filename, fformat='tif', driver=driver, fill_value=fill_value, dtype=dtype, compute=compute, keep_palette=keep_palette, cmap=cmap, tags=tags, include_scale_offset_tags=include_scale_offset, scale_offset_tags=scale_offset_tags, overviews=overviews, overviews_resampling=overviews_resampling, overviews_minsize=overviews_minsize, tiled=tiled, **gdal_options) def _get_gdal_options(self, kwargs): # Update global GDAL options with these specific ones gdal_options = self.gdal_options.copy() for k in kwargs: if k in self.GDAL_OPTIONS: gdal_options[k] = kwargs[k] return gdal_options satpy-0.34.0/satpy/writers/mitiff.py000066400000000000000000001000041420401153000174060ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018, 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """MITIFF writer objects for creating MITIFF files from `Dataset` objects.""" import logging import dask import numpy as np from satpy.dataset import DataID, DataQuery from satpy.writers import ImageWriter, get_enhanced_image IMAGEDESCRIPTION = 270 LOG = logging.getLogger(__name__) KELVIN_TO_CELSIUS = -273.15 def _adjust_kwargs(dataset, kwargs): if 'platform_name' not in kwargs: kwargs['platform_name'] = dataset.attrs['platform_name'] if 'name' not in kwargs: kwargs['name'] = dataset.attrs['name'] if 'start_time' not in kwargs: kwargs['start_time'] = dataset.attrs['start_time'] if 'sensor' not in kwargs: kwargs['sensor'] = dataset.attrs['sensor'] # Sensor attrs could be set. MITIFFs needing to handle sensor can only have one sensor # Assume the first value of set as the sensor. if isinstance(kwargs['sensor'], set): LOG.warning('Sensor is set, will use the first value: %s', kwargs['sensor']) kwargs['sensor'] = (list(kwargs['sensor']))[0] class MITIFFWriter(ImageWriter): """Writer to produce MITIFF image files.""" def __init__(self, name=None, tags=None, **kwargs): """Initialize reader with tag and other configuration information.""" ImageWriter.__init__(self, name=name, default_config_filename="writers/mitiff.yaml", **kwargs) self.tags = self.info.get("tags", None) if tags is None else tags if self.tags is None: self.tags = {} elif not isinstance(self.tags, dict): # if it's coming from a config file self.tags = dict(tuple(x.split("=")) for x in self.tags.split(",")) self.mitiff_config = {} self.translate_channel_name = {} self.channel_order = {} self.palette = False self.sensor = None def save_image(self): """Save dataset as an image array.""" raise NotImplementedError("save_image mitiff is not implemented.") def save_dataset(self, dataset, filename=None, fill_value=None, compute=True, **kwargs): """Save single dataset as mitiff file.""" LOG.debug("Starting in mitiff save_dataset ... ") def _delayed_create(dataset): try: if 'palette' in kwargs: self.palette = kwargs['palette'] _adjust_kwargs(dataset, kwargs) try: self.mitiff_config[kwargs['sensor']] = dataset.attrs['metadata_requirements']['config'] self.channel_order[kwargs['sensor']] = dataset.attrs['metadata_requirements']['order'] self.file_pattern = dataset.attrs['metadata_requirements']['file_pattern'] except KeyError: # For some mitiff products this info is needed, for others not. # If needed you should know how to fix this pass try: self.translate_channel_name[kwargs['sensor']] = \ dataset.attrs['metadata_requirements']['translate'] except KeyError: # For some mitiff products this info is needed, for others not. # If needed you should know how to fix this pass image_description = self._make_image_description(dataset, **kwargs) gen_filename = filename or self.get_filename(**dataset.attrs) LOG.info("Saving mitiff to: %s ...", gen_filename) self._save_datasets_as_mitiff(dataset, image_description, gen_filename, **kwargs) except (KeyError, ValueError, RuntimeError): raise delayed = dask.delayed(_delayed_create)(dataset) if compute: return delayed.compute() return delayed def save_datasets(self, datasets, filename=None, fill_value=None, compute=True, **kwargs): """Save all datasets to one or more files.""" LOG.debug("Starting in mitiff save_datasets ... ") def _delayed_create(datasets): dataset = datasets[0] try: _adjust_kwargs(dataset, kwargs) try: self.mitiff_config[kwargs['sensor']] = dataset.attrs['metadata_requirements']['config'] translate = dataset.attrs['metadata_requirements']['translate'] self.translate_channel_name[kwargs['sensor']] = translate self.channel_order[kwargs['sensor']] = dataset.attrs['metadata_requirements']['order'] self.file_pattern = dataset.attrs['metadata_requirements']['file_pattern'] except KeyError: # For some mitiff products this info is needed, for others not. # If needed you should know how to fix this pass image_description = self._make_image_description(datasets, **kwargs) LOG.debug("File pattern %s", self.file_pattern) if isinstance(datasets, list): kwargs['start_time'] = dataset.attrs['start_time'] else: kwargs['start_time'] = datasets.attrs['start_time'] gen_filename = filename or self.get_filename(**kwargs) LOG.info("Saving mitiff to: %s ...", gen_filename) self._save_datasets_as_mitiff(datasets, image_description, gen_filename, **kwargs) except (KeyError, ValueError, RuntimeError): raise delayed = dask.delayed(_delayed_create)(datasets) LOG.debug("About to call delayed compute ...") if compute: return delayed.compute() return delayed def _make_channel_list(self, datasets, **kwargs): channels = [] try: if self.channel_order: channels = self._reorder_channels(datasets, **kwargs) elif self.palette: if 'palette_channel_name' in kwargs: channels.append(kwargs['palette_channel_name'].upper()) else: LOG.error("Is palette but can not find palette_channel_name to name the dataset") else: for ch in range(len(datasets)): channels.append(ch + 1) except KeyError: for ch in range(len(datasets)): channels.append(ch + 1) return channels def _reorder_channels(self, datasets, **kwargs): channels = [] for cn in self.channel_order[kwargs['sensor']]: for ch, ds in enumerate(datasets): if isinstance(ds.attrs['prerequisites'][ch], (DataQuery, DataID)): if ds.attrs['prerequisites'][ch]['name'] == cn: channels.append( ds.attrs['prerequisites'][ch]['name']) break else: if ds.attrs['prerequisites'][ch] == cn: channels.append( ds.attrs['prerequisites'][ch]) break return channels def _channel_names(self, channels, cns, **kwargs): _image_description = "" for ch in channels: try: _image_description += str( self.mitiff_config[kwargs['sensor']][cns.get(ch, ch)]['alias']) except KeyError: _image_description += str(ch) _image_description += ' ' # Replace last char(space) with \n _image_description = _image_description[:-1] _image_description += '\n' return _image_description def _add_sizes(self, datasets, first_dataset): _image_description = ' Xsize: ' if isinstance(datasets, list): _image_description += str(first_dataset.sizes['x']) + '\n' else: _image_description += str(datasets.sizes['x']) + '\n' _image_description += ' Ysize: ' if isinstance(datasets, list): _image_description += str(first_dataset.sizes['y']) + '\n' else: _image_description += str(datasets.sizes['y']) + '\n' return _image_description def _add_proj4_string(self, datasets, first_dataset): proj4_string = " Proj string: " if isinstance(datasets, list): area = first_dataset.attrs['area'] else: area = datasets.attrs['area'] # Use pyproj's CRS object to get a valid EPSG code if possible # only in newer pyresample versions with pyproj 2.0+ installed if hasattr(area, 'crs') and area.crs.to_epsg() is not None: proj4_string += "+init=EPSG:{}".format(area.crs.to_epsg()) else: proj4_string += area.proj_str x_0 = 0 y_0 = 0 # FUTURE: Use pyproj 2.0+ to convert EPSG to PROJ4 if possible proj4_string, x_0 = self._convert_epsg_to_proj(proj4_string, x_0) if 'geos' in proj4_string: proj4_string = proj4_string.replace("+sweep=x ", "") if '+a=6378137.0 +b=6356752.31414' in proj4_string: proj4_string = proj4_string.replace("+a=6378137.0 +b=6356752.31414", "+ellps=WGS84") if '+units=m' in proj4_string: proj4_string = proj4_string.replace("+units=m", "+units=km") if not any(datum in proj4_string for datum in ['datum', 'towgs84']): proj4_string += ' +towgs84=0,0,0' if 'units' not in proj4_string: proj4_string += ' +units=km' proj4_string = self._append_projection_center(proj4_string, datasets, first_dataset, x_0, y_0) LOG.debug("proj4_string: %s", proj4_string) proj4_string += '\n' return proj4_string def _append_projection_center(self, proj4_string, datasets, first_dataset, x_0, y_0): if isinstance(datasets, list): dataset = first_dataset else: dataset = datasets if 'x_0' not in proj4_string: proj4_string += ' +x_0=%.6f' % ( (-dataset.attrs['area'].area_extent[0] + dataset.attrs['area'].pixel_size_x) + x_0) proj4_string += ' +y_0=%.6f' % ( (-dataset.attrs['area'].area_extent[1] + dataset.attrs['area'].pixel_size_y) + y_0) elif '+x_0=0' in proj4_string and '+y_0=0' in proj4_string: proj4_string = proj4_string.replace("+x_0=0", '+x_0=%.6f' % ( (-dataset.attrs['area'].area_extent[0] + dataset.attrs['area'].pixel_size_x) + x_0)) proj4_string = proj4_string.replace("+y_0=0", '+y_0=%.6f' % ( (-dataset.attrs['area'].area_extent[1] + dataset.attrs['area'].pixel_size_y) + y_0)) return proj4_string def _convert_epsg_to_proj(self, proj4_string, x_0): if 'EPSG:32631' in proj4_string: proj4_string = proj4_string.replace("+init=EPSG:32631", "+proj=etmerc +lat_0=0 +lon_0=3 +k=0.9996 +ellps=WGS84 +datum=WGS84") x_0 = 500000 elif 'EPSG:32632' in proj4_string: proj4_string = proj4_string.replace("+init=EPSG:32632", "+proj=etmerc +lat_0=0 +lon_0=9 +k=0.9996 +ellps=WGS84 +datum=WGS84") x_0 = 500000 elif 'EPSG:32633' in proj4_string: proj4_string = proj4_string.replace("+init=EPSG:32633", "+proj=etmerc +lat_0=0 +lon_0=15 +k=0.9996 +ellps=WGS84 +datum=WGS84") x_0 = 500000 elif 'EPSG:32634' in proj4_string: proj4_string = proj4_string.replace("+init=EPSG:32634", "+proj=etmerc +lat_0=0 +lon_0=21 +k=0.9996 +ellps=WGS84 +datum=WGS84") x_0 = 500000 elif 'EPSG:32635' in proj4_string: proj4_string = proj4_string.replace("+init=EPSG:32635", "+proj=etmerc +lat_0=0 +lon_0=27 +k=0.9996 +ellps=WGS84 +datum=WGS84") x_0 = 500000 elif 'EPSG' in proj4_string: LOG.warning("EPSG used in proj string but not converted. Please add this in code") return proj4_string, x_0 def _add_pixel_sizes(self, datasets, first_dataset): _image_description = "" if isinstance(datasets, list): _image_description += ' Ax: %.6f' % ( first_dataset.attrs['area'].pixel_size_x / 1000.) _image_description += ' Ay: %.6f' % ( first_dataset.attrs['area'].pixel_size_y / 1000.) else: _image_description += ' Ax: %.6f' % ( datasets.attrs['area'].pixel_size_x / 1000.) _image_description += ' Ay: %.6f' % ( datasets.attrs['area'].pixel_size_y / 1000.) return _image_description def _add_corners(self, datasets, first_dataset): # But this ads up to upper left corner of upper left pixel. # But need to use the center of the pixel. # Therefor use the center of the upper left pixel. _image_description = "" if isinstance(datasets, list): _image_description += ' Bx: %.6f' % ( first_dataset.attrs['area'].area_extent[0] / 1000. + first_dataset.attrs['area'].pixel_size_x / 1000. / 2.) # LL_x _image_description += ' By: %.6f' % ( first_dataset.attrs['area'].area_extent[3] / 1000. - first_dataset.attrs['area'].pixel_size_y / 1000. / 2.) # UR_y else: _image_description += ' Bx: %.6f' % ( datasets.attrs['area'].area_extent[0] / 1000. + datasets.attrs['area'].pixel_size_x / 1000. / 2.) # LL_x _image_description += ' By: %.6f' % ( datasets.attrs['area'].area_extent[3] / 1000. - datasets.attrs['area'].pixel_size_y / 1000. / 2.) # UR_y _image_description += '\n' return _image_description def _add_calibration_datasets(self, ch, datasets, reverse_offset, reverse_scale, decimals): _reverse_offset = reverse_offset _reverse_scale = reverse_scale _decimals = decimals _table_calibration = "" found_calibration = False skip_calibration = False ds_list = datasets if not isinstance(datasets, list) and 'bands' not in datasets.sizes: ds_list = [datasets] for i, ds in enumerate(ds_list): if ('prerequisites' in ds.attrs and isinstance(ds.attrs['prerequisites'], list) and len(ds.attrs['prerequisites']) >= i + 1 and isinstance(ds.attrs['prerequisites'][i], (DataQuery, DataID))): if ds.attrs['prerequisites'][i].get('name') == str(ch): if ds.attrs['prerequisites'][i].get('calibration') == 'RADIANCE': raise NotImplementedError( "Mitiff radiance calibration not implemented.") # _table_calibration += ', Radiance, ' # _table_calibration += '[W/m²/µm/sr]' # _decimals = 8 elif ds.attrs['prerequisites'][i].get('calibration') == 'brightness_temperature': found_calibration = True _table_calibration += ', BT, ' _table_calibration += u'\u00B0' # '\u2103' _table_calibration += u'[C]' _reverse_offset = 255. _reverse_scale = -1. _decimals = 2 elif ds.attrs['prerequisites'][i].get('calibration') == 'reflectance': found_calibration = True _table_calibration += ', Reflectance(Albedo), ' _table_calibration += '[%]' _decimals = 2 else: LOG.warning("Unknown calib type. Must be Radiance, Reflectance or BT.") break else: continue else: _table_calibration = "" skip_calibration = True break if not found_calibration: _table_calibration = "" skip_calibration = True # How to format string by passing the format # http://stackoverflow.com/questions/1598579/rounding-decimals-with-new-python-format-function return skip_calibration, _table_calibration, _reverse_offset, _reverse_scale, _decimals def _add_palette_info(self, datasets, palette_unit, palette_description, **kwargs): # mitiff key word for palette interpretion _palette = '\n COLOR INFO:\n' # mitiff info for the unit of the interpretion _palette += ' {}\n'.format(palette_unit) # The length of the palette description as needed by mitiff in DIANA _palette += ' {}\n'.format(len(palette_description)) for desc in palette_description: _palette += ' {}\n'.format(desc) return _palette def _add_calibration(self, channels, cns, datasets, **kwargs): _table_calibration = "" skip_calibration = False for ch in channels: palette = False # Make calibration. if palette: raise NotImplementedError("Mitiff palette saving is not implemented.") else: _table_calibration += 'Table_calibration: ' try: _table_calibration += str( self.mitiff_config[kwargs['sensor']][cns.get(ch, ch)]['alias']) except KeyError: _table_calibration += str(ch) _reverse_offset = 0. _reverse_scale = 1. _decimals = 2 skip_calibration, __table_calibration, _reverse_offset, _reverse_scale, _decimals = \ self._add_calibration_datasets(ch, datasets, _reverse_offset, _reverse_scale, _decimals) _table_calibration += __table_calibration if not skip_calibration: _table_calibration += ', 8, [ ' for val in range(0, 256): # Comma separated list of values _table_calibration += '{0:.{1}f} '.format((float(self.mitiff_config[ kwargs['sensor']][cns.get(ch, ch)]['min-val']) + ((_reverse_offset + _reverse_scale * val) * (float(self.mitiff_config[kwargs['sensor']][cns.get(ch, ch)]['max-val']) - float(self.mitiff_config[kwargs['sensor']][cns.get(ch, ch)]['min-val']))) / 255.), _decimals) # _table_calibration += '0.00000000 ' _table_calibration += ']\n\n' else: _table_calibration = "" return _table_calibration def _make_image_description(self, datasets, **kwargs): r"""Generate image description for mitiff. Satellite: NOAA 18 Date and Time: 06:58 31/05-2016 SatDir: 0 Channels: 6 In this file: 1-VIS0.63 2-VIS0.86 3(3B)-IR3.7 4-IR10.8 5-IR11.5 6(3A)-VIS1.6 Xsize: 4720 Ysize: 5544 Map projection: Stereographic Proj string: +proj=stere +lon_0=0 +lat_0=90 +lat_ts=60 +ellps=WGS84 +towgs84=0,0,0 +units=km +x_0=2526000.000000 +y_0=5806000.000000 TrueLat: 60 N GridRot: 0 Xunit:1000 m Yunit: 1000 m NPX: 0.000000 NPY: 0.000000 Ax: 1.000000 Ay: 1.000000 Bx: -2526.000000 By: -262.000000 Satellite: Date and Time: SatDir: 0 Channels: In this file: Xsize: Ysize: Map projection: Stereographic Proj string: TrueLat: 60 N GridRot: 0 Xunit:1000 m Yunit: 1000 m NPX: 0.000000 NPY: 0.000000 Ax: Ay: Bx: By: if palette image write special palette if normal channel write table calibration: Table_calibration: , , [], , []\n\n """ translate_platform_name = {'metop01': 'Metop-B', 'metop02': 'Metop-A', 'metop03': 'Metop-C', 'noaa15': 'NOAA-15', 'noaa16': 'NOAA-16', 'noaa17': 'NOAA-17', 'noaa18': 'NOAA-18', 'noaa19': 'NOAA-19'} first_dataset = datasets if isinstance(datasets, list): LOG.debug("Datasets is a list of dataset") first_dataset = datasets[0] _platform_name = self._get_platform_name(first_dataset, translate_platform_name, kwargs) _image_description = '' _image_description.encode('utf-8') _image_description += ' Satellite: ' if _platform_name is not None: _image_description += _platform_name _image_description += '\n' _image_description += ' Date and Time: ' # Select earliest start_time first = True earliest = 0 for dataset in datasets: if first: earliest = dataset.attrs['start_time'] else: if dataset.attrs['start_time'] < earliest: earliest = dataset.attrs['start_time'] first = False LOG.debug("earliest start_time: %s", earliest) _image_description += earliest.strftime("%H:%M %d/%m-%Y\n") _image_description += ' SatDir: 0\n' _image_description += ' Channels: ' _image_description += self._get_dataset_len(datasets) _image_description += ' In this file: ' channels = self._make_channel_list(datasets, **kwargs) try: cns = self.translate_channel_name.get(kwargs['sensor'], {}) except KeyError: pass _image_description += self._channel_names(channels, cns, **kwargs) _image_description += self._add_sizes(datasets, first_dataset) _image_description += ' Map projection: Stereographic\n' _image_description += self._add_proj4_string(datasets, first_dataset) _image_description += ' TrueLat: 60N\n' _image_description += ' GridRot: 0\n' _image_description += ' Xunit:1000 m Yunit: 1000 m\n' _image_description += ' NPX: %.6f' % (0) _image_description += ' NPY: %.6f' % (0) + '\n' _image_description += self._add_pixel_sizes(datasets, first_dataset) _image_description += self._add_corners(datasets, first_dataset) if isinstance(datasets, list): LOG.debug("Area extent: %s", first_dataset.attrs['area'].area_extent) else: LOG.debug("Area extent: %s", datasets.attrs['area'].area_extent) if self.palette: LOG.debug("Doing palette image") _image_description += self._add_palette_info(datasets, **kwargs) else: _image_description += self._add_calibration(channels, cns, datasets, **kwargs) return _image_description def _get_dataset_len(self, datasets): if isinstance(datasets, list): LOG.debug("len datasets: %s", len(datasets)) dataset_len = str(len(datasets)) elif 'bands' in datasets.sizes: LOG.debug("len datasets: %s", datasets.sizes['bands']) dataset_len = str(datasets.sizes['bands']) elif len(datasets.sizes) == 2: LOG.debug("len datasets: 1") dataset_len = '1' else: dataset_len = "" return dataset_len def _get_platform_name(self, first_dataset, translate_platform_name, kwargs): if 'platform_name' in first_dataset.attrs: _platform_name = translate_platform_name.get( first_dataset.attrs['platform_name'], first_dataset.attrs['platform_name']) elif 'platform_name' in kwargs: _platform_name = translate_platform_name.get( kwargs['platform_name'], kwargs['platform_name']) else: _platform_name = None return _platform_name def _calibrate_data(self, dataset, calibration, min_val, max_val): reverse_offset = 0. reverse_scale = 1. if calibration == 'brightness_temperature': # If data is brightness temperature, the data must be inverted. reverse_offset = 255. reverse_scale = -1. dataset.data += KELVIN_TO_CELSIUS # Need to possible translate channels names from satpy to mitiff _data = reverse_offset + reverse_scale * ((dataset.data - float(min_val)) / (float(max_val) - float(min_val))) * 255. return _data.clip(0, 255) def _save_as_palette(self, tif, datasets, **kwargs): # MITIFF palette has only one data channel if len(datasets.dims) == 2: LOG.debug("Palette ok with only 2 dimensions. ie only x and y") # 3 = Palette color. In this model, a color is described with a single component. # The value of the component is used as an index into the red, green and blue curves # in the ColorMap field to retrieve an RGB triplet that defines the color. When # PhotometricInterpretation=3 is used, ColorMap must be present and SamplesPerPixel must be 1. tif.SetField('PHOTOMETRIC', 3) # As write_image can not save tiff image as palette, this has to be done basicly # ie. all needed tags needs to be set. tif.SetField('IMAGEWIDTH', datasets.sizes['x']) tif.SetField('IMAGELENGTH', datasets.sizes['y']) tif.SetField('BITSPERSAMPLE', 8) tif.SetField('COMPRESSION', tif.get_tag_define('deflate')) if 'palette_color_map' in kwargs: tif.SetField('COLORMAP', kwargs['palette_color_map']) else: LOG.ERROR("In a mitiff palette image a color map must be provided: palette_color_map is missing.") data_type = np.uint8 # Looks like we need to pass the data to writeencodedstrip as ctypes cont_data = np.ascontiguousarray(datasets.data, data_type) tif.WriteEncodedStrip(0, cont_data.ctypes.data, datasets.sizes['x'] * datasets.sizes['y']) tif.WriteDirectory() def _save_as_enhanced(self, tif, datasets, **kwargs): """Save datasets as an enhanced RGB image.""" img = get_enhanced_image(datasets.squeeze(), enhance=self.enhancer) if 'bands' in img.data.sizes and 'bands' not in datasets.sizes: LOG.debug("Datasets without 'bands' become image with 'bands' due to enhancement.") LOG.debug("Needs to regenerate mitiff image description") image_description = self._make_image_description(img.data, **kwargs) tif.SetField(IMAGEDESCRIPTION, (image_description).encode('utf-8')) for band in img.data['bands']: chn = img.data.sel(bands=band) data = chn.values.clip(0, 1) * 254. + 1 data = data.clip(0, 255) tif.write_image(data.astype(np.uint8), compression='deflate') def _save_datasets_as_mitiff(self, datasets, image_description, gen_filename, **kwargs): """Put all together and save as a tiff file. Include the special tags making it a mitiff file. """ from libtiff import TIFF tif = TIFF.open(gen_filename, mode='wb') tif.SetField(IMAGEDESCRIPTION, (image_description).encode('utf-8')) cns = self.translate_channel_name.get(kwargs['sensor'], {}) if isinstance(datasets, list): LOG.debug("Saving datasets as list") for _cn in self.channel_order[kwargs['sensor']]: for dataset in datasets: if dataset.attrs['name'] == _cn: # Need to possible translate channels names from satpy to mitiff cn = cns.get(dataset.attrs['name'], dataset.attrs['name']) data = self._calibrate_data(dataset, dataset.attrs['calibration'], self.mitiff_config[kwargs['sensor']][cn]['min-val'], self.mitiff_config[kwargs['sensor']][cn]['max-val']) tif.write_image(data.astype(np.uint8), compression='deflate') break elif 'dataset' in datasets.attrs['name']: self._save_single_dataset(datasets, cns, tif, kwargs) elif self.palette: LOG.debug("Saving dataset as palette.") self._save_as_palette(tif, datasets, **kwargs) else: LOG.debug("Saving datasets as enhanced image") self._save_as_enhanced(tif, datasets, **kwargs) del tif def _save_single_dataset(self, datasets, cns, tif, kwargs): LOG.debug("Saving %s as a dataset.", datasets.attrs['name']) if len(datasets.dims) == 2 and (all('bands' not in i for i in datasets.dims)): # Special case with only one channel ie. no bands # Need to possible translate channels names from satpy to mitiff # Note the last index is a tuple index. cn = cns.get(datasets.attrs['prerequisites'][0]['name'], datasets.attrs['prerequisites'][0]['name']) data = self._calibrate_data(datasets, datasets.attrs['prerequisites'][0].get('calibration'), self.mitiff_config[kwargs['sensor']][cn]['min-val'], self.mitiff_config[kwargs['sensor']][cn]['max-val']) tif.write_image(data.astype(np.uint8), compression='deflate') else: for _cn_i, _cn in enumerate(self.channel_order[kwargs['sensor']]): for band in datasets['bands']: if band == _cn: chn = datasets.sel(bands=band) # Need to possible translate channels names from satpy to mitiff # Note the last index is a tuple index. cn = cns.get(chn.attrs['prerequisites'][_cn_i]['name'], chn.attrs['prerequisites'][_cn_i]['name']) data = self._calibrate_data(chn, chn.attrs['prerequisites'][_cn_i].get('calibration'), self.mitiff_config[kwargs['sensor']][cn]['min-val'], self.mitiff_config[kwargs['sensor']][cn]['max-val']) tif.write_image(data.astype(np.uint8), compression='deflate') break satpy-0.34.0/satpy/writers/ninjogeotiff.py000066400000000000000000000472271420401153000206320ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Writer for GeoTIFF images with tags for the NinJo visualization tool. The next version of NinJo (release expected spring 2022) will be able to read standard GeoTIFF images, with required metadata encoded as a set of XML tags in the GDALMetadata TIFF tag. Each of the XML tags must be prepended with ``'NINJO_'``. For NinJo delivery, these GeoTIFF files supersede the old NinJoTIFF format. The :class:`NinJoGeoTIFFWriter` therefore supersedes the old Satpy NinJoTIFF writer and the pyninjotiff package. The reference documentation for valid NinJo tags and their meaning is contained in `NinJoPedia`_. Since this page is not in the public web, there is a (possibly outdated) `mirror`_. .. _NinJoPedia: https://ninjopedia.com/tiki-index.php?page=adm_SatelliteServer_SatelliteImportFormats_en .. _mirror: https://www.ssec.wisc.edu/~davidh/polar2grid/misc/NinJo_Satellite_Import_Formats.html There are some user-facing differences between the old NinJoTIFF writer and the new NinJoGeoTIFF writer. Most notably, keyword arguments that correspond to tags directly passed by the user are now identical, including case, to how they will be written to the GDALMetaData and interpreted by NinJo. That means some keyword arguments have changed, such as summarised in this table: .. list-table:: Migrating to NinJoGeoTIFF, keyword arguments for the writer :header-rows: 1 * - ninjotiff (old) - ninjogeotiff (new) - Notes * - ``chan_id`` - ``ChannelID`` - mandatory * - ``data_cat`` - ``DataType`` - mandatory * - ``physic_unit`` - ``PhysicUnit`` - mandatory * - ``physic_val`` - ``PhysicValue`` - mandatory * - ``sat_id`` - ``SatelliteNameID`` - mandatory * - ``data_source`` - ``DataSource`` - optional Moreover, two keyword arguments are no longer supported because their functionality has become redundant. This applies to ``ch_min_measurement_unit`` and ``ch_max_measurement_unit``. Instead, pass those values in source units to the :func:`~satpy.enhancements.stretch` enhancement with the ``min_stretch`` and ``max_stretch`` arguments. """ import copy import datetime import logging import numpy as np from .geotiff import GeoTIFFWriter logger = logging.getLogger(__name__) class NinJoGeoTIFFWriter(GeoTIFFWriter): """Writer for GeoTIFFs with NinJo tags. This writer is experimental. API may be subject to change. For information, see module docstring and documentation for :meth:`~NinJoGeoTIFFWriter.save_image`. """ def save_image( self, image, filename=None, fill_value=None, compute=True, keep_palette=False, cmap=None, overviews=None, overviews_minsize=256, overviews_resampling=None, tags=None, config_files=None, *, ChannelID, DataType, PhysicUnit, PhysicValue, SatelliteNameID, **kwargs): """Save image along with NinJo tags. Save image along with NinJo tags. Interface as for GeoTIFF, except NinJo expects some additional tags. Those tags will be prepended with ``ninjo_`` and added as GDALMetaData. Writing such images requires trollimage 1.16 or newer. Importing such images with NinJo requires NinJo 7 or newer. Args: image (:class:`~trollimage.xrimage.XRImage`): Image to save. filename (str): Where to save the file. fill_value (int): Which pixel value is fill value? compute (bool): To compute or not to compute, that is the question. keep_palette (bool): As for parent GeoTIFF :meth:`~satpy.writers.geotiff.GeoTIFFWriter.save_image`. cmap (:class:`trollimage.colormap.Colormap`): As for parent :meth:`~satpy.writers.geotiff.GeoTIFFWriter.save_image`. overviews (list): As for :meth:`~satpy.writers.geotiff.GeoTIFFWriter.save_image`. overviews_minsize (int): As for :meth:`~satpy.writers.geotiff.GeoTIFFWriter.save_image`. overviews_resampling (str): As for :meth:`~satpy.writers.geotiff.GeoTIFFWriter.save_image`. tags (dict): Extra (not NinJo) tags to add to GDAL MetaData config_files (Any): Not directly used by this writer, supported for compatibility with other writers. Remaining keyword arguments are either passed as GDAL options, if contained in ``self.GDAL_OPTIONS``, or they are passed to :class:`NinJoTagGenerator`, which will include them as NinJo tags in GDALMetadata. Supported tags are defined in ``NinJoTagGenerator.optional_tags``. The meaning of those (and other) tags are defined in the NinJo documentation (see module documentation for a link to NinJoPedia). The following tags are mandatory and must be provided as keyword arguments: ChannelID (int) NinJo Channel ID DataType (int) NinJo Data Type SatelliteNameID (int) NinJo Satellite ID PhysicUnit (str) NinJo label for unit (example: "C"). If PhysicValue is set to "Temperature", PhysicUnit is set to "C", but data attributes incidate the data have unit "K", then the writer will adapt the header ``ninjo_AxisIntercept`` such that data are interpreted in units of "C". PhysicValue (str) NinJo label for quantity (example: "temperature") """ dataset = image.data # filename not passed on to writer by Scene.save_dataset, but I need # it! filename = filename or self.get_filename(**dataset.attrs) gdal_opts = {} ntg_opts = {} for (k, v) in kwargs.items(): if k in self.GDAL_OPTIONS: gdal_opts[k] = v else: ntg_opts[k] = v ntg = NinJoTagGenerator( image, fill_value=fill_value, filename=filename, ChannelID=ChannelID, DataType=DataType, PhysicUnit=PhysicUnit, PhysicValue=PhysicValue, SatelliteNameID=SatelliteNameID, **ntg_opts) ninjo_tags = {f"ninjo_{k:s}": v for (k, v) in ntg.get_all_tags().items()} image = self._fix_units(image, PhysicValue, PhysicUnit) return super().save_image( image, filename=filename, fill_value=fill_value, compute=compute, keep_palette=keep_palette, cmap=cmap, overviews=overviews, overviews_minsize=overviews_minsize, overviews_resampling=overviews_resampling, tags={**(tags or {}), **ninjo_tags}, scale_offset_tags=None if image.mode.startswith("RGB") else ("ninjo_Gradient", "ninjo_AxisIntercept"), **gdal_opts) def _fix_units(self, image, quantity, unit): """Adapt units between °C and K. This will return a new XRImage, to make sure the old data and enhancement history aren't touched. """ data_units = image.data.attrs.get("units") if (quantity.lower() == "temperature" and unit == "C" and data_units == "K"): logger.debug("Adding offset for K → °C conversion") new_attrs = copy.deepcopy(image.data.attrs) im2 = type(image)(image.data.copy()) im2.data.attrs = new_attrs # this scale/offset has to be applied before anything else im2.data.attrs["enhancement_history"].insert(0, {"scale": 1, "offset": 273.15}) return im2 if unit != data_units and unit.lower() != "n/a": logger.warning( f"Writing {unit!s} to ninjogeotiff headers, but " f"data attributes have unit {data_units!s}. " "No conversion applied.") return image class NinJoTagGenerator: """Class to collect NinJo tags. This class is used by :class:`NinJoGeoTIFFWriter` to collect NinJo tags. Most end-users will not need to create instances of this class directly. Tags are gathered from three sources: - Fixed tags, contained in the attribute ``fixed_tags``. The value of those tags is hardcoded and never changes. - Tags passed by the user, contained in the attribute ``passed_tags``. Those tags must be passed by the user as arguments to the writer, which will pass them on when instantiating this class. - Tags calculated from data and metadata. Those tags are defined in the attribute ``dynamic_tags``. They are either calculated from image data, from image metadata, or from arguments passed by the user to the writer. Some tags are mandatory (defined in ``mandatory_tags``). All tags that are not mandatory are optional. By default, optional tags are generated if and only if the required information is available. """ # tags that never change fixed_tags = { "Magic": "NINJO", "HeaderVersion": 2, "XMinimum": 1, "YMinimum": 1} # tags that must be passed directly by the user passed_tags = {"ChannelID", "DataType", "PhysicUnit", "SatelliteNameID", "PhysicValue"} # tags that can be calculated dynamically from (meta)data dynamic_tags = { "CentralMeridian": "central_meridian", "ColorDepth": "color_depth", "CreationDateID": "creation_date_id", "DateID": "date_id", "EarthRadiusLarge": "earth_radius_large", "EarthRadiusSmall": "earth_radius_small", "FileName": "filename", "MaxGrayValue": "max_gray_value", "MinGrayValue": "min_gray_value", "Projection": "projection", "ReferenceLatitude1": "ref_lat_1", "TransparentPixel": "transparent_pixel", "XMaximum": "xmaximum", "YMaximum": "ymaximum" } # mandatory tags according to documentation mandatory_tags = {"SatelliteNameID", "DateID", "CreationDateID", "ChannelID", "HeaderVersion", "DataType", "SatelliteNumber", "ColorDepth", "XMinimum", "XMaximum", "YMinimum", "YMaximum", "Projection", "PhysicValue", "PhysicUnit", "MinGrayValue", "MaxGrayValue", "Gradient", "AxisIntercept", "TransparentPixel"} # optional tags are added on best effort or if passed by user optional_tags = {"DataSource", "MeridianWest", "MeridianEast", "EarthRadiusLarge", "EarthRadiusSmall", "GeodeticDate", "ReferenceLatitude1", "ReferenceLatitude2", "CentralMeridian", "ColorTable", "Description", "OverflightDirection", "GeoLatitude", "GeoLongitude", "Altitude", "AOSAzimuth", "LOSAzimuth", "MaxElevation", "OverFlightTime", "IsBlackLinesCorrection", "IsAtmosphereCorrected", "IsCalibrated", "IsNormalized", "OriginalHeader", "IsValueTableAvailable", "ValueTableFloatField"} # tags that are added later in other ways postponed_tags = {"AxisIntercept", "Gradient"} def __init__(self, image, fill_value, filename, **kwargs): """Initialise tag generator. Args: image (:class:`trollimage.xrimage.XRImage`): XRImage for which NinJo tags should be calculated. fill_value (int): Fill value corresponding to image. filename (str): Filename to be written. **kwargs: Any additional tags to be included as-is. """ self.image = image self.dataset = image.data self.fill_value = fill_value self.filename = filename self.args = kwargs self.tag_names = (self.fixed_tags.keys() | self.passed_tags | self.dynamic_tags.keys() | (self.args.keys() & self.optional_tags)) if self.args.keys() - self.tag_names: raise ValueError("The following tags were not recognised: " + " ".join(self.args.keys() - self.tag_names)) def get_all_tags(self): """Get a dictionary with all tags for NinJo.""" tags = {} for tag in self.tag_names: try: tags[tag] = self.get_tag(tag) except (AttributeError, KeyError) as e: if tag in self.mandatory_tags: raise logger.debug( f"Unable to obtain value for optional NinJo tag {tag:s}. " f"This is probably expected. The reason is: {e.args[0]}") return tags def get_tag(self, tag): """Get value for NinJo tag.""" if tag in self.fixed_tags: return self.fixed_tags[tag] if tag in self.passed_tags: return self.args[tag] if tag in self.dynamic_tags: return getattr(self, f"get_{self.dynamic_tags[tag]:s}")() if tag in self.optional_tags and tag in self.args: return self.args[tag] if tag in self.postponed_tags: raise ValueError(f"Tag {tag!s} is added later by the GeoTIFF writer.") if tag in self.optional_tags: raise ValueError( f"Optional tag {tag!s} must be supplied by user if user wants to " "request the value, but wasn't.") raise ValueError(f"Unknown tag: {tag!s}") def get_central_meridian(self): """Calculate central meridian.""" pams = self.dataset.attrs["area"].crs.coordinate_operation.params lon_0 = {p.name: p.value for p in pams}["Longitude of natural origin"] return lon_0 def get_color_depth(self): """Return the color depth.""" if self.image.mode in ("L", "P"): return 8 if self.image.mode in ("LA", "PA"): return 16 if self.image.mode == "RGB": return 24 if self.image.mode == "RGBA": return 32 raise ValueError( f"Unsupported image mode: {self.image.mode:s}") # Set unix epoch here explicitly, because datetime.timestamp() is # apparently not supported on Windows. _epoch = datetime.datetime(1970, 1, 1, tzinfo=datetime.timezone.utc) def get_creation_date_id(self): """Calculate the creation date ID. That's seconds since UNIX Epoch for the time the image is created. """ delta = datetime.datetime.now(tz=datetime.timezone.utc) - self._epoch return int(delta.total_seconds()) def get_date_id(self): """Calculate the date ID. That's seconds since UNIX Epoch for the time corresponding to the satellite image start of measurement time. """ tm = self.dataset.attrs["start_time"] delta = tm.replace(tzinfo=datetime.timezone.utc) - self._epoch return int(delta.total_seconds()) def get_earth_radius_large(self): """Return the Earth semi-major axis in metre.""" return self.dataset.attrs["area"].crs.ellipsoid.semi_major_metre def get_earth_radius_small(self): """Return the Earth semi-minor axis in metre.""" return self.dataset.attrs["area"].crs.ellipsoid.semi_minor_metre def get_filename(self): """Return the filename.""" return self.filename def get_min_gray_value(self): """Calculate minimum gray value.""" return self.image._scale_to_dtype( self.dataset.min(), np.uint8, self.fill_value).astype(np.uint8) def get_max_gray_value(self): """Calculate maximum gray value.""" return self.image._scale_to_dtype( self.dataset.max(), np.uint8, self.fill_value).astype(np.uint8) def get_projection(self): """Get NinJo projection string. From the documentation, valid values are: - NPOL/SPOL: polar-sterographic North/South - PLAT: „Plate Carrée“, equirectangular projection - MERC: Mercator projection Derived from AreaDefinition. """ if self.dataset.attrs["area"].crs.coordinate_system.name == "ellipsoidal": # For lat/lon coordinates, we say it's PLAT return "PLAT" name = self.dataset.attrs["area"].crs.coordinate_operation.method_name if "Equidistant Cylindrical" in name: return "PLAT" if "Mercator" in name: return "MERC" if "Stereographic" in name: if self.get_ref_lat_1() >= 0: return "NPOL" return "SPOL" raise ValueError( "Unknown mapping from area " f"'{self.dataset.attrs['area'].description}' with CRS coordinate " f"operation name {name:s} to NinJo projection. NinJo understands only " "equidistant cylindrical, mercator, or stereographic projections.") def get_ref_lat_1(self): """Get reference latitude one. Derived from area definition. """ pams = {p.name: p.value for p in self.dataset.attrs["area"].crs.coordinate_operation.params} for label in ["Latitude of standard parallel", "Latitude of natural origin", "Latitude of 1st standard parallel"]: if label in pams: return pams[label] raise ValueError( "Could not find reference latitude for area " f"{self.dataset.attrs['area'].description}") def get_transparent_pixel(self): """Get the transparent pixel value, also known as the fill value. When the no fill value is defined (value `None`), such as for RGBA or LA images, returns -1, in accordance with the file format specification. """ if self.fill_value is None: return -1 return self.fill_value def get_xmaximum(self): """Get the maximum value of x, i.e. the meridional extent of the image in pixels.""" return self.dataset.sizes["x"] def get_ymaximum(self): """Get the maximum value of y, i.e. the zonal extent of the image in pixels.""" return self.dataset.sizes["y"] def get_meridian_east(self): """Get the easternmost longitude of the area. Currently not implemented. In pyninjotiff it was implemented but the answer was incorrect. """ raise NotImplementedError() def get_meridian_west(self): """Get the westernmost longitude of the area. Currently not implemented. In pyninjotiff it was implemented but the answer was incorrect. """ raise NotImplementedError() def get_ref_lat_2(self): """Get reference latitude two. This is not implemented and never was correctly implemented in pyninjotiff either. It doesn't appear to be used by NinJo. """ raise NotImplementedError("Second reference latitude not implemented.") satpy-0.34.0/satpy/writers/ninjotiff.py000066400000000000000000000205701420401153000201270ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Writer for TIFF images compatible with the NinJo visualization tool (NinjoTIFFs). NinjoTIFFs can be color images or monochromatic. For monochromatic images, the physical units and scale and offsets to retrieve the physical values are provided. Metadata is also recorded in the file. In order to write ninjotiff files, some metadata needs to be provided to the writer. Here is an example on how to write a color image:: chn = "airmass" ninjoRegion = load_area("areas.def", "nrEURO3km") filenames = glob("data/*__") global_scene = Scene(reader="hrit_msg", filenames=filenames) global_scene.load([chn]) local_scene = global_scene.resample(ninjoRegion) local_scene.save_dataset(chn, filename="airmass.tif", writer='ninjotiff', sat_id=6300014, chan_id=6500015, data_cat='GPRN', data_source='EUMCAST', nbits=8) Here is an example on how to write a color image:: chn = "IR_108" ninjoRegion = load_area("areas.def", "nrEURO3km") filenames = glob("data/*__") global_scene = Scene(reader="hrit_msg", filenames=filenames) global_scene.load([chn]) local_scene = global_scene.resample(ninjoRegion) local_scene.save_dataset(chn, filename="msg.tif", writer='ninjotiff', sat_id=6300014, chan_id=900015, data_cat='GORN', data_source='EUMCAST', physic_unit='K', nbits=8) The metadata to provide to the writer can also be stored in a configuration file (see pyninjotiff), so that the previous example can be rewritten as:: chn = "IR_108" ninjoRegion = load_area("areas.def", "nrEURO3km") filenames = glob("data/*__") global_scene = Scene(reader="hrit_msg", filenames=filenames) global_scene.load([chn]) local_scene = global_scene.resample(ninjoRegion) local_scene.save_dataset(chn, filename="msg.tif", writer='ninjotiff', # ninjo product name to look for in .cfg file ninjo_product_name="IR_108", # custom configuration file for ninjo tiff products # if not specified PPP_CONFIG_DIR is used as config file directory ninjo_product_file="/config_dir/ninjotiff_products.cfg") .. _ninjotiff: http://www.ssec.wisc.edu/~davidh/polar2grid/misc/NinJo_Satellite_Import_Formats.html """ import logging import numpy as np import pyninjotiff.ninjotiff as nt import xarray as xr from trollimage.xrimage import invert_scale_offset from satpy.writers import ImageWriter logger = logging.getLogger(__name__) def convert_units(dataset, in_unit, out_unit): """Convert units of *dataset*. Convert dataset units for the benefit of writing NinJoTIFF. The main background here is that NinJoTIFF would like brightness temperatures in °C, but satellite data files are in K. For simplicity of implementation, this function can only convert from K to °C. This function will convert input data from K to °C and write the new unit in the ``"units"`` attribute. When output and input units are equal, it returns the input dataset. Args: dataset (xarray DataArray): Dataarray for which to convert the units. in_unit (str): Unit for input data. out_unit (str): Unit for output data. Returns: dataset, possibly with new units. """ if in_unit == out_unit: return dataset if in_unit.lower() in {"k", "kelvin"} and out_unit.lower() in {"c", "celsius"}: logger.debug("Converting temperature units from K to °C") with xr.set_options(keep_attrs=True): new_dataset = dataset - 273.15 new_dataset.attrs["units"] = out_unit return new_dataset # Other units not implemented. Before Satpy 0.16.1 there was a # non-working implementation based on pint here. raise NotImplementedError( "NinJoTIFF unit conversion only implemented between K and C, not " f"between {in_unit!s} and {out_unit!s}") class NinjoTIFFWriter(ImageWriter): """Writer for NinjoTiff files.""" def __init__(self, tags=None, **kwargs): """Inititalize the writer.""" ImageWriter.__init__( self, default_config_filename="writers/ninjotiff.yaml", **kwargs ) self.tags = self.info.get("tags", None) if tags is None else tags if self.tags is None: self.tags = {} elif not isinstance(self.tags, dict): # if it's coming from a config file self.tags = dict(tuple(x.split("=")) for x in self.tags.split(",")) def save_image(self, img, filename=None, compute=True, **kwargs): # floating_point=False, """Save the image to the given *filename* in ninjotiff_ format. .. _ninjotiff: http://www.ssec.wisc.edu/~davidh/polar2grid/misc/NinJo_Satellite_Import_Formats.html """ filename = filename or self.get_filename(**img.data.attrs) if img.mode.startswith("L") and ( "ch_min_measurement_unit" not in kwargs or "ch_max_measurement_unit" not in kwargs ): try: scale, offset = img.get_scaling_from_history() scale, offset = invert_scale_offset(scale, offset) except ValueError as err: logger.warning(str(err)) else: try: # Here we know that the data if the image is scaled between 0 and 1 dmin = offset dmax = scale + offset ch_min_measurement_unit, ch_max_measurement_unit = np.minimum(dmin, dmax), np.maximum(dmin, dmax) kwargs["ch_min_measurement_unit"] = ch_min_measurement_unit kwargs["ch_max_measurement_unit"] = ch_max_measurement_unit except KeyError: raise NotImplementedError( "Don't know how to handle non-scale/offset-based enhancements yet." ) if img.mode.startswith("P"): img.data = img.data.astype(np.uint8) return nt.save(img, filename, data_is_scaled_01=True, compute=compute, **kwargs) def save_dataset( self, dataset, filename=None, fill_value=None, compute=True, convert_temperature_units=True, **kwargs ): """Save a dataset to ninjotiff format. This calls `save_image` in turn, but first preforms some unit conversion if necessary and desired. Unit conversion can be suppressed by passing ``convert_temperature_units=False``. """ nunits = kwargs.get("physic_unit", None) if nunits is None: try: options = nt.get_product_config( kwargs["ninjo_product_name"], True, kwargs["ninjo_product_file"] ) nunits = options["physic_unit"] except KeyError: pass if nunits is not None: try: units = dataset.attrs["units"] except KeyError: logger.warning( "Saving to physical ninjo file without units defined in dataset!" ) else: if convert_temperature_units: dataset = convert_units(dataset, units, nunits) else: logger.debug("Omitting unit conversion") return super(NinjoTIFFWriter, self).save_dataset( dataset, filename=filename, compute=compute, fill_value=fill_value, **kwargs ) satpy-0.34.0/satpy/writers/simple_image.py000066400000000000000000000054121420401153000205720ustar00rootroot00000000000000#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright (c) 2015-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Generic PIL/Pillow image format writer.""" import logging from satpy.writers import ImageWriter LOG = logging.getLogger(__name__) class PillowWriter(ImageWriter): """Generic PIL image format writer.""" def __init__(self, **kwargs): """Initialize image writer plugin.""" ImageWriter.__init__( self, default_config_filename="writers/simple_image.yaml", **kwargs) def save_image(self, img, filename=None, compute=True, **kwargs): """Save Image object to a given ``filename``. Args: img (trollimage.xrimage.XRImage): Image object to save to disk. filename (str): Optionally specify the filename to save this dataset to. It may include string formatting patterns that will be filled in by dataset attributes. compute (bool): If `True` (default), compute and save the dataset. If `False` return either a `dask.delayed.Delayed` object or tuple of (source, target). See the return values below for more information. **kwargs: Keyword arguments to pass to the images `save` method. Returns: Value returned depends on `compute`. If `compute` is `True` then the return value is the result of computing a `dask.delayed.Delayed` object or running `dask.array.store`. If `compute` is `False` then the returned value is either a `dask.delayed.Delayed` object that can be computed using `delayed.compute()` or a tuple of (source, target) that should be passed to `dask.array.store`. If target is provided the the caller is responsible for calling `target.close()` if the target has this method. """ filename = filename or self.get_filename(**img.data.attrs) LOG.debug("Saving to image: %s", filename) return img.save(filename, compute=compute, **kwargs) satpy-0.34.0/satpy/writers/utils.py000066400000000000000000000022471420401153000173020ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Writer utilities.""" def flatten_dict(d, parent_key='', sep='_'): """Flatten a nested dictionary. Based on https://stackoverflow.com/a/6027615/5703449 """ items = [] for k, v in d.items(): new_key = parent_key + sep + k if parent_key else k if isinstance(v, dict): items.extend(flatten_dict(v, parent_key=new_key, sep=sep).items()) else: items.append((new_key, v)) return dict(items) satpy-0.34.0/setup.cfg000066400000000000000000000011251420401153000145440ustar00rootroot00000000000000[options] setup_requires = setuptools_scm setuptools_scm_git_archive [bdist_rpm] requires=h5py pyresample python2-numexpr pyhdf xarray dask h5netcdf release=1 doc_files = doc/Makefile doc/source/*.rst doc/examples/*.py [bdist_wheel] universal=1 [flake8] max-line-length = 120 exclude = satpy/readers/li_l2.py satpy/readers/scatsat1_l2b.py satpy/version.py satpy/tests/features per-file-ignores = satpy/tests/*/conftest.py:F401 doc/source/doi_role.py:D103 satpy/tests/features/steps/*.py:F811 [coverage:run] relative_files = True omit = satpy/version.py satpy-0.34.0/setup.py000066400000000000000000000141211420401153000144350ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2009-2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Setup file for satpy.""" import os.path from glob import glob from setuptools import find_packages, setup try: # HACK: https://github.com/pypa/setuptools_scm/issues/190#issuecomment-351181286 # Stop setuptools_scm from including all repository files import setuptools_scm.integration setuptools_scm.integration.find_files = lambda _: [] except ImportError: pass requires = ['numpy >=1.13', 'pillow', 'pyresample >=1.11.0', 'trollsift', 'trollimage >1.10.1', 'pykdtree', 'pyyaml', 'xarray >=0.10.1, !=0.13.0', 'dask[array] >=0.17.1', 'pyproj>=2.2', 'zarr', 'donfig', 'appdirs', 'pooch'] test_requires = ['behave', 'h5py', 'netCDF4', 'pyhdf', 'imageio', 'pylibtiff', 'rasterio', 'geoviews', 'trollimage', 'fsspec', 'bottleneck', 'rioxarray', 'pytest', 'pytest-lazy-fixture', 'defusedxml'] extras_require = { # Readers: 'avhrr_l1b_gaclac': ['pygac >= 1.3.0'], 'modis_l1b': ['pyhdf', 'python-geotiepoints >= 1.1.7'], 'geocat': ['pyhdf'], 'acspo': ['netCDF4 >= 1.1.8'], 'clavrx': ['netCDF4 >= 1.1.8'], 'viirs_l1b': ['netCDF4 >= 1.1.8'], 'viirs_sdr': ['h5py >= 2.7.0'], 'viirs_compact': ['h5py >= 2.7.0'], 'omps_edr': ['h5py >= 2.7.0'], 'amsr2_l1b': ['h5py >= 2.7.0'], 'hrpt': ['pyorbital >= 1.3.1', 'pygac', 'python-geotiepoints >= 1.1.7'], 'hrit_msg': ['pytroll-schedule'], 'msi_safe': ['rioxarray', "bottleneck", "python-geotiepoints"], 'nc_nwcsaf_msg': ['netCDF4 >= 1.1.8'], 'sar_c': ['python-geotiepoints >= 1.1.7', 'rasterio', 'rioxarray', 'defusedxml'], 'abi_l1b': ['h5netcdf'], 'seviri_l1b_hrit': ['pyorbital >= 1.3.1'], 'seviri_l1b_native': ['pyorbital >= 1.3.1'], 'seviri_l1b_nc': ['pyorbital >= 1.3.1', 'netCDF4 >= 1.1.8'], 'seviri_l2_bufr': ['eccodes-python'], 'seviri_l2_grib': ['eccodes-python'], 'hsaf_grib': ['pygrib'], # Writers: 'cf': ['h5netcdf >= 0.7.3'], 'awips_tiled': ['netCDF4 >= 1.1.8'], 'geotiff': ['rasterio', 'trollimage[geotiff]'], 'mitiff': ['pylibtiff'], 'ninjo': ['pyninjotiff', 'pint'], # Composites/Modifiers: 'rayleigh': ['pyspectral >= 0.10.1'], 'angles': ['pyorbital >= 1.3.1'], # MultiScene: 'animations': ['imageio'], # Documentation: 'doc': ['sphinx', 'sphinx_rtd_theme', 'sphinxcontrib-apidoc'], # Other 'geoviews': ['geoviews'], 'overlays': ['pycoast', 'pydecorate'], 'tests': test_requires, } all_extras = [] for extra_deps in extras_require.values(): all_extras.extend(extra_deps) extras_require['all'] = list(set(all_extras)) def _config_data_files(base_dirs, extensions=(".cfg", )): """Find all subdirectory configuration files. Searches each base directory relative to this setup.py file and finds all files ending in the extensions provided. :param base_dirs: iterable of relative base directories to search :param extensions: iterable of file extensions to include (with '.' prefix) :returns: list of 2-element tuples compatible with `setuptools.setup` """ data_files = [] pkg_root = os.path.realpath(os.path.dirname(__file__)) + "/" for base_dir in base_dirs: new_data_files = [] for ext in extensions: configs = glob(os.path.join(pkg_root, base_dir, "*" + ext)) configs = [c.replace(pkg_root, "") for c in configs] new_data_files.extend(configs) data_files.append((base_dir, new_data_files)) return data_files entry_points = { 'console_scripts': [ 'satpy_retrieve_all_aux_data=satpy.aux_download:retrieve_all_cmd', ], } NAME = 'satpy' with open('README.rst', 'r') as readme: README = readme.read() setup(name=NAME, description='Python package for earth-observing satellite data processing', long_description=README, author='The Pytroll Team', author_email='pytroll@googlegroups.com', classifiers=["Development Status :: 5 - Production/Stable", "Intended Audience :: Science/Research", "License :: OSI Approved :: GNU General Public License v3 " + "or later (GPLv3+)", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Scientific/Engineering"], url="https://github.com/pytroll/satpy", packages=find_packages(), # Always use forward '/', even on Windows # See https://setuptools.readthedocs.io/en/latest/userguide/datafiles.html#data-files-support package_data={'satpy': ['etc/geo_image.cfg', 'etc/areas.yaml', 'etc/satpy.cfg', 'etc/himawari-8.cfg', 'etc/eps_avhrrl1b_6.5.xml', 'etc/readers/*.yaml', 'etc/writers/*.yaml', 'etc/composites/*.yaml', 'etc/enhancements/*.cfg', 'etc/enhancements/*.yaml', 'tests/etc/readers/*.yaml', 'tests/etc/composites/*.yaml', 'tests/etc/writers/*.yaml', ]}, zip_safe=False, install_requires=requires, python_requires='>=3.7', extras_require=extras_require, entry_points=entry_points, ) satpy-0.34.0/utils/000077500000000000000000000000001420401153000140645ustar00rootroot00000000000000satpy-0.34.0/utils/convert_to_ninjotiff.py000066400000000000000000000065271420401153000207000ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Simple command line too that converts an image into a NinJo Tiff file. NinJo Tiff metadata can be passed as command line input or through a config file (an example is given in the ninjo-cmd.yaml file in the etc directory). The area matching the input image shall be defined in the areas configuration file (located in $PPP_CONFIG_DIR). """ import argparse import os import yaml from satpy import Scene from satpy.pyresample import get_area_def from satpy.utils import debug_on try: from yaml import UnsafeLoader except ImportError: from yaml import Loader as UnsafeLoader # type: ignore debug_on() parser = argparse.ArgumentParser(description='Turn an image into a NinjoTiff.') parser.add_argument('--cfg', dest='cfg', action="store", help="YAML configuration as an alternative to the command line input for NinJo metadata.") parser.add_argument('--input_dir', dest='input_dir', action="store", help="Directory with input data, that must contain a timestamp in the filename.") parser.add_argument('--chan_id', dest='chan_id', action="store", help="Channel ID", default="9999") parser.add_argument('--sat_id', dest='sat_id', action="store", help="Satellite ID", default="8888") parser.add_argument('--data_cat', dest='data_cat', action="store", help="Category of data (one of GORN, GPRN, PORN)", default="GORN") parser.add_argument('--area', dest='areadef', action="store", help="Area name, the definition must exist in your areas configuration file", default="nrEURO1km_NPOL_COALeqc") parser.add_argument('--ph_unit', dest='ph_unit', action="store", help="Physical unit", default="CELSIUS") parser.add_argument('--data_src', dest='data_src', action="store", help="Data source", default="EUMETCAST") args = parser.parse_args() if (args.input_dir is not None): os.chdir(args.input_dir) cfg = vars(args) if (args.cfg is not None): with open(args.cfg, 'r') as ymlfile: cfg = yaml.load(ymlfile, Loader=UnsafeLoader) narea = get_area_def(args.areadef) global_data = Scene(reader="generic_image") global_data.load(['image']) global_data['image'].info['area'] = narea fname = global_data['image'].info['filename'] ofname = fname[:-3] + "tif" # global_data.save_dataset('image', filename="out.png", writer="simple_image") global_data.save_dataset('image', filename=ofname, writer="ninjotiff", sat_id=cfg['sat_id'], chan_id=cfg['chan_id'], data_cat=cfg['data_cat'], data_source=cfg['data_src'], physic_unit=cfg['ph_unit']) satpy-0.34.0/utils/coord2area_def.py000066400000000000000000000130661420401153000173030ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2012-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Convert human coordinates (lon and lat) to an area definition. Here is a usage example. python coord2area_def.py france stere 42.0 51.5 -5.5 8.0 1.5 The arguments are "name proj min_lat max_lat min_lon max_lon resolution(km)". The command above yelds the following result. ### +proj=stere +lat_0=46.75 +lon_0=1.25 +ellps=WGS84 france: description: france projection: proj: stere ellps: WGS84 lat_0: 46.75 lon_0: 1.25 shape: height: 703 width: 746 area_extent: lower_left_xy: [-559750.381098, -505020.675776] upper_right_xy: [559750.381098, 549517.351948] The first commented line is just a sum-up. The value of "description" can be changed to any descriptive text. Such a custom yaml configuration can be profitably saved in a local areas.yaml configuration file that won't be overridden by future updates of SatPy package. For that purpose the local processing script may have suitable lines as reported below. # set PPP_CONFIG_DIR for custom composites import os os.environ['PPP_CONFIG_DIR'] = '/my_local_path/for_satpy_configuration' As a further functionality this script may give a quick display of the defined area, provided the path for the GSHHG library is supplied via the "-s" option and the modules PyCoast, Pillow and AggDraw have been installed. python coord2area_def.py france stere 42.0 51.5 -5.5 8.0 1.5 -s /path/for/gshhs/library The command above would first print the seen area definition and then launch a casual representation of the area relying on the information about borders involved. """ import argparse import sys from pyproj import Proj if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument("name", help="The name of the area.") parser.add_argument("proj", help="The projection to use. Use proj.4 names, like 'stere', 'merc'...") parser.add_argument("min_lat", help="The the latitude of the bottom of the area", type=float) parser.add_argument("max_lat", help="The the latitude of the top of the area", type=float) parser.add_argument("min_lon", help="The the longitude of the left of the area", type=float) parser.add_argument("max_lon", help="The the longitude of the right of the area", type=float) parser.add_argument("resolution", help="The resolution of the area (in km)", type=float) parser.add_argument("-s", "--shapes", help="Show a preview of the area using the coastlines in this directory") args = parser.parse_args() name = args.name proj = args.proj left = args.min_lon right = args.max_lon up = args.min_lat down = args.max_lat res = args.resolution * 1000 lat_0 = (up + down) / 2 lon_0 = (right + left) / 2 p = Proj(proj=proj, lat_0=lat_0, lon_0=lon_0, ellps="WGS84") left_ex1, up_ex1 = p(left, up) right_ex1, up_ex2 = p(right, up) left_ex2, down_ex1 = p(left, down) right_ex2, down_ex2 = p(right, down) left_ex3, dummy = p(left, lat_0) right_ex3, dummy = p(right, lat_0) area_extent = (min(left_ex1, left_ex2, left_ex3), min(up_ex1, up_ex2), max(right_ex1, right_ex2, right_ex3), max(down_ex1, down_ex2)) xsize = int(round((area_extent[2] - area_extent[0]) / res)) ysize = int(round((area_extent[3] - area_extent[1]) / res)) proj4_string = "+" + \ " +".join(("proj=" + proj + ",lat_0=" + str(lat_0) + ",lon_0=" + str(lon_0) + ",ellps=WGS84").split(",")) print('### ' + proj4_string) print() print(name + ":") print(" description: " + name) print(" projection:") print(" proj: " + proj) print(" ellps: WGS84") print(" lat_0: " + str(lat_0)) print(" lon_0: " + str(lon_0)) print(" shape:") print(" height: " + str(ysize)) print(" width: " + str(xsize)) print(" area_extent:") print(" lower_left_xy: [%f, %f]" % (area_extent[0], area_extent[1])) print(" upper_right_xy: [%f, %f]" % (area_extent[2], area_extent[3])) if args.shapes is None: sys.exit(0) from PIL import Image from pycoast import ContourWriterAGG img = Image.new('RGB', (xsize, ysize)) area_def = (proj4_string, area_extent) cw = ContourWriterAGG(args.shapes) cw.add_coastlines(img, (proj4_string, area_extent), resolution='l', width=0.5) cw.add_grid(img, area_def, (10.0, 10.0), (2.0, 2.0), write_text=False, outline='white', outline_opacity=175, width=1.0, minor_outline='white', minor_outline_opacity=175, minor_width=0.2, minor_is_tick=False) img.show() satpy-0.34.0/utils/fetch_avhrr_calcoeffs.py000066400000000000000000000112451420401153000207410ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Fetch avhrr calibration coefficients.""" import datetime as dt import os.path import sys import h5py import urllib2 BASE_URL = "http://www.star.nesdis.noaa.gov/smcd/spb/fwu/homepage/" + \ "AVHRR/Op_Cal_AVHRR/" URLS = { "Metop-B": {"ch1": BASE_URL + "Metop1_AVHRR_Libya_ch1.txt", "ch2": BASE_URL + "Metop1_AVHRR_Libya_ch2.txt", "ch3a": BASE_URL + "Metop1_AVHRR_Libya_ch3a.txt"}, "Metop-A": {"ch1": BASE_URL + "Metop2_AVHRR_Libya_ch1.txt", "ch2": BASE_URL + "Metop2_AVHRR_Libya_ch2.txt", "ch3a": BASE_URL + "Metop2_AVHRR_Libya_ch3a.txt"}, "NOAA-16": {"ch1": BASE_URL + "N16_AVHRR_Libya_ch1.txt", "ch2": BASE_URL + "N16_AVHRR_Libya_ch2.txt"}, "NOAA-17": {"ch1": BASE_URL + "N17_AVHRR_Libya_ch1.txt", "ch2": BASE_URL + "N17_AVHRR_Libya_ch2.txt", "ch3a": BASE_URL + "N17_AVHRR_Libya_ch3a.txt"}, "NOAA-18": {"ch1": BASE_URL + "N18_AVHRR_Libya_ch1.txt", "ch2": BASE_URL + "N18_AVHRR_Libya_ch2.txt"}, "NOAA-19": {"ch1": BASE_URL + "N19_AVHRR_Libya_ch1.txt", "ch2": BASE_URL + "N19_AVHRR_Libya_ch2.txt"} } def get_page(url): """Retrieve the given page.""" return urllib2.urlopen(url).read() def get_coeffs(page): """Parse coefficients from the page.""" coeffs = {} coeffs['datetime'] = [] coeffs['slope1'] = [] coeffs['intercept1'] = [] coeffs['slope2'] = [] coeffs['intercept2'] = [] slope1_idx, intercept1_idx, slope2_idx, intercept2_idx = \ None, None, None, None date_idx = 0 for row in page.lower().split('\n'): row = row.split() if len(row) == 0: continue if row[0] == 'update': # Get the column indices from the header line slope1_idx = row.index('slope_lo') intercept1_idx = row.index('int_lo') slope2_idx = row.index('slope_hi') intercept2_idx = row.index('int_hi') continue if slope1_idx is None: continue # In some cases the fields are connected, skip those rows if max([slope1_idx, intercept1_idx, slope2_idx, intercept2_idx]) >= len(row): continue try: dat = dt.datetime.strptime(row[date_idx], "%m/%d/%Y") except ValueError: continue coeffs['datetime'].append([dat.year, dat.month, dat.day]) coeffs['slope1'].append(float(row[slope1_idx])) coeffs['intercept1'].append(float(row[intercept1_idx])) coeffs['slope2'].append(float(row[slope2_idx])) coeffs['intercept2'].append(float(row[intercept2_idx])) return coeffs def get_all_coeffs(): """Get all available calibration coefficients for the satellites.""" coeffs = {} for platform in URLS: if platform not in coeffs: coeffs[platform] = {} for chan in URLS[platform].keys(): url = URLS[platform][chan] print(url) page = get_page(url) coeffs[platform][chan] = get_coeffs(page) return coeffs def save_coeffs(coeffs, out_dir=''): """Save calibration coefficients to HDF5 files.""" for platform in coeffs.keys(): fname = os.path.join(out_dir, "%s_calibration_data.h5" % platform) fid = h5py.File(fname, 'w') for chan in coeffs[platform].keys(): fid.create_group(chan) fid[chan]['datetime'] = coeffs[platform][chan]['datetime'] fid[chan]['slope1'] = coeffs[platform][chan]['slope1'] fid[chan]['intercept1'] = coeffs[platform][chan]['intercept1'] fid[chan]['slope2'] = coeffs[platform][chan]['slope2'] fid[chan]['intercept2'] = coeffs[platform][chan]['intercept2'] fid.close() print("Calibration coefficients saved for %s" % platform) def main(): """Create calibration coefficient files for AVHRR.""" out_dir = sys.argv[1] coeffs = get_all_coeffs() save_coeffs(coeffs, out_dir=out_dir) if __name__ == "__main__": main()