pax_global_header00006660000000000000000000000064136252552410014517gustar00rootroot0000000000000052 comment=33506e2c52c47afda11f4987456bb04b28fdd390 satpy-0.20.0/000077500000000000000000000000001362525524100127365ustar00rootroot00000000000000satpy-0.20.0/.codebeatignore000066400000000000000000000000371362525524100157110ustar00rootroot00000000000000satpy/version.py versioneer.py satpy-0.20.0/.git_archival.txt000066400000000000000000000000471362525524100162120ustar00rootroot00000000000000ref-names: HEAD -> master, tag: v0.20.0satpy-0.20.0/.gitattributes000066400000000000000000000000401362525524100156230ustar00rootroot00000000000000.git_archival.txt export-subst satpy-0.20.0/.github/000077500000000000000000000000001362525524100142765ustar00rootroot00000000000000satpy-0.20.0/.github/CODEOWNERS000066400000000000000000000112261362525524100156730ustar00rootroot00000000000000* @djhoese @mraspaud satpy/readers/seviri_l1b_hrit.py @sfinkens @mraspaud doc/source/api/satpy.composites.rst @pnuu @djhoese @mraspaud doc/source/composites.rst @pnuu @djhoese @mraspaud satpy/composites/abi.py @djhoese satpy/composites/cloud_products.py @adybbroe @mraspaud satpy/composites/crefl_utils.py @djhoese @mraspaud @adybbroe satpy/composites/sar.py @mraspaud satpy/demo/__init__.py @djhoese satpy/demo/google_cloud_platform.py @djhoese satpy/etc/composites/abi.yaml @djhoese satpy/etc/composites/avhrr-3.yaml @adybbroe @pnuu satpy/etc/composites/msi.yaml @mraspaud satpy/etc/composites/msu-gs.yaml @mraspaud satpy/etc/composites/olci.yaml @mraspaud satpy/etc/composites/sar-c.yaml @mraspaud satpy/etc/composites/sar.yaml @mraspaud satpy/etc/composites/seviri.yaml @mraspaud @pnuu @adybbroe satpy/etc/composites/slstr.yaml @mraspaud satpy/etc/composites/viirs.yaml @djhoese @mraspaud @adybbroe satpy/etc/composites/visir.yaml @djhoese @mraspaud @adybbroe @pnuu satpy/etc/readers/abi_l1b.yaml @djhoese satpy/etc/readers/abi_l1b_scmi.yaml @djhoese satpy/etc/readers/acspo.yaml @djhoese satpy/etc/readers/ahi_hrit.yaml @sfinkens @mraspaud satpy/etc/readers/ahi_hsd.yaml @sfinkens @djhoese @mraspaud satpy/etc/readers/avhrr_l1b_aapp.yaml @pnuu @mraspaud @adybbroe satpy/etc/readers/avhrr_l1b_eps.yaml @pnuu @mraspaud @adybbroe satpy/etc/readers/avhrr_l1b_gaclac.yaml @mraspaud @sfinkens satpy/etc/readers/avhrr_l1b_hrpt.yaml @mraspaud satpy/etc/readers/clavrx.yaml @djhoese satpy/etc/readers/electrol_hrit.yaml @sfinkens @mraspaud satpy/etc/readers/fci_l1c_fdhsi.yaml @mraspaud satpy/etc/readers/geocat.yaml @djhoese satpy/etc/readers/goes-imager_hrit.yaml @sfinkens @mraspaud satpy/etc/readers/goes-imager_nc.yaml @sfinkens @mraspaud satpy/etc/readers/iasi_l2.yaml @pnuu satpy/etc/readers/jami_hrit.yaml @sfinkens @mraspaud satpy/etc/readers/li_l2.yaml @sjoro satpy/etc/readers/maia.yaml @adybbroe satpy/etc/readers/msi_safe.yaml @mraspaud satpy/etc/readers/mtsat2-imager_hrit.yaml @sfinkens @mraspaud satpy/etc/readers/nucaps.yaml @djhoese satpy/etc/readers/nwcsaf-geo.yaml @adybbroe @pnuu satpy/etc/readers/nwcsaf-pps_nc.yaml @adybbroe @mraspaud satpy/etc/readers/olci_l1b.yaml @mraspaud satpy/etc/readers/olci_l2.yaml @mraspaud satpy/etc/readers/omps_edr.yaml @djhoese satpy/etc/readers/sar-c_safe.yaml @mraspaud satpy/etc/readers/seviri_l1b_hrit.yaml @sfinkens @sjoro @mraspaud satpy/etc/readers/seviri_l1b_native.yaml @sfinkens @sjoro @mraspaud satpy/etc/readers/seviri_l1b_nc.yaml @sjoro @sfinkens satpy/etc/readers/slstr_l1b.yaml @mraspaud satpy/etc/readers/viirs_compact.yaml @mraspaud satpy/etc/readers/viirs_edr_active_fires.yaml @adybbroe @djhoese satpy/etc/readers/viirs_edr_flood.yaml @djhoese satpy/etc/readers/viirs_l1b.yaml @djhoese satpy/etc/readers/virr_l1b.yaml @djhoese @adybbroe satpy/etc/writers/cf.yaml @mraspaud satpy/etc/writers/ninjotiff.yaml @mraspaud satpy/etc/writers/scmi.yaml @djhoese satpy/readers/aapp_l1b.py @pnuu @mraspaud @adybbroe satpy/readers/abi_l1b.py @djhoese satpy/readers/acspo.py @djhoese satpy/readers/ahi_hsd.py @sfinkens @djhoese @mraspaud satpy/readers/avhrr_l1b_gaclac.py @mraspaud @sfinkens satpy/readers/clavrx.py @djhoese satpy/readers/electrol_hrit.py @sfinkens @mraspaud satpy/readers/eps_l1b.py @mraspaud @pnuu @adybbroe satpy/readers/eum_base.py @sjoro @sfinkens @adybbroe satpy/readers/fci_l1c_fdhsi.py @mraspaud satpy/readers/geocat.py @djhoese satpy/readers/goes_imager_hrit.py @sfinkens @mraspaud satpy/readers/goes_imager_nc.py @sfinkens @mraspaud satpy/readers/hrit_base.py @sfinkens @sjoro @mraspaud satpy/readers/hrit_jma.py @sfinkens @mraspaud satpy/readers/hrpt.py @mraspaud satpy/readers/iasi_l2.py @pnuu satpy/readers/li_l2.py @sjoro satpy/readers/maia.py @adybbroe satpy/readers/msi_safe.py @mraspaud satpy/readers/nucaps.py @djhoese satpy/readers/nwcsaf_nc.py @adybbroe @mraspaud satpy/readers/olci_nc.py @mraspaud satpy/readers/omps_edr.py @djhoese satpy/readers/sar_c_safe.py @mraspaud satpy/readers/scmi.py @djhoese satpy/readers/seviri_base.py @sfinkens @sjoro @mraspaud @adybbroe satpy/readers/seviri_l1b_hrit.py @sfinkens @sjoro @mraspaud satpy/readers/seviri_l1b_native.py @sjoro @sfinkens @mraspaud satpy/readers/seviri_l1b_native_hdr.py @sjoro @sfinkens @adybbroe satpy/readers/seviri_l1b_nc.py @sjoro @sfinkens @mraspaud satpy/readers/slstr_l1b.py @mraspaud satpy/readers/viirs_compact.py @mraspaud satpy/readers/viirs_edr_active_fires.py @adybbroe @djhoese satpy/readers/viirs_edr_flood.py @djhoese satpy/readers/viirs_l1b.py @djhoese satpy/readers/xmlformat.py @mraspaud satpy/resample.py @pnuu @djhoese @mraspaud satpy/writers/cf_writer.py @mraspaud satpy/writers/scmi.py @djhoese utils/coord2area_def.py @mraspaud @adybbroe utils/fetch_avhrr_calcoeffs.py @pnuu satpy-0.20.0/.github/ISSUE_TEMPLATE/000077500000000000000000000000001362525524100164615ustar00rootroot00000000000000satpy-0.20.0/.github/ISSUE_TEMPLATE/bug_report.md000066400000000000000000000015271362525524100211600ustar00rootroot00000000000000--- name: Bug report about: Create a report to help us improve --- **Describe the bug** A clear and concise description of what the bug is. **To Reproduce** ```python # Your code here ``` **Expected behavior** A clear and concise description of what you expected to happen. **Actual results** Text output of actual results or error messages including full tracebacks if applicable. **Screenshots** If applicable, add screenshots to help explain your problem. **Environment Info:** - OS: [e.g. OSX, Windows, Linux] - Satpy Version: [e.g. 0.9.0] - PyResample Version: - Readers and writers dependencies (when relevant): [run `from satpy.config import check_satpy; check_satpy()`] **Additional context** Add any other context about the problem here. satpy-0.20.0/.github/ISSUE_TEMPLATE/feature_request.md000066400000000000000000000012001362525524100221770ustar00rootroot00000000000000--- name: Feature request about: Suggest an idea for this project --- ## Feature Request **Is your feature request related to a problem? Please describe.** A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] **Describe the solution you'd like** A clear and concise description of what you want to happen. **Describe any changes to existing user workflow** Are there any backwards compatibility concerns? Changes to the build process? Additional dependencies? **Additional context** Have you considered any alternative solutions or is there anything else that would help describe your request. satpy-0.20.0/.github/PULL_REQUEST_TEMPLATE.md000066400000000000000000000013721362525524100201020ustar00rootroot00000000000000 - [ ] Closes #xxxx - [ ] Tests added and test suite added to parent suite - [ ] Tests passed - [ ] Passes ``flake8 satpy`` - [ ] Fully documented - [ ] Add your name to `AUTHORS.md` if not there already satpy-0.20.0/.gitignore000066400000000000000000000012541362525524100147300ustar00rootroot00000000000000### PYTHON IGNORES ### *.py[cod] # C extensions *.so # Packages *.egg *.egg-info dist build doc/build eggs *.eggs parts bin var sdist develop-eggs .installed.cfg lib lib64 # Installer logs pip-log.txt # Unit test / coverage reports .coverage .tox nosetests.xml htmlcov #Translations *.mo #Mr Developer .mr.developer.cfg ### C IGNORES ### # Object files *.o # Libraries *.lib *.a # Shared objects (inc. Windows DLLs) *.dll *.so *.so.* *.dylib # Executables *.exe *.out *.app # Others *~ # PyCharm Settings .idea # VSCode Settings .vscode # vi / vim swp files *.swp .DS_STORE # setuptools_scm files # this should be generated automatically when installed satpy/version.py satpy-0.20.0/.pre-commit-config.yaml000066400000000000000000000003371362525524100172220ustar00rootroot00000000000000exclude: '^$' fail_fast: false repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v2.2.3 hooks: - id: flake8 additional_dependencies: [flake8-docstrings, flake8-debugger, flake8-bugbear] satpy-0.20.0/.readthedocs.yml000066400000000000000000000005441362525524100160270ustar00rootroot00000000000000# Read the Docs configuration file # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details version: 2 # Build documentation in the docs/ directory with Sphinx sphinx: configuration: doc/source/conf.py # Optionally build your docs in additional formats such as PDF and ePub formats: all conda: environment: doc/rtd_environment.yml satpy-0.20.0/.stickler.yml000066400000000000000000000000671362525524100153620ustar00rootroot00000000000000linters: flake8: python: 3 config: setup.cfg satpy-0.20.0/.travis.yml000066400000000000000000000042601362525524100150510ustar00rootroot00000000000000language: python env: global: # Set defaults to avoid repeating in most cases - PYTHON_VERSION=$TRAVIS_PYTHON_VERSION - NUMPY_VERSION=1.17 - MAIN_CMD='python setup.py' - CONDA_DEPENDENCIES='xarray dask distributed toolz Cython sphinx cartopy pillow matplotlib scipy pyyaml pyproj=2.4.1 pyresample coveralls coverage codecov behave netcdf4 h5py h5netcdf gdal rasterio imageio pyhdf mock libtiff geoviews zarr six python-eccodes' - PIP_DEPENDENCIES='trollsift trollimage pyspectral pyorbital libtiff' - SETUP_XVFB=False - EVENT_TYPE='push pull_request' - SETUP_CMD='test' - CONDA_CHANNELS='conda-forge' - CONDA_CHANNEL_PRIORITY='strict' matrix: include: - env: PYTHON_VERSION=3.8 os: linux - env: PYTHON_VERSION=3.8 os: osx language: generic - env: PYTHON_VERSION=3.7 os: linux - env: PYTHON_VERSION=3.7 os: osx language: generic install: - git clone --depth 1 git://github.com/astropy/ci-helpers.git - source ci-helpers/travis/setup_conda.sh script: - coverage run --source=satpy setup.py test - coverage run -a --source=satpy -m behave satpy/tests/features --tags=-download after_success: - if [[ $PYTHON_VERSION == 3.8 ]]; then coveralls; codecov; fi deploy: - provider: pypi user: dhoese password: secure: frK+0k1STeTM7SizRseP0qdTfOVz9ZMIra+3qEytPdxCLceXAH8LxPU16zj5rdNQxasF1hZ6rAd952fly+ypw2TEf5r2WnStrt7G5QlyE7VB6XGSDpIUxKF1FYccLvYs0/R6Y35MTEPqdM51PM5yEBjoY5b4tA3RF3fDq11cqc/SiWr6DgSLB1WJZULOdtCzBbfGbm5LyJ7yeNbISASSAwVvZTGWw7kJDgi0W5zxwEX82N5tBGbfKIu59qmxyj8FxmcrUwKZ4P3rQNg1kN1utzAB+PSf3GAVvbZfWJQuAKwMqpZgaV9lX0V7eUd/AxPobzEk9WyoNBMIdrSPej5BKWTDiYvaeRTOsggoUCSQJJA/SITEvkJgLWXoKKX2OWrM8RBUO4MoZJpPGXN42PRtMJkV2sx6ZigkpJlHdn39SsIRZX31zsfv8bBhclb70bt1Ts0fDd0rVdZAI6gMI+sgUePwEUn+XbWrvI0sMfDX3QsXDMV393RHgaIPxd+lRqUlYsNOxjsWpsbsvX55ePLxYHsNrv11KKyL/iGjGotVeVUO5D78qvfd4JrsUnMalQyZfW8NTEKa5Ebcs7gYJTwYEOTCQU12BkHOv1zFkjZG5RdGwkEvG3pykLhx+qDyYEd7pKB3TvhzLPqZPSrPxirwcoc0UzCc6ocYdzpqVuViFuk= distributions: sdist skip_existing: true on: tags: true repo: pytroll/satpy notifications: slack: rooms: - pytroll:96mNSYSI1dBjGyzVXkBT6qFt#github satpy-0.20.0/AUTHORS.md000066400000000000000000000070151362525524100144100ustar00rootroot00000000000000# Project Contributors The following people have made contributions to this project: - [Trygve Aspenes (TAlonglong)](https://github.com/TAlonglong) - [Talfan Barnie (TalfanBarnie)](https://github.com/TalfanBarnie) - [Suyash Behera (Suyash458)](https://github.com/Suyash458) - [Andrew Brooks (howff)](https://github.com/howff) - Guido della Bruna - meteoswiss - [Eric Bruning (deeplycloudy)](https://github.com/deeplycloudy) - [Lorenzo Clementi (loreclem)](https://github.com/loreclem) - [Colin Duff (ColinDuff)](https://github.com/ColinDuff) - [Radar, Satellite and Nowcasting Division (meteoswiss-mdr)](https://github.com/meteoswiss-mdr) - [Rohan Daruwala (rdaruwala)](https://github.com/rdaruwala) - [Adam Dybbroe (adybbroe)](https://github.com/adybbroe) - [Ulrik Egede (egede)](https://github.com/egede) - [Joleen Feltz (joleenf)](https://github.com/joleenf) - [Stephan Finkensieper (sfinkens)](https://github.com/sfinkens) - [Andrea Grillini (AppLEaDaY)](https://github.com/AppLEaDaY) - [Nina Håkansson (ninahakansson)](https://github.com/ninahakansson) - [Ulrich Hamann](https://github.com/) - [Gerrit Holl (gerritholl)](https://github.com/gerritholl) - [David Hoese (djhoese)](https://github.com/djhoese) - [Marc Honnorat (honnorat)](https://github.com/honnorat) - [Mikhail Itkin (mitkin)](https://github.com/mitkin) - [Tommy Jasmin (tommyjasmin)](https://github.com/tommyjasmin) - [Johannes Johansson (JohannesSMHI)](https://github.com/JohannesSMHI) - [Sauli Joro (sjoro)](https://github.com/sjoro) - [Janne Kotro (jkotro)](https://github.com/jkotro) - [Ralph Kuehn (ralphk11)](https://github.com/ralphk11) - [Panu Lahtinen (pnuu)](https://github.com/pnuu) - [Thomas Leppelt (m4sth0)](https://github.com/m4sth0) - [Andrea Meraner (ameraner)](https://github.com/ameraner) - [Lucas Meyer (LTMeyer)](https://github.com/LTMeyer) - [Oana Nicola](https://github.com/) - [Esben S. Nielsen (storpipfugl)](https://github.com/storpipfugl) - [Tom Parker (tparker-usgs)](https://github.com/tparker-usgs) - [Christian Peters (peters77)](https://github.com/peters77) - [Ghislain Picard (ghislainp)](https://github.com/ghislainp) - [Simon R. Proud (simonrp84)](https://github.com/simonrp84) - [Lars Ørum Rasmussen (loerum)](https://github.com/loerum) - [Martin Raspaud (mraspaud)](https://github.com/mraspaud) - [William Roberts (wroberts4)](https://github.com/wroberts4) - [Pascale Roquet (roquetp)](https://github.com/roquetp) - [Kristian Rune Larsen](https://github.com/) - [RutgerK (RutgerK)](https://github.com/RutgerK) - Marco Sassi - meteoswiss - [Stefan Scheiblauer (StefanSnippetCoder)](https://github.com/StefanSnippetCoder) - [Ronald Scheirer](https://github.com/) - [Hauke Schulz (observingClouds)](https://github.com/observingClouds) - [Eysteinn Sigurðsson (eysteinn)](https://github.com/eysteinn) - [Dario Stelitano (bornagain1981)](https://github.com/bornagain1981) - [Matias Takala (elfsprite)](https://github.com/elfsprite) - [hazbottles (hazbottles)](https://github.com/hazbottles) - [oananicola (oananicola)](https://github.com/oananicola) - [praerien (praerien)](https://github.com/praerien) - [Xin Zhang (zxdawn)](https://github.com/zxdawn) - [Yufei Zhu (yufeizhu600)](https://github.com/yufeizhu600) satpy-0.20.0/CHANGELOG.md000066400000000000000000002526161362525524100145630ustar00rootroot00000000000000## Version v0.20.0 (2020/02/25) ### Issues Closed * [Issue 1077](https://github.com/pytroll/satpy/issues/1077) - Tropomi l2 reader needs to handle more filenames ([PR 1078](https://github.com/pytroll/satpy/pull/1078)) * [Issue 1076](https://github.com/pytroll/satpy/issues/1076) - Metop level 2 EUMETCAST BUFR reader ([PR 1079](https://github.com/pytroll/satpy/pull/1079)) * [Issue 1004](https://github.com/pytroll/satpy/issues/1004) - Computing the lons and lats of metop granules from the eps_l1b reader is painfully slow ([PR 1063](https://github.com/pytroll/satpy/pull/1063)) * [Issue 1002](https://github.com/pytroll/satpy/issues/1002) - Resampling of long passes of metop l1b eps data gives strange results * [Issue 928](https://github.com/pytroll/satpy/issues/928) - Satpy Writer 'geotiff' exists but could not be loaded * [Issue 924](https://github.com/pytroll/satpy/issues/924) - eps_l1b reader does not accept more than 1 veadr element ([PR 1063](https://github.com/pytroll/satpy/pull/1063)) * [Issue 809](https://github.com/pytroll/satpy/issues/809) - Update avhrr_l1b_aapp reader ([PR 811](https://github.com/pytroll/satpy/pull/811)) * [Issue 112](https://github.com/pytroll/satpy/issues/112) - Python 2 Cruft ([PR 1047](https://github.com/pytroll/satpy/pull/1047)) In this release 8 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 1084](https://github.com/pytroll/satpy/pull/1084) - Add latitude_bounds and longitude_bounds to tropomi_l2 * [PR 1078](https://github.com/pytroll/satpy/pull/1078) - Tropomi l2 reader to handle more types of products ([1077](https://github.com/pytroll/satpy/issues/1077)) * [PR 1072](https://github.com/pytroll/satpy/pull/1072) - Fix the omerc-bb area to use a sphere as ellps * [PR 1066](https://github.com/pytroll/satpy/pull/1066) - Rename natural_color_sun to natural_color in generic VIS/IR RGB recipes * [PR 1063](https://github.com/pytroll/satpy/pull/1063) - Fix eps infinite loop ([924](https://github.com/pytroll/satpy/issues/924), [1004](https://github.com/pytroll/satpy/issues/1004)) * [PR 1058](https://github.com/pytroll/satpy/pull/1058) - Work around changes in xarray 0.15 * [PR 1057](https://github.com/pytroll/satpy/pull/1057) - lowercase the sensor name * [PR 1055](https://github.com/pytroll/satpy/pull/1055) - Fix sst standard name * [PR 1049](https://github.com/pytroll/satpy/pull/1049) - Fix handling of paths with forward slashes on Windows * [PR 1048](https://github.com/pytroll/satpy/pull/1048) - Fix AMI L1b reader incorrectly grouping files * [PR 1045](https://github.com/pytroll/satpy/pull/1045) - Update hrpt.py for new pygac syntax * [PR 1043](https://github.com/pytroll/satpy/pull/1043) - Update seviri icare reader that handles differing dataset versions * [PR 1042](https://github.com/pytroll/satpy/pull/1042) - Replace a unicode hyphen in the glm_l2 reader * [PR 1041](https://github.com/pytroll/satpy/pull/1041) - Unify Dataset attribute naming in SEVIRI L2 BUFR-reader #### Features added * [PR 1082](https://github.com/pytroll/satpy/pull/1082) - Update SLSTR composites * [PR 1079](https://github.com/pytroll/satpy/pull/1079) - Metop level 2 EUMETCAST BUFR reader ([1076](https://github.com/pytroll/satpy/issues/1076)) * [PR 1067](https://github.com/pytroll/satpy/pull/1067) - Add GOES-17 support to the 'geocat' reader * [PR 1065](https://github.com/pytroll/satpy/pull/1065) - Add AHI airmass, ash, dust, fog, and night_microphysics RGBs * [PR 1064](https://github.com/pytroll/satpy/pull/1064) - Adjust default blending in DayNightCompositor * [PR 1061](https://github.com/pytroll/satpy/pull/1061) - Add support for NUCAPS Science EDRs * [PR 1052](https://github.com/pytroll/satpy/pull/1052) - Delegate dask delays to pyninjotiff * [PR 1047](https://github.com/pytroll/satpy/pull/1047) - Remove deprecated abstractproperty usage ([112](https://github.com/pytroll/satpy/issues/112)) * [PR 1020](https://github.com/pytroll/satpy/pull/1020) - Feature Sentinel-3 Level-2 SST * [PR 988](https://github.com/pytroll/satpy/pull/988) - Remove py27 tests and switch to py38 * [PR 964](https://github.com/pytroll/satpy/pull/964) - Update SEVIRI L2 BUFR reader to handle BUFR products from EUMETSAT Data Centre * [PR 839](https://github.com/pytroll/satpy/pull/839) - Add support of colorbar * [PR 811](https://github.com/pytroll/satpy/pull/811) - Daskify and test avhrr_l1b_aapp reader ([809](https://github.com/pytroll/satpy/issues/809)) #### Documentation changes * [PR 1068](https://github.com/pytroll/satpy/pull/1068) - Fix a typo in writer 'filename' documentation * [PR 1056](https://github.com/pytroll/satpy/pull/1056) - Fix name of natural_color composite in quickstart #### Backwards incompatible changes * [PR 1066](https://github.com/pytroll/satpy/pull/1066) - Rename natural_color_sun to natural_color in generic VIS/IR RGB recipes * [PR 988](https://github.com/pytroll/satpy/pull/988) - Remove py27 tests and switch to py38 In this release 31 pull requests were closed. ## Version 0.19.1 (2020/01/10) ### Issues Closed * [Issue 1030](https://github.com/pytroll/satpy/issues/1030) - Geostationary padding results in wrong area definition for AHI mesoscale sectors. ([PR 1037](https://github.com/pytroll/satpy/pull/1037)) * [Issue 1029](https://github.com/pytroll/satpy/issues/1029) - NetCDF (CF) writer doesn't include semi_minor_axis/semi_major_axis for new versions of pyproj ([PR 1040](https://github.com/pytroll/satpy/pull/1040)) * [Issue 1023](https://github.com/pytroll/satpy/issues/1023) - RTD "Edit on Github" broken in "latest" documentation In this release 3 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 1040](https://github.com/pytroll/satpy/pull/1040) - Fix geostationary axis handling in CF writer ([1029](https://github.com/pytroll/satpy/issues/1029)) * [PR 1037](https://github.com/pytroll/satpy/pull/1037) - Fix segment handling for non-FLDK sectors in the AHI HSD reader ([1030](https://github.com/pytroll/satpy/issues/1030)) * [PR 1036](https://github.com/pytroll/satpy/pull/1036) - Fix ABI L1b/L2 time dimension causing issues with newer xarray * [PR 1034](https://github.com/pytroll/satpy/pull/1034) - Fix AMI geolocation being off by 1 pixel * [PR 1033](https://github.com/pytroll/satpy/pull/1033) - Fix avhrr_l1b_aapp reader not including standard_name metadata * [PR 1031](https://github.com/pytroll/satpy/pull/1031) - Fix tropomi_l2 reader not using y and x dimension names #### Features added * [PR 1035](https://github.com/pytroll/satpy/pull/1035) - Add additional Sentinel 3 OLCI 2 datasets * [PR 1027](https://github.com/pytroll/satpy/pull/1027) - Update SCMI writer and VIIRS EDR Flood reader to work for pre-tiled data #### Documentation changes * [PR 1032](https://github.com/pytroll/satpy/pull/1032) - Add documentation about y and x dimensions for custom readers In this release 9 pull requests were closed. ## Version 0.19.0 (2019/12/30) ### Issues Closed * [Issue 996](https://github.com/pytroll/satpy/issues/996) - In the sar-c_safe reader, add platform_name to the attribute. ([PR 998](https://github.com/pytroll/satpy/pull/998)) * [Issue 991](https://github.com/pytroll/satpy/issues/991) - Secondary file name patterns aren't used if the first doesn't match * [Issue 975](https://github.com/pytroll/satpy/issues/975) - Add HRV navigation to `seviri_l1b_native`-reader ([PR 985](https://github.com/pytroll/satpy/pull/985)) * [Issue 972](https://github.com/pytroll/satpy/issues/972) - MTG-FCI-FDHSI reader is slow, apparently not actually dask-aware ([PR 981](https://github.com/pytroll/satpy/pull/981)) * [Issue 970](https://github.com/pytroll/satpy/issues/970) - Pad all geostationary L1 data to full disk area ([PR 977](https://github.com/pytroll/satpy/pull/977)) * [Issue 960](https://github.com/pytroll/satpy/issues/960) - Factorize area def computation in jma_hrit ([PR 978](https://github.com/pytroll/satpy/pull/978)) * [Issue 957](https://github.com/pytroll/satpy/issues/957) - Rayleigh correction in bands l2 of the ABI sensor * [Issue 954](https://github.com/pytroll/satpy/issues/954) - Mask composites using cloud products ([PR 982](https://github.com/pytroll/satpy/pull/982)) * [Issue 949](https://github.com/pytroll/satpy/issues/949) - Make a common function for geostationnary area_extent computation ([PR 952](https://github.com/pytroll/satpy/pull/952)) * [Issue 807](https://github.com/pytroll/satpy/issues/807) - Add a MIMIC-TPW2 reader ([PR 858](https://github.com/pytroll/satpy/pull/858)) * [Issue 782](https://github.com/pytroll/satpy/issues/782) - Update custom reader documentation to mention coordinates and available datasets ([PR 1019](https://github.com/pytroll/satpy/pull/1019)) * [Issue 486](https://github.com/pytroll/satpy/issues/486) - Add GMS series satellite data reader In this release 12 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 1021](https://github.com/pytroll/satpy/pull/1021) - Fix padding of segmented geostationary images * [PR 1010](https://github.com/pytroll/satpy/pull/1010) - Fix missing part in ahi_hrit file pattern * [PR 1007](https://github.com/pytroll/satpy/pull/1007) - Fix `ahi_hrit` expected segments * [PR 1006](https://github.com/pytroll/satpy/pull/1006) - Rename standard_name for various readers to be consistent * [PR 993](https://github.com/pytroll/satpy/pull/993) - Fix VIIRS EDR Flood file patterns not working for AOI files ([243](https://github.com/ssec/polar2grid/issues/243)) * [PR 989](https://github.com/pytroll/satpy/pull/989) - Fix generation of solar and satellite angles when lon/lats are invalid * [PR 976](https://github.com/pytroll/satpy/pull/976) - CF Writer Improvements * [PR 974](https://github.com/pytroll/satpy/pull/974) - Fix available_composite_names including night_background static images ([239](https://github.com/ssec/polar2grid/issues/239)) * [PR 969](https://github.com/pytroll/satpy/pull/969) - Fix HDF4 handling of scalar attributes * [PR 966](https://github.com/pytroll/satpy/pull/966) - Add the fire temperature products to AHI * [PR 931](https://github.com/pytroll/satpy/pull/931) - Update coord2area_def.py #### Features added * [PR 1012](https://github.com/pytroll/satpy/pull/1012) - Implement a small cviirs speedup * [PR 1011](https://github.com/pytroll/satpy/pull/1011) - Provide only dask arrays to pyspectral's nir reflectance computation * [PR 1009](https://github.com/pytroll/satpy/pull/1009) - Add support for SEVIRI data from icare * [PR 1005](https://github.com/pytroll/satpy/pull/1005) - Remove unused reader xslice/yslice keyword arguments * [PR 1003](https://github.com/pytroll/satpy/pull/1003) - Update copyright header in readers. Add and fix docstrings. * [PR 998](https://github.com/pytroll/satpy/pull/998) - Add platform name to attributes of sar_c_safe reader ([996](https://github.com/pytroll/satpy/issues/996)) * [PR 997](https://github.com/pytroll/satpy/pull/997) - Add check if prerequisites is used * [PR 994](https://github.com/pytroll/satpy/pull/994) - Add LAC support to the avhrr-gac-lac reader * [PR 992](https://github.com/pytroll/satpy/pull/992) - Add hrv_clouds, hrv_fog and natural_with_night_fog composites to seviri.yaml * [PR 987](https://github.com/pytroll/satpy/pull/987) - scene.aggregate will now handle a SwathDefinition * [PR 985](https://github.com/pytroll/satpy/pull/985) - Add HRV full disk navigation for `seviri_l1b_native`-reader ([975](https://github.com/pytroll/satpy/issues/975)) * [PR 984](https://github.com/pytroll/satpy/pull/984) - Add on-the-fly decompression to the AHI HSD reader * [PR 982](https://github.com/pytroll/satpy/pull/982) - Add simple masking compositor ([954](https://github.com/pytroll/satpy/issues/954)) * [PR 981](https://github.com/pytroll/satpy/pull/981) - Optionally cache small data variables and file handles ([972](https://github.com/pytroll/satpy/issues/972)) * [PR 980](https://github.com/pytroll/satpy/pull/980) - Read the meta_data dictionary from pygac * [PR 978](https://github.com/pytroll/satpy/pull/978) - Factorize area computation in hrit_jma ([960](https://github.com/pytroll/satpy/issues/960)) * [PR 977](https://github.com/pytroll/satpy/pull/977) - Add a YAMLReader to pad segmented geo data ([970](https://github.com/pytroll/satpy/issues/970)) * [PR 976](https://github.com/pytroll/satpy/pull/976) - CF Writer Improvements * [PR 966](https://github.com/pytroll/satpy/pull/966) - Add the fire temperature products to AHI * [PR 962](https://github.com/pytroll/satpy/pull/962) - add support for meteo file in OLCI L1B reader * [PR 961](https://github.com/pytroll/satpy/pull/961) - Fix default radius_of_influence for lon/lat AreaDefintions * [PR 952](https://github.com/pytroll/satpy/pull/952) - Adds a common function for geostationary projection / area definition calculations ([949](https://github.com/pytroll/satpy/issues/949)) * [PR 920](https://github.com/pytroll/satpy/pull/920) - Transverse Mercator section added in cf writer * [PR 908](https://github.com/pytroll/satpy/pull/908) - Add interface to pyresample gradient resampler * [PR 858](https://github.com/pytroll/satpy/pull/858) - Mimic TPW Reader ([807](https://github.com/pytroll/satpy/issues/807)) * [PR 854](https://github.com/pytroll/satpy/pull/854) - Add GOES-R GLM L2 Gridded product reader and small ABI L1b changes #### Documentation changes * [PR 1025](https://github.com/pytroll/satpy/pull/1025) - Switch to configuration file for readthedocs * [PR 1019](https://github.com/pytroll/satpy/pull/1019) - Add more information about creating custom readers ([782](https://github.com/pytroll/satpy/issues/782)) * [PR 1018](https://github.com/pytroll/satpy/pull/1018) - Add information to Quickstart on basics of getting measurement values and navigation * [PR 1008](https://github.com/pytroll/satpy/pull/1008) - Add documentation for combine_metadata function * [PR 1003](https://github.com/pytroll/satpy/pull/1003) - Update copyright header in readers. Add and fix docstrings. * [PR 1001](https://github.com/pytroll/satpy/pull/1001) - Get travis badge from master branch * [PR 999](https://github.com/pytroll/satpy/pull/999) - Add FCI L1C reader short and long name metadata * [PR 968](https://github.com/pytroll/satpy/pull/968) - Add information about multi-threaded compression with geotiff creation In this release 45 pull requests were closed. ## Version 0.18.1 (2019/11/07) ### Pull Requests Merged #### Bugs fixed * [PR 959](https://github.com/pytroll/satpy/pull/959) - Fix `grid` argument handling in overlaying In this release 1 pull request was closed. ## Version 0.18.0 (2019/11/06) ### Issues Closed * [Issue 944](https://github.com/pytroll/satpy/issues/944) - Multiple errors when processing OLCI data. ([PR 945](https://github.com/pytroll/satpy/pull/945)) * [Issue 940](https://github.com/pytroll/satpy/issues/940) - Loading of DNB data from VIIRS compact SDR is slow ([PR 941](https://github.com/pytroll/satpy/pull/941)) * [Issue 922](https://github.com/pytroll/satpy/issues/922) - Clarify orbital_parameters metadata ([PR 950](https://github.com/pytroll/satpy/pull/950)) * [Issue 888](https://github.com/pytroll/satpy/issues/888) - Unintended/wrong behaviour of getitem method in HDF5FileHandler? ([PR 886](https://github.com/pytroll/satpy/pull/886)) * [Issue 737](https://github.com/pytroll/satpy/issues/737) - Add reader for GEO-KOMPSAT AMI ([PR 911](https://github.com/pytroll/satpy/pull/911)) In this release 5 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 953](https://github.com/pytroll/satpy/pull/953) - Encode header attributes in CF writer * [PR 945](https://github.com/pytroll/satpy/pull/945) - Fix bug in OLCI reader that caused multiple error messages to print ([944](https://github.com/pytroll/satpy/issues/944)) * [PR 942](https://github.com/pytroll/satpy/pull/942) - Fix VIIRS EDR Active Fires not assigning a _FillValue to confidence_pct * [PR 939](https://github.com/pytroll/satpy/pull/939) - Fix MERSI-2 natural_color composite using the wrong band for sharpening * [PR 938](https://github.com/pytroll/satpy/pull/938) - Fix MultiScene.save_animation to work with new dask.distributed versions * [PR 914](https://github.com/pytroll/satpy/pull/914) - Cleaning up and adding MERSI-2 RGB composites #### Features added * [PR 955](https://github.com/pytroll/satpy/pull/955) - Code clean-up for SEVIRI L2 BUFR-reader * [PR 953](https://github.com/pytroll/satpy/pull/953) - Encode header attributes in CF writer * [PR 948](https://github.com/pytroll/satpy/pull/948) - Add the possibility to include scale and offset in geotiffs * [PR 947](https://github.com/pytroll/satpy/pull/947) - Feature mitiff palette * [PR 941](https://github.com/pytroll/satpy/pull/941) - Speed up cviirs tiepoint interpolation ([940](https://github.com/pytroll/satpy/issues/940)) * [PR 935](https://github.com/pytroll/satpy/pull/935) - Adapt avhrr_l1b_gaclac to recent pygac changes * [PR 934](https://github.com/pytroll/satpy/pull/934) - Update add_overlay to make use of the full pycoast capabilities * [PR 911](https://github.com/pytroll/satpy/pull/911) - Add GK-2A AMI L1B Reader ([737](https://github.com/pytroll/satpy/issues/737)) * [PR 886](https://github.com/pytroll/satpy/pull/886) - Reader for NWCSAF/MSG 2013 format ([888](https://github.com/pytroll/satpy/issues/888)) * [PR 769](https://github.com/pytroll/satpy/pull/769) - Added initial version of an MSG BUFR reader and TOZ product yaml file * [PR 586](https://github.com/pytroll/satpy/pull/586) - Update handling of reading colormaps from files in enhancements #### Documentation changes * [PR 950](https://github.com/pytroll/satpy/pull/950) - Clarify documentation of orbital_parameters metadata ([922](https://github.com/pytroll/satpy/issues/922)) * [PR 943](https://github.com/pytroll/satpy/pull/943) - Fix sphinx docs generation after setuptools_scm migration In this release 19 pull requests were closed. ## Version 0.17.1 (2019/10/08) ### Issues Closed * [Issue 918](https://github.com/pytroll/satpy/issues/918) - satpy 0.17 does not work with pyresample 1.11 ([PR 927](https://github.com/pytroll/satpy/pull/927)) * [Issue 902](https://github.com/pytroll/satpy/issues/902) - background compositor with colorized ir_clouds and static image problem ([PR 917](https://github.com/pytroll/satpy/pull/917)) * [Issue 853](https://github.com/pytroll/satpy/issues/853) - scene.available_composite_names() return a composite even the dependency is not fullfilled ([PR 921](https://github.com/pytroll/satpy/pull/921)) * [Issue 830](https://github.com/pytroll/satpy/issues/830) - generic_image reader doesn't read area from .yaml file? ([PR 925](https://github.com/pytroll/satpy/pull/925)) In this release 4 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 925](https://github.com/pytroll/satpy/pull/925) - Fix area handling in StaticImageCompositor ([830](https://github.com/pytroll/satpy/issues/830)) * [PR 923](https://github.com/pytroll/satpy/pull/923) - Make the olci l2 mask a bool array instead of floats * [PR 921](https://github.com/pytroll/satpy/pull/921) - Fix Scene.available_composite_names showing unavailable composites ([853](https://github.com/pytroll/satpy/issues/853)) * [PR 917](https://github.com/pytroll/satpy/pull/917) - Fix BackgroundCompositor not retaining input metadata ([902](https://github.com/pytroll/satpy/issues/902)) #### Features added * [PR 927](https://github.com/pytroll/satpy/pull/927) - Fix resampler imports ([918](https://github.com/pytroll/satpy/issues/918)) #### Backwards incompatible changes * [PR 921](https://github.com/pytroll/satpy/pull/921) - Fix Scene.available_composite_names showing unavailable composites ([853](https://github.com/pytroll/satpy/issues/853)) In this release 6 pull requests were closed. ## Version 0.17.0 (2019/10/01) ### Issues Closed * [Issue 896](https://github.com/pytroll/satpy/issues/896) - Satpy built-in composite for dust RGB (MSG/SEVIRI data) does not generate expected color pattern * [Issue 893](https://github.com/pytroll/satpy/issues/893) - Resampling data read with generic image reader corrupts data * [Issue 876](https://github.com/pytroll/satpy/issues/876) - Update reader configuration with human-readable long names ([PR 887](https://github.com/pytroll/satpy/pull/887)) * [Issue 865](https://github.com/pytroll/satpy/issues/865) - Himawari-8 B13 image is negative? * [Issue 863](https://github.com/pytroll/satpy/issues/863) - Record what the values from MODIS cloud mask represent * [Issue 852](https://github.com/pytroll/satpy/issues/852) - No module named geotiepoints.modisinterpolator * [Issue 851](https://github.com/pytroll/satpy/issues/851) - Scene(reader, filenames = [radiance, geoloc]) expects filenames to be in a specific format * [Issue 850](https://github.com/pytroll/satpy/issues/850) - group_files function returns only one dictionary ([PR 855](https://github.com/pytroll/satpy/pull/855)) * [Issue 848](https://github.com/pytroll/satpy/issues/848) - FCI composites not loadable ([PR 849](https://github.com/pytroll/satpy/pull/849)) * [Issue 846](https://github.com/pytroll/satpy/issues/846) - Segmentation fault calculating overlay projection with MTG * [Issue 762](https://github.com/pytroll/satpy/issues/762) - Add x and y coordinates to all loaded gridded DataArrays * [Issue 735](https://github.com/pytroll/satpy/issues/735) - Bilinear interpolation doesn't work with `StackedAreaDefinitions` * [Issue 678](https://github.com/pytroll/satpy/issues/678) - Consider using setuptools-scm instead of versioneer ([PR 856](https://github.com/pytroll/satpy/pull/856)) * [Issue 617](https://github.com/pytroll/satpy/issues/617) - Update 'generic_image' reader to use rasterio for area creation ([PR 847](https://github.com/pytroll/satpy/pull/847)) * [Issue 603](https://github.com/pytroll/satpy/issues/603) - Support FY-4A hdf data ([PR 751](https://github.com/pytroll/satpy/pull/751)) In this release 15 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 915](https://github.com/pytroll/satpy/pull/915) - Fix CRS object being recreated when adding CRS coordinate * [PR 905](https://github.com/pytroll/satpy/pull/905) - Fix ABI L2 reader not scaling and masking data * [PR 901](https://github.com/pytroll/satpy/pull/901) - Fix compact viirs angle interpolation at the poles * [PR 891](https://github.com/pytroll/satpy/pull/891) - Fix HDF4 reading utility using dtype classes instead of instances * [PR 890](https://github.com/pytroll/satpy/pull/890) - Fix MERSI-2 and VIRR readers being recognized by pyspectral * [PR 889](https://github.com/pytroll/satpy/pull/889) - Fix the ninjotiff writer to provide correct scale and offset * [PR 884](https://github.com/pytroll/satpy/pull/884) - Update mersi2_l1b sensor name to mersi-2 to match pyspectral * [PR 882](https://github.com/pytroll/satpy/pull/882) - Bug in mitiff writer; calibration information is not written in the imagedescription * [PR 877](https://github.com/pytroll/satpy/pull/877) - Fix standard_name and units for T4/T13 in viirs_edr_active_fires reader * [PR 875](https://github.com/pytroll/satpy/pull/875) - Fix error in hncc_dnb composite test * [PR 871](https://github.com/pytroll/satpy/pull/871) - Fix FY-4 naming to follow WMO Oscar naming * [PR 869](https://github.com/pytroll/satpy/pull/869) - Fix the nwcsaf-nc reader to drop scale and offset once data is scaled * [PR 867](https://github.com/pytroll/satpy/pull/867) - Fix attribute datatypes in CF Writer * [PR 837](https://github.com/pytroll/satpy/pull/837) - Fix Satpy tests to work with new versions of pyresample * [PR 790](https://github.com/pytroll/satpy/pull/790) - Modify the SLSTR file pattern to support stripe and frame products #### Features added * [PR 910](https://github.com/pytroll/satpy/pull/910) - Add near real-time and reprocessed file patterns to TROPOMI L1b reader * [PR 907](https://github.com/pytroll/satpy/pull/907) - Handle bad orbit coefficients in SEVIRI HRIT header * [PR 906](https://github.com/pytroll/satpy/pull/906) - Avoid xarray 0.13.0 * [PR 903](https://github.com/pytroll/satpy/pull/903) - Fix HRV area definition tests * [PR 898](https://github.com/pytroll/satpy/pull/898) - Add night lights compositor and SEVIRI day/night composite * [PR 897](https://github.com/pytroll/satpy/pull/897) - Cache slicing arrays in bilinear resampler * [PR 895](https://github.com/pytroll/satpy/pull/895) - Add the possibility to pad the HRV in the seviri hrit reader * [PR 892](https://github.com/pytroll/satpy/pull/892) - Update coefficients for FY-3B VIRR reflectance calibration * [PR 890](https://github.com/pytroll/satpy/pull/890) - Fix MERSI-2 and VIRR readers being recognized by pyspectral * [PR 881](https://github.com/pytroll/satpy/pull/881) - Make it possible to reverse a built-in colormap in enhancements * [PR 880](https://github.com/pytroll/satpy/pull/880) - Replace Numpy files with zarr for resampling LUT caching * [PR 874](https://github.com/pytroll/satpy/pull/874) - Hardcoding of mersi2 l1b reader valid_range for channel 24 and 25 as these are wrong in the HDF data * [PR 873](https://github.com/pytroll/satpy/pull/873) - Add mersi2 level 1b ears data file names to the reader * [PR 872](https://github.com/pytroll/satpy/pull/872) - Fix ABI L1B coordinates to be equivalent at all resolutions * [PR 856](https://github.com/pytroll/satpy/pull/856) - Switch to setuptools_scm for automatic version numbers from git tags ([678](https://github.com/pytroll/satpy/issues/678)) * [PR 849](https://github.com/pytroll/satpy/pull/849) - Make composites available to FCI FDHSI L1C ([848](https://github.com/pytroll/satpy/issues/848)) * [PR 847](https://github.com/pytroll/satpy/pull/847) - Update 'generic_image' reader to use rasterio for area creation ([617](https://github.com/pytroll/satpy/issues/617)) * [PR 767](https://github.com/pytroll/satpy/pull/767) - Add a reader for NOAA GOES-R ABI L2+ products (abi_l2_nc) * [PR 751](https://github.com/pytroll/satpy/pull/751) - Add a reader for FY-4A AGRI level 1 data ([603](https://github.com/pytroll/satpy/issues/603)) * [PR 672](https://github.com/pytroll/satpy/pull/672) - Add CIMSS True Color (Natural Color) RGB recipes #### Documentation changes * [PR 916](https://github.com/pytroll/satpy/pull/916) - Update orbit coefficient docstrings in seviri_l1b_hrit * [PR 887](https://github.com/pytroll/satpy/pull/887) - Add more reader metadata like long_name and description ([876](https://github.com/pytroll/satpy/issues/876)) * [PR 878](https://github.com/pytroll/satpy/pull/878) - Add Suyash458 to AUTHORS.md #### Backwards incompatible changes * [PR 890](https://github.com/pytroll/satpy/pull/890) - Fix MERSI-2 and VIRR readers being recognized by pyspectral In this release 39 pull requests were closed. ## Version 0.16.1 (2019/07/04) ### Issues Closed * [Issue 835](https://github.com/pytroll/satpy/issues/835) - modis_l2 reader is not working properly. * [Issue 829](https://github.com/pytroll/satpy/issues/829) - Citing satpy ([PR 833](https://github.com/pytroll/satpy/pull/833)) * [Issue 826](https://github.com/pytroll/satpy/issues/826) - SEVIRI channels loaded from netcdf in Scene object appear to have wrong names and calibration ([PR 827](https://github.com/pytroll/satpy/pull/827)) * [Issue 823](https://github.com/pytroll/satpy/issues/823) - Netcdf produced with the satpy CF writer don't pass cf-checker ([PR 825](https://github.com/pytroll/satpy/pull/825)) * [Issue 398](https://github.com/pytroll/satpy/issues/398) - Add AUTHORS file to replace individual copyright authors In this release 5 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 843](https://github.com/pytroll/satpy/pull/843) - Remove Invalid Metadata From ACSPO Reader * [PR 841](https://github.com/pytroll/satpy/pull/841) - Temporarily remove longitude/latitude 2D xarray coordinates * [PR 838](https://github.com/pytroll/satpy/pull/838) - Fix 'abi_l1b' reader keeping _Unsigned attribute * [PR 836](https://github.com/pytroll/satpy/pull/836) - Fix composites not being recorded with desired resolution in deptree * [PR 831](https://github.com/pytroll/satpy/pull/831) - Fix EWA resampling tests not properly testing caching * [PR 828](https://github.com/pytroll/satpy/pull/828) - Fix delayed generation of composites and composite resolution * [PR 827](https://github.com/pytroll/satpy/pull/827) - Corrected nc_key for channels WV_062, WV_073, IR_087 ([826](https://github.com/pytroll/satpy/issues/826)) * [PR 825](https://github.com/pytroll/satpy/pull/825) - Fix the cf writer for better CF compliance ([823](https://github.com/pytroll/satpy/issues/823)) #### Features added * [PR 842](https://github.com/pytroll/satpy/pull/842) - Fix cviirs reader to be more dask-friendly * [PR 832](https://github.com/pytroll/satpy/pull/832) - Add pre-commit configuration #### Documentation changes * [PR 813](https://github.com/pytroll/satpy/pull/813) - Add some documentation to modis readers similar to hrit #### Backwards incompatible changes * [PR 844](https://github.com/pytroll/satpy/pull/844) - Change default CF writer engine to follow xarray defaults In this release 12 pull requests were closed. ## Version 0.16.0 (2019/06/18) ### Issues Closed * [Issue 795](https://github.com/pytroll/satpy/issues/795) - Composites delayed in the presence of non-dimensional coordinates ([PR 796](https://github.com/pytroll/satpy/pull/796)) * [Issue 753](https://github.com/pytroll/satpy/issues/753) - seviri l1b netcdf reader needs to be updated due to EUM fixing Attribute Issue ([PR 791](https://github.com/pytroll/satpy/pull/791)) * [Issue 734](https://github.com/pytroll/satpy/issues/734) - Add a compositor that can use static images ([PR 804](https://github.com/pytroll/satpy/pull/804)) * [Issue 670](https://github.com/pytroll/satpy/issues/670) - Refine Satellite Position * [Issue 640](https://github.com/pytroll/satpy/issues/640) - question: save geotiff without modifying pixel value * [Issue 625](https://github.com/pytroll/satpy/issues/625) - Fix inconsistency between save_dataset and save_datasets ([PR 803](https://github.com/pytroll/satpy/pull/803)) * [Issue 460](https://github.com/pytroll/satpy/issues/460) - Creating day/night composites ([PR 804](https://github.com/pytroll/satpy/pull/804)) In this release 7 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 805](https://github.com/pytroll/satpy/pull/805) - Fix 3a3b transition in the aapp l1b reader * [PR 803](https://github.com/pytroll/satpy/pull/803) - Fix save_datasets always using geotiff writer regardless of filename ([625](https://github.com/pytroll/satpy/issues/625), [625](https://github.com/pytroll/satpy/issues/625)) * [PR 802](https://github.com/pytroll/satpy/pull/802) - Factorize and improve modis reader's interpolation * [PR 800](https://github.com/pytroll/satpy/pull/800) - Fix 'virr_l1b' reader when slope attribute is 0 * [PR 796](https://github.com/pytroll/satpy/pull/796) - Drop non-dimensional coordinates in Compositor ([795](https://github.com/pytroll/satpy/issues/795), [795](https://github.com/pytroll/satpy/issues/795)) * [PR 792](https://github.com/pytroll/satpy/pull/792) - Bug mitiff writer when only one channel is to be written with calibration information * [PR 791](https://github.com/pytroll/satpy/pull/791) - Fix handling of file attributes in seviri_l1b_nc reader ([753](https://github.com/pytroll/satpy/issues/753)) #### Features added * [PR 821](https://github.com/pytroll/satpy/pull/821) - Remove warning about unused kwargs in YAML reader * [PR 820](https://github.com/pytroll/satpy/pull/820) - Add support for NWCSAF GEO v2018, retain support for v2016 * [PR 818](https://github.com/pytroll/satpy/pull/818) - Add TLEs to dataset attributes in avhrr_l1b_gaclac * [PR 816](https://github.com/pytroll/satpy/pull/816) - Add grouping parameters for the 'viirs_sdr' reader * [PR 814](https://github.com/pytroll/satpy/pull/814) - Reader for Hydrology SAF precipitation products * [PR 806](https://github.com/pytroll/satpy/pull/806) - Add flag_meanings and flag_values to 'viirs_edr_active_fires' categories * [PR 805](https://github.com/pytroll/satpy/pull/805) - Fix 3a3b transition in the aapp l1b reader * [PR 804](https://github.com/pytroll/satpy/pull/804) - Add compositor for adding an image as a background ([734](https://github.com/pytroll/satpy/issues/734), [460](https://github.com/pytroll/satpy/issues/460)) * [PR 794](https://github.com/pytroll/satpy/pull/794) - Add 'orbital_parameters' metadata to all geostationary satellite readers * [PR 788](https://github.com/pytroll/satpy/pull/788) - Add new 'crs' coordinate variable when pyproj 2.0+ is installed * [PR 779](https://github.com/pytroll/satpy/pull/779) - Add TROPOMI L2 reader (tropomi_l2) * [PR 736](https://github.com/pytroll/satpy/pull/736) - CF Writer: Attribute encoding, groups and non-dimensional coordinates. Plus: Raw SEVIRI HRIT metadata * [PR 687](https://github.com/pytroll/satpy/pull/687) - Add Vaisala GLD360-reader. #### Documentation changes * [PR 797](https://github.com/pytroll/satpy/pull/797) - Sort AUTHORS.md file by last name #### Backwards incompatible changes * [PR 822](https://github.com/pytroll/satpy/pull/822) - Deprecate old reader names so that they are no longer recognized ([598](https://github.com/pytroll/satpy/issues/598)) * [PR 815](https://github.com/pytroll/satpy/pull/815) - Remove legacy GDAL-based geotiff writer support In this release 23 pull requests were closed. ## Version 0.15.2 (2019/05/22) ### Issues Closed * [Issue 785](https://github.com/pytroll/satpy/issues/785) - Loading cache for resampling scene fails with numpy 1.16.3 ([PR 787](https://github.com/pytroll/satpy/pull/787)) * [Issue 777](https://github.com/pytroll/satpy/issues/777) - Log warning and error messages are not printed to console ([PR 778](https://github.com/pytroll/satpy/pull/778)) * [Issue 776](https://github.com/pytroll/satpy/issues/776) - africa projection yields CRSError when saving dataset ([PR 780](https://github.com/pytroll/satpy/pull/780)) * [Issue 774](https://github.com/pytroll/satpy/issues/774) - ABI Level 1b long_name when reflectances and brightness temperatures are calculated * [Issue 766](https://github.com/pytroll/satpy/issues/766) - MODIS l1b reader seems to switch latitude and longitude for 500m data ([PR 781](https://github.com/pytroll/satpy/pull/781)) * [Issue 742](https://github.com/pytroll/satpy/issues/742) - GOES16/17 netcdf reader fails with rasterio installed * [Issue 649](https://github.com/pytroll/satpy/issues/649) - Make MTG-I reader work ([PR 755](https://github.com/pytroll/satpy/pull/755)) * [Issue 466](https://github.com/pytroll/satpy/issues/466) - Fix deprecation warnings with xarray, dask, and numpy * [Issue 449](https://github.com/pytroll/satpy/issues/449) - Adding coastlines to single channel not working In this release 9 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 787](https://github.com/pytroll/satpy/pull/787) - Loading resample cache with numpy 1.16.3 ([785](https://github.com/pytroll/satpy/issues/785)) * [PR 781](https://github.com/pytroll/satpy/pull/781) - Fix longitude/latitude being swapped in modis readers ([766](https://github.com/pytroll/satpy/issues/766)) * [PR 780](https://github.com/pytroll/satpy/pull/780) - Fix builtin areas to be compatible with rasterio ([776](https://github.com/pytroll/satpy/issues/776)) * [PR 778](https://github.com/pytroll/satpy/pull/778) - Fix NullHandler not allowing warning/error logs to be printed to console ([777](https://github.com/pytroll/satpy/issues/777)) * [PR 775](https://github.com/pytroll/satpy/pull/775) - Fix 'abi_l1b' reader not updating long_name when calibrating * [PR 770](https://github.com/pytroll/satpy/pull/770) - Fix typo for mersi2/abi/ahi using bidirection instead of bidirectional * [PR 763](https://github.com/pytroll/satpy/pull/763) - Fix AVHRR tests importing external mock on Python 3 * [PR 760](https://github.com/pytroll/satpy/pull/760) - Avoid leaking file objects in NetCDF4FileHandler #### Features added * [PR 759](https://github.com/pytroll/satpy/pull/759) - Fix the avhrr_l1b_gaclac to support angles, units and avhrr variants * [PR 755](https://github.com/pytroll/satpy/pull/755) - Update MTG FCI FDHSI L1C reader for latest data format ([649](https://github.com/pytroll/satpy/issues/649)) * [PR 470](https://github.com/pytroll/satpy/pull/470) - Switched `xarray.unfuncs` to `numpy` #### Documentation changes * [PR 773](https://github.com/pytroll/satpy/pull/773) - Improve Scene.show documentation * [PR 771](https://github.com/pytroll/satpy/pull/771) - Update pull request template to include AUTHORS and flake8 changes In this release 13 pull requests were closed. ## Version 0.15.1 (2019/05/10) ### Pull Requests Merged #### Bugs fixed * [PR 761](https://github.com/pytroll/satpy/pull/761) - Fix mersi2_l1b reader setting sensor as a set object In this release 1 pull request was closed. ## Version 0.15.0 (2019/05/10) ### Issues Closed * [Issue 758](https://github.com/pytroll/satpy/issues/758) - RuntimeError with NetCDF4FileHandler * [Issue 730](https://github.com/pytroll/satpy/issues/730) - Rewrite introduction paragraph in documentation ([PR 747](https://github.com/pytroll/satpy/pull/747)) * [Issue 725](https://github.com/pytroll/satpy/issues/725) - Update 'viirs_edr_active_fires' reader to read newest algorithm output ([PR 733](https://github.com/pytroll/satpy/pull/733)) * [Issue 706](https://github.com/pytroll/satpy/issues/706) - Add reader for FY3D MERSI2 L1B data ([PR 740](https://github.com/pytroll/satpy/pull/740)) * [Issue 434](https://github.com/pytroll/satpy/issues/434) - Allow readers to filter the available datasets configured in YAML ([PR 739](https://github.com/pytroll/satpy/pull/739)) In this release 5 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 757](https://github.com/pytroll/satpy/pull/757) - Fix MODIS L1B and L2 readers not reading geolocation properly * [PR 754](https://github.com/pytroll/satpy/pull/754) - Fix optional modifier dependencies being unloaded for delayed composites * [PR 750](https://github.com/pytroll/satpy/pull/750) - Add missing warnings import to geotiff writer #### Features added * [PR 752](https://github.com/pytroll/satpy/pull/752) - Add scanline timestamps to seviri_l1b_hrit * [PR 740](https://github.com/pytroll/satpy/pull/740) - Add FY-3D MERSI-2 L1B Reader (mersi2_l1b) ([706](https://github.com/pytroll/satpy/issues/706)) * [PR 739](https://github.com/pytroll/satpy/pull/739) - Refactor available datasets logic to be more flexible ([434](https://github.com/pytroll/satpy/issues/434)) * [PR 738](https://github.com/pytroll/satpy/pull/738) - Remove unused area slice-based filtering in the base reader * [PR 733](https://github.com/pytroll/satpy/pull/733) - Update VIIRS EDR Active Fires ([725](https://github.com/pytroll/satpy/issues/725)) * [PR 728](https://github.com/pytroll/satpy/pull/728) - Add VIIRS Fire Temperature rgb * [PR 711](https://github.com/pytroll/satpy/pull/711) - Replace usage of deprecated get_proj_coords_dask * [PR 611](https://github.com/pytroll/satpy/pull/611) - Add MODIS L2 reader * [PR 580](https://github.com/pytroll/satpy/pull/580) - Allow colormaps to be saved with geotiff writer * [PR 532](https://github.com/pytroll/satpy/pull/532) - Add enhancement for VIIRS flood reader #### Documentation changes * [PR 747](https://github.com/pytroll/satpy/pull/747) - Update index page introduction ([730](https://github.com/pytroll/satpy/issues/730)) In this release 14 pull requests were closed. ## Version 0.14.2 (2019/04/25) ### Issues Closed * [Issue 679](https://github.com/pytroll/satpy/issues/679) - Cannot save a multiscene animation - imagio:ffmpeg warning In this release 1 issue was closed. ### Pull Requests Merged #### Bugs fixed * [PR 731](https://github.com/pytroll/satpy/pull/731) - Fix viirs sdr reader to allow ivcdb files in the sdr directory * [PR 726](https://github.com/pytroll/satpy/pull/726) - Bugfixes in the Electro-L reader ([](https://groups.google.com/forum//issues/)) #### Features added * [PR 729](https://github.com/pytroll/satpy/pull/729) - Add "extras" checks to check_satpy utility function #### Documentation changes * [PR 724](https://github.com/pytroll/satpy/pull/724) - Add codeowners In this release 4 pull requests were closed. ## Version 0.14.1 (2019/04/12) ### Issues Closed * [Issue 716](https://github.com/pytroll/satpy/issues/716) - Reading the EUMETSAT compact viirs format returns wrong platform name (J01 instead of NOAA-20) ([PR 717](https://github.com/pytroll/satpy/pull/717)) * [Issue 710](https://github.com/pytroll/satpy/issues/710) - Question (maybe a bug): Why does RBG array exported with scn.save_dataset contain values greater than 255 ? In this release 2 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 721](https://github.com/pytroll/satpy/pull/721) - Consistent platform id attribute across NAT + HRIT SEVIRI readers * [PR 719](https://github.com/pytroll/satpy/pull/719) - Fix VIIRS 'night_fog' RGB composite recipe to use M12 instead of M14 * [PR 718](https://github.com/pytroll/satpy/pull/718) - Fix 'seviri_l1b_hrit' reader's area creation for pyproj 2.0+ * [PR 717](https://github.com/pytroll/satpy/pull/717) - Fix 'viirs_compact' and 'viirs_l1b' readers to return WMO/Oscar platform name ([716](https://github.com/pytroll/satpy/issues/716)) * [PR 715](https://github.com/pytroll/satpy/pull/715) - Fix hurricane florence demo download to only include M1 files * [PR 712](https://github.com/pytroll/satpy/pull/712) - Fix 'mitiff' writer not clipping enhanced data before scaling to 8 bit values * [PR 709](https://github.com/pytroll/satpy/pull/709) - Fix datetime64 use in 'seviri_l1b_hrit' reader for numpy < 1.15 * [PR 708](https://github.com/pytroll/satpy/pull/708) - Fix 'seviri_0deg' and 'seviri_iodc' builtin areas (areas.yaml) not matching reader areas #### Documentation changes * [PR 713](https://github.com/pytroll/satpy/pull/713) - Add links to source from API documentation In this release 9 pull requests were closed. ## Version 0.14.0 (2019/04/09) ### Issues Closed * [Issue 698](https://github.com/pytroll/satpy/issues/698) - Read WKT geotiff * [Issue 692](https://github.com/pytroll/satpy/issues/692) - sdr_viirs_l1b reader fails in 0.13, recent master, Works with version 0.12.0 ([PR 693](https://github.com/pytroll/satpy/pull/693)) * [Issue 683](https://github.com/pytroll/satpy/issues/683) - Question: Change image size when saving with satpy.save_dataset ([PR 691](https://github.com/pytroll/satpy/pull/691)) * [Issue 681](https://github.com/pytroll/satpy/issues/681) - incorrect data offset in HSD files ([PR 689](https://github.com/pytroll/satpy/pull/689)) * [Issue 666](https://github.com/pytroll/satpy/issues/666) - Add drawing of lat lon graticules when saving dataset ([PR 668](https://github.com/pytroll/satpy/pull/668)) * [Issue 646](https://github.com/pytroll/satpy/issues/646) - Add 'demo' subpackage for accessing example data ([PR 686](https://github.com/pytroll/satpy/pull/686)) * [Issue 528](https://github.com/pytroll/satpy/issues/528) - Support dask version of PySpectral ([PR 529](https://github.com/pytroll/satpy/pull/529)) * [Issue 511](https://github.com/pytroll/satpy/issues/511) - Add/update documentation about composites and compositors ([PR 705](https://github.com/pytroll/satpy/pull/705)) In this release 8 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 700](https://github.com/pytroll/satpy/pull/700) - Mask out invalid values in the precipitation probability product * [PR 693](https://github.com/pytroll/satpy/pull/693) - Fix VIIRS SDR reading of visible channels at nighttime ([692](https://github.com/pytroll/satpy/issues/692)) * [PR 689](https://github.com/pytroll/satpy/pull/689) - Fix Himawari HSD reader's incorrect header information ([681](https://github.com/pytroll/satpy/issues/681)) * [PR 688](https://github.com/pytroll/satpy/pull/688) - Fix offset correction in seviri_l1b_hrit * [PR 685](https://github.com/pytroll/satpy/pull/685) - Fix bug in Scene.resample causing AssertionError * [PR 677](https://github.com/pytroll/satpy/pull/677) - Fix MultiScene save_animation when distributed isn't installed * [PR 675](https://github.com/pytroll/satpy/pull/675) - Do not pass `filter_parameters` to the filehandler creation #### Features added * [PR 691](https://github.com/pytroll/satpy/pull/691) - Add Scene.aggregate method (python 3 only) ([683](https://github.com/pytroll/satpy/issues/683)) * [PR 686](https://github.com/pytroll/satpy/pull/686) - Add demo subpackage to simplify test data download ([646](https://github.com/pytroll/satpy/issues/646)) * [PR 676](https://github.com/pytroll/satpy/pull/676) - Feature add nightfog modis * [PR 674](https://github.com/pytroll/satpy/pull/674) - Use platform ID to choose the right reader for AVHRR GAC data * [PR 671](https://github.com/pytroll/satpy/pull/671) - Add satellite position to dataset attributes (seviri_l1b_hrit) * [PR 669](https://github.com/pytroll/satpy/pull/669) - Add ocean-color for viirs and modis * [PR 668](https://github.com/pytroll/satpy/pull/668) - Add grid/graticules to add_overlay function. ([666](https://github.com/pytroll/satpy/issues/666)) * [PR 665](https://github.com/pytroll/satpy/pull/665) - Add reader for VIIRS Active Fires * [PR 645](https://github.com/pytroll/satpy/pull/645) - Reader for the SAR OCN L2 wind product in SAFE format. * [PR 565](https://github.com/pytroll/satpy/pull/565) - Add reader for FY-3 VIRR (virr_l1b) * [PR 529](https://github.com/pytroll/satpy/pull/529) - Add dask support to NIRReflectance modifier ([528](https://github.com/pytroll/satpy/issues/528)) #### Documentation changes * [PR 707](https://github.com/pytroll/satpy/pull/707) - Add ABI Meso demo data case and clean up documentation * [PR 705](https://github.com/pytroll/satpy/pull/705) - Document composites ([511](https://github.com/pytroll/satpy/issues/511)) * [PR 701](https://github.com/pytroll/satpy/pull/701) - Clarify release instructions * [PR 699](https://github.com/pytroll/satpy/pull/699) - Rename SatPy to Satpy throughout documentation * [PR 673](https://github.com/pytroll/satpy/pull/673) - Add information about GDAL_CACHEMAX to FAQ In this release 23 pull requests were closed. ## Version 0.13.0 (2019/03/18) ### Issues Closed * [Issue 641](https://github.com/pytroll/satpy/issues/641) - After pip upgrade to satpy 0.12 and pyproj 2.0.1 got pyproj.exceptions.CRSError * [Issue 626](https://github.com/pytroll/satpy/issues/626) - Issue loading MODIS Aqua data ([PR 648](https://github.com/pytroll/satpy/pull/648)) * [Issue 620](https://github.com/pytroll/satpy/issues/620) - Add FAQ about controlling number of threads for pykdtree and blas ([PR 621](https://github.com/pytroll/satpy/pull/621)) * [Issue 521](https://github.com/pytroll/satpy/issues/521) - Interactively set the Calibration Mode when creating the Scene Object ([PR 543](https://github.com/pytroll/satpy/pull/543)) In this release 4 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 664](https://github.com/pytroll/satpy/pull/664) - Fix Scene.crop with RGBs and multidimensional data * [PR 662](https://github.com/pytroll/satpy/pull/662) - Fix masked resampling when dataset dtype is integer * [PR 661](https://github.com/pytroll/satpy/pull/661) - Fix CTTH composite not to mark invalid data as cloud-free * [PR 660](https://github.com/pytroll/satpy/pull/660) - Fix seviri_l1b_hrit prologue/epilogue readers * [PR 655](https://github.com/pytroll/satpy/pull/655) - Fix yaml load to be compatible with pyyaml 5.1 * [PR 652](https://github.com/pytroll/satpy/pull/652) - Fix resampling of ancillary variables when also first class datasets * [PR 648](https://github.com/pytroll/satpy/pull/648) - Add wrapped line support for metadata in modis_l1b reader ([626](https://github.com/pytroll/satpy/issues/626)) * [PR 644](https://github.com/pytroll/satpy/pull/644) - Fix the modis overview not to sun normalize the IR channel * [PR 633](https://github.com/pytroll/satpy/pull/633) - Fix VIIRS HNCC composite passing xarray objects to dask * [PR 632](https://github.com/pytroll/satpy/pull/632) - Fixing start and end times when missing in the CF writer #### Features added * [PR 647](https://github.com/pytroll/satpy/pull/647) - Switch python-hdf4 dependencies to pyhdf * [PR 643](https://github.com/pytroll/satpy/pull/643) - In cira_strech clip values less or equal to 0 to avoid nans and -inf. * [PR 642](https://github.com/pytroll/satpy/pull/642) - Bugfix pps2018 cpp products * [PR 638](https://github.com/pytroll/satpy/pull/638) - Add processing-mode and disposition-mode to the avhrr-l1b-eps file name * [PR 636](https://github.com/pytroll/satpy/pull/636) - Facilitate selection of calibration coefficients in seviri_l1b_hrit * [PR 635](https://github.com/pytroll/satpy/pull/635) - Add local caching of slicing for data reduction * [PR 627](https://github.com/pytroll/satpy/pull/627) - Add DNB satellite angles (DNB_SENZ, DNB_SENA) to VIIRS SDR reader * [PR 557](https://github.com/pytroll/satpy/pull/557) - Improve the SAR-C reading and Ice composite * [PR 543](https://github.com/pytroll/satpy/pull/543) - Calibration mode can now be passed via a keyword argument ([521](https://github.com/pytroll/satpy/issues/521)) * [PR 538](https://github.com/pytroll/satpy/pull/538) - Support CLASS packed viirs files in viirs_sdr reader #### Documentation changes * [PR 659](https://github.com/pytroll/satpy/pull/659) - DOC: Refer to PyTroll coding guidelines * [PR 653](https://github.com/pytroll/satpy/pull/653) - DOC: Fix small typos in documentation * [PR 651](https://github.com/pytroll/satpy/pull/651) - Rename changelog for releases before 0.9.0 * [PR 621](https://github.com/pytroll/satpy/pull/621) - Add FAQ items on number of workers and threads ([620](https://github.com/pytroll/satpy/issues/620)) In this release 24 pull requests were closed. ## Version 0.12.0 (2019/02/15) ### Issues Closed * [Issue 601](https://github.com/pytroll/satpy/issues/601) - MultiScene 'save_animation' fails if "datasets=" isn't provided ([PR 602](https://github.com/pytroll/satpy/pull/602)) * [Issue 310](https://github.com/pytroll/satpy/issues/310) - Create MultiScene from list of files ([PR 576](https://github.com/pytroll/satpy/pull/576)) In this release 2 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 616](https://github.com/pytroll/satpy/pull/616) - Fix geotiff writer being unimportable if gdal isn't installed * [PR 615](https://github.com/pytroll/satpy/pull/615) - Fix confusing error in abi_l1b reader when file fails to open * [PR 607](https://github.com/pytroll/satpy/pull/607) - Fix VIIRS 'histogram_dnb' compositor not returning new data * [PR 605](https://github.com/pytroll/satpy/pull/605) - Fix enhancements using dask delayed on internal functions * [PR 602](https://github.com/pytroll/satpy/pull/602) - Fix MultiScene save_animation not using dataset IDs correctly ([601](https://github.com/pytroll/satpy/issues/601), [601](https://github.com/pytroll/satpy/issues/601)) * [PR 600](https://github.com/pytroll/satpy/pull/600) - Fix resample reduce_data bug introduced in #582 #### Features added * [PR 614](https://github.com/pytroll/satpy/pull/614) - Support for reduced resolution OLCI data * [PR 613](https://github.com/pytroll/satpy/pull/613) - Add 'crop' and 'save_datasets' to MultiScene * [PR 609](https://github.com/pytroll/satpy/pull/609) - Add ability to use dask distributed when generating animation videos * [PR 582](https://github.com/pytroll/satpy/pull/582) - Add 'reduce_data' keyword argument to disable cropping before resampling * [PR 576](https://github.com/pytroll/satpy/pull/576) - Add group_files and from_files utility functions for creating Scenes from multiple files ([310](https://github.com/pytroll/satpy/issues/310)) * [PR 567](https://github.com/pytroll/satpy/pull/567) - Add utility functions for generating GeoViews plots ([541](https://github.com/pytroll/satpy/issues/541)) In this release 12 pull requests were closed. ## Version 0.11.2 (2019/01/28) ### Issues Closed * [Issue 584](https://github.com/pytroll/satpy/issues/584) - DayNightCompositor does not work with eg overview_sun as the day part ([PR 593](https://github.com/pytroll/satpy/pull/593)) * [Issue 577](https://github.com/pytroll/satpy/issues/577) - Creation of composites using `sunz_corrected` modifier fails with VIIRS SDR data * [Issue 569](https://github.com/pytroll/satpy/issues/569) - Can not show or save ABI true color image (RuntimeWarning: invalid value encountered in log) * [Issue 531](https://github.com/pytroll/satpy/issues/531) - Mask space pixels in AHI HSD reader ([PR 592](https://github.com/pytroll/satpy/pull/592)) * [Issue 106](https://github.com/pytroll/satpy/issues/106) - Warnings In this release 5 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 594](https://github.com/pytroll/satpy/pull/594) - Fix VIIRS L1B reader not using standard 'y' and 'x' dimension names * [PR 593](https://github.com/pytroll/satpy/pull/593) - Fix sunz_corrected modifier adding unnecessary x and y coordinates ([587](https://github.com/pytroll/satpy/issues/587), [584](https://github.com/pytroll/satpy/issues/584)) * [PR 592](https://github.com/pytroll/satpy/pull/592) - Fix masking of AHI HSD space pixels ([531](https://github.com/pytroll/satpy/issues/531)) * [PR 589](https://github.com/pytroll/satpy/pull/589) - Fix dask not importing sharedict automatically in dask 1.1+ * [PR 588](https://github.com/pytroll/satpy/pull/588) - Fix start_time type in seviri_l1b_nc reader * [PR 585](https://github.com/pytroll/satpy/pull/585) - Fix geotiff writer not using fill_value from writer YAML config * [PR 572](https://github.com/pytroll/satpy/pull/572) - Fix VIIRS SDR masking and distracting colors in composites * [PR 570](https://github.com/pytroll/satpy/pull/570) - Fix CF epoch for xarray compat * [PR 563](https://github.com/pytroll/satpy/pull/563) - Fix StopIteration and python 3.7 compatibility issue in MultiScene * [PR 554](https://github.com/pytroll/satpy/pull/554) - Fix AreaDefinition usage to work with newer versions of pyresample #### Features added * [PR 561](https://github.com/pytroll/satpy/pull/561) - Add AHI HRIT B07 files for high resolution night data #### Documentation changes * [PR 590](https://github.com/pytroll/satpy/pull/590) - Add FAQ page to docs * [PR 575](https://github.com/pytroll/satpy/pull/575) - Add page for data download resources * [PR 574](https://github.com/pytroll/satpy/pull/574) - Add code of conduct In this release 14 pull requests were closed. ## Version 0.11.1 (2018/12/27) ### Pull Requests Merged #### Bugs fixed * [PR 560](https://github.com/pytroll/satpy/pull/560) - Fix available_composite_ids including inline comp dependencies In this release 1 pull request was closed. ## Version 0.11.0 (2018/12/21) ### Issues Closed * [Issue 555](https://github.com/pytroll/satpy/issues/555) - GOES-16 geolocation seems off when saving as TIFF * [Issue 552](https://github.com/pytroll/satpy/issues/552) - GOES Composites failling ([PR 553](https://github.com/pytroll/satpy/pull/553)) * [Issue 534](https://github.com/pytroll/satpy/issues/534) - Support GOES-15 in netcdf format from Eumetcast (`nc_goes` reader) ([PR 530](https://github.com/pytroll/satpy/pull/530)) * [Issue 527](https://github.com/pytroll/satpy/issues/527) - [SEP] Reader naming conventions ([PR 546](https://github.com/pytroll/satpy/pull/546)) * [Issue 518](https://github.com/pytroll/satpy/issues/518) - Make bilinear interpolation dask/xarray friendly ([PR 519](https://github.com/pytroll/satpy/pull/519)) * [Issue 467](https://github.com/pytroll/satpy/issues/467) - Flake8-ify all of satpy ([PR 515](https://github.com/pytroll/satpy/pull/515)) * [Issue 459](https://github.com/pytroll/satpy/issues/459) - How to colorize images * [Issue 449](https://github.com/pytroll/satpy/issues/449) - Adding coastlines to single channel not working ([PR 551](https://github.com/pytroll/satpy/pull/551)) * [Issue 337](https://github.com/pytroll/satpy/issues/337) - Plot true color by using VIIRS SDR * [Issue 333](https://github.com/pytroll/satpy/issues/333) - `available_readers` to detail unavailable items * [Issue 263](https://github.com/pytroll/satpy/issues/263) - How to get the available dataset names from the reader * [Issue 147](https://github.com/pytroll/satpy/issues/147) - SEVIRI HRIT reading: More userfriendly warning when no EPI/PRO files are present ([PR 452](https://github.com/pytroll/satpy/pull/452)) In this release 12 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 556](https://github.com/pytroll/satpy/pull/556) - Fix turning off enhancements in writers for float data * [PR 553](https://github.com/pytroll/satpy/pull/553) - Fix DifferenceCompositor and other compositors when areas are incompatible ([552](https://github.com/pytroll/satpy/issues/552), [552](https://github.com/pytroll/satpy/issues/552)) * [PR 550](https://github.com/pytroll/satpy/pull/550) - Fix AHI HRIT file patterns so area's ID is correct * [PR 548](https://github.com/pytroll/satpy/pull/548) - Fix ratio sharpening compositors when the ratio is negative * [PR 547](https://github.com/pytroll/satpy/pull/547) - Fix EWA resampling for new versions of pyresample * [PR 542](https://github.com/pytroll/satpy/pull/542) - Fix palette application for pps 2018 products * [PR 508](https://github.com/pytroll/satpy/pull/508) - Fix the cf_writer to accept single-valued time coordinate variable #### Features added * [PR 558](https://github.com/pytroll/satpy/pull/558) - Make counts available in ahi_hsd * [PR 551](https://github.com/pytroll/satpy/pull/551) - Fix image overlays for single band data (requires trollimage 1.6+) ([449](https://github.com/pytroll/satpy/issues/449)) * [PR 549](https://github.com/pytroll/satpy/pull/549) - Fix nwcpps ct palette from v2018 to be backwards compatible * [PR 546](https://github.com/pytroll/satpy/pull/546) - Rename readers to meet new reader naming scheme ([527](https://github.com/pytroll/satpy/issues/527)) * [PR 545](https://github.com/pytroll/satpy/pull/545) - Add configurable parameters to solar zenith correctors * [PR 530](https://github.com/pytroll/satpy/pull/530) - Add reader for Goes15 netcdf Eumetsat format ([534](https://github.com/pytroll/satpy/issues/534)) * [PR 519](https://github.com/pytroll/satpy/pull/519) - Add xarray/dask bilinear resampling ([518](https://github.com/pytroll/satpy/issues/518)) * [PR 507](https://github.com/pytroll/satpy/pull/507) - Change default enhancement for reflectance data to gamma 1.5 * [PR 452](https://github.com/pytroll/satpy/pull/452) - Improve handling of missing file requirements in readers ([147](https://github.com/pytroll/satpy/issues/147)) #### Documentation changes * [PR 533](https://github.com/pytroll/satpy/pull/533) - Fix copy/paste error in readers table for viirs_l1b * [PR 515](https://github.com/pytroll/satpy/pull/515) - Fix all flake8 errors in satpy package code ([467](https://github.com/pytroll/satpy/issues/467)) #### Backwards incompatible changes * [PR 546](https://github.com/pytroll/satpy/pull/546) - Rename readers to meet new reader naming scheme ([527](https://github.com/pytroll/satpy/issues/527)) * [PR 507](https://github.com/pytroll/satpy/pull/507) - Change default enhancement for reflectance data to gamma 1.5 In this release 20 pull requests were closed. ## Version 0.10.0 (2018/11/23) ### Issues Closed * [Issue 491](https://github.com/pytroll/satpy/issues/491) - Area definition of incomplete SEVIRI images * [Issue 487](https://github.com/pytroll/satpy/issues/487) - Resampling a User Defined Scene * [Issue 465](https://github.com/pytroll/satpy/issues/465) - Native resampler fails with 3D DataArrays ([PR 468](https://github.com/pytroll/satpy/pull/468)) * [Issue 464](https://github.com/pytroll/satpy/issues/464) - Drawing coastlines/borders with save_datasets ([PR 469](https://github.com/pytroll/satpy/pull/469)) * [Issue 453](https://github.com/pytroll/satpy/issues/453) - Review subclasses of BaseFileHander ([PR 455](https://github.com/pytroll/satpy/pull/455)) * [Issue 450](https://github.com/pytroll/satpy/issues/450) - Allow readers to accept pathlib.Path instances ([PR 451](https://github.com/pytroll/satpy/pull/451)) * [Issue 445](https://github.com/pytroll/satpy/issues/445) - Readthedocs builds are failing * [Issue 439](https://github.com/pytroll/satpy/issues/439) - KeyError when creating true_color for ABI * [Issue 417](https://github.com/pytroll/satpy/issues/417) - Add custom string formatter for lower/upper support * [Issue 414](https://github.com/pytroll/satpy/issues/414) - Inconsistent units of geostationary radiances ([PR 490](https://github.com/pytroll/satpy/pull/490)) * [Issue 405](https://github.com/pytroll/satpy/issues/405) - Angle interpolation for MODIS data missing ([PR 430](https://github.com/pytroll/satpy/pull/430)) * [Issue 397](https://github.com/pytroll/satpy/issues/397) - Add README to setup.py description ([PR 443](https://github.com/pytroll/satpy/pull/443)) * [Issue 369](https://github.com/pytroll/satpy/issues/369) - Mitiff writer is broken ([PR 480](https://github.com/pytroll/satpy/pull/480)) In this release 13 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 510](https://github.com/pytroll/satpy/pull/510) - Make sure a discrete data type is preserved through resampling * [PR 506](https://github.com/pytroll/satpy/pull/506) - Remove dependency on nc_nwcsaf_msg * [PR 504](https://github.com/pytroll/satpy/pull/504) - Change unnecessary warning messages to debug * [PR 496](https://github.com/pytroll/satpy/pull/496) - Add more descriptive names to AHI readers AreaDefinition names * [PR 492](https://github.com/pytroll/satpy/pull/492) - Fix thinned modis reading in 'hdfeos_l1b' reader * [PR 480](https://github.com/pytroll/satpy/pull/480) - Fix 'mitiff' writer to use 'base_dir' properly ([369](https://github.com/pytroll/satpy/issues/369)) * [PR 476](https://github.com/pytroll/satpy/pull/476) - Fix handling of navigation in a grib file with lons greater than 180 * [PR 473](https://github.com/pytroll/satpy/pull/473) - Change combine_metadata to average any 'time' fields * [PR 471](https://github.com/pytroll/satpy/pull/471) - Fix offset between VIS+IR and HRV navigation for hrit seviri * [PR 469](https://github.com/pytroll/satpy/pull/469) - Fix attributes not being preserved when adding overlays or decorations ([464](https://github.com/pytroll/satpy/issues/464)) * [PR 468](https://github.com/pytroll/satpy/pull/468) - Fix native resampling when RGBs are resampled ([465](https://github.com/pytroll/satpy/issues/465)) * [PR 458](https://github.com/pytroll/satpy/pull/458) - Fix the slstr reader for consistency and tir view * [PR 456](https://github.com/pytroll/satpy/pull/456) - Fix SCMI writer not writing fill values properly * [PR 448](https://github.com/pytroll/satpy/pull/448) - Fix saving a dataset with a prerequisites attrs to netcdf * [PR 447](https://github.com/pytroll/satpy/pull/447) - Fix masking in DayNightCompositor when composites have partial missing data * [PR 446](https://github.com/pytroll/satpy/pull/446) - Fix nc_nwcsaf_msg reader's handling of projection units #### Features added * [PR 503](https://github.com/pytroll/satpy/pull/503) - Add two luminance sharpening compositors * [PR 498](https://github.com/pytroll/satpy/pull/498) - Make it possible to configure in-line composites * [PR 488](https://github.com/pytroll/satpy/pull/488) - Add the check_satpy function to find missing dependencies * [PR 481](https://github.com/pytroll/satpy/pull/481) - Refactor SCMI writer to be dask friendly * [PR 478](https://github.com/pytroll/satpy/pull/478) - Allow writers to create output directories if they don't exist * [PR 477](https://github.com/pytroll/satpy/pull/477) - Add additional metadata to ABI L1B DataArrays * [PR 474](https://github.com/pytroll/satpy/pull/474) - Improve handling of dependency loading when reader has multiple matches * [PR 463](https://github.com/pytroll/satpy/pull/463) - MSG Level1.5 NetCDF Reader (code and yaml file) for VIS/IR Channels * [PR 455](https://github.com/pytroll/satpy/pull/455) - Ensure file handlers all use filenames as strings ([453](https://github.com/pytroll/satpy/issues/453)) * [PR 451](https://github.com/pytroll/satpy/pull/451) - Allow readers to accept pathlib.Path instances as filenames. ([450](https://github.com/pytroll/satpy/issues/450)) * [PR 442](https://github.com/pytroll/satpy/pull/442) - Replace areas.def with areas.yaml * [PR 441](https://github.com/pytroll/satpy/pull/441) - Fix metop reader * [PR 438](https://github.com/pytroll/satpy/pull/438) - Feature new olcil2 datasets * [PR 436](https://github.com/pytroll/satpy/pull/436) - Allow on-the-fly decompression of xRIT files in xRIT readers * [PR 430](https://github.com/pytroll/satpy/pull/430) - Implement fast modis lon/lat and angles interpolation ([405](https://github.com/pytroll/satpy/issues/405)) #### Documentation changes * [PR 501](https://github.com/pytroll/satpy/pull/501) - Add DOI role and reference to Zinke DNB method * [PR 489](https://github.com/pytroll/satpy/pull/489) - Add a first version on how to write a custom reader * [PR 444](https://github.com/pytroll/satpy/pull/444) - Fix the readers table in the sphinx docs so it wraps text * [PR 443](https://github.com/pytroll/satpy/pull/443) - Add long_description to setup.py ([397](https://github.com/pytroll/satpy/issues/397)) * [PR 440](https://github.com/pytroll/satpy/pull/440) - Fix CI badges in README #### Backwards incompatible changes * [PR 485](https://github.com/pytroll/satpy/pull/485) - Deprecate 'enhancement_config' keyword argument in favor of 'enhance' In this release 37 pull requests were closed. ## Version 0.9.4 (2018/09/29) ### Pull Requests Merged #### Bugs fixed * [PR 433](https://github.com/pytroll/satpy/pull/433) - Fix native_msg readers standard_names to match other satpy readers * [PR 432](https://github.com/pytroll/satpy/pull/432) - Fix reader config loading so it raises exception for bad reader name * [PR 428](https://github.com/pytroll/satpy/pull/428) - Fix start_time and end_time being lists in native_msg reader * [PR 426](https://github.com/pytroll/satpy/pull/426) - Fix hrit_jma reader not having satellite lon/lat/alt info * [PR 423](https://github.com/pytroll/satpy/pull/423) - Fixed that save_dataset does not propagate fill_value * [PR 421](https://github.com/pytroll/satpy/pull/421) - Fix masking and simplify avhrr_aapp_l1b reader * [PR 413](https://github.com/pytroll/satpy/pull/413) - Fix calculating solar zenith angle in eps_l1b reader * [PR 412](https://github.com/pytroll/satpy/pull/412) - Fix platform_name and sensor not being added by avhrr eps l1b reader #### Features added * [PR 415](https://github.com/pytroll/satpy/pull/415) - Add hrit_jma file patterns that don't include segments In this release 9 pull requests were closed. ## Version 0.9.3 (2018/09/10) ### Issues Closed * [Issue 336](https://github.com/pytroll/satpy/issues/336) - Scene crop does not compare all dataset areas ([PR 406](https://github.com/pytroll/satpy/pull/406)) In this release 1 issue was closed. ### Pull Requests Merged #### Bugs fixed * [PR 409](https://github.com/pytroll/satpy/pull/409) - Fix viirs_sdr reading of aggregated files * [PR 406](https://github.com/pytroll/satpy/pull/406) - Fix Scene crop so new areas are consistent with resolution ([336](https://github.com/pytroll/satpy/issues/336)) In this release 2 pull requests were closed. ## Version 0.9.2 (2018/08/23) ### Pull Requests Merged #### Bugs fixed * [PR 402](https://github.com/pytroll/satpy/pull/402) - Fix 'platform_name' metadata in ACSPO and CLAVR-x readers * [PR 401](https://github.com/pytroll/satpy/pull/401) - Wrap solar and satellite angles in xarray in AVHRR AAPP reader In this release 2 pull requests were closed. ## Version 0.9.1 (2018/08/19) ### Issues Closed * [Issue 388](https://github.com/pytroll/satpy/issues/388) - SCMI Writer raises exception with lettered grids ([PR 389](https://github.com/pytroll/satpy/pull/389)) * [Issue 385](https://github.com/pytroll/satpy/issues/385) - No platform_name and sensor in dataset metadata for avhrr_aapp_l1b reader ([PR 386](https://github.com/pytroll/satpy/pull/386)) * [Issue 379](https://github.com/pytroll/satpy/issues/379) - Data is not masked when loading calibrated GOES HRIT data ([PR 380](https://github.com/pytroll/satpy/pull/380)) * [Issue 377](https://github.com/pytroll/satpy/issues/377) - Unmasked data when using DayNightCompositor ([PR 378](https://github.com/pytroll/satpy/pull/378)) * [Issue 372](https://github.com/pytroll/satpy/issues/372) - "find_files_and_readers" doesn't work on Windows ([PR 373](https://github.com/pytroll/satpy/pull/373)) * [Issue 364](https://github.com/pytroll/satpy/issues/364) - Unable to load individual channels from VIIRS_SDR data. * [Issue 350](https://github.com/pytroll/satpy/issues/350) - Creating a Scene object with NOAA-15/18 data * [Issue 347](https://github.com/pytroll/satpy/issues/347) - No image is shown in Jupyter notebook via scene.show() * [Issue 345](https://github.com/pytroll/satpy/issues/345) - Future warning - xarray ([PR 352](https://github.com/pytroll/satpy/pull/352)) In this release 9 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 395](https://github.com/pytroll/satpy/pull/395) - Fix DayNightCompositor not checking inputs areas * [PR 391](https://github.com/pytroll/satpy/pull/391) - Fix native resampler using SwathDefinition as an AreaDefinition * [PR 387](https://github.com/pytroll/satpy/pull/387) - Fix enhancement config loading when yaml file is empty * [PR 386](https://github.com/pytroll/satpy/pull/386) - Add platform_name and sensor in avhrr_aapp_l1b reader ([385](https://github.com/pytroll/satpy/issues/385)) * [PR 381](https://github.com/pytroll/satpy/pull/381) - Fix keyword arguments not being properly passed to writers * [PR 362](https://github.com/pytroll/satpy/pull/362) - Replace np.ma.mean by np.nanmean for pixel aggregation * [PR 361](https://github.com/pytroll/satpy/pull/361) - Remove Rayleigh correction from abi natural composite * [PR 360](https://github.com/pytroll/satpy/pull/360) - Fix lookup table enhancement for multi-band datasets * [PR 339](https://github.com/pytroll/satpy/pull/339) - fixed meteosat native georeferencing #### Documentation changes * [PR 359](https://github.com/pytroll/satpy/pull/359) - Add examples from pytroll-examples to documentation In this release 10 pull requests were closed. ## Version 0.9.0 (2018/07/02) ### Issues Closed * [Issue 344](https://github.com/pytroll/satpy/issues/344) - find_files_and_reader does not seem to care about start_time! ([PR 349](https://github.com/pytroll/satpy/pull/349)) * [Issue 338](https://github.com/pytroll/satpy/issues/338) - Creating a Scene object with Terra MODIS data * [Issue 332](https://github.com/pytroll/satpy/issues/332) - Non-requested datasets are saved when composites fail to generate ([PR 342](https://github.com/pytroll/satpy/pull/342)) In this release 3 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 355](https://github.com/pytroll/satpy/pull/355) - Fix ABI L1B reader losing file variable attributes * [PR 353](https://github.com/pytroll/satpy/pull/353) - Fix multiscene memory issues by adding an optional batch_size * [PR 351](https://github.com/pytroll/satpy/pull/351) - Fix AMSR-2 L1B reader loading bytes incorrectly * [PR 349](https://github.com/pytroll/satpy/pull/349) - Fix datetime-based file selection when filename only has a start time ([344](https://github.com/pytroll/satpy/issues/344)) * [PR 348](https://github.com/pytroll/satpy/pull/348) - Fix freezing of areas before resampling even as strings * [PR 343](https://github.com/pytroll/satpy/pull/343) - Fix shape assertion after resampling * [PR 342](https://github.com/pytroll/satpy/pull/342) - Fix Scene save_datasets to only save datasets from the wishlist ([332](https://github.com/pytroll/satpy/issues/332)) * [PR 341](https://github.com/pytroll/satpy/pull/341) - Fix ancillary variable loading when anc var is already loaded * [PR 340](https://github.com/pytroll/satpy/pull/340) - Cut radiances array depending on number of scans In this release 9 pull requests were closed. ## Version 0.9.0b0 (2018/06/26) ### Issues Closed * [Issue 328](https://github.com/pytroll/satpy/issues/328) - hrit reader bugs ([PR 329](https://github.com/pytroll/satpy/pull/329)) * [Issue 323](https://github.com/pytroll/satpy/issues/323) - "Manual" application of corrections * [Issue 320](https://github.com/pytroll/satpy/issues/320) - Overview of code layout * [Issue 279](https://github.com/pytroll/satpy/issues/279) - Add 'level' to DatasetID ([PR 283](https://github.com/pytroll/satpy/pull/283)) * [Issue 272](https://github.com/pytroll/satpy/issues/272) - How to save region of interest from Band 3 Himawari Data as png image ([PR 276](https://github.com/pytroll/satpy/pull/276)) * [Issue 267](https://github.com/pytroll/satpy/issues/267) - Missing dependency causes strange error during unit tests ([PR 273](https://github.com/pytroll/satpy/pull/273)) * [Issue 244](https://github.com/pytroll/satpy/issues/244) - Fix NUCAPS reader for NUCAPS EDR v2 files ([PR 326](https://github.com/pytroll/satpy/pull/326)) * [Issue 236](https://github.com/pytroll/satpy/issues/236) - scene.resample(cache_dir=) fails with TypeError: Unicode-objects must be encoded before hashing * [Issue 233](https://github.com/pytroll/satpy/issues/233) - IOError: Unable to read attribute (no appropriate function for conversion path) * [Issue 211](https://github.com/pytroll/satpy/issues/211) - Fix OLCI and other readers' file patterns to work on Windows * [Issue 207](https://github.com/pytroll/satpy/issues/207) - Method not fully documented in terms of possible key word arguments * [Issue 199](https://github.com/pytroll/satpy/issues/199) - Reading Modis file produce a double image * [Issue 168](https://github.com/pytroll/satpy/issues/168) - Cannot read MODIS data * [Issue 167](https://github.com/pytroll/satpy/issues/167) - KeyError 'v' using Scene(base_dir=, reader=) ([PR 325](https://github.com/pytroll/satpy/pull/325)) * [Issue 165](https://github.com/pytroll/satpy/issues/165) - HRIT GOES reader is broken ([PR 303](https://github.com/pytroll/satpy/pull/303)) * [Issue 160](https://github.com/pytroll/satpy/issues/160) - Inconsistent naming of optional datasets in composite configs and compositors * [Issue 157](https://github.com/pytroll/satpy/issues/157) - Add animation example ([PR 322](https://github.com/pytroll/satpy/pull/322)) * [Issue 156](https://github.com/pytroll/satpy/issues/156) - Add cartopy example * [Issue 146](https://github.com/pytroll/satpy/issues/146) - Add default null log handler * [Issue 123](https://github.com/pytroll/satpy/issues/123) - NetCDF writer doesn't work ([PR 307](https://github.com/pytroll/satpy/pull/307)) * [Issue 114](https://github.com/pytroll/satpy/issues/114) - Print a list of available sensors/readers * [Issue 82](https://github.com/pytroll/satpy/issues/82) - Separate file discovery from Scene init * [Issue 61](https://github.com/pytroll/satpy/issues/61) - Creating composites post-load * [Issue 10](https://github.com/pytroll/satpy/issues/10) - Optimize CREFL for memory In this release 24 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 331](https://github.com/pytroll/satpy/pull/331) - Adapt slstr reader to xarray&dask * [PR 329](https://github.com/pytroll/satpy/pull/329) - issue#328: fixed bugs loading JMA HRIT files ([328](https://github.com/pytroll/satpy/issues/328)) * [PR 326](https://github.com/pytroll/satpy/pull/326) - Fix nucaps reader for NUCAPS EDR v2 files ([244](https://github.com/pytroll/satpy/issues/244), [244](https://github.com/pytroll/satpy/issues/244)) * [PR 325](https://github.com/pytroll/satpy/pull/325) - Fix exception when Scene is given reader and base_dir ([167](https://github.com/pytroll/satpy/issues/167)) * [PR 319](https://github.com/pytroll/satpy/pull/319) - Fix msi reader delayed * [PR 318](https://github.com/pytroll/satpy/pull/318) - Fix nir reflectance to use XArray * [PR 312](https://github.com/pytroll/satpy/pull/312) - Allow custom regions in ahi-hsd file patterns * [PR 311](https://github.com/pytroll/satpy/pull/311) - Allow valid_range to be a tuple for cloud product colorization * [PR 303](https://github.com/pytroll/satpy/pull/303) - Fix hrit goes to support python 3 ([165](https://github.com/pytroll/satpy/issues/165)) * [PR 288](https://github.com/pytroll/satpy/pull/288) - Fix hrit-goes reader * [PR 192](https://github.com/pytroll/satpy/pull/192) - Clip day and night composites after enhancement #### Features added * [PR 315](https://github.com/pytroll/satpy/pull/315) - Add slicing to Scene * [PR 314](https://github.com/pytroll/satpy/pull/314) - Feature mitiff writer * [PR 307](https://github.com/pytroll/satpy/pull/307) - Fix projections in cf writer ([123](https://github.com/pytroll/satpy/issues/123)) * [PR 305](https://github.com/pytroll/satpy/pull/305) - Add support for geolocation and angles to msi reader * [PR 302](https://github.com/pytroll/satpy/pull/302) - Workaround the LinearNDInterpolator thread-safety issue for Sentinel 1 SAR geolocation * [PR 301](https://github.com/pytroll/satpy/pull/301) - Factorize header definitions between hrit_msg and native_msg. Fix a bug in header definition. * [PR 298](https://github.com/pytroll/satpy/pull/298) - Implement sentinel 2 MSI reader * [PR 294](https://github.com/pytroll/satpy/pull/294) - Add the ocean color product to olci * [PR 153](https://github.com/pytroll/satpy/pull/153) - [WIP] Improve compatibility of cf_writer with CF-conventions In this release 20 pull requests were closed. ## Version 0.9.0a2 (2018/05/14) ### Issues Closed * [Issue 286](https://github.com/pytroll/satpy/issues/286) - Proposal: search automatically for local config-files/readers * [Issue 278](https://github.com/pytroll/satpy/issues/278) - msg native reader fails on full disk image * [Issue 277](https://github.com/pytroll/satpy/issues/277) - msg_native reader fails when order number has a hyphen in it ([PR 282](https://github.com/pytroll/satpy/pull/282)) * [Issue 270](https://github.com/pytroll/satpy/issues/270) - How to find the value at certain latitude and longtitude * [Issue 269](https://github.com/pytroll/satpy/issues/269) - How to intepret the parameter values in AreaDefinition * [Issue 268](https://github.com/pytroll/satpy/issues/268) - How to find the appropriate values of parameters in Scene.resample() function using Himawari Data * [Issue 241](https://github.com/pytroll/satpy/issues/241) - reader native_msg using `np.str` * [Issue 218](https://github.com/pytroll/satpy/issues/218) - Resampling to EPSG:4326 produces unexpected results * [Issue 189](https://github.com/pytroll/satpy/issues/189) - Error when reading MSG native format * [Issue 62](https://github.com/pytroll/satpy/issues/62) - msg_native example * [Issue 33](https://github.com/pytroll/satpy/issues/33) - Load metadata without loading data In this release 11 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 290](https://github.com/pytroll/satpy/pull/290) - Fix unicode-named data loading * [PR 285](https://github.com/pytroll/satpy/pull/285) - Fix native_msg calibration bug * [PR 282](https://github.com/pytroll/satpy/pull/282) - Fix native_msg reader for ROI input and multi-part order file patterns ([277](https://github.com/pytroll/satpy/issues/277)) * [PR 280](https://github.com/pytroll/satpy/pull/280) - Fix CLAVR-x reader to work with xarray * [PR 274](https://github.com/pytroll/satpy/pull/274) - Convert ahi hsd reader to dask and xarray * [PR 265](https://github.com/pytroll/satpy/pull/265) - Bugfix msg native reader * [PR 262](https://github.com/pytroll/satpy/pull/262) - Fix dependency tree to find the best dependency when multiple matches occur * [PR 260](https://github.com/pytroll/satpy/pull/260) - Fix ABI L1B reader masking data improperly #### Features added * [PR 293](https://github.com/pytroll/satpy/pull/293) - Switch to netcdf4 as engine for nc nwcsaf reading * [PR 292](https://github.com/pytroll/satpy/pull/292) - Use pyresample's boundary classes * [PR 291](https://github.com/pytroll/satpy/pull/291) - Allow datasets without areas to be concatenated * [PR 289](https://github.com/pytroll/satpy/pull/289) - Fix so UMARF files (with extention .nat) are found as well * [PR 287](https://github.com/pytroll/satpy/pull/287) - Add production configuration for NWCSAF RDT, ASII products by Marco Sassi * [PR 283](https://github.com/pytroll/satpy/pull/283) - Add GRIB Reader ([279](https://github.com/pytroll/satpy/issues/279)) * [PR 281](https://github.com/pytroll/satpy/pull/281) - Port the maia reader to dask/xarray * [PR 276](https://github.com/pytroll/satpy/pull/276) - Support reducing data for geos areas ([272](https://github.com/pytroll/satpy/issues/272)) * [PR 273](https://github.com/pytroll/satpy/pull/273) - Msg readers cleanup ([267](https://github.com/pytroll/satpy/issues/267)) * [PR 271](https://github.com/pytroll/satpy/pull/271) - Add appveyor and use ci-helpers for CI environments * [PR 264](https://github.com/pytroll/satpy/pull/264) - Add caching at the scene level, and handle saving/loading from disk * [PR 262](https://github.com/pytroll/satpy/pull/262) - Fix dependency tree to find the best dependency when multiple matches occur In this release 20 pull requests were closed. ## Version 0.9.0a1 (2018/04/22) ### Issues Closed * [Issue 227](https://github.com/pytroll/satpy/issues/227) - Issue Reading MSG4 * [Issue 225](https://github.com/pytroll/satpy/issues/225) - Save Datasets using SCMI ([PR 228](https://github.com/pytroll/satpy/pull/228)) * [Issue 215](https://github.com/pytroll/satpy/issues/215) - Change `Scene.compute` to something else ([PR 220](https://github.com/pytroll/satpy/pull/220)) * [Issue 208](https://github.com/pytroll/satpy/issues/208) - Strange behaviour when trying to load data to a scene object after having worked with it ([PR 214](https://github.com/pytroll/satpy/pull/214)) * [Issue 200](https://github.com/pytroll/satpy/issues/200) - Different mask handling when saving to PNG or GeoTIFF ([PR 201](https://github.com/pytroll/satpy/pull/201)) * [Issue 176](https://github.com/pytroll/satpy/issues/176) - Loading viirs natural_color composite fails ([PR 177](https://github.com/pytroll/satpy/pull/177)) In this release 6 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 259](https://github.com/pytroll/satpy/pull/259) - Fix writer and refactor so bad writer name raises logical exception * [PR 257](https://github.com/pytroll/satpy/pull/257) - Fix geotiff and png writers to save to a temporary directory * [PR 256](https://github.com/pytroll/satpy/pull/256) - Add 'python_requires' to setup.py to specify python support * [PR 253](https://github.com/pytroll/satpy/pull/253) - Fix ABI L1B reader to use 64-bit scaling factors for X/Y variables * [PR 250](https://github.com/pytroll/satpy/pull/250) - Fix floating point geotiff saving in dask geotiff writer * [PR 249](https://github.com/pytroll/satpy/pull/249) - Fix float geotiff saving on 0.8 * [PR 248](https://github.com/pytroll/satpy/pull/248) - Fix unloading composite deps when one of them has incompatible areas * [PR 243](https://github.com/pytroll/satpy/pull/243) - Remove ABI composite reducerX modifiers #### Features added * [PR 252](https://github.com/pytroll/satpy/pull/252) - Use rasterio to save geotiffs when available * [PR 239](https://github.com/pytroll/satpy/pull/239) - Add CSPP Geo (geocat) AHI reading support In this release 10 pull requests were closed. ## Version 0.9.0a0 (2018-03-20) #### Bugs fixed * [Issue 179](https://github.com/pytroll/satpy/issues/179) - Cannot read AVHRR in AAPP format * [PR 234](https://github.com/pytroll/satpy/pull/234) - Bugfix sar reader * [PR 231](https://github.com/pytroll/satpy/pull/231) - Bugfix palette based compositor concatenation * [PR 230](https://github.com/pytroll/satpy/pull/230) - Fix dask angle calculations of rayleigh corrector * [PR 229](https://github.com/pytroll/satpy/pull/229) - Fix bug in dep tree when modifier deps are modified wavelengths * [PR 228](https://github.com/pytroll/satpy/pull/228) - Fix 'platform' being used instead of 'platform_name' * [PR 224](https://github.com/pytroll/satpy/pull/224) - Add helper method for checking areas in compositors * [PR 222](https://github.com/pytroll/satpy/pull/222) - Fix resampler caching by source area * [PR 221](https://github.com/pytroll/satpy/pull/221) - Fix Scene loading and resampling when generate=False * [PR 220](https://github.com/pytroll/satpy/pull/220) - Rename Scene's `compute` to `generate_composites` * [PR 219](https://github.com/pytroll/satpy/pull/219) - Fixed native_msg calibration problem and added env var to change the … * [PR 214](https://github.com/pytroll/satpy/pull/214) - Fix Scene not being copied properly during resampling * [PR 210](https://github.com/pytroll/satpy/pull/210) - Bugfix check if lons and lats should be masked before resampling * [PR 206](https://github.com/pytroll/satpy/pull/206) - Fix optional dependencies not being passed to modifiers with opts only * [PR 187](https://github.com/pytroll/satpy/pull/187) - Fix reader configs having mismatched names between filename and config * [PR 185](https://github.com/pytroll/satpy/pull/185) - Bugfix nwcsaf_pps reader for file discoverability * [PR 177](https://github.com/pytroll/satpy/pull/177) - Bugfix viirs loading - picked from (xarray)develop branch * [PR 163](https://github.com/pytroll/satpy/pull/163) - Bugfix float geotiff #### Features added * [PR 232](https://github.com/pytroll/satpy/pull/232) - Add ABI L1B system tests * [PR 226](https://github.com/pytroll/satpy/pull/226) - EARS NWCSAF products reading * [PR 217](https://github.com/pytroll/satpy/pull/217) - Add xarray/dask support to DayNightCompositor * [PR 216](https://github.com/pytroll/satpy/pull/216) - Fix dataset writing so computations are shared between tasks * [PR 213](https://github.com/pytroll/satpy/pull/213) - [WIP] Reuse same resampler for similar datasets * [PR 212](https://github.com/pytroll/satpy/pull/212) - Improve modis reader to support dask * [PR 209](https://github.com/pytroll/satpy/pull/209) - Fix enhancements to work with xarray * [PR 205](https://github.com/pytroll/satpy/pull/205) - Fix ABI 'natural' and 'true_color' composites to work with xarray * [PR 204](https://github.com/pytroll/satpy/pull/204) - Add 'native' resampler * [PR 203](https://github.com/pytroll/satpy/pull/203) - [WIP] Feature trollimage xarray * [PR 195](https://github.com/pytroll/satpy/pull/195) - Add ABI-specific configs for Airmass composite * [PR 186](https://github.com/pytroll/satpy/pull/186) - Add missing nodata tiff tag * [PR 180](https://github.com/pytroll/satpy/pull/180) - Replace BW and RGBCompositor with a more generic one #### Documentation changes * [PR 155](https://github.com/pytroll/satpy/pull/155) - Add contributing and developers guide documentation In this release 1 issue and 31 pull requests were closed. satpy-0.20.0/CITATION000066400000000000000000000002521362525524100140720ustar00rootroot00000000000000To find out how to reference satpy, go to https://zenodo.org/badge/latestdoi/51397392 and choose your favourite citation format on the bottom of the right hand side-bar. satpy-0.20.0/CODE_OF_CONDUCT.md000066400000000000000000000064471362525524100155500ustar00rootroot00000000000000# Contributor Covenant Code of Conduct ## Our Pledge In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation. ## Our Standards Examples of behavior that contributes to creating a positive environment include: * Using welcoming and inclusive language * Being respectful of differing viewpoints and experiences * Gracefully accepting constructive criticism * Focusing on what is best for the community * Showing empathy towards other community members Examples of unacceptable behavior by participants include: * The use of sexualized language or imagery and unwelcome sexual attention or advances * Trolling, insulting/derogatory comments, and personal or political attacks * Public or private harassment * Publishing others' private information, such as a physical or electronic address, without explicit permission * Other conduct which could reasonably be considered inappropriate in a professional setting ## Our Responsibilities Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. ## Scope This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. ## Enforcement Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at . All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. ## Attribution This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html [homepage]: https://www.contributor-covenant.org For answers to common questions about this code of conduct, see https://www.contributor-covenant.org/faq satpy-0.20.0/CONTRIBUTING.rst000066400000000000000000000160561362525524100154070ustar00rootroot00000000000000================= How to contribute ================= Thank you for considering contributing to Satpy! Satpy's development team is made up of volunteers so any help we can get is very appreciated. Contributions from users are what keep this community going. We welcome any contributions including bug reports, documentation fixes or updates, bug fixes, and feature requests. By contributing to Satpy you are providing code that everyone can use and benefit from. The following guidelines will describe how the Satpy project structures its code contributions from discussion to code to package release. For more information on contributing to open source projects see `GitHub's Guide `_. What can I do? ============== - Make sure you have a `GitHub account `_. - Submit a ticket for your issue, assuming one does not already exist. - If you're uncomfortable using Git/GitHub, see `Learn Git Branching `_ or other online tutorials. - If you are uncomfortable contributing to an open source project see: * `How to Contribute to an Open Source Project on GitHub `_ video series * Aaron Meurer's `Git Workflow `_ * `How to Contribute to Open Source `_ - See what `issues `_ already exist. Issues marked `good first issue `_ or `help wanted `_ can be good issues to start with. - Read the :doc:`index` for more details on contributing code. - `Fork `_ the repository on GitHub and install the package in development mode. - Update the Satpy documentation to make it clearer and more detailed. - Contribute code to either fix a bug or add functionality and submit a `Pull Request `_. - Make an example Jupyter Notebook and add it to the `available examples `_. What if I break something? ========================== Not possible. If something breaks because of your contribution it was our fault. When you submit your changes to be merged as a GitHub `Pull Request `_ they will be automatically tested and checked against coding style rules. Before they are merged they are reviewed by at least one maintainer of the Satpy project. If anything needs updating, we'll let you know. What is expected? ================= You can expect the Satpy maintainers to help you. We are all volunteers, have jobs, and occasionally go on vacations. We will try our best to answer your questions as soon as possible. We will try our best to understand your use case and add the features you need. Although we strive to make Satpy useful for everyone there may be some feature requests that we can't allow if they would require breaking existing features. Other features may be best for a different package, PyTroll or otherwise. Regardless, we will help you find the best place for your feature and to make it possible to do what you want. We, the Satpy maintainers, expect you to be patient, understanding, and respectful of both developers and users. Satpy can only be successful if everyone in the community feels welcome. We also expect you to put in as much work as you expect out of us. There is no dedicated PyTroll or Satpy support team, so there may be times when you need to do most of the work to solve your problem (trying different test cases, environments, etc). Being respectful includes following the style of the existing code for any code submissions. Please follow `PEP8 `_ style guidelines and limit lines of code to 80 characters whenever possible and when it doesn't hurt readability. Satpy follows `Google Style Docstrings `_ for all code API documentation. When in doubt use the existing code as a guide for how coding should be done. .. _dev_help: How do I get help? ================== The Satpy developers (and all other PyTroll package developers) monitor the: - `Mailing List `_ - `Slack chat `_ (get an `invitation `_) - `GitHub issues `_ How do I submit my changes? =========================== Any contributions should start with some form of communication (see above) to let the Satpy maintainers know how you plan to help. The larger the contribution the more important direct communication is so everyone can avoid duplicate code and wasted time. After talking to the Satpy developers any additional work like code or documentation changes can be provided as a GitHub `Pull Request `_. To make sure that your code complies with the pytroll python standard, you can run the `flake8 `_ linter on your changes before you submit them, or even better install a pre-commit hook that runs the style check for you. To this aim, we provide a configuration file for the `pre-commit `_ tool, that you can install with eg:: pip install pre-commit pre-commit install running from your base satpy directory. This will automatically check code style for every commit. Code of Conduct =============== Satpy follows the same code of conduct as the PyTroll project. For reference it is copied to this repository in `CODE_OF_CONDUCT.md `_. As stated in the PyTroll home page, this code of conduct applies to the project space (GitHub) as well as the public space online and offline when an individual is representing the project or the community. Online examples of this include the PyTroll Slack team, mailing list, and the PyTroll twitter account. This code of conduct also applies to in-person situations like PyTroll Contributor Weeks (PCW), conference meet-ups, or any other time when the project is being represented. Any violations of this code of conduct will be handled by the core maintainers of the project including David Hoese, Martin Raspaud, and Adam Dybbroe. If you wish to report one of the maintainers for a violation and are not comfortable with them seeing it, please contact one or more of the other maintainers to report the violation. Responses to violations will be determined by the maintainers and may include one or more of the following: - Verbal warning - Ask for public apology - Temporary or permanent ban from in-person events - Temporary or permanent ban from online communication (Slack, mailing list, etc) For details see the official `code of conduct document `_. satpy-0.20.0/LICENSE.txt000066400000000000000000001045131362525524100145650ustar00rootroot00000000000000 GNU GENERAL PUBLIC LICENSE Version 3, 29 June 2007 Copyright (C) 2007 Free Software Foundation, Inc. Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The GNU General Public License is a free, copyleft license for software and other kinds of works. The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use the GNU General Public License for most of our software; it applies also to any other work released this way by its authors. You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights. Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it: responsibilities to respect the freedom of others. For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. Developers that use the GNU GPL protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License giving you legal permission to copy, distribute and/or modify it. For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software. For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems will not be attributed erroneously to authors of previous versions. Some devices are designed to deny users access to install or run modified versions of the software inside them, although the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those products. If such problems arise substantially in other domains, we stand ready to extend this provision to those domains in future versions of the GPL, as needed to protect the freedom of users. Finally, every program is threatened constantly by software patents. States should not allow patents to restrict development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that patents cannot be used to render the program non-free. The precise terms and conditions for copying, distribution and modification follow. TERMS AND CONDITIONS 0. Definitions. "This License" refers to version 3 of the GNU General Public License. "Copyright" also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. "The Program" refers to any copyrightable work licensed under this License. Each licensee is addressed as "you". "Licensees" and "recipients" may be individuals or organizations. To "modify" a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a "modified version" of the earlier work or a work "based on" the earlier work. A "covered work" means either the unmodified Program or a work based on the Program. To "propagate" a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. To "convey" a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. An interactive user interface displays "Appropriate Legal Notices" to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. 1. Source Code. The "source code" for a work means the preferred form of the work for making modifications to it. "Object code" means any non-source form of a work. A "Standard Interface" means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. The "System Libraries" of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A "Major Component", in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. The "Corresponding Source" for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. The Corresponding Source for a work in source code form is that same work. 2. Basic Permissions. All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. 3. Protecting Users' Legal Rights From Anti-Circumvention Law. No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. 4. Conveying Verbatim Copies. You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. 5. Conveying Modified Source Versions. You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: a) The work must carry prominent notices stating that you modified it, and giving a relevant date. b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to "keep intact all notices". c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an "aggregate" if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. 6. Conveying Non-Source Forms. You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. A "User Product" is either (1) a "consumer product", which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, "normally used" refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. "Installation Information" for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. 7. Additional Terms. "Additional permissions" are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or d) Limiting the use for publicity purposes of names of licensors or authors of the material; or e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. All other non-permissive additional terms are considered "further restrictions" within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. 8. Termination. You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. 9. Acceptance Not Required for Having Copies. You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. 10. Automatic Licensing of Downstream Recipients. Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. An "entity transaction" is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. 11. Patents. A "contributor" is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's "contributor version". A contributor's "essential patent claims" are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, "control" includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. In the following three paragraphs, a "patent license" is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To "grant" such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. "Knowingly relying" means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. A patent license is "discriminatory" if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. 12. No Surrender of Others' Freedom. If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. 13. Use with the GNU Affero General Public License. Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply to the combination as such. 14. Revised Versions of this License. The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU General Public License, you may choose any version ever published by the Free Software Foundation. If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. 15. Disclaimer of Warranty. THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. Limitation of Liability. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. 17. Interpretation of Sections 15 and 16. If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see . Also add information on how to contact you by electronic and paper mail. If the program does terminal interaction, make it output a short notice like this when it starts in an interactive mode: Copyright (C) This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, your program's commands might be different; for a GUI interface, you would use an "about box". You should also get your employer (if you work as a programmer) or school, if any, to sign a "copyright disclaimer" for the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see . The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read . satpy-0.20.0/MANIFEST.in000066400000000000000000000001531362525524100144730ustar00rootroot00000000000000include doc/Makefile include doc/source/* include doc/examples/*.py include LICENSE.txt include README.rst satpy-0.20.0/README000077700000000000000000000000001362525524100153002README.rstustar00rootroot00000000000000satpy-0.20.0/README.rst000066400000000000000000000053111362525524100144250ustar00rootroot00000000000000Satpy ===== .. image:: https://travis-ci.org/pytroll/satpy.svg?branch=master :target: https://travis-ci.org/pytroll/satpy .. image:: https://ci.appveyor.com/api/projects/status/a82tm59hldt4ycor/branch/master?svg=true :target: https://ci.appveyor.com/project/pytroll/satpy/branch/master .. image:: https://coveralls.io/repos/github/pytroll/satpy/badge.svg?branch=master :target: https://coveralls.io/github/pytroll/satpy?branch=master .. image:: https://badge.fury.io/py/satpy.svg :target: https://badge.fury.io/py/satpy .. image:: https://zenodo.org/badge/51397392.svg :target: https://zenodo.org/badge/latestdoi/51397392 The Satpy package is a python library for reading and manipulating meteorological remote sensing data and writing it to various image and data file formats. Satpy comes with the ability to make various RGB composites directly from satellite instrument channel data or higher level processing output. The `pyresample `_ package is used to resample data to different uniform areas or grids. The documentation is available at http://satpy.readthedocs.org/. Installation ------------ Satpy can be installed from PyPI with pip: .. code-block:: bash pip install satpy It is also available from `conda-forge` for conda installations: .. code-block:: bash conda install -c conda-forge satpy Code of Conduct --------------- Satpy follows the same code of conduct as the PyTroll project. For reference it is copied to this repository in CODE_OF_CONDUCT.md_. As stated in the PyTroll home page, this code of conduct applies to the project space (GitHub) as well as the public space online and offline when an individual is representing the project or the community. Online examples of this include the PyTroll Slack team, mailing list, and the PyTroll twitter account. This code of conduct also applies to in-person situations like PyTroll Contributor Weeks (PCW), conference meet-ups, or any other time when the project is being represented. Any violations of this code of conduct will be handled by the core maintainers of the project including David Hoese, Martin Raspaud, and Adam Dybbroe. If you wish to report one of the maintainers for a violation and are not comfortable with them seeing it, please contact one or more of the other maintainers to report the violation. Responses to violations will be determined by the maintainers and may include one or more of the following: - Verbal warning - Ask for public apology - Temporary or permanent ban from in-person events - Temporary or permanent ban from online communication (Slack, mailing list, etc) For details see the official CODE_OF_CONDUCT.md_. .. _CODE_OF_CONDUCT.md: ./CODE_OF_CONDUCT.md satpy-0.20.0/RELEASING.md000066400000000000000000000023611362525524100145730ustar00rootroot00000000000000# Releasing Satpy 1. checkout master 2. pull from repo 3. run the unittests 4. run `loghub`. Replace and with proper values. To get the previous version run `git tag` and select the most recent with highest version number. ``` loghub pytroll/satpy -u -st v -plg bug "Bugs fixed" -plg enhancement "Features added" -plg documentation "Documentation changes" -plg backwards-incompatibility "Backwards incompatible changes" ``` This command will create a CHANGELOG.temp file which need to be added to the top of the CHANGLOG.md file. The same content is also printed to terminal, so that can be copy-pasted, too. Remember to update also the version number to the same given in step 5. Don't forget to commit CHANGELOG.md! 5. Create a tag with the new version number, starting with a 'v', eg: ``` git tag -a v -m "Version " ``` For example if the previous tag was `v0.9.0` and the new release is a patch release, do: ``` git tag -a v0.9.1 -m "Version 0.9.1" ``` See [semver.org](http://semver.org/) on how to write a version number. 6. push changes to github `git push --follow-tags` 7. Verify travis tests passed and deployed sdist and wheel to PyPI satpy-0.20.0/appveyor.yml000066400000000000000000000026031362525524100153270ustar00rootroot00000000000000environment: global: PYTHON: "C:\\conda" MINICONDA_VERSION: "latest" CMD_IN_ENV: "cmd /E:ON /V:ON /C .\\ci-helpers\\appveyor\\windows_sdk.cmd" CONDA_DEPENDENCIES: "xarray dask distributed toolz Cython sphinx cartopy pillow matplotlib scipy pyyaml pyproj pyresample coverage netcdf4 h5py h5netcdf gdal rasterio imageio pyhdf mock libtiff zarr six" PIP_DEPENDENCIES: "trollsift trollimage pyspectral pyorbital libtiff" CONDA_CHANNELS: "conda-forge" matrix: - PYTHON: "C:\\Python38_64" PYTHON_VERSION: "3.8" PYTHON_ARCH: "64" NUMPY_VERSION: "stable" - PYTHON: "C:\\Python38_64" PYTHON_VERSION: "3.7" PYTHON_ARCH: "64" NUMPY_VERSION: "1.16" install: - "git clone --depth 1 git://github.com/astropy/ci-helpers.git" - "powershell ci-helpers/appveyor/install-miniconda.ps1" - "conda activate test" build: false # Not a C# project, build stuff at the test step instead. test_script: # Build the compiled extension and run the project tests - "%CMD_IN_ENV% python setup.py test" after_test: # If tests are successful, create a whl package for the project. - "%CMD_IN_ENV% python setup.py bdist_wheel bdist_wininst" - ps: "ls dist" artifacts: # Archive the generated wheel package in the ci.appveyor.com build report. - path: dist\* #on_success: # - TODO: upload the content of dist/*.whl to a public wheelhouse # satpy-0.20.0/changelog_pre0.9.0.rst000066400000000000000000006026731362525524100166700ustar00rootroot00000000000000Changelog ========= v0.8.1 (2018-01-19) ------------------- Fix ~~~ - Bugfix: Fix so the Himawari platform name is a string and not a numpy array. [Adam.Dybbroe] - Bugfix: The satellite azimuth returned by PyOrbital is not in the range -180 to 180 as was expected. [Adam.Dybbroe] Other ~~~~~ - Update changelog. [Martin Raspaud] - Bump version: 0.8.0 → 0.8.1. [Martin Raspaud] - Merge pull request #162 from pytroll/bugfix-pyorbital-azimuth- difference. [Martin Raspaud] Bugfix: The satellite azimuth returned by PyOrbital is not in the ran… - Merge pull request #154 from pytroll/bugfix-viirs-truecolor- ratiosharpening. [Martin Raspaud] Add a rayleigh_correction modifier for I-bands, - Add a rayleigh_correction modifier for I-bands, which is refered to in the ratio-sharpened true color and natural_color RGBs. [Adam.Dybbroe] - Fix backwards compatibility with scene instantiation. [Martin Raspaud] v0.8.0 (2018-01-11) ------------------- Fix ~~~ - Bugfix: Explicitly set the resolution for sun-satellite geometry for the Rayleigh correction modifiers needed for True Color imagery. [Adam.Dybbroe] Other ~~~~~ - Update changelog. [Martin Raspaud] - Bump version: 0.7.8 → 0.8.0. [Martin Raspaud] - Merge pull request #152 from pytroll/bugfix-truecolor-viirs. [Martin Raspaud] Bugfix: Explicitly set the resolution for sun-satellite geometry - Bugfix viirs_sdr reader: Use correct sunz corrector for ibands. [Adam.Dybbroe] - Merge pull request #91 from pytroll/feature-discover-utility. [Martin Raspaud] Separate find files utility - Merge branch 'develop' into feature-discover-utility. [David Hoese] - Refactor all of the documentation and fix various docstrings. [davidh- ssec] - Update documentation index and installation instructions. [davidh- ssec] - Merge branch 'develop' into feature-discover-utility. [davidh-ssec] # Conflicts: # satpy/readers/mipp_xrit.py # satpy/tests/test_readers.py # satpy/utils.py - Add filename filtering and tests for find_files_and_readers. [davidh- ssec] - Remove unused strftime function. [davidh-ssec] - Fix behavior tests and other necessary changes to fix file discovery. [davidh-ssec] - Update Scene and reader loading docstrings. [davidh-ssec] - Move reader start_time and end_time to filter_parameters. [davidh- ssec] Includes a first attempt at updating mipp_xrit to work with this - Fix `load_readers` tests after changing from ReaderFinder. [davidh- ssec] - Remove 'sensor' functionality from Scene init and clean reader loading. [davidh-ssec] - Fix behavior tests. [davidh-ssec] - Move file finding functionality to a separate utility function. [davidh-ssec] - Move ABI simulated green calculation to a separate function. [davidh- ssec] - Merge pull request #149 from pytroll/truecolor-red-channel-corr. [Martin Raspaud] Truecolor uses red channel as base for rayleigh correction - Fix indentation error in viirs.yaml. [Martin Raspaud] - Merge branch 'develop' into truecolor-red-channel-corr. [Martin Raspaud] - Remove marine-clean true color recipe, as it was the same as the standard recipe. [Adam.Dybbroe] - Bugfix abi true color recipes. [Adam.Dybbroe] - Apply consistency in true color imagery across sensors. Adding for land and sea variants. [Adam.Dybbroe] - Use the red band in the damping of the atm correction over reflective targets. [Adam.Dybbroe] v0.7.8 (2018-01-11) ------------------- - Update changelog. [Martin Raspaud] - Bump version: 0.7.7 → 0.7.8. [Martin Raspaud] - Merge pull request #148 from pytroll/feature-utils. [Martin Raspaud] Fix platform name reading for ahi hsd reader in py3 - Fix platform name reading for ahi hsd reader in py3. [Martin Raspaud] This patch also factorizes some code to a np2str function that takes care of converting np.string_ to str - Merge pull request #130 from pytroll/ahi_truecolor. [Martin Raspaud] Use the cira stretch also for the true_color_ahi_default - Use consistent standard_name naming. [Adam.Dybbroe] - Fix for Himawari true colors at different resolutions. [Adam.Dybbroe] - Use the cira stretch also for the true_color_ahi_default. [Adam.Dybbroe] - Merge pull request #141 from pytroll/pep8. [Martin Raspaud] Remove unused imports and use pep8-ify - Remove unused imports and use pep8-ify. [Adam.Dybbroe] - Merge pull request #145 from pytroll/fix-refl37-rgbs. [Martin Raspaud] Add snow RGB, add r37-based and natural RGB recipes specific to SEVIRI, and fix sun-zenith correction - When doing atm correction with pass the band name rather than the wavelength to Pyspectral, as the latter may be ambigous. [Adam.Dybbroe] - Explain how the 3.x reflectance needs to be derived before getting the emissive part. [Adam.Dybbroe] - Removing the two protected internal variables: self._nir and self._tb11. [Adam.Dybbroe] - Add new recipes for daytime-cloudtop RGBs using Pyspectral to remove the reflective part of the 3.x signal. [Adam.Dybbroe] - Add method initiating the reflectance/emissive calculations. [Adam.Dybbroe] - Update __init__.py. [Adam Dybbroe] Replaced "dummy" with "_" - Add a NIR (3.x micron band) emissive RGB provided by new pyspectral. [Adam.Dybbroe] - Adapt method call to latest pyspectral. [Adam.Dybbroe] - Fix so it is possible to derive 3.7 micron reflective RGBs from both VIIRS I- and M-bands. [Adam.Dybbroe] - Add snow RGBs for VIIRS for both M- and I-bands. [Adam.Dybbroe] - Add snow RGB, add r37-based and natural RGB recipes specific to SEVIRI, and fix sun-zenith correction. [Adam.Dybbroe] - Merge pull request #143 from pytroll/noaa-20-platform-naming. [Martin Raspaud] Fix platform_name for NOAA-20 and -21 - Fix platform_name for NOAA-20 and -21. [Adam.Dybbroe] v0.7.7 (2017-12-21) ------------------- - Update changelog. [davidh-ssec] - Bump version: 0.7.6 → 0.7.7. [davidh-ssec] - Merge pull request #140 from pytroll/bugfix-scmi-signed. [David Hoese] Bugfix scmi signed integer data variables - Add ipython tab completion for scene keys. [davidh-ssec] - Fix SCMI writer because AWIPS doesn't like unsigned integers. [davidh- ssec] Using the entire 16-bit unsigned integer space displays fine in AWIPS but it doesn't handle them correctly when adding derived parameters. Meaning once the data goes in to a python script and gets converted to a signed interger...yeah. This change makes it so data is a signed 16-bit integer that only uses the positive half of the bit space. - Merge pull request #138 from pytroll/bugfix-modis-reader. [David Hoese] WIP: Fix readers not returning the highest resolution dataset IDs - Add more file patterns to hdfeos_l1b reader. [davidh-ssec] - Fix requesting a specific resolution from a reader. [davidh-ssec] - Merge remote-tracking branch 'origin/fix-resolution' into bugfix- modis-reader. [davidh-ssec] - Allow providing resolution when loading a composite. [Martin Raspaud] - Fix hdfeos_l1b reader not knowing what resolution of datasets it had. [davidh-ssec] - Fix interpolation problem at 250m resolution. [Martin Raspaud] - Fix readers not returning the highest resolution dataset IDs. [davidh- ssec] - Merge pull request #139 from pytroll/bugfix-viirs-l1b. [David Hoese] Fix VIIRS L1B to work with JPSS-1 and new NASA filenames - Fix VIIRS L1B to work with JPSS-1 and new NASA filenames. [davidh- ssec] - Clean up style. [Martin Raspaud] - Fix lon/lat caching in hdfeos_l1b for different resolutions. [Martin Raspaud] Fixes #132 - Merge pull request #137 from pytroll/logging_corrupted_file. [Martin Raspaud] When opening/reading a nc or hdf file fails, be verbose telling which file it is that fails - When opening/reading a file fails, be verbose telling which file it is that fails. [Adam.Dybbroe] - Merge pull request #134 from howff/hdfeos_l1b_ipopp_filenames. [Martin Raspaud] Added IPOPP-style MODIS-L1b filenames - Update doc re. IMAPP and IPOPP. [Andrew Brooks] - Added IPOPP-style MODIS-L1b filenames. [Andrew Brooks] v0.7.6 (2017-12-19) ------------------- - Update changelog. [Martin Raspaud] - Bump version: 0.7.5 → 0.7.6. [Martin Raspaud] - Merge pull request #135 from pytroll/viirs_truecolor_config_error. [Martin Raspaud] Replace effective_solar_pathlength_corrected with the standard sunz-corrected - Replace effective_solar_pathlength_corrected witn the standard sunz- correction. VIIRS data are already sun-zenith corrected. [Adam.Dybbroe] - Update documentation to add hrit_goes. [Martin Raspaud] - Fix GOES navigation. [Martin Raspaud] - Finalize GOES LRIT reader. [Martin Raspaud] - Merge pull request #39 from howff/develop. [Martin Raspaud] Reader for GOES HRIT, WIP - Fix available_composite_names in doc. [Andrew Brooks] - Merge branch 'develop' of https://github.com/pytroll/satpy into develop. [Andrew Brooks] - Start of reader for GOES HRIT. [howff] - Update PULL_REQUEST_TEMPLATE.md. [Martin Raspaud] This hides the comments when the PR is previewed and reminds user to provide a description for the PR. - Merge pull request #122 from eysteinn/scatsat1. [Martin Raspaud] Add reader for ScatSat1 Level 2B wind speed data, HDF5 format - Read end_time info correctly. [Eysteinn] - Add reader for ScatSat1 Level 2B wind speed data. [Eysteinn] - Merge pull request #129 from pytroll/viirs_rgbs. [Martin Raspaud] Use the Pyspectral atm correction as the default. - Use the Pyspectral atm correction as the default. Add a high-res overview RGB, use the hncc-dnb in the night-microphysics and use the effective_solar_pathlength_corrected for all true color RGBs. [Adam.Dybbroe] - Merge pull request #128 from pytroll/atm_corrections. [Martin Raspaud] Atm corrections - Pep8 cosmetics. [Adam.Dybbroe] - Pep8 cosmetics. [Adam.Dybbroe] - Pep8 editorial, and fixing copyright. [Adam.Dybbroe] - Add some pre-defined atm/rayleigh corrections to appply over land and sea. [Adam.Dybbroe] - Merge pull request #131 from pytroll/bugfix-hrit-jma. [Martin Raspaud] Bugfix hrit_jma - Bugfix hrit_jma. [Martin Raspaud] - Use a more appropriate and shorter link to the MSG native format pdf doc. [Adam.Dybbroe] - Merge pull request #126 from pytroll/feature_ahi_stretch. [Martin Raspaud] Improvemements to AHI True color imagery - Use marine_clean and us-standard for atm correction, and improve stretch at low sun elevation. [Adam.Dybbroe] - Use the CIRA stretch for True color imagery. [Adam.Dybbroe] v0.7.5 (2017-12-11) ------------------- - Update changelog. [davidh-ssec] - Bump version: 0.7.4 → 0.7.5. [davidh-ssec] - Remove unused legacy .cfg files. [davidh-ssec] - Merge branch 'master' into develop. [davidh-ssec] - Merge pull request #118 from mitkin/master. [Martin Raspaud] Add file pattern for MODIS L1B from LAADS WEB - Add file pattern for MODIS L1B from LAADS WEB. [Mikhail Itkin] NASA's LAADS WEB pattern is slightly different - Remove old and unused mipp_xrit reader. [davidh-ssec] - Fix SCMI writer not overwriting data from previous tiles. [davidh- ssec] - Merge pull request #121 from pytroll/fix-ir-modifiers. [Martin Raspaud] Remove VIIRS SDR IR modifiers - Remove sun zenith angle correction from IR channels. [Panu Lahtinen] - Add github templates for issues and PRs. [Martin Raspaud] - Bugfix epsl1b reader. [Martin Raspaud] - Merge pull request #107 from pytroll/fix-nwcsaf-proj4. [David Hoese] Convert NWC SAF MSG projection string to meters - Merge branch 'fix-nwcsaf-proj4' of https://github.com/pytroll/satpy into fix-nwcsaf-proj4. [Panu Lahtinen] - Merge branch 'fix-nwcsaf-proj4' of https://github.com/pytroll/satpy into fix-nwcsaf-proj4. [Panu Lahtinen] - Read attributes "flag_meanings", "flag_values" and "long_name" [Panu Lahtinen] - Configure more datasets. [Panu Lahtinen] - Fix also area extents. [Panu Lahtinen] - Add unit tests for utils.proj_units_to_meters() [Panu Lahtinen] - Move proj_units_to_meters() to satpy.utils. [Panu Lahtinen] - Convert projection parameters from kilometers to meters. [Panu Lahtinen] - Read attributes "flag_meanings", "flag_values" and "long_name" [Panu Lahtinen] - Configure more datasets. [Panu Lahtinen] - Fix also area extents. [Panu Lahtinen] - Add unit tests for utils.proj_units_to_meters() [Panu Lahtinen] - Move proj_units_to_meters() to satpy.utils. [Panu Lahtinen] - Convert projection parameters from kilometers to meters. [Panu Lahtinen] - Move proj_units_to_meters() to satpy.utils. [Panu Lahtinen] - Convert projection parameters from kilometers to meters. [Panu Lahtinen] - Read attributes "flag_meanings", "flag_values" and "long_name" [Panu Lahtinen] - Configure more datasets. [Panu Lahtinen] - Fix also area extents. [Panu Lahtinen] - Add unit tests for utils.proj_units_to_meters() [Panu Lahtinen] - Move proj_units_to_meters() to satpy.utils. [Panu Lahtinen] - Convert projection parameters from kilometers to meters. [Panu Lahtinen] - Merge pull request #111 from eysteinn/sentinel1-reproject. [David Hoese] Fixed area information to safe_sar_c reader to allow for resampling - Added coordinates to sar_c.yaml to allow for reprojection. [Eysteinn] - Merge pull request #108 from TAlonglong/feature-decorate. [Martin Raspaud] Feature decorate - __init__.py docstring in a few add pydecorate features. [Trygve Aspenes] - Satpy/writers/__init__.py implement more general way of handling pydecorate calls from satpy save_dataset. Instead of logo and text separate, use decorate. This needs to be a list to keep the order of alignment available in pydecorate. Since the argument to add_decorate needs to be a mapping it may look like this: decorate={'decorate':[{'logo':{...}},{'text':{...}},...]} [Trygve Aspenes] - Merge branch 'develop' into develop-fork. [Trygve Aspenes] - Satpy/writers/__init__.py added add_text function. This is meant to be used when calling save_dataset to add text to an image using pydecorate. eg save_dataset(...., text_overlay={'text': 'THIS IS THE TEXT TO BE ADDED', 'align':{'top_bottom':'bottom', 'left_right':'right'}, 'font':'/usr/share/fonts/truetype/msttcorefonts/Arial.ttf', 'font_size':25, 'height':30, 'bg':'black', 'bg_opacity':255, 'line':'white'}). Not all options available as style in pydecorate are implemented. This is left TODO. This PR is dependent on https://github.com/pytroll/pydecorate/pull/3 to be completed. [Trygve Aspenes] - Adding to more options to add_overlay. This to better control which levels of coast(GSHHS) and borders (WDB_II) are put on the plot. [Trygve Aspenes] - Merge pull request #88 from pytroll/feature-3d-enhancement. [Panu Lahtinen] Add 3D enhancement, fix BWCompositor - Merge branch 'feature-3d-enhancement' of https://github.com/pytroll/satpy into feature-3d-enhancement. [Panu Lahtinen] - Add example of composite with 3D effect. [Panu Lahtinen] - Fix BWCompositor to handle info correctly. [Panu Lahtinen] - Add 3D effect enhancement. [Panu Lahtinen] - Remove rebase comments. [Panu Lahtinen] - Add example of composite with 3D effect. [Panu Lahtinen] - Fix BWCompositor to handle info correctly. [Panu Lahtinen] - Add 3D effect enhancement. [Panu Lahtinen] - Merge pull request #87 from pytroll/feature-IASI-L2-reader. [Panu Lahtinen] Add IASI L2 reader - Merge branch 'feature-IASI-L2-reader' of https://github.com/pytroll/satpy into feature-IASI-L2-reader. [Panu Lahtinen] - Merge branch 'feature-IASI-L2-reader' of https://github.com/pytroll/satpy into feature-IASI-L2-reader. [Panu Lahtinen] - Fix unit of time. [Panu Lahtinen] - Remove un-needed '' from the reader init line. [Panu Lahtinen] - Merge branch 'develop' into feature-IASI-L2-reader. [Panu Lahtinen] - Add mapping from M03 to Metop-C. [Panu Lahtinen] - Add subsatellite resolution to datasets. [Panu Lahtinen] - Fix typos, make read_dataset() and read_geo() functions instead of methods. [Panu Lahtinen] - Add initial version of IASI L2 reader. [Panu Lahtinen] - Fix unit of time. [Panu Lahtinen] - Remove un-needed '' from the reader init line. [Panu Lahtinen] - Add mapping from M03 to Metop-C. [Panu Lahtinen] - Add subsatellite resolution to datasets. [Panu Lahtinen] - Fix typos, make read_dataset() and read_geo() functions instead of methods. [Panu Lahtinen] - Add initial version of IASI L2 reader. [Panu Lahtinen] - Fix unit of time. [Panu Lahtinen] - Remove un-needed '' from the reader init line. [Panu Lahtinen] - Add mapping from M03 to Metop-C. [Panu Lahtinen] - Add subsatellite resolution to datasets. [Panu Lahtinen] - Fix typos, make read_dataset() and read_geo() functions instead of methods. [Panu Lahtinen] - Add initial version of IASI L2 reader. [Panu Lahtinen] - Merge pull request #96 from eysteinn/create_colormap. [David Hoese] Create colormap - Make colorizing/palettizing more flexible. [Eysteinn] - Merge pull request #4 from pytroll/develop. [Eysteinn Sigurðsson] Develop - Merge pull request #3 from pytroll/develop. [Eysteinn Sigurðsson] Develop - Merge pull request #109 from pytroll/bugfix-scmi. [David Hoese] Fix SCMI writer and add more tiled grids - Fix SCMI writer writing masked geolocation to netcdf files. [davidh- ssec] - Add additional GOES SCMI grids. [davidh-ssec] - Allow adding overlay for L and LA images. [Martin Raspaud] - Merge pull request #101 from pytroll/bugfix-scmi3. [David Hoese] Fix python 3 compatibility in scmi writer - Add more SCMI writer tests for expected failures. [davidh-ssec] - Fix python 3 compatibility in scmi writer. [davidh-ssec] Includes fix for X/Y coordinate precision which affects GOES-16 data - Merge pull request #105 from howff/doc-fix. [Martin Raspaud] fix available_composite_names in doc - Fix available_composite_names in doc. [Andrew Brooks] v0.7.4 (2017-11-13) ------------------- - Update changelog. [davidh-ssec] - Bump version: 0.7.3 → 0.7.4. [davidh-ssec] - Update changelog. [davidh-ssec] - Fix physical_element for VIIRS M07 in SCMI writer. [davidh-ssec] - Merge pull request #97 from pytroll/feature-optimize-scmi. [David Hoese] Optimize SCMI writer to reuse results of tile calculations - Fix area id in SCMI writer to be more specific. [davidh-ssec] - Optimize SCMI writer to reuse results of tile calculations. [davidh- ssec] It uses a little bit more memory, but speeds up the processing by quite a bit when tested under the Polar2Grid equivalent. - Fix floating point saving for geotiff. [Martin Raspaud] - Merge pull request #93 from pytroll/bugfix-user-enhancements. [David Hoese] Fix enhancement config loading when user configs are present - Fix enhancement config loading when user configs are present. [davidh- ssec] v0.7.3 (2017-10-24) ------------------- - Update changelog. [davidh-ssec] - Bump version: 0.7.2 → 0.7.3. [davidh-ssec] - Merge branch 'develop' into new_release. [davidh-ssec] - Fix mock import in unittest. [davidh-ssec] mock should come from the unittest package in python 3+ - Merge pull request #90 from pytroll/bugfix-scmi-writer. [David Hoese] Fix SCMI writer to use newest version of pyresample - Fix SCMI writer to use newest version of pyresample. [davidh-ssec] - Adjust extents to kilometers. [Panu Lahtinen] - Merge pull request #86 from pytroll/bugfix-resample-setitem. [David Hoese] Fix resampling when a dataset was added via setitem and a test for it - Fix resampling when a dataset was added via setitem and a test for it. [davidh-ssec] Includes removing python 3.3 from travis tests - Merge pull request #84 from eysteinn/composite-snowage-fix. [Martin Raspaud] Composite snowage fix - Expand the dynamic of the channels up to 255 before to combine them: (0,1.6) => (0,255) [Eysteinn] - Merge pull request #2 from pytroll/develop. [Eysteinn Sigurðsson] Develop - Merge pull request #85 from pytroll/feature-fullres-abi-tc. [David Hoese] Feature fullres abi tc - Fix geocat tests. [davidh-ssec] - Fix bug in geocat reader and SCMI writer. [davidh-ssec] Caused incorrect H8 and GOES-16 geolocation - Fix reader metaclass with newer versions of six. [davidh-ssec] - Fix metadata in ABI true color. [davidh-ssec] - Fix ABI true color averaging. [davidh-ssec] - Fix DatasetID comparison in python 3 and add test for it. [davidh- ssec] - Fix super call in ABI true color 2km class. [davidh-ssec] - Add writers yaml files to setup.py. [davidh-ssec] - Create sharpened full resolution ABI true color. [davidh-ssec] - Merge pull request #81 from loreclem/develop. [Martin Raspaud] Develop - Added some doc. [lorenzo clementi] - Fixed missing import. [lorenzo clementi] - Bugfix (typo) [lorenzo clementi] - First working version of ninjo converter. [lorenzo clementi] - Improved generic reader, removed useles bitmap composite. [lorenzo clementi] - Bugfix in the generic image reader. [lorenzo clementi] - Draft generic image reader. [lorenzo clementi] - Merge pull request #80 from pytroll/solar-pathlength-correction. [Martin Raspaud] Solar pathlength correction and Rayleigh correction interface - Fix anti pattern: Not using get() to return a default value from a dict. [Adam.Dybbroe] - Introduce an alternative sun-zenith correction algorithm, and fix rayleigh/aerosol correction so atmosphere and aerosol type can be specified in the config files. [Adam.Dybbroe] - Merge branch 'develop' into solar-pathlength-correction. [Adam.Dybbroe] - Maia reader (#79) [roquetp] * not finalised version : problem with standard name * Fix maia reader for simple loading * working version with CM and CT * add Datasets and fix the problem with end_time. * Add a exemple for read MAIA files * Add maia reader * fix on maia name * add reference on the test case * autopep8 on the example polar_maia.py and add the reference of the data test case * maia-reader : clean and pep8 * add reference documentation v0.7.2 (2017-09-18) ------------------- Fix ~~~ - Bugfix: Get the solar zenith angle. [Adam.Dybbroe] Other ~~~~~ - Update changelog. [davidh-ssec] - Bump version: 0.7.1 → 0.7.2. [davidh-ssec] - Merge pull request #67 from pytroll/feature-scmi-writer. [David Hoese] Feature scmi writer - Fix SCMI lettered grid test to not create huge arrays. [davidh-ssec] - Fix SCMI test so it actually uses lettered grids. [davidh-ssec] - Add more SCMI writer tests and documentation. [davidh-ssec] - Fix geocat reader for better X/Y coordinate estimation. [davidh-ssec] - Add really basic SCMI writer test. [davidh-ssec] - Fix SCMI debug tile generation. [davidh-ssec] - Add debug tile creation to SCMI writer. [davidh-ssec] - Fix SCMI writer for lettered grids. [davidh-ssec] - Fix numbered tile counts for SCMI writer. [davidh-ssec] - Add initial SCMI writer. [davidh-ssec] WIP: Multiple tiles, lettered tiles, debug images - Separate EnhancementDecisionTree in to base DecisionTree and subclass. [davidh-ssec] - Add 'goesr' as possible platform in geocat reader. [davidh-ssec] - Add SCMI and geotiff writer extras to setup.py. [davidh-ssec] - Add GOES-16 filename to geocat config. [davidh-ssec] - Merge pull request #69 from pytroll/modis-viewing-geometry-and-atm- correction. [Martin Raspaud] Modis viewing geometry and atm correction - Modis true_color atm corrected with pyspectral. [Adam.Dybbroe] - Merge branch 'develop' into modis-viewing-geometry-and-atm-correction. [Adam.Dybbroe] - Merge pull request #73 from pytroll/cira-stretch-numpy-1-13-issue. [Martin Raspaud] Add unittest for cira_stretch and fix it for numpy >=1.13 - Bugfix unittest suite. [Adam.Dybbroe] - Fix cira_stretch to work despite broken numpy (numpy issue 9687) [Adam.Dybbroe] - Smaller unittest example, and fixed. Works for numpy < 1.13 only though. [Adam.Dybbroe] - Add unittest for cira_stretch and fix it for numpy >=1.13. [Adam.Dybbroe] - Merge pull request #75 from pytroll/feature_realistic_colors. [Martin Raspaud] Realistic colors composite for SEVIRI - Merge branch 'develop' into feature_realistic_colors. [Martin Raspaud] - Merge branch 'develop' into feature_realistic_colors. [Martin Raspaud] - Add RealisticColors compositor for SEVIRI. [Panu Lahtinen] - Use array shape instead of possibly non-existent lon array shape. [Panu Lahtinen] - Adjust mask size when number of channels is changed when enhancing. [Panu Lahtinen] - Merge pull request #71 from eysteinn/composite-snowage. [Martin Raspaud] added snow_age viirs composite & lookup table enhancement - Merge branch 'develop' into composite-snowage. [Martin Raspaud] - Ch out is explicit. [Eysteinn] - Allows any number of channels. [Eysteinn] - Allows any number of channels. [Eysteinn] - Fixed satpy/etc/enhancements/generic.yaml. [Eysteinn] - Added snow_age viirs composite & lookup table enhancement. [Eysteinn] - Merge pull request #72 from pytroll/feature_day-night_compositor. [Martin Raspaud] Add DayNightCompositor - Add DayNightCompositor and example composite and enhancement configs. [Panu Lahtinen] - Merge pull request #74 from eysteinn/composite-seviri. [Martin Raspaud] Composite seviri - .changed night_overview to ir_overview. [Eysteinn] - Added night_overview to seviri. [Eysteinn] - Added night_microphysics to visir. [Eysteinn] - Merge pull request #68 from pytroll/feature_palette_enhancement. [Panu Lahtinen] Merged. - Update with palettize() and clarify usage. [Panu Lahtinen] - Refactor using _merge_colormaps() instead of dupplicate code. [Panu Lahtinen] - Add palettize() [Panu Lahtinen] - Fix typo. [Panu Lahtinen] - Add user palette colorization to quickstart documentation. [Panu Lahtinen] - Add palettize enhancement and colormap creation from .npy files. [Panu Lahtinen] - Add sun-sat viewing angles and support for atm correction. [Adam.Dybbroe] - Bugfix atm correction. [Adam.Dybbroe] - Merge pull request #65 from pytroll/feature_bwcompositor. [Martin Raspaud] Feature bwcompositor - Undo line wrapping done by autopep8. [Panu Lahtinen] - Add single channel compositor. [Panu Lahtinen] - Merge pull request #66 from loreclem/master. [Martin Raspaud] Added test to check the 1.5 km georeferencing shift - Added test to check whether to apply the 1.5 km georeferencing correction or not. [lorenzo clementi] - Add ir atm correction, and new airmass composite using this correction. [Adam.Dybbroe] - Change writer configs from INI (.cfg) to YAML (#63) [David Hoese] * Change writer configs from INI (.cfg) to YAML * Add very simple writer tests and fix writer load from Scene - Merge pull request #59 from pytroll/feature-geocat-reader. [David Hoese] Add geocat reader - Add CLAVR-x reader to documentation. [davidh-ssec] - Add geocat reader to documentation. [davidh-ssec] - Fix a few styling issues in geocat reader. [davidh-ssec] - Add python-hdf4 and HDF4 C library to travis dependencies. [davidh- ssec] - Add HDF4 utils tests. [davidh-ssec] - Add geocat unit tests. [davidh-ssec] - Add geocat reader. [davidh-ssec] v0.7.1 (2017-08-29) ------------------- - Update changelog. [Martin Raspaud] - Bump version: 0.7.0 → 0.7.1. [Martin Raspaud] - Fix style. [Martin Raspaud] - Fix hdf4 lib name in dependencies. [Martin Raspaud] - Rename optional dependencies for hdfeos to match reader name. [Martin Raspaud] - Rename mda with metadata in hdfeos_l1b reader. [Martin Raspaud] - Add overview composite for modis. [Martin Raspaud] - Do not guess end time when filtering a filename. [Martin Raspaud] - Add optional dependencies for viirs_compact. [Martin Raspaud] - Fix abi_l1b test again. [Martin Raspaud] - Fix abi_l1b tests. [Martin Raspaud] - Fix sweep axis parameter reading in py3 for abi_l1b. [Martin Raspaud] - Support py3 in abi_l1b. [Martin Raspaud] - Add optional dependencies for abi_l1b. [Martin Raspaud] - Merge pull request #58 from pytroll/metadata-filtering. [Martin Raspaud] Metadata filtering - Fix filehandler unit test to use filename_info as a dict. [Martin Raspaud] - Implement suggested style changes. [Martin Raspaud] See conversation in PR #58 - Finish fixing 0° Service to 0DEG. [Martin Raspaud] - Fix Meteosat numbers to remove leading 0. [Martin Raspaud] - Change HRIT base service to 0DEG. [Martin Raspaud] - Change HRIT MSG patterns to explicit `service` [Martin Raspaud] - Correct unit tests for metadata filtering compatibility. [Martin Raspaud] - Add metadata filtering of filehandlers. [Martin Raspaud] - Replace filter by list comprehension for py3 compatibility. [Martin Raspaud] - Check area compatibility before merging channels in RGBCompositor. [Martin Raspaud] - Add overview for ABI. [Martin Raspaud] - Add EUM file patterns for ABI. [Martin Raspaud] - Avoid crash when pattern matching on file crashes. [Martin Raspaud] - Fix clavrx reader when filenames don't have end_time. [davidh-ssec] - Add optional dependencies for sar_c. [Martin Raspaud] - Fix h5py py3 issues with byte arrays as strings. [Martin Raspaud] - Add optional dependency for the nc_nwcsaf_msg reader. [Martin Raspaud] - Fix hrit_msg reading for py3. [Martin Raspaud] - Add optional dependency for the hrit_msg reader. [Martin Raspaud] - Add platform_name and service to msg metadata. [Martin Raspaud] - Bugfix in MSG acquisition time metadata. [Martin Raspaud] - Fix xRIT end time to follow specifications. [Martin Raspaud] v0.7.0 (2017-08-15) ------------------- - Update changelog. [Martin Raspaud] - Bump version: 0.6.2 → 0.7.0. [Martin Raspaud] - Fix support for OMPS EDRs from other NASA sources. [davidh-ssec] Fix #57 - Change 'ncc_zinke' composite name to 'hncc_dnb' [davidh-ssec] Includes changes to code to make sure that things we think are floats actually are floats. - Fix major bug that stopped certain composites from being loadable. [davidh-ssec] If a composite modified (added information) to the DatasetID of its returned Dataset then the wishlist was not properly modified. This resulted in the Dataset being unloaded and seen as "unneeded". There was a test for this, but it wasn't working as expected. - Update ABI scale factors to be 64-bit floats to improve X/Y calculations. [davidh-ssec] In other applications I have noticed that the in-file 32-bit factor and offset produce a noticeable drift in the per-pixel X/Y values. When converted to 64-bit to force 64-bit arithmetic the results are closer to the advertised pixel resolution of the instrument. - Add 'reader' name metadata to all reader datasets. [davidh-ssec] - Add flag_meanings to clavrx reader. [davidh-ssec] Includes addition of /dtype to hdf4/hdf5/netcdf file handlers - Fix area unit conversion. [Martin Raspaud] - Fix the path to the doc to test. [Martin Raspaud] - Fix some documentation. [Martin Raspaud] - Fix area hashing in resample caching. [davidh-ssec] - Add better error when provided enhancement config doesn't exist. [davidh-ssec] - Simple workaround for printing a dataset with no-name areas. [davidh- ssec] - Fix `get_config_path` to return user files before package provided. [davidh-ssec] - Fix bug in geotiff writer where gdal options were ignored. [davidh- ssec] - Merge pull request #53 from pytroll/feature-clavrx-reader. [David Hoese] Add CLAVR-x reader - Update setuptools before installing on travis. [davidh-ssec] - Fix enhancement configs in setup.py. [davidh-ssec] Includes fixing of hdf4 dependency to python-hdf4 - Add CLAVR-x reader. [davidh-ssec] - Merge pull request #54 from tparker-usgs/writerTypo. [David Hoese] Correct typo in writer - Correct typo. [Tom Parker] v0.6.2 (2017-05-22) ------------------- - Update changelog. [davidh-ssec] - Bump version: 0.6.1 → 0.6.2. [davidh-ssec] - Fix NUCAPS reader when used with multiple input granules. [davidh- ssec] Includes extra fix for the scene when missing datasets need to be printed/logged. - Work on projections for cf-writer. [Martin Raspaud] - Cosmetic fixes. [Martin Raspaud] - Improve cf write including grid mappings. [Martin Raspaud] - Bugfix eps_l1b. [Martin Raspaud] - Pass kwargs to dataset saving. [Martin Raspaud] - Add ninjotiff writer. [Martin Raspaud] - Avoid crashing when resampling datasets without area. [Martin Raspaud] - Add reducer8 compositor. [Martin Raspaud] - Merge pull request #51 from pytroll/common-nwcsaf-readers. [Martin Raspaud] Add reader for NWCSAF/PPS which can also be used by NWCSAF/MSG - Add support for PPS/CPP cloud phase and effective radius. [Adam.Dybbroe] - Harmonize composite names between PPS and MSG, and try handle the odd PPS palette in CTTH-height. [Adam.Dybbroe] - Added more PPS products - CPP parameters still missing. [Adam.Dybbroe] - Add modis support for pps reader. [Adam.Dybbroe] - Comment out get_shape method. [Adam.Dybbroe] - Add reader for NWCSAF/PPS which can also be used by NWCSAF/MSG. [Adam.Dybbroe] - Add initial enhancer tests. [davidh-ssec] v0.6.1 (2017-04-24) ------------------- - Update changelog. [Martin Raspaud] - Bump version: 0.6.0 → 0.6.1. [Martin Raspaud] - Change branch for landscape badge. [Martin Raspaud] - Fix badge to point to develop. [Martin Raspaud] - Add a couple of badges to the readme. [Martin Raspaud] - Remove imageo subpackage and related tests. [davidh-ssec] - Add test for ReaderFinder. [davidh-ssec] Required fixing all reader tests that had improper patching of base file handlers. - Add NUCAPS reader tests. [davidh-ssec] - Fix OMPS EDR valid_min comparison. [davidh-ssec] - Add OMPS EDR tests. [davidh-ssec] - Add shape checking to AMSR2 L1B tests. [davidh-ssec] - Attempt to fix AMSR2 L1B reader tests. [davidh-ssec] - Add AMSR2 L1B tests. [davidh-ssec] - Fix loading of failed datasets. [davidh-ssec] Fix #42 - Fix viirs sdr loading when dataset's file type isn't loaded. [davidh- ssec] - Add a ColorizeCompositor vs PaletteCompositor. [Martin Raspaud] - Fix viirs sdr tests for python 3. [davidh-ssec] - Add ability for VIIRS SDRs to load geolocation files from N_GEO_Ref. [davidh-ssec] Also fixed tests and fixed dfilter not working in VIIRS SDRs when key was a DatasetID - Clean up styling for coordinates check. [davidh-ssec] Quantified code complained about duplicate if statements - Raise ValueError instead of IOError when standard_name is missing in coordinates. [Adam.Dybbroe] - Use previously unused cache dict to hold cached geolocation data. [Adam.Dybbroe] - Remove redundant import. [Adam.Dybbroe] - Raise an IOError when (lon,lat) coordinates doesn't have a standard_name. [Adam.Dybbroe] - Add warning when sensor is not supported by any readers. [davidh-ssec] Fix #32 v0.6.0 (2017-04-18) ------------------- Fix ~~~ - Bugfix: Masking data and apply vis-calibration. [Adam.Dybbroe] - Bugfix: Add wavelength to the DatasetID. [Adam.Dybbroe] - Bugfix: Add wavelength to the dataset info object, so pyspectral interface works. [Adam.Dybbroe] Other ~~~~~ - Update changelog. [Martin Raspaud] - Bump version: 0.5.0 → 0.6.0. [Martin Raspaud] - Fix pyresample link in README. [davidh-ssec] - Update documentation and readme to be more SatPy-y. [davidh-ssec] - Add ACSPO reader to documentation. [davidh-ssec] - Reduce redundant code in netcdf4 based tests. [davidh-ssec] - Add ACSPO reader tests. [davidh-ssec] - Force minimum version of netcdf4-python. [davidh-ssec] - Update pip on travis before installing dependencies. [davidh-ssec] - Install netcdf4 from source tarball on travis instead of from wheel. [davidh-ssec] netCDF4-python seems to be broken on travis when installed from a wheel. This tries installing it from a source tarball. - Replace netcdf4 with h5netcdf in netcdf4 file handler tests. [davidh- ssec] Travis has a library issue with netcdf4 so trying h5netcdf instead - Install cython via apt for travis tests. [davidh-ssec] - Add tests for NetCDF4 File Handler utility class. [davidh-ssec] - Add tests for HDF5 File Handler utility class. [davidh-ssec] - Update VIIRS L1B tests to work with python 3. [davidh-ssec] Includes installing netcdf4 apt packages on travis - Add netCDF4 library to travis tests. [davidh-ssec] - Add VIIRS L1B tests. [davidh-ssec] - Change YAML reader to only provide datasets that are requested. [davidh-ssec] Includes changes to mask any data slices when data can't be loaded from one or more file handlers. Raises an error if all file handlers fail. - Clean up style. [Martin Raspaud] - Add behave test for returned least modified dataset. [davidh-ssec] - Merge pull request #48 from pytroll/feature_bilinear. [David Hoese] Bilinear interpolation - Merge pull request #49 from pytroll/fix_ewa. [David Hoese] Fix EWA resampling - Remove data copy from EWA resampling. [davidh-ssec] - Send copy of the data to fornav() [Panu Lahtinen] - Merge branch 'fix_ewa' of https://github.com/pytroll/satpy into fix_ewa. [Panu Lahtinen] - Send copy of data to fornav() [Panu Lahtinen] - Fixes EWA resampling - Remove unused import. [Panu Lahtinen] - Discard masks from cache data. [Panu Lahtinen] - Start fixing EWA; single channels work, multichannels yield bad images. [Panu Lahtinen] - Add example using bilinear interpolation, caching and more CPUs. [Panu Lahtinen] - Handle datasets with multiple channels. [Panu Lahtinen] - Reorganize code. [Panu Lahtinen] - move caches to base class attribute - move cache reading to base class - move cache updating to base class - Add bilinear resampling, separate lonlat masking to a function. [Panu Lahtinen] - Merge pull request #50 from pytroll/feature-acspo-reader. [David Hoese] Add ACSPO SST Reader - Add more documentation methods in ACSPO reader. [davidh-ssec] - Fix ACSPO reader module docstring. [davidh-ssec] - Add ACSPO SST Reader. [davidh-ssec] - Cleanup code based on quantifiedcode. [davidh-ssec] - Add test to make sure least modified datasets are priorities in getitem. [davidh-ssec] - Change DatasetID sorting to be more pythonic. [davidh-ssec] - Fix incorrect usage of setdefault. [davidh-ssec] - Change DatasetIDs to be sortable and sort them in DatasetDict.keys() [davidh-ssec] - Make failing test more deterministic. [davidh-ssec] Planning to change how requested datasets are loaded/discovered so this test will need to get updated in the future anyway. - Fix DatasetDict.__getitem__ being slightly non-deterministic. [davidh- ssec] __getitem__ was depending on the output and order of .keys() which is not guaranteed to be the same every time. If more than one key was found to match the `item` then the first in a list based on .keys() was returned. The first element in this list was not always the same. - Fix Scene loading or computing datasets multiple times. [davidh-ssec] - Add filename filtering for start and end time. [davidh-ssec] - Fix Scene loading datasets multiple times. [davidh-ssec] Fix #45 - Fix setup.py's usage of find_packages. [davidh-ssec] - Fix deleting an item from the Scene if it wasn't in the wishlist. [davidh-ssec] If a user specified `unload=False` then there may be something in the Scene that isn't needed later. - Use setuptool's find_packages in setup.py. [davidh-ssec] - Use only h5py for compact viirs reading. [Martin Raspaud] - Remove hanging print statements. [Martin Raspaud] - Add night overview composite for viirs. [Martin Raspaud] - Add area def for MSG HRV. [Martin Raspaud] - Merge pull request #47 from pytroll/feature-yaml-enhancements. [Martin Raspaud] Switch enhancements to yaml format - Switch enhancements to yaml format. [Martin Raspaud] - Fix missed Projectable use in composites. [davidh-ssec] - Add support for segmented geostationary data. [Martin Raspaud] - Merge pull request #43 from pytroll/msg-native. [Martin Raspaud] Msg native - Possible fix for python 3.5. [Adam.Dybbroe] - Fix for python 3.5. [Adam.Dybbroe] - Change from relative to absolute import. [Adam.Dybbroe] - Merge branch 'develop' into msg-native. [Adam.Dybbroe] - Handle (nastily) cases where channel data are not available in the file. Add unittests. [Adam.Dybbroe] - Merge branch 'develop' into msg-native. [Adam.Dybbroe] - Add unittests for count to radiance calibration. [Adam.Dybbroe] - Use 10 to 16 bit conversion function that was copied from mipp. [Adam.Dybbroe] - Handle subset of SEVIRI channels Full disk supported only. [Adam.Dybbroe] - Make file reading numpy 1.12 compatible. [Sauli Joro] - Remove dependency on mipp. [Adam.Dybbroe] - Merge branch 'develop' into msg-native. [Adam.Dybbroe] Conflicts: satpy/readers/__init__.py satpy/readers/hrit_msg.py - Fix IR and VIS calibration. [Adam.Dybbroe] - Pep8 and editorial (header) updates. [Adam.Dybbroe] - Adding the native msg header record definitions. [Adam.Dybbroe] - Semi-stable native reader version. Calibration unfinished. [Adam.Dybbroe] - Unfinished msg native reader. [Adam.Dybbroe] - Merge pull request #38 from bmu/develop. [Martin Raspaud] conda based install - Reformulated the documentation again. [bmu] - Corrected channel preferences of conda requirement file. [bmu] - Corrected file name in documentation. [bmu] - Renamed requirement file to reflect python and numpy version. [bmu] - Added installation section to the docs. [bmu] - Add vi swp files to gitignore. [bmu] - Added environment file for conda installations. [bmu] - Merge pull request #40 from m4sth0/develop. [Martin Raspaud] Add area slicing support for MTG-LI filehandler - Add workaround for area slicing issue. [m4sth0] Choosing an sub area for data import in a scene objects like EuropeCanary results in a wrong area slice due to wrong area interpolation. If the lat lon values of a sub area are invalid (e.g. in space) the slicing gets incorrect. This commit will bypass this by calculating the slices directly without interpolation for two areas with the same projection (geos) - Add area slicing support for MTG-LI filehandler. [m4sth0] - Merge pull request #41 from meteoswiss-mdr/develop. [Martin Raspaud] Pytroll workshop --> new NWCSAF v2016 products - Pytroll workshop --> new NWCSAF v2016 products. [sam] - Change table of supported data types. [Adam.Dybbroe] - Add column "shortcomings" to table of supported readers, and add row for native reader. [Adam.Dybbroe] - Do not compute resampling mask for AreaDefintions. [Martin Raspaud] - Add support for LRIT 8 bits. [Martin Raspaud] - Cleanup HRIT readers. [Martin Raspaud] - Add ABI composite module. [Martin Raspaud] - Update list of supported formats. [Martin Raspaud] - Remove uneeded code for electro reader. [Martin Raspaud] - Add HRIT JMA reader. [Martin Raspaud] - Merge pull request #35 from m4sth0/develop. [Martin Raspaud] Fix MTG-FCI and LI readers - Fix MTG-FCI and LI readers. [m4sth0] - Fix area extent for MSG segments. [Martin Raspaud] - Add very basic tests for the VIIRS SDR file reader. [davidh-ssec] - Test some utility functions. [Martin Raspaud] - Fix tutorial. [Martin Raspaud] v0.5.0 (2017-03-27) ------------------- - Update changelog. [Martin Raspaud] - Bump version: 0.4.3 → 0.5.0. [Martin Raspaud] - Make sure calibration order is respected. [Martin Raspaud] - Fix angles interpolation in olci reader. [Martin Raspaud] - Fix some py3 tests. [Martin Raspaud] - Test BaseFileHandler. [Martin Raspaud] - Add some reader tests. [Martin Raspaud] - Work on ABI true color. [Martin Raspaud] - Add more VIIRS SDR tests. [davidh-ssec] - Add a missing docstring. [Martin Raspaud] - Refactor and test yaml_reader. [Martin Raspaud] - Add basic VIIRS SDR file handler tests. [davidh-ssec] - Add h5netcdf to travis. [Martin Raspaud] - Add the ABI reader tests to main test suite. [Martin Raspaud] - Optimize and test ABI l1b calibration functions. [Martin Raspaud] - Add Zinke NCC algorithm to viirs DNB. [Martin Raspaud] - Fix lunar angles names in viirs sdr. [Martin Raspaud] - Add lunar angles support in compact viirs. [Martin Raspaud] v0.4.3 (2017-03-07) ------------------- - Update changelog. [Martin Raspaud] - Bump version: 0.4.2 → 0.4.3. [Martin Raspaud] - Add more tests to yaml_reader. [Martin Raspaud] - Document what the Scene accepts better. [davidh-ssec] - Remove unused FileKey class. [davidh-ssec] - Add more tests for Scene object. [davidh-ssec] - Fix ABI L1B area again. [davidh-ssec] - Add Electro-L N2 HRIT reader. [Martin Raspaud] - Fix off by one error on calculating ABI L1B pixel resolution. [davidh- ssec] - Add sweep PROJ.4 parameter to ABI L1B reader. [davidh-ssec] - Fix geos bbox to rotate in the right direction. [Martin Raspaud] - Fix ABI L1B file patterns not working for mesos. [davidh-ssec] - Fix tests to handle reader_kwargs and explicit sensor keyword argument. [davidh-ssec] - Add reader_kwargs to Scene to pass to readers. [davidh-ssec] - Fix yaml reader start/end time with multiple file types. [davidh-ssec] - Allow `Scene.all_composite_ids` to return even if no sensor composite config. [davidh-ssec] v0.4.2 (2017-02-27) ------------------- - Update changelog. [Martin Raspaud] - Bump version: 0.4.1 → 0.4.2. [Martin Raspaud] - Merge branch 'develop' [Martin Raspaud] - Fix area coverage test for inmporterror. [Martin Raspaud] - Add two more tests for yaml_reader. [Martin Raspaud] - Add more datasets for NUCAPS reader. [davidh-ssec] - Add missing_datasets property to Scene. [davidh-ssec] Includes fix for trying to compute datasets after resampling that previously failed to load from readers - Make 'view' a variable in SLSTR reader. [Martin Raspaud] - Test available_datasets in yaml_reader. [Martin Raspaud] - Remove NotImplementedError in abstactmethods. [Martin Raspaud] - Test filering yaml filehandlers by area. [Martin Raspaud] - Add yamlreader test. [Martin Raspaud] - Fix reader test of all_dataset_ids. [davidh-ssec] - Fix unit conversion for ABI L1B reader. [davidh-ssec] - Fix python3 tests. [Martin Raspaud] - Test all datasets ids and names. [Martin Raspaud] - Fix ABI Reader to work with non-CONUS images. [davidh-ssec] - Add unit conversion to ABI reader so generic composites work better. [davidh-ssec] - Fix ABI reader area definition and file type definitions. [davidh- ssec] - Change default start_time from file handler filename info. [davidh- ssec] - Add `get` method to hdf5 and netcdf file handlers. [davidh-ssec] - Fix interpolation of slstr angles. [Martin Raspaud] - Merge pull request #31 from mitkin/feature_caliop-reader. [Martin Raspaud] Add CALIOP v3 HDF4 reader - PEP8 fixes. [Mikhail Itkin] - Read end_time from file metadata. [Mikhail Itkin] - Functional CALIOP V3 HDF4 file handler. [Mikhail Itkin] - Merge branch 'develop' of https://github.com/pytroll/satpy into feature_caliop-reader. [Mikhail Itkin] - CALIOP reader WIP. [Mikhail Itkin] - Update to caliop reader. [Mikhail Itkin] - Add CALIOP reader (non functional yet) [Mikhail Itkin] - Work on slstr reader. [Martin Raspaud] - Fix small style error. [davidh-ssec] - Change swath definition name to be more unique. [davidh-ssec] - Fix style. [Martin Raspaud] - Create on-the-fly name for swath definitions. [Martin Raspaud] - Do some style cleanup. [Martin Raspaud] - Add simple tests for scene dunder-methods and others. [davidh-ssec] Fix bugs that these tests encountered - Remove osx from travis testing environments. [davidh-ssec] - Fix amsr2 l1b reader coordinates. [davidh-ssec] - Update link to satpy's repository. [Mikhail Itkin] Used to be under `mraspaud`, now `pytroll` v0.4.1 (2017-02-21) ------------------- - Update changelog. [davidh-ssec] - Bump version: 0.4.0 → 0.4.1. [davidh-ssec] - Remove forgotten print statement in tests. [davidh-ssec] - Fix wavelength comparison when there are mixed types. [davidh-ssec] - Remove old files. [Martin Raspaud] - Merge pull request #30 from pytroll/feature-get-dataset-key-refactor. [David Hoese] Refactor get_dataset_key - Merge branch 'develop' into feature-get-dataset-key-refactor. [Martin Raspaud] - Rename ds id search function. [Martin Raspaud] - Added some test to get_dataset_key refactor. [Martin Raspaud] - Refactor get_dataset_key. [Martin Raspaud] - Use dfilter in node. [Martin Raspaud] - Refactor get_dataset_key wip. [Martin Raspaud] - Use wavelength instead of channel name for NIR refl computation. [Martin Raspaud] - Update contact info. [Martin Raspaud] v0.4.0 (2017-02-21) ------------------- - Update changelog. [davidh-ssec] - Bump version: 0.3.1 → 0.4.0. [davidh-ssec] - Fix composite loading when prereqs are delayed. [davidh-ssec] - Remove randomness altogether. [Martin Raspaud] - Reduce range of randomness for helper tests. [Martin Raspaud] - Make PSPRayleigh modifier fail if dataset shapes don't match. [Martin Raspaud] - Replace compositor name by id in log message. [Martin Raspaud] - Remove unnecessary print statement. [Martin Raspaud] - Remove plotting from helper_functions. [Martin Raspaud] - Add some randomness in helper_function tests. [Martin Raspaud] - Refactor and test helper functions for geostationary areas. [Martin Raspaud] - Add masking of space pixels in AHI hsd reader. [Martin Raspaud] - Add tests when datasets fail to load. [davidh-ssec] - Remove redundant container specification in certain reader configs. [davidh-ssec] Now that Areas are set by coordinates and Projectables are now Datasets there is no need to customize the container a dataset uses to define it as "metadata". - Fix composite loading when the compositor adds more information to the DatasetID. [davidh-ssec] - Add new composites for AHI. [Martin Raspaud] - Remove fast finish and py26 from travis config. [davidh-ssec] - Fix duplicate or incorrect imports from Projectable/DatasetID refactor. [davidh-ssec] - Remove Projectable class to use Dataset everywhere instead. [davidh- ssec] - Merge pull request #28 from pytroll/feature-remove-id. [David Hoese] Remove 'id' from the info attribute in datasets and composites - Remove to_trimmed_dict, add a kw to to_dict instead. [Martin Raspaud] - Add id attribute to Dataset. [Martin Raspaud] - Fix tests.utils to work with the id attribute. [Martin Raspaud] - Remove id from infodict, wip. [Martin Raspaud] - Fix style. [Martin Raspaud] - Use getattr instead of if-else construct in apply_modifier_info. [Martin Raspaud] - Use wavelength instead of channel name for NIR refl computation. [Martin Raspaud] - Fix modifier info getting applied. [davidh-ssec] Now the modifiers DatasetID gets updated along with any information that can be gathered from the source - Fix loading modified datasets that change resolution. [davidh-ssec] - Add more Scene loading tests for composites that use wavelengths instead of names. [davidh-ssec] - Fix rows_per_scan for VIIRS L1B reader and the sharpened RGB compositor. [davidh-ssec] - Fix scene loading when reader dataset failed to load. [davidh-ssec] - Add day microphysics composite to slstr. [Martin Raspaud] - Fix reading angles for SLSTR (S3) [Martin Raspaud] - Fix test by using DATASET_KEYS instead of DatasetID's as_dict. [Martin Raspaud] - Correct some metadata in viirs_sdr. [Martin Raspaud] - Refactor and test get_dataset_by* [Martin Raspaud] - Merge pull request #27 from davidh-ssec/develop. [David Hoese] Refactor Scene dependency tree - Add some docstrings to new deptree and compositor handling. [davidh- ssec] - Fix intermittent bug where requested dataset/comp wasn't "kept" after loading. [davidh-ssec] This would happen when a composite depended on a dataset that was also requested by the user. If the composite was processed first then the dependency wasn't reprocessed, but this was incorrectly not replacing the requested `name` in the wishlist with the new `DatasetID`. - Add tests for Scene loading. [davidh-ssec] Includes a few fixes for bugs that were discovered including choosing the best dataset from a DatasetDict when there are multiple matching Datasets. - Add very basic Scene loading tests. [davidh-ssec] - Fix behavior tests for python 3 and composite dependencies. [davidh- ssec] - Move dependency logic to DependencyTree class. [davidh-ssec] - Fix dependency tree when scene is resampled. [davidh-ssec] - Refactor compositor loading to better handle modified datasets/composites. [davidh-ssec] Includes assigning DatasetIDs to every compositor and renaming some missed references to wavelength_range which should be wavelength. - Fix DatasetID hashability in python 3. [davidh-ssec] In python 3 if __eq__ is defined then the object is automatically unhashable. I don't think we should run in to problems with a more flexible __eq__ than the hash function. - Fix loading composite by DatasetID. [davidh-ssec] Includes some clean up of dependency tree, including changes to Node. Also includes adding comparison methods to the DatasetID class - Fix `available_modifiers` [davidh-ssec] Required changes to how a deptree is created. Includes adding name attribute to Node class. - Refactor name and wavelength comparison functions to top of readers module. [davidh-ssec] So they can be used outside of DatasetDict - Added some tests for yaml_reader generic functions. [Martin Raspaud] - Add true_color_lowres to viirs (no pan sharpening) [Martin Raspaud] - Provide blue band to psp rayleigh correction. [Martin Raspaud] - Add MODIS composite config. [Martin Raspaud] - Add ABI composite config. [Martin Raspaud] - Cleanup style in yaml_reader. [Martin Raspaud] - Implement slicing for hrit. [Martin Raspaud] - Cleanup abi_l1b reader. [Martin Raspaud] - Allow get_dataset to raise KeyError to signal missing dataset in file. [Martin Raspaud] - Fix geostationary boundingbox. [Martin Raspaud] - Fill in correct wavelength for olci. [Martin Raspaud] - Add lon and lan info for hrpt. [Martin Raspaud] - Remove redundant file opening in hdfeos. [Martin Raspaud] - Add forgoten unit. [Martin Raspaud] - Fix wrong standard_name and add "overview" recipe. [Adam.Dybbroe] - Fix NIRReflectance modifier. [Martin Raspaud] - Update standard names and mda for hrit_msg. [Martin Raspaud] - Add another modis filepattern. [Nina.Hakansson] - Add python 3.6 to travis testing. [davidh-ssec] - Update travis config to finish as soon as required environments finish. [davidh-ssec] - Fix h5py reading of byte strings on python 3. [davidh-ssec] Was handling scalar arrays of str objects, but in python 3 they are bytes objects and weren't detected in the previous condition. - Cleanup test_yaml_reader.py. [Martin Raspaud] - Add tests for file selection. [Martin Raspaud] - Document how to save custom composites. [Martin Raspaud] - Fix VIIRS L1B reader for reflectances on v1.1+ level 1 processing software. [davidh-ssec] - Fix bug in FileYAMLReader when filenames are provided. [davidh-ssec] - Add a reader for Sentinel-2 MSI L1C data. [Martin Raspaud] - Remove unnecessary arguments in sar-c reader. [Martin Raspaud] v0.3.1 (2017-01-16) ------------------- - Update changelog. [Martin Raspaud] - Bump version: 0.3.0 → 0.3.1. [Martin Raspaud] - Cleanup SAR-C. [Martin Raspaud] - Add annotations loading for sar-c. [Martin Raspaud] - Merge pull request #22 from mitkin/feature-sar-geolocation. [Martin Raspaud] Feature SAFE (Sentinel 1) SAR geolocation - Refactor coordinates computation. [Mikhail Itkin] Refactor changes for pull request #22 - Merge branch 'develop' of https://github.com/mitkin/satpy into feature-sar-geolocation. [Mikhail Itkin] - Make Sentinel 1 (SAFE) reader able to read coordinates. [Mikhail Itkin] Add latitude and longitude dictionaries to the `sar_c.yaml` reader and make the `safe_sar_c.py` reader compute coordinate arrays from a collection of GCPs provided in the measurement files. NB: each polarization has it's set of longitudes and latitudes. - Restore reducers to their original values. [Martin Raspaud] - Add alternatives for true color on ahi. [Martin Raspaud] Thanks balt - Add name to the dataset attributes when writing nc files. [Martin Raspaud] - Improve documentation. [Martin Raspaud] - Add proper enhancements for nwcsaf images. [Martin Raspaud] - Refactor hrit msg area def computation. [Martin Raspaud] - Perform som PEP8 cleanup. [Martin Raspaud] - Fix nwcsaf reader and its area definition. [Martin Raspaud] - Merge pull request #21 from mitkin/develop. [David Hoese] Mock pyresample.ewa - Mock pyresample.ewa. [Mikhail Itkin] Mock pyresample.ewa to prevent sphinx from importing the module. - Add NWCSAF MSG nc reader and composites. [Martin Raspaud] - Add gamma to the sarice composite. [Martin Raspaud] - Cleanup the sar composite. [Martin Raspaud] - Add the sar-ice composite. [Martin Raspaud] - Clean up the safe sar-c reader. [Martin Raspaud] - Finalize MSG HRIT calibration. [Martin Raspaud] - Fix abi reader copyright. [Martin Raspaud] - Refactor yaml_reader's create_filehandlers. [Martin Raspaud] - Rename function. [Martin Raspaud] - Add a composite file for slstr. [Martin Raspaud] - Add a noaa GAC/LAC reader using PyGAC. [Martin Raspaud] - Implement a mipp-free HRIT reader. [Martin Raspaud] WIP, supports only MSG, no calibration yet. - Concatenate area_def through making new AreaDefinition. [Martin Raspaud] This makes the concatenation independent of the AreaDefinition implementation. - Allow stacking area_def from bottom-up. [Martin Raspaud] - Fix yaml_reader testing. [Martin Raspaud] - Add support for filetype requirements. [Martin Raspaud] - Remove print statement in slstr reader. [Martin Raspaud] - Remove deprecated helper functions. [Martin Raspaud] - Refactor select_files, yaml_reader. [Martin Raspaud] - Editorials. [Adam.Dybbroe] - Add coastline overlay capability. [Martin Raspaud] - Move the Node class to its own module. [Martin Raspaud] - Initialize angles in epsl1b reader. [Martin Raspaud] - Add angles reading to eps reader. [Martin Raspaud] v0.3.0 (2016-12-13) ------------------- - Update changelog. [Martin Raspaud] - Bump version: 0.2.1 → 0.3.0. [Martin Raspaud] - Fix NUCAPS reader to work with latlon datasets. [davidh-ssec] This required changing yaml_reader to work with 1D arrays since NUCAPS is all 1D (both swath data and metadata). - Refactor yaml_reader's load method. [Martin Raspaud] - Merge branch 'develop' into feature-lonlat-datasets. [Martin Raspaud] - Fix VIIRS L1B reader to work with xslice/yslice and fix geolocation dataset names. [davidh-ssec] - Fix netcdf wrapper to work better with older and newer versions of netcdf4-python. [davidh-ssec] - Make ahi reader use correct default slicing. [Martin Raspaud] - Bugfix sliced reading. [Martin Raspaud] - Put slice(None) as default for reading. [Martin Raspaud] - Allow readers not supporting slices. [Martin Raspaud] - Refactor scene's init. [Martin Raspaud] - Convert nucaps to coordinates. [Martin Raspaud] - Adapt viirs_l1b to coordinates. [Martin Raspaud] - Convert omps reader to coordinates. [Martin Raspaud] - Reinstate viirs_sdr.yaml for coordinates, add standard_names. [Martin Raspaud] - Adapt compact viirs reader to coordinates. [Martin Raspaud] - Add first version of S1 Sar-c reader. [Martin Raspaud] - Adapt olci reader to coordinates. [Martin Raspaud] - Add S3 slstr reader. [Martin Raspaud] - Add standard_names to hdfeos navigation. [Martin Raspaud] - Fix epsl1b reader for lon/lat standard_name. [Martin Raspaud] - Adapt amsr2 reader for coordinates. [Martin Raspaud] - Fix aapp1b reader. [Martin Raspaud] - Use standard name for lon and lat identification. [Martin Raspaud] - Merge branch 'develop' into feature-lonlat-datasets. [Martin Raspaud] Conflicts: satpy/readers/ahi_hsd.py - Area loading for ahi_hsd. [Martin Raspaud] - Fix python3 syntax incompatibility. [Martin Raspaud] - Implement area-based loading. [Martin Raspaud] - Add get_bounding_box for area-based file selection. [Martin Raspaud] - Fix ahi area extent. [Martin Raspaud] - Merge remote-tracking branch 'origin/feature-lonlat-datasets' into feature-lonlat-datasets. [Martin Raspaud] - Convert VIIRS SDR reader to coordinates. [davidh-ssec] - Fix viirs_sdr i bands to work with coordinates. [davidh-ssec] - Support different path separators in patterns. [Martin Raspaud] - Move area def loading to its own function. [Martin Raspaud] - Merge branch 'develop' into feature-lonlat-datasets. [Martin Raspaud] Conflicts: satpy/readers/yaml_reader.py - Merge branch 'develop' into feature-lonlat-datasets. [Martin Raspaud] Conflicts: satpy/readers/yaml_reader.py - Pass down the calibration, polarization and resolution from main load. [Martin Raspaud] - Fix typo in sunzenith correction description. Default is 88 deg, not 80. [Adam.Dybbroe] - Fix sun zenith key for caching. [Martin Raspaud] - Move helper functions to readers directory. [Martin Raspaud] - Adapt hrpt reader to coordinates. [Martin Raspaud] - Fix resample to work when the area has no name. [Martin Raspaud] - Adapt aapp_l1b and hdfeos to coordinates. [Martin Raspaud] - Change remove arguments from get_area_def signature. [Martin Raspaud] - Adapt eps_l1b to 'coordinates' [Martin Raspaud] - Navigation is now handled thru 'coordinates' [Martin Raspaud] Here we make longitude and latitudes usual datasets, and the keyword called 'coordinates' in the config specifies the coordinates to use for the dataset at hand. v0.2.1 (2016-12-08) ------------------- - Update changelog. [Martin Raspaud] - Bump version: 0.2.0 → 0.2.1. [Martin Raspaud] - Move ghrsst_osisaf.yaml to new location. [Martin Raspaud] - Remove old mpop legacy files. [Martin Raspaud] - Move etc to satpy, use package_data for default config files. [Martin Raspaud] - Merge pull request #19 from adybbroe/osisaf_sst_reader. [Martin Raspaud] Add OSISAF SST GHRSST reader - Add OSISAF SST GHRSST reader. [Adam.Dybbroe] - Replace memmap with fromfile in ahi hsd reading. [Martin Raspaud] - Merge branch 'develop' of github.com:pytroll/satpy into develop. [Adam.Dybbroe] - Merge pull request #18 from northaholic/develop. [Martin Raspaud] improve FCI reader readability. fix FCI reader config for WV channels. - Improve FCI reader readability. fix FCI reader config for WV channels. [Sauli Joro] - Merge pull request #17 from m4sth0/develop. [Martin Raspaud] Add MTG LI reader - Add MTG-LI L2 reader for preliminary test data. [m4sth0] - Merge branch 'develop' of https://github.com/pytroll/satpy into develop. [m4sth0] - Merge branch 'develop' of https://github.com/pytroll/satpy into develop. [m4sth0] - Solve compatibility problem with older netCDF4 versions. [Adam.Dybbroe] - Fix style in abi reader. [Martin Raspaud] - Add ABI reader + YAML. [Guido Della Bruna] - Merge pull request #15 from m4sth0/develop. [Martin Raspaud] Develop - Merge branch 'develop' of https://github.com/pytroll/satpy into develop. [m4sth0] - Fixed FCI channel calibration method. [m4sth0] - Fix VIIRS L1B moon illumination fraction for L1B v2.0. [davidh-ssec] In NASA Level 1 software version <2.0 the fraction was a global attribute, now in v2.0 it is a per-pixel swath variable - Fix DNB SZA and LZA naming to match viirs composite configs. [davidh- ssec] - Fix start_time/end_time creation in Scene when no readers found. [davidh-ssec] - Merge pull request #14 from m4sth0/develop. [Martin Raspaud] Add calibration functions for FCI - Add calibration functions for FCI. [m4sth0] - Bugfix. [Adam.Dybbroe] - Bugfix. [Adam.Dybbroe] - Editorial pep8/pylint. [Adam.Dybbroe] - Merge pull request #13 from m4sth0/develop. [Martin Raspaud] Add MTG-FCI Level 1C netCDF reader - Add MTG-FCI Level 1C netCDF reader The test dataset from EUMETSAT for the FCI Level 1C Format Familiarisation is used to implement the reader in satpy. Limitations due to missing meta data for satellite georeferencing and calibration. [m4sth0] - Pass down the calibration, polarization and resolution from main load. [Martin Raspaud] - Fix typo in sunzenith correction description. Default is 88 deg, not 80. [Adam.Dybbroe] - Move helper functions to readers directory. [Martin Raspaud] - Fix Scene sensor metadata when it is a string instead of a list. [davidh-ssec] - Fix start_time/end_time properties on Scene object after resampling. [davidh-ssec] These properties were dependent on scn.readers which doesn't exist after resampling creates a new "copy" of the original Scene. Now these values are part of the metadata in .info and set on init. - Replace errors with warnings when loading dependencies. [davidh-ssec] v0.2.0 (2016-11-21) ------------------- Fix ~~~ - Bugfix: converted MSG products should be saveable. [Martin Raspaud] - Bugfix: satellite name in msg_hdf now supports missing number. [Martin Raspaud] - Bugfix: misspelling. [Martin Raspaud] - Bugfix: mipp_xrit: do not crash on unknown channels, just warn and skip. [Martin Raspaud] - Bugfix: changed reference from composites.cfg to composites/generic.cfg. [Martin Raspaud] - Bugfix: works now for file auto discovery. [Martin Raspaud] - Bugfix: get_filename wants a reader_instance and cleanup. [Martin Raspaud] - Bugfix: setup.py includes now eps xml format description. [Martin Raspaud] - Close all h5files in viirs_sdr, not only the last one. [Martin.Raspaud] - Bugfix: close h5 files when done. [Martin Raspaud] Prior to h5py 3.0, the h5 files open with h5py are not closed upon deletion, so we have to do it ourselves... - Bugfix: area.id doesn't exist, use area.area_id. [Martin Raspaud] - Bugfix: return when each file has been loaded independently. [Martin Raspaud] - Bugfix: Do not crash on multiple non-nwc files. [Martin Raspaud] - Bugfix: check start and end times from loaded channels only. [Martin Raspaud] - Bugfix: viirs start and end times not relying on non-existant channels anymore. [Martin Raspaud] - Bugfix: type() doesn't support unicode, cast to str. [Martin Raspaud] - Bugfix: allow more than one "-" in section names. [Martin Raspaud] - Bugfix: read aqua/terra orbit number from file only if not already defined. [Martin Raspaud] - Bugfix: fixed unittest case for wavelengths as lists. [Martin Raspaud] - Bugfix: remove deprecated mviri testcases. [Martin Raspaud] - Bugfix: backward compatibility with netcdf files. [Martin Raspaud] - Bugfix: removed the old mviri compositer. [Martin Raspaud] - Bugfix: When assembling, keep track of object, not just lon/lats. [Martin Raspaud] - Bugfix: assembling scenes would unmask some lon/lats... [Martin Raspaud] - Bugfix: handling of channels with different resolutions in assemble_segments. [Martin Raspaud] - Bugfix: Runner crashed if called with an area not in product list. [Martin Raspaud] - Bugfix: the nwcsaf_pps reader was crashing if no file was found... [Martin Raspaud] - Bugfix: pynav is not working in some cases, replace with pyorbital. [Martin Raspaud] - Bugfix: can now add overlay in monochromatic images. [Martin Raspaud] - Bugfix: swath scene projection takes forever from the second time. [Martin Raspaud] The swath scene, when projected more than once would recompute the nearest neighbours for every channel. - Bugfix: importing geotiepoints. [Martin Raspaud] - Bugfix: hdfeos was not eumetcast compliant :( [Martin Raspaud] - Bugfix: Do not raise exception on loading failure (nwcsaf_pps) [Martin Raspaud] - Bugfix: fixed misc bugs. [Martin Raspaud] - Bugfix: comparing directories with samefile is better than ==. [Martin Raspaud] - Bugfix: updating old eps_l1b interface. [Martin Raspaud] - Bugfix: Fixed typo in gatherer. [Martin Raspaud] - Bugfix: taking satscene.area into consideration for get_lonlat. [Martin Raspaud] - Bugfix: mipp required version to 0.6.0. [Martin Raspaud] - Bugfix: updating unittest and setup for new mipp release. [Martin Raspaud] - Bugfix: for eps l1b, get_lonlat did not return coherent values since the introduction of pyresample. [Martin Raspaud] - Bugfix: mipp to mipp_xrit namechange. [Martin Raspaud] - Bugfix: better detection of needed channels in aapp1b. [Martin Raspaud] - Bugfix: support for other platforms. [Martin Raspaud] - Bugfix: Support python 2.4 in mipp plugin. [Martin Raspaud] - Bugfix: masked arrays should be conserved by scene.__setitem__ [Martin Raspaud] - Bugfix: Don't make area and time_slot static in compositer. [Martin Raspaud] - Bugfix: reinit channels_to_load and messages for no loading. [Martin Raspaud] - When the loading process is interrupted, the channels_to_load attribute was not reinitialized. - Added a message when loading for a given level did not load anything. - Bugfix: Give an informative message when area is missing for msg's hdf reader. [Martin Raspaud] - Bugfix: update satpos file retrieval for hrpt and eps1a. [Martin Raspaud] - Bugfix: fixed unittests for new plugin system. [Martin Raspaud] - Bugfix: Do not load plugins automatically... [Martin Raspaud] - Bugfix: satellite vs satname again. [Martin Raspaud] - Bugfix: don't crash if msg hdf can't be loaded. [Martin Raspaud] - Bugfix: project now chooses mode automatically by default. [Martin Raspaud] - Bugfix: eps_avhrr adapted to new plugin format. [Martin Raspaud] - Bugfix: loading in msg_hdf adapted to new plugin system. [Martin Raspaud] - Bugfix: loading plugins should fail on any exception. [Martin Raspaud] - Bugfix: stupid syntax error. [Martin Raspaud] - Bugfix: mistook satname for satellite. [Martin Raspaud] - Bugfix: move to jenkins. [Martin Raspaud] - Bugfix: affecting area to channel_image. [Martin Raspaud] - Bugfix: Better handling of alpha channel. [Martin Raspaud] - Bugfix: filewatcher would wait a long time if no new file has come. [Martin Raspaud] - Bugfix: netcdf saving didn't record lat and lon correctly. [Martin Raspaud] - Bugfix: netcdf saving didn't work if only one value was available. [Martin Raspaud] - Bugfix: test_mipp had invalid proj parameters. [Martin Raspaud] - Bugfix: satellite vs satname again. [Martin Raspaud] - Bugfix: project now chooses mode automatically by default. [Martin Raspaud] - Bugfix: move to jenkins. [Martin Raspaud] - Bugfix: fixed unit test for projector reflecting the new mode handling. [Martin Raspaud] - Bugfix: fixed None mode problem in projector. [Martin Raspaud] - Bugfix: The default projecting mode now take into account the types of the in and out areas. [Martin Raspaud] - Bugfix: forgot the argument to wait in filewatcher. [Martin Raspaud] - Bugfix: tags and gdal_options were class attributes, they should be instance attributes. [Martin Raspaud] - Bugfix: 0 reflectances were masked in aapp1b loader. [Martin Raspaud] - Bugfix: corrected parallax values as no_data in msg products reading. [Martin Raspaud] - Bugfix: tags and gdal_options were class attributes, they should be instance attributes. [Martin Raspaud] - Bugfix: Compatibility with nordrad was broken. [Martin Raspaud] - Bugfix: forgot the argument to wait in filewatcher. [Martin Raspaud] - Bugfix: forgot strptime = datetime.strptime when python > 2.5. [Martin Raspaud] - Bugfix: corrected parallax values as no_data in msg products reading. [Martin Raspaud] - Bugfix: individual channel areas are preserved when assembled together. [Martin Raspaud] - Bugfix: cleanup tmp directory when convertion to lvl 1b is done. [Martin Raspaud] - Bugfix: remove hardcoded pathes in hrpt and eps lvl 1a. [Martin Raspaud] - Bugfix: use mpop's main config path. [Martin Raspaud] - Bugfix: added python 2.4 compatibility. [Martin Raspaud] - Bugfix: allow all masked array as channel data. [Martin Raspaud] - Better support for channel-bound areas. [Martin Raspaud] - Bugfix: 0 reflectances were masked in aapp1b loader. [Martin Raspaud] - Bugfix: tags and gdal_options were class attributes, they should be instance attributes. [Martin Raspaud] - Bugfix: error checking on area_extent for loading. [Martin Raspaud] - Bugfix: non loaded channels should not induce computation of projection. [Martin Raspaud] - Bugfix: thin modis didn't like area extent and was locked in 2010... [Martin Raspaud] - Bugfix: Compatibility with nordrad was broken. [Martin Raspaud] - Bugfix: fixed matching in git command for version numbering. [Martin Raspaud] - Bugfix: Negative temperatures (in K) should not be valid data when reading aapp1b files. [Martin Raspaud] - Bugfix: remove hudson from tags when getting version. [Martin Raspaud] - Bugfix: fixed hdf inconstistencies with the old pyhl reading of msg ctype and ctth files. [Martin Raspaud] - Bugfix: Updated code and tests to validate unittests. [Martin Raspaud] - Bugfix: data reloaded even if the load_again flag was False. [Martin Raspaud] - Bugfix: updated tests for disapearance of avhrr.py. [Martin Raspaud] - Bugfix: access to CompositerClass would fail if using the old interface. [Martin Raspaud] - Bugfix: typesize for msg's ctth didn't please pps... [Martin Raspaud] - Bugfix: fixed data format (uint8) in msg_hdf. [Martin Raspaud] - Bugfix: wrong and forgotten instanciations. [Martin Raspaud] - Bugfix: crashing on missing channels in mipp loading. [Martin Raspaud] - Bugfix: forgot to pass along area_extent in mipp loader. [Martin Raspaud] - Bugfix: fixing integration test (duck typing). [Martin Raspaud] - Bugfix: pyresample.geometry is loaded lazily for area building. [Martin Raspaud] - Bugfix: Updated unit tests. [Martin Raspaud] - Bugfix: Last change introduced empty channel list for meteosat 09. [Martin Raspaud] - Bugfix: Last change introduced empty channel list for meteosat 09. [Martin Raspaud] - Bugfix: update unittests for new internal implementation. [Martin Raspaud] - Bugfix: compression argument was wrong in satelliteinstrumentscene.save. [Martin Raspaud] - Bugfix: adapted mpop to new equality operation in pyresample. [Martin Raspaud] - Bugfix: More robust config reading in projector and test_projector. [Martin Raspaud] - Bugfix: updated the msg_hrit (nwclib based) reader. [Martin Raspaud] - Bugfix: swath processing was broken, now fixed. [Martin Raspaud] - Bugfix: corrected the smaller msg globe area. [Martin Raspaud] - Bugfix: Erraneous assumption on the position of the 0,0 lon lat in the seviri frame led to many wrong things. [Martin Raspaud] - Bugfix: introduced bugs in with last changes. [Martin Raspaud] - Bugfix: new area extent for EuropeCanary. [Martin Raspaud] - Bugfix: Updated setup.py to new structure. [Martin Raspaud] - Bugfix: updated integration test to new structure. [Martin Raspaud] - Bugfix: more verbose crashing when building extensions. [Martin Raspaud] - Bugfix: corrected EuropeCanary region. [Martin Raspaud] - Bugfix: made missing areas message in projector more informative (includes missing area name). [Martin Raspaud] - Bugfix: Added missing import in test_pp_core. [Martin Raspaud] - Bugfix: fixing missing import in test_scene. [Martin Raspaud] - Bugfix: geotiff images were all saved with the wgs84 ellipsoid even when another was specified... [Martin Raspaud] - Bugfix: Corrected the formulas for area_extend computation in geos view. [Martin Raspaud] - Bugfix: satellite number in cf proxy must be an int. Added also instrument_name. [Martin Raspaud] - Bugfix: Erraneous on the fly area building. [Martin Raspaud] - Bugfix: geo_image: gdal_options and tags where [] and {} by default, which is dangerous. [Martin Raspaud] - Bugfix: Support for new namespace for osr. [Martin Raspaud] - Bugfix: remove dubble test in test_channel. [Martin Raspaud] - Bugfix: showing channels couldn't handle masked arrays. [Martin Raspaud] - Bugfix: Scen tests where wrong in project. [Martin Raspaud] - Bugfix: when loading only CTTH or CloudType, the region name was not defined. [Martin Raspaud] - Bugfix: in test_channel, Channel constructor needs an argument. [Martin Raspaud] - Bugfix: in test_cmp, tested GenericChannel instead of Channel. [Martin Raspaud] - Bugfix: Test case for channel initialization expected the wrong error when wavelength argument was of the wrong size. [Martin Raspaud] - Bugfix: Added length check for "wavelength" channel init argument. [Martin Raspaud] - Bugfix: test case for channel resolution did not follow previous patch allowing real resolutions. [Martin Raspaud] - Bugfix: thin modis lon/lat are now masked arrays. [Martin Raspaud] - Bugfix: in channel constructor, wavelength triplet was not correctly checked for type. [Martin Raspaud] Just min wavelength was check three times. Other ~~~~~ - Update changelog. [Martin Raspaud] - Bump version: 0.1.0 → 0.2.0. [Martin Raspaud] - Fix version number. [Martin Raspaud] - Do not fill lon and lat masks with random values. [Martin Raspaud] - Fix AHI reading for new rayleigh correction. [Martin Raspaud] - Add some modifiers for AHI. [Martin Raspaud] - Adjust to requesting rayleigh correction by wavelength. [Martin Raspaud] - Add rayleigh modifier to visir. [Martin Raspaud] - Add angles reading to nc_olci. [Martin Raspaud] - Add pyspectral's generic rayleigh correction. [Martin Raspaud] - Fix cosmetics in scene.py. [Martin Raspaud] - Remove memmap from eps_l1b, use fromfile instead. [Martin Raspaud] This was triggering a `Too many open files` error since the memmap was called for every scanline. - Fix loading for datasets with no navigation. [Martin Raspaud] - Read start and end time from filename for eps_l1b. [Martin Raspaud] This avoids opening every file just for time checks. - Rename file handler's get_area to get_lonlats. [davidh-ssec] There is now a get_area_def and get_lonlats method on individual file handlers - Fix start/end/area parameters in FileYAMLReader. [davidh-ssec] - Move start_time, end_time, area parameters to reader init instead of load. [davidh-ssec] Scenes do not change start_time, end_time, area after init so neither should readers. Same treatment is probably needed for 'sensors'. - Fix avhrr reading. [Martin Raspaud] - Add amsr2 composite config file. [Martin Raspaud] - Adjust OLCI reader for reflectance calibration. [Martin Raspaud] - Delete old reader .cfg config files that are no longer used. [davidh- ssec] - Add forgotten OMPS yaml file. [davidh-ssec] - Convert OMPS reader from .cfg/INI to YAML. [davidh-ssec] - Provide better warning message when specified reader can't be found. [davidh-ssec] - Clean up class declarations in viirs l1b yaml. [davidh-ssec] - Fix VIIRS L1B inplace loading. [davidh-ssec] - Remove duplicate units definition in nucaps reader. [davidh-ssec] - Add standard_name and units to nucaps reader. [davidh-ssec] - Convert nucaps reader to yaml. [davidh-ssec] - Remove `dskey` from reader dataset ID dictionary. [davidh-ssec] The section name for each dataset was not used except to uniquely identify one dataset 'variation' from another similar dataset. For example you could technically have two sections for each calibration of a single dataset. YAML would require a different section name for each of these, but it is not used inside of satpy's readers because the `name` and DatasetID are used for that purpose. - Rename 'navigation' section in reader configs to 'navigations' [davidh-ssec] More consistent and grammatically correct with file_types and datasets - Rename 'corrector' and 'correction' modifiers to 'corrected' [davidh- ssec] Modifier names are applied to DatasetIDs so it was decided that 'corrected' may sound better in the majority of cases than 'corrector'. - Add .info dictionary to SwathDefinition created by YAML Reader. [davidh-ssec] - Fix standard_name of natural_color composite for VIIRS. [davidh-ssec] - Add ratio sharpened natural color for VIIRS. [davidh-ssec] - Rename VIIRSSharpTrueColor to RatioSharpenedRGB. [davidh-ssec] This includes making the ratio sharpened true color the default for VIIRS under the name 'true_color' - Fix tuple expansion in sunz corrector. [davidh-ssec] - Rename I and DNB angle datasets to reflect M band naming. [davidh- ssec] - Allow including directories in file patterns. [Martin Raspaud] - Add navigation to olci reader. [Martin Raspaud] - Add support for OLCI format reading. [Martin Raspaud] - Cleanup SunZenithCorrector. [Martin Raspaud] - Remove some TODOs. [Martin Raspaud] - Fix some seviri composites. [Martin Raspaud] - Add mipp config file for MSG3. [Martin Raspaud] This is needed by mipp when the mipp_hrit reader is used. - Remove `if True` from viirs sharp true color. [davidh-ssec] - Fix small bug in scene when dataset isn't found in a reader. [davidh- ssec] - Update VIIRS sharpened true color to be more flexible when upsampling. [davidh-ssec] - Refactor composite config loading to allow interdependent modifiers. [Martin Raspaud] - Add configuration files for HRIT H8 loading. [Martin Raspaud] - Pass platform_name to mipp for prologue-less hrit formats. [Martin Raspaud] - Provide satellite position information on load (HSD) [Martin Raspaud] - Put AHI HSD reflectances in % [Martin Raspaud] They were between 0 and 1 by default - Fix AHI HSD nav dtype. [Martin Raspaud] lon ssp and lat ssp where swaped - Adjust correct standard names for seviri calibration. [Martin Raspaud] - Fix Seviri CO2 correction buggy yaml def. [Martin Raspaud] - Fix sunz corrector with different resolutions. [davidh-ssec] Includes fix to make sure composites from user-land will overwrite builtin composites. - Update VIIRS L1B LUT variable path construction to be more flexible. [davidh-ssec] - Add recursive dict updating to yaml reader configs. [davidh-ssec] Before this only the top level values would be updated as a whole which wasn't really the intended function of having multiple config files. - Fix coords2area_def with rounding of x and y sizes. [Martin Raspaud] - Fix cos zen normalisation (do not use datetime64) [Martin Raspaud] - Fix start and end time format to use datetime.datetime. [Martin Raspaud] - Add IMAPP file patterns to HDFEOS L1B reader. [davidh-ssec] - Fix hdfeos_l1b due to missing get_area_def method. [davidh-ssec] The HDFEOS file handlers weren't inheriting the proper base classes - Add sunz_corrector modifier to viirs_sdr reader. [davidh-ssec] - Fix available_dataset_names when multiple file types are involved. [davidh-ssec] Also includes a clean up of the available_dataset_names by not providing duplicates (from multiple calibrations and resolutions) - Allow multiple file types in yaml reader. [davidh-ssec] - Add VIIRS SDR M-band angles and DNB angles. [davidh-ssec] - Add VIIRS SDR reader back in [WIP] [davidh-ssec] I've added all the M and I bands, but need to add DNB and the various angle measurements that we use a lot. Also need to add the functionality to load/find the geolocation files from the content in the data files. - Add reader_name and composites keywords to all/available_dataset_names methods. [davidh-ssec] - Fix available_dataset_ids and all_dataset_ids methods. [davidh-ssec] There are not `(all/available)_dataset_(ids/names)` methods on the Scene object. Includes a fix for available composites. - Fix multiple load calls in Scene. [davidh-ssec] This isn't technically a supported feature, but it was a simple fix to get it to work for my case. - Fix compositor loading when optional_prerequisites are more than a name. [davidh-ssec] - Update coord2area_def to be in sync with the mpop version. [Martin Raspaud] - Fix seviri.yaml for new prerequisite syntax. [Martin Raspaud] - Fix EPSG info in geotiffs. [Martin Raspaud] - Adjust crefl for python 3 compatibility. [Martin Raspaud] - Merge branch 'new_prereq_syntax' into feature-yaml. [Martin Raspaud] Conflicts: etc/composites/viirs.yaml etc/composites/visir.yaml satpy/composites/__init__.py satpy/scene.py - Add support for new prerequisite syntax. [Martin Raspaud] - Got VIIRS L1B True color working. [davidh-ssec] Still need work on sharpened true color when I01 is used for ratio sharpening. - Remove unneeded quotes for python names in yaml files. [Martin Raspaud] - Merge branch 'feature-ahi-no-navigation' into feature-yaml. [Martin Raspaud] Conflicts: etc/composites/viirs.yaml satpy/readers/yaml_reader.py - Add viirs composites. [Martin Raspaud] - Fix the area_def concatenation. [Martin Raspaud] - Mask nan in ir calibration for ahi hsd. [Martin Raspaud] - Fix out of place loading, by not using a shuttle. [Martin Raspaud] - Make get_area_def a default method of file_handlers. [Martin Raspaud] - Allow file handler to provide area defs instead of swath. [Martin Raspaud] This is enabled by implementing the `get_area_def` method in the file handler. - Optimize AHI reading using inplace loading. [Martin Raspaud] Navigation is switched off for now. - Allow area loading for the data file handlers. [Martin Raspaud] - Use a named tuple to pass both data, mask and info dict for inplace loading. [Martin Raspaud] - Fix AreaID name to AreaID. [Martin Raspaud] - Fix AreaID name to AreaID. [Martin Raspaud] - Add moon illumination fraction and DNB enhancements for VIIRS. [davidh-ssec] MIF needed some edits to how the reader works since it returns a Dataset (no associated navigation) - Add other basic datasets to VIIRS L1B. [davidh-ssec] I only had I01 and I04 for testing, not has all I, M, and DNB datasets. - Add enhancements configuration directory to the setup.py data_files. [davidh-ssec] - Complete AHI HSD reader. [Martin Raspaud] - Fix missing dependency and python3 compatibility in ahi_hsd. [Martin Raspaud] - Add skeleton for Himawari AHI reading. [Martin Raspaud] - Add a NIR reflectance modifier using pyspectral. [Martin Raspaud] - Add some metadata to projectables in viirs compact. [Martin Raspaud] - Fix optional prerequisites loading. [Martin Raspaud] - Raise an IncompatibleArea exception on RGBCompositor. [Martin Raspaud] - Look for local files even if base_dir and filenames are missing. [Martin Raspaud] - Allow empty scene creation when neither filenames nor base_dir is provided. [Martin Raspaud] - Handle incompatible areas when reading composites. [Martin Raspaud] - Remove dead code. [Martin Raspaud] - Add debug information in viirs compact. [Martin Raspaud] - Get dataset key from calibration in correct order. [Martin Raspaud] - Raise exception when no files are found. [Martin Raspaud] - Add DNB to viirs compact. [Martin Raspaud] - Remove old mpop legacy files. [Martin Raspaud] - Make viirs_compact python 3 compatible. [Martin Raspaud] - Move xmlformat.py to the readers directory, and remove a print statement. [Martin Raspaud] - Fix EPSG projection definition saving to geotiff. [Martin Raspaud] - Remove python 3 incompatible syntax (Tuple Parameter Unpacking) [Martin Raspaud] - Fix crefl further to lower memory consumption. [Martin Raspaud] - Avoid raising an error when no files are found. [Martin Raspaud] Instead, a warning is logged. - Remove unused code from readers/__init__.py. [Martin Raspaud] - Cleanup style. [Martin Raspaud] - Fix unittests. [Martin Raspaud] - Deactivate viirssdr testing while migrating to yaml. [Martin Raspaud] - Refactor parts of compact viirs reader. [Martin Raspaud] - Optimize memory for crefl computation. [Martin Raspaud] - Allow sunz corrector to be provided the sunz angles. [Martin Raspaud] - Make chained modifiers work. [Martin Raspaud] - Cleanup style. [Martin Raspaud] - Add a crefl modifier for viirs. [Martin Raspaud] - Add loading of sun-satellite/sensor viewing angles to aapp-l1b reader. [Adam.Dybbroe] - Add sensor/solar angles loading to compact viirs reader. [Martin Raspaud] - Allow modifier or composites sections to be missing from config. [Martin Raspaud] - Fix some composites. [Martin Raspaud] - Port VIIRS Compact M-bands to yaml. [Martin Raspaud] - Add modifiers feature. [Martin Raspaud] Now modifiers can be added to the prerequisites as dictionnaries. - Add standard_names to channels in mipp_xrit. [Martin Raspaud] - Add a NC4/CF writer. [Martin Raspaud] - Use YAML instead of CFG for composites. [Martin Raspaud] - Rename wavelength_range to wavelength in reader configs. [davidh-ssec] Also rewrote other yaml configs to use new dict identifiers - Add YAML based VIIRS L1B reader (I01 and I04 only) [davidh-ssec] - Allow dict identifiers in reader's datasets config. [davidh-ssec] Some metadata (standard_name, units, etc) are dependent on the calibration, resolution, or other identifying piece of info. Now these make it easier to fully identify a dataset and the multiple ways it may exist. This commit also includes small fixes for how `get_shape` is called and fixes for the netcdf4 handler to match past changes. - Fix numpy warnings when assigning to masked arrays. [davidh-ssec] - Add pyyaml to setup.py requires. [davidh-ssec] - Make base file handler and abstract base class. [davidh-ssec] Also changed start_time and end_time to properties of the file handlers - Make AbstractYAMLReader an actual ABCMeta abstract class. [davidh- ssec] - Fix ReaderFinder when all provided filenames have been found. [davidh- ssec] Also fixed mipp_xrit reader which was providing the set of files that matched rather than the set of files that didn't match. Added start and end time to the xrit reader too. - Rename YAMLBasedReader to FileYAMLReader. [davidh-ssec] As in it is a YAML Based Reader that accepts files where a dataset is not separated among multiple files. - Merge remote-tracking branch 'origin/feature-yaml' into feature-yaml. [davidh-ssec] - Port EPS l1b reader to yaml. [Martin Raspaud] - Combine areas also in combine_info. [Martin Raspaud] - Port mipp xrit reader to yaml. [Martin Raspaud] - Split YAMLBasedReader to accomodate for derivatives. [Martin Raspaud] Some file formats split a dataset on multiple files, a situation which is not covered by the YAMLBasedReader. Some parts of the class being still valid in this situation, we split the class to avoid code duplication, using subclassing instead. - Add hrpt reader. [Martin Raspaud] - Change AMSR2 L1B reader config to be 2 spaces instead of 4. [davidh- ssec] - Remove uncommented blank likes from scene header. [Martin Raspaud] - Allow filenames to be an empty set and still look for files. [Martin Raspaud] - Reorganize imports in mipp reader. [Martin Raspaud] - Beautify resample.py. [Martin Raspaud] - Use uncertainty flags to mask erroneous data. [Martin Raspaud] - Optimize the loading by caching 3b flag. [Martin Raspaud] - Stack the projectable keeping the mask. [Martin Raspaud] - Avoid datasets from being requested multiple times. [Martin Raspaud] - Fix aapp1b to work again. [Martin Raspaud] - Use area ids to carry navigation needs. [Martin Raspaud] - Get the hdfeos_l1b reader to work again. [Martin Raspaud] - Add yaml files to setup.py included data files. [davidh-ssec] - Move start/end/area filtering to reader init. [davidh-ssec] This includes moving file handler opening to the `select_files` method. - Add combine_info method to base file handlers. [davidh-ssec] I needed a way to let file handlers (written by reader developers) to have control over how extra metadata is combined among all of the "joined" datasets of a swath. This should probably be a classmethod, but I worry that may complicate customization and there is always a chance that instance variables may control this behavior. - Add more AMSR2 metadata to loaded datasets. [davidh-ssec] - Change exception to warning when navigation information can't be loaded. [davidh-ssec] - Move reader check to earlier in the file selection process. [davidh- ssec] The code was looking through each reader config file, instantiating each one, then running the `select_files` method only to return right away when the instantiated reader's name didn't equal the user's requested reader. This was a lot of wasted processing and will get worse with every new reader that's added. - Rename amsr2 reader to amsr2_l1b. [davidh-ssec] - Add AMSR2 36.5 channel. [davidh-ssec] - Fix reader finder so it returns when not asked for anything. [davidh- ssec] Resampling in the Scene object requires making an empty Scene. There was an exception being raised because the reader finder was trying to search for files in path `None`. - Add initial AMSR2 L1B reader (yaml) [davidh-ssec] - Make lons/lats for SwathDefinition in to masked arrays. [davidh-ssec] - Rewrite the yaml based reader loading methods. [davidh-ssec] Lightly tested. - Rename utility file handlers and moved base file handlers to new module. [davidh-ssec] The base file handlers being in yaml_reader could potentially cause a circular dependency. The YAML Reader loads a file handler which subclasses one of the base handlers which are in the same module as the yaml reader. - Fix filename_info name in file handler. [davidh-ssec] Oops - Pass filename info to each file handler. [davidh-ssec] There is a lot of information collected while parsing filenames that wasn't being passed to file handlers, now it is. This commit also includes renaming the generic file handler's (hdf5, netcdf) data cache to `file_content` because `metadata` was too generic IMO. - Finish merge of develop to yaml branch. [davidh-ssec] Starting merging develop and a few things didn't make it all the way over cleanly - Remove redundant log message. [davidh-ssec] - Fix reader keyword argument name change. [davidh-ssec] Also raise an exception if no readers are created - Merge branch 'develop' into feature-yaml-amsr2. [davidh-ssec] # Conflicts: # etc/readers/aapp_l1b.yaml # satpy/readers/__init__.py # satpy/readers/aapp_l1b.py # satpy/scene.py - Add OMPS so2_trm dataset. [davidh-ssec] - Rename "scaling_factors" to "factor" in reader configuration. [davidh- ssec] - Merge branch 'feature-omps-reader' into develop. [davidh-ssec] - Add simple OMPS EDR Reader. [davidh-ssec] - Clean up various reader methods. [davidh-ssec] In preparation for OMPS reader - Move HDF5 file wrapper to new hdf5_utils.py. [davidh-ssec] - Add the multiscene module to combine satellite datasets. [Martin Raspaud] The multiscene class adds the possibility to blend different datasets together, given a blend function. - Add a test yaml-based reader for aapp1b. [Martin Raspaud] - Fix manually added datasets not being resampled. [davidh-ssec] - Merge pull request #8 from davidh-ssec/feature-ewa-resampling. [David Hoese] Feature ewa resampling - Update EWA resampler to use new wrapper functions from pyresample. [davidh-ssec] - Move resample import in resample tests. [davidh-ssec] The resample module import now happens inside the test so only the resample tests fail instead of halting all unittests. - Fix resample test from moved resample import. [davidh-ssec] The 'resample' method imported at the top of projectable.py was moved to inside the resample method to avoid circular imports. The resample tests were still patching the global import. Now they modify the original function. I also imported unittest2 in a few modules to be more consistent. - Fix bug in EWA output array shape. [davidh-ssec] - Add initial EWA resampler. [davidh-ssec] - Move resample imports in Projectable to avoid circular imports. [davidh-ssec] - Rename `reader_name` scene keyword to `reader` [davidh-ssec] Also make it possible to pass an instance of a reader or reader-like class. Renaming is similar to how `save_datasets` takes a `writer` keyword. - Fix loading aggregated viirs sdr metadata. [davidh-ssec] Aggregated VIIRS SDR files have multiple `Gran_0` groups with certain attributes and data, like G-Ring information. Loading these in a simple way is a little more complex than the normal variable load and required adding a new metadata join method. - Refix reader_info reference in yaml base reader. [davidh-ssec] This fix got reverted in the last commit for some reason - Add support for modis l1b data. [Martin Raspaud] - Edit the wishlist only when needed. [Martin Raspaud] - Add MODIS l1b reader, no geolocation for now. [Martin Raspaud] - Assign right files to the reader. [Martin Raspaud] No matching of file was done, resulting in assigning all found files to all readers. - Fix reader_info reference in yaml base reader. [davidh-ssec] - Keep channels in the wishlist when necessary. [Martin Raspaud] Due to the creation of a DatasetID for each dataset key, the wishlist wasn't matching the actual ids of the datasets. - Adapt reading to yaml reader way. [Martin Raspaud] Since there is more delegating of tasks to the reader, the reading has to be adapted. - Cleanup using pep8. [Martin Raspaud] - Allow yaml files as config files. [Martin Raspaud] - Add the dependency tree based reading. [Martin Raspaud] - Update the yamlbased aapp reader. [Martin Raspaud] - Move the hdfeos reader to the readers directory. [Martin Raspaud] - Add the multiscene module to combine satellite datasets. [Martin Raspaud] The multiscene class adds the possibility to blend different datasets together, given a blend function. - Add a test yaml-based reader for aapp1b. [Martin Raspaud] - Fix netcdf dimension use to work with older versions of netcdf-python library. [davidh-ssec] - Add 'iter_by_area' method for easier grouping of datasets in special resampling cases. [davidh-ssec] - Fix bug when resampling is done for specific datasets. [davidh-ssec] This fix addresses the case when resampling is done for a specific set of datasets. The compute method will attempt to create datasets that don't exist after resampling. Since we didn't resample all datasets it will always fail. This commit only copies the datasets that were specified in resampling. It is up to the user to care for the wishlist if not using the default (resample all datasets). - Add dimensions to collected metadata for netcdf file wrapper. [davidh- ssec] I needed to use VIIRS L1B like I do VIIRS SDR for some GTM work and needed to copy over some of the metadata. One piece was only available as a global dimension of the NC file so I made it possible to ask for dimensions similar to how you can for attributes. - Fix crefl searching for coefficients by dataset name. [davidh-ssec] - Fix combining info when metadata is a numpy array. [davidh-ssec] - Fix incorrect NUCAPS quality flag masking data. [davidh-ssec] - Add .gitignore with python and C patterns. [davidh-ssec] - Add 'load_tests' for easier test selection. [davidh-ssec] PyCharm and possibly other IDEs don't really play well with unittest TestSuites, but work as expected when `load_tests` is used. - Fix resample hashing when area has no mask. [davidh-ssec] - Add test for scene iter and fix it again. [davidh-ssec] - Fix itervalues usage in scene for python 3. [davidh-ssec] - Allow other array parameters to be passed to MaskedArray through Dataset. [davidh-ssec] - Fix viirs l1b reader to handle newest change in format (no reflectance units) [davidh-ssec] - Fix bug in crefl compositor not respecting input data type. [davidh- ssec] - Fix NUCAPS H2O_MR Dataset to get proper field from file. [davidh-ssec] - Add environment variable SATPY_ANCPATH for crefl composites. [davidh- ssec] - Fix config files being loaded in the correct (reverse) order. [davidh- ssec] INI config files loaded from ConfigParser should be loaded in the correct order so that users' custom configs overwrite the builtin configs. For that to happen the builtin configs must be loaded first. The `config_search_paths` function had this backwards, but the compositor loading function was already reversing them. This commit puts the reverse in the config function. - Update setup.py to always require pillow and not import PIL. [davidh- ssec] It seems that in older versions of setuptools (or maybe even easy_install) that importing certain libraries in setup.py causes an infinite loop and eats up memory until it gets killed by the kernel. - Change NUCAPS H2O to H2O_MR to match name in file. [davidh-ssec] - Add quality flag filtering to nucaps reader. [davidh-ssec] - Change default units for NUCAPS H2O to g/kg. [davidh-ssec] - Add filtering by surface pressure to NUCAPS reader. [davidh-ssec] - Fix composite prereqs not being removed after use. [davidh-ssec] - Update metadata combining in viirs crefl composite. [davidh-ssec] - Perform the sharpening on unresampled data if possible. [Martin Raspaud] - Set the default zero height to the right shape in crefl. [Martin Raspaud] - Fix bug in viirs composites when combining infos. [davidh-ssec] - Add the cloudtop composite for viirs. [Martin Raspaud] - Merge pull request #7 from davidh-ssec/feature-crefl-composites. [David Hoese] Feature crefl composites - Remove ValueError from combine_info for one argument. [davidh-ssec] - Add info dictionary to Areas created in the base reader. [davidh-ssec] - Modify `combine_info` to work on multiple datasets. [davidh-ssec] Also updated a few VIIRS composites as test usages - Add angle datasets to viirs l1b for crefl true color to work. [davidh- ssec] - Cleanup crefl code a bit. [davidh-ssec] - Add sunz correction to CREFL compositor. [davidh-ssec] First attempt at adding modifiers to composites, but this method of doing it probably won't be used in the future. For now we'll keep it. - Fix bug in Scene where composite prereqs aren't removed after resampling. [davidh-ssec] - Rename VIIRS SDR solar and sensor angle datasets. [davidh-ssec] - Update crefl true color to pan sharpen with I01 if available. [davidh- ssec] - Fix crefl utils to use resolution and sensor name to find coefficients. [davidh-ssec] - Fix Dataset `mask` keyword being passed to MaskedArray. [davidh-ssec] - Remove filling masked values in crefl utils. [davidh-ssec] - Fix crefl composite when given percentage reflectances. [davidh-ssec] - Add basic crefl compositor. [davidh-ssec] - Clean up crefl utils and rename main function to run_crefl. [davidh- ssec] - Fix crefl utils bug and other code clean up. [davidh-ssec] - Add M band solar angles and sensor/satellite angles. [davidh-ssec] - Add `datasets` keyword to save_datasets to more easily filter by name. [davidh-ssec] - Make crefl utils more pythonic. [davidh-ssec] - Add original python crefl code from Ralph Kuehn. [davidh-ssec] - Fix the viirs truecolor composite to keep mask info. [Martin Raspaud] - Allow composites to depend on other composites. [Martin Raspaud] In the case of true color with crefl corrected channels for example, the true color needs to depend on 3 corrected channels, which in turn can now be composites. - Add Scene import to __init__ for convience. [davidh-ssec] - Add composites to 'available_datasets' [davidh-ssec] Additionally have Scene try to determine what sensors are involved if they weren't specified by the user. - Add proper "available_datasets" checks in config based readers. [davidh-ssec] - Move config utility functions to separate `config.py` module. [davidh- ssec] - Fix the 'default' keyword not being used checking config dir environment variable. [davidh-ssec] - Add H2O dataset to NUCAPS reader. [davidh-ssec] - Merge pull request #6 from davidh-ssec/feature-nucaps-reader. [David Hoese] Add NUCAPS retrieval reader - Cleanup code according to quantifiedcode. [davidh-ssec] Removed instances of checking length for 0, not using .format for strings, and various other code cleanups in the readers. - Add documentation to various reader functions including NUCAPS reader. [davidh-ssec] - Fix bug when filtering NUCAPS datasets by pressure level. [davidh- ssec] - Add initial NUCAPS retrieval reader. [davidh-ssec] - Move netcdf file handler class to separate module from VIIRS L1B reader. [davidh-ssec] Also prepare generic reader for handling other dimensions besides 2D. - Document the __init__.py files also. [Martin Raspaud] - Mock scipy and osgeo to fix doc generation problems. [Martin Raspaud] - Mock more imports for doc building. [Martin Raspaud] - Remove deprecated doc files. [Martin Raspaud] - Mock trollsift.parser for documentation building. [Martin Raspaud] - Update the doc conf.py file no mock trollsift. [Martin Raspaud] - Add satpy api documentation. [Martin Raspaud] - Post travis notifications to #satpy. [Martin Raspaud] - Fix a few deprecation warnings. [Martin Raspaud] - Document a few Dataset methods. [Martin Raspaud] - Fix div test skip in py3. [Martin Raspaud] - Skip the Dataset __div__ test in python 3. [Martin Raspaud] - Implement numeric type methods for Dataset. [Martin Raspaud] In order to merge or keep metadata for Dataset during arithmetic operations we need to implement the numeric type methods. - Cleanup unused arguments in base reader. [davidh-ssec] Also makes _load_navigation by renaming it to load_navigation to resolve some quantifiedcode code checks. - Add documentation to setup.py data file function. [davidh-ssec] - Fix call to netcdf4's set_auto_maskandscale in viirs l1b reader. [davidh-ssec] - Fix setup.py to find all reader, writer, composite configs. [davidh- ssec] - Merge pull request #5 from davidh-ssec/feature-viirs-l1b. [David Hoese] Add beta VIIRS L1B reader - Add LZA and SZA to VIIRS L1B config for DNB composites. [davidh-ssec] To make certain DNB composites available I added DNB solar and lunar zenith angle as well as moon illumination fraction. This also required detecting units in the ERF DNB composite since it assumes a 0-1 range for the input DNB data. - Remove debug_on from scene.py. [davidh-ssec] - Fix reader not setting units. [davidh-ssec] The default for FileKey objects was None for "units". This means that `setdefault` would never work properly. - Fix config parser error in python 3. [davidh-ssec] I tried to make typing easier by using interpolation (substitution) in the VIIRS L1B reader config, but changing from RawConfigParser to ConfigParser breaks things in python 3. I changed it back in this commit and did the config the "long way" with some find and replace. - Add DNB and I bands to VIIRS L1B reader. [davidh-ssec] - Fix brightness temperature M bands for VIIRS L1B. [davidh-ssec] - Add M bands to VIIRS L1B reader. [davidh-ssec] - Fix VIIRS L1B masking with valid_max. [davidh-ssec] - Add initial VIIRS L1B reader. [davidh-ssec] Currently only supports M01. - Revert test_viirs_sdr to np 1.7.1 compatibility. [Martin Raspaud] - Fix gring test in viirs_sdr. [davidh-ssec] - Add gring_lat and gring_lon as viirs_sdr metadata. [davidh-ssec] Also added join_method `append_granule` as a way to keep each granule's data separate. - Fix composite kd3 resampling. [Martin Raspaud] 3d array masks were not precomputed correctly, so we now make a workaround. A better solution is yet to be found. - Fix kd3 precomputation for AreaDefinitions. [Martin Raspaud] The lons and lats attributes aren't defined by default in AreaDefs, so we now make sure to call the get_lonlats method. - Set default format for dataset saving to geotiff. [Martin Raspaud] - Move `save_datasets` logic from Scene to base Writer. [davidh-ssec] - Fix bug in resample when geolocation is 2D. [davidh-ssec] The builtin 'any' function works for 1D numpy arrays, but raises an exception when 2D numpy arrays are provided which is the usual case for sat imagery. - Allow geotiff creation with no 'area' [davidh-ssec] Geotiff creation used to depend on projection information from the `img.info['area']` object, but it is perfectly legal to make a TIFF image with GDAL by not providing this projection information. This used to raise an exception, now it just warns. - Merge pull request #1 from pytroll/autofix/wrapped2_to3_fix. [Martin Raspaud] Fix "Consider dict comprehensions instead of using 'dict()'" issue - Use dict comprehension instead of dict([...]) [Cody] - Merge pull request #2 from pytroll/autofix/wrapped2_to3_fix-0. [Martin Raspaud] Fix "Explicitly number replacement fields in a format string" issue - Explicitely numbered replacement fields. [Cody] - Merge pull request #3 from pytroll/autofix/wrapped2_to3_fix-1. [Martin Raspaud] Fix "Use `is` or `is not` to compare with `None`" issue - Use `is` operator for comparing with `None` (Pep8) [Cody] - Merge pull request #4 from pytroll/autofix/wrapped2_to3_fix-2. [Martin Raspaud] Fix "Consider an iterator instead of materializing the list" issue - Use generator expression with any/all. [Cody] - Fix resample test for python 3. [Martin Raspaud] the dict `keys` method return views in py3. We now convert to list for consistency. - Add a test case for resample caching. [Martin Raspaud] - Revert resample cache changes. [Martin Raspaud] They didn't seem necessary in the way resampling is called. - Rename to satpy. [Martin Raspaud] - Remove the world_map.ascii file. [Martin Raspaud] - Allow compressed files to be checked by hrit reader. [Martin Raspaud] - Add number of scans metadata to viirs sdr config. [davidh-ssec] Also fixed rows_per_scan being a string instead of an integer when loaded from a navigation section. - Fix bug that removed most recent cached kdtree. [davidh-ssec] Nearest neighbor resampling cached multiple kdtree results and cleans up the cache when there are more than CACHE_SIZE items stored. It was incorrectly cleaning out the most recent key instead of the oldest key. - Fix bug when nearest neighbor source geo definition needs to be copied. [davidh-ssec] - Fix bug when specifying what datasets to resample. [davidh-ssec] - Move geolocation mask blending to resampling step. [davidh-ssec] The mask for geolocation (longitude/latitude) was being OR'd with the mask from the first dataset being loaded in the reader. This was ignoring the possibility that other loaded datasets will have different masks since AreaDefinitions are cached. This blending of the masks was moved to nearest neighbor resampling since it ignored other datasets' masks in the reader and is technically a limitation of the nearest neighbor resampling because the geolocation must be masked with the dataset mask for proper output. May still need work to optimize the resampling. - Add spacecraft_position and midtime metadata to viirs_sdr reader. [davidh-ssec] - Update changelog. [Martin Raspaud] - Bump version: 1.1.0 → 2.0.0-alpha.1. [Martin Raspaud] - Add config files for release utilities. [Martin Raspaud] We add the .bumpversion.cfg and .gitchangelog.rc for easy version bumping and changelog updates. - Remove v from version string. [Martin Raspaud] - Add str and repr methods for composites. [Martin Raspaud] This add simple repl and str methods for compositors. - Restructure the documentation for mpop2. [Martin Raspaud] This is an attempt to reorganize the documentation to prepare for mpop2. Old stuff has been take away, and a fresh quickstart and api are now provided. - Improve the ReaderFinder ImportError message to include original error. [Martin Raspaud] To make the ImportError more useful in ReaderFinder, the original error string is now provided. - Fix save_dataset to allow both empty filename and writer. [Martin Raspaud] When saving a dataset without a filename and writer, save_dataset would crash. Instead, we are now putting writer to "simple_image" in that case. - Rename projectable when assigning it through setitem. [Martin Raspaud] When a new dataset is added to a scene, it's name should match the string key provided by the user. - Remove references to deprecated mpop.projector. [Martin Raspaud] - Allow resample to receive strings as area identifiers. [Martin Raspaud] In resample, the interactive user would most likely use pre-defined areas from a custom area file. In this case, it's much easier to refer to the area by name, than to get the area definition object from the file. This patch allows the `resample` projectable method to work with string ids also. - Add a dataset to whishlish when added with setitem. [Martin Raspaud] When adding a dataset to a scene via the datasetdict.__setitem__ method, it is likely that the user case about this dataset. As such, it should be added to the wishlist in order not to get removed accidently. - Move composite loading out of Scene to mpop.composites. [Martin Raspaud] The loading of compositors was a part of the Scene object. However, it does not belong there, so we decided to move it out of Scene. The next logical place to have it is the mpop.composites modules. As a conterpart, we now provide the `available_composites` method to the Scene to be able to figure out what we have possibility to generate. - Fix the travis file to allow python 2.6 to fail. [Martin Raspaud] - Allow travis to fail on python 2.6. [Martin Raspaud] - Install importlib for travis tests on python 2.6. [Martin Raspaud] - Add `behave` to the pip installations in travis. [Martin Raspaud] - Add behaviour testing to travis and coveralls. [Martin Raspaud] - Add behaviour tests for showing and saving datasets. [Martin Raspaud] Three scenarios were added, testing showing a dataset, saving a dataset, and bulk saving datasets (`save_datasets`). - Fix loading behaviour tests. [Martin Raspaud] A little cleanup, and using builtin functions for getting the dataset_names - Fix DatasetDict's setitem to allow empty md in value. [Martin Raspaud] Sometimes a dataset/projectable doesn't have any info attached to it, eg because the dataset is synthetic. In these cases, setitem would crash. This is now fixed, and if a string is provided as a key in setitem it is used as a name if no better name is already there. - Simplify dataset saving to disk. [Martin Raspaud] saving datasets can now be done one by one. If a writer is not provided, it is guessed from the filename extension. - Add a show method to the Scene class. [Martin Raspaud] That allows the user to interactively vizualize the data - Add a default areas.def file. [Martin Raspaud] - Fix the manifest file to include the config files. [Martin Raspaud] - Add missing config files to setup.py. [Martin Raspaud] - Fix setup.py to add cfg files. [Martin Raspaud] This is in order to make mpop work out of the box after a pip install. - Add a behaviour test to find out the available dataset. [Martin Raspaud] - Prevent crashing when a load requirement is not available. [Martin Raspaud] When requiring a band which isn't available, mpop would crash. This is now fixed and replaced by a warning in the log. - Use behave to do higher level tests. [Martin Raspaud] Two small scenarios for testing the loading of the data are implemented now. - Fix import error in scene. [davidh-ssec] A small refactor was done and then undone to move DatasetDict and DatasetID. This little import change wasn't properly cleaned up. - Fix scene to work with "2 part" compositors and added pan sharpened true color composite as an example. [davidh-ssec] - Added log message to pillow writer to say what filename it was saving to. [davidh-ssec] - Handle optional dependencies for composites (not tested) [davidh-ssec] - Activate the remaining viirs_sdr reader test cases. [Martin Raspaud] - Remove the overview_sun TODO item. [Martin Raspaud] - Fix the multiple load issue for composites. [Martin Raspaud] The composite loading would crash when several composites would be loaded one after the other. This was because composite config files where loaded partially but were considered loaded entirely. In order to fix this problem and make things simpler, we removed the composite config mechanism entirely, so that the composites are reloaded everytime. That allows both config changing on the fly, but also more resilience for multiple sensor cases, like one sensor is loaded after another, and the composites wouldn't get updated. - Fix the name issue in sensor-specific composite requests. [Martin Raspaud] The read_composite_config was requiring wrongly that the provided names should be empty or None, making it not read the sensor config file at all. In turn that meant that generic composites were used instead of sensor- specific ones. - Got metadata requests working for composites. [davidh-ssec] - Use DatasetID in composite requirements instead of names and wavelengths only. [davidh-ssec] - Adds ERF DNB composite and updates compositor base to allow for metadata and optional requirements although they are not completely used yet. [davidh-ssec] - Added adaptive DNB product. [davidh-ssec] - Fixed bug in scene when getting writer instance in save_images. [davidh-ssec] - Fix the dataset str function to allow missing name and sensor keys. [Martin Raspaud] - Add quickstart seviri to the documentation. [Martin Raspaud] - Update the documentation. [Martin Raspaud] - Add a get_writer function to the scene object. [Martin Raspaud] - Updating dataset displaying. [Martin Raspaud] - Add a fixme comment. [Martin Raspaud] - Added histogram_dnb composite as a stepping stone for getting more complex composites added (ex. adaptive_dnb) [davidh-ssec] - Can now retrieve channel with incomplete DatasetID instance. [Martin Raspaud] - First try at loading metadata. [davidh-ssec] - Added python 3.5 to travis tests and removed 3.x as allowed failures. [davidh-ssec] - Added basic test for DatasetDict. [davidh-ssec] - Refactored some file reader methods to properties to be more pythonic. [davidh-ssec] - Viirs test case now works with python3 hopefully. [Martin Raspaud] - Fixed file units for eps l1b reflectances. [davidh-ssec] - Corrected frame indicator for eps l1b band 3a. [davidh-ssec] - Updated eps l1b config with temporary calibration information. [davidh-ssec] - First attempt at rewriting eps l1b reader to be more configurable (overkill?) [davidh-ssec] - Renamed Scene projectables to datasets. [davidh-ssec] - Updated eps l1b file reader to match base class. [davidh-ssec] - Made generic single file reader abstract base class and cleaned up viirs sdr tests. [davidh-ssec] - Added a fixme comment. [Martin Raspaud] - Enable python 3 and osx builds in travis. [Martin Raspaud] - Config treatment for enhancements. [davidh-ssec] - Update config handling for finding composites. [davidh-ssec] - Small fix for dumb environment variable clear on tests. [davidh-ssec] - First attempt at getting readers and writers using PPP_CONFIG_DIR as a supplement to builtin configs. [davidh-ssec] - Fixed scene tests so they pass. [davidh-ssec] - Added base_dir for finding input files and a separate base_dir kwargs on save_images. [davidh-ssec] - Makes wishlist a set and should fix problems with multiple loads. [davidh-ssec] - Fixed calibration and other DatasetID access in reader, hopefully. [davidh-ssec] - Fix the xrit reader. [Martin Raspaud] - Cleanup to prepare for handling calibration better. [davidh-ssec] - Updated filtering based on resolution, calibration, and polarization. [davidh-ssec] - Updated how readers create dataset info and dataset ids. [davidh-ssec] - Added calibration to DatasetID (not used yet) and added helper method on DatasetDict for filtering retrieved items and keys. [davidh-ssec] - Renamed BandID to DatasetID. [davidh-ssec] - Better handling of loading composite dependencies...i think. [davidh- ssec] - Got EPS L1B reader working again with readers being given BandID objects. [davidh-ssec] - Fixed small bug with extra empty string being listed as reader file pattern. [davidh-ssec] - Made DatasetDict accept non-BandID keys during setitem. [davidh-ssec] - Fixed default file reader for the eps l1b reader. [davidh-ssec] - A little more cleanup of unused code in viirs sdr. [davidh-ssec] - More work on viirs sdr using base reader class. [davidh-ssec] - Started using ConfigBasedReader as base class for VIIRS SDR reader. [davidh-ssec] - Fixed failing scene tests. [davidh-ssec] - Got viirs sdr reader working with namedtuple dataset keys. [davidh- ssec] - Continue on python3 compatibility. [Martin Raspaud] - Cleanup. [Martin Raspaud] - WIP: Start python 3 support. [Martin Raspaud] - Smoother transition in the sun zenith correct imagery. [Martin Raspaud] - Move reader discovery out of the scene and into mpop.readers. [Martin Raspaud] The class ReaderFinder was created for this purpose. - Cleanup. [Martin Raspaud] - Fix overview and natural composites. [Martin Raspaud] - Make read and load argument lists consistent. [Martin Raspaud] - Fix the M01 dataset definition in viirs_sdr.cfg. [Martin Raspaud] - Fix some viirs composites. [Martin Raspaud] - Fix viirs_sdr loading using start and end times. [Martin Raspaud] - Introduce BandIDs to allow for more complex referencing of datasets. [Martin Raspaud] - Add the BandID namedtuple (name, wl, resolution, polarization) - Fix querying for compatibility with BandIDs - Fix existing readers for BandIDs Example usage from the user side: scn.load([BandID(wavelength=0.67, resolution=742), BandID(wavelength=0.67, resolution=371), "natural", "true_color"]) BandIDs are now used internally as key for the scene's projectables dict. - Add file keys to metop's getitem. [Martin Raspaud] - Rename metop calibration functions. [Martin Raspaud] - Add file keys for start and end times for metop. [Martin Raspaud] - Merge the old eps l1b reader with the new one. [Martin Raspaud] - More work on EPS l1b reader. [Martin Raspaud] - Initial commit for the metop eps l1b reader. [Martin Raspaud] - New attempt at calibration keyword in viirs sdr reader. [davidh-ssec] - Renamed 'channel' to 'dataset' [davidh-ssec] - Added more tests for VIIRS SDR readers before making calibration or file discovery changes. [davidh-ssec] - Use "super" in the readers. [Martin Raspaud] - Hopefully fixed py2.6 incompatibility in string formatting. [davidh- ssec] - Added viirs sdr tests for MultiFileReader and HDF5MetaData. [davidh- ssec] - More viirs sdr file reader tests. [davidh-ssec] - Simple proof of concept for calibration level in viirs sdr reader. [davidh-ssec] - Fixed getting end orbit from last file reader in viirs sdr reader. [davidh-ssec] - Use unittest2 in viirs sdr tests so we can use new features. [davidh- ssec] - Added unittest2 to py26 travis build to hopefully fix h5py importerror. [davidh-ssec] - Added h5py and hdf5 library to travis. [davidh-ssec] - Started adding basic VIIRS SDR reader tests. [davidh-ssec] - Changed scene to accept sequence instead of *args. [davidh-ssec] - Merge branch 'feature-simplify-newreader' into feature-simplify. [davidh-ssec] - Added simple method for finding geolocation files based on header values. [davidh-ssec] - Added rows per scan to viirs sdr metadata. [davidh-ssec] - Got units and file units working for VIIRS SDR reader. [davidh-ssec] - Cleaner code for viirs sdr scaling factor check and made sure to OR any previous masks. [davidh-ssec] - Better memory usage in new style viirs sdr reader. [davidh-ssec] - First step in proof of concept with new reader design. Mostly working VIIRS SDR frontend. [davidh-ssec] - Fixed get_area_file in the resample.py module. [davidh-ssec] - Allowed sensor to be specified in the reader section. [davidh-ssec] - Added method to base plugin to determine type of a section. [davidh- ssec] - Make sunzenithnormalize a modern class. [Martin Raspaud] - Add sunz correction feature. [Martin Raspaud] - Avoid an infinite loop. [Martin Raspaud] - Add travis notifications to slack. [Martin Raspaud] - Remove unneeded code for composites. [Martin Raspaud] - Add a few composites. [Martin Raspaud] - Cleanup. [Martin Raspaud] - Allow json in enhancement config files. [Martin Raspaud] - Switch on test for writers. [Martin Raspaud] - Move tests for image stuff to corresponding test file. [Martin Raspaud] - Move image stuff out of projectable into writers/__init__.py. [Martin Raspaud] - Forgot to change reader/writer base class imports. [davidh-ssec] - Moved reader and writer base classes to subpackages. [davidh-ssec] - Reworked configuration reading in plugins for less redundancy. [davidh-ssec] - Small fixes to make VIIRS SDR reader work with new resampling. [davidh-ssec] - Fix the wishlist names and removing uneeded info when building RGB composites. [Martin Raspaud] - Dataset is now a subclass of np.ma.MaskedArray. [Martin Raspaud] - Move determine_mode to projectable. [Martin Raspaud] - Add helper function to read config files and get the area def file. [Martin Raspaud] - Rename precompute kwarg to cache_dir. [Martin Raspaud] - Convenience enhancements for resample. [Martin Raspaud] - we can now provide "nearest" or "kdtree" instead of a resampler class. - The precompute/dump kwarg is now a directory where to save the proj info, defaulting to '.' if precompute=True. - Switch to containers in travis. [Martin Raspaud] - Fix repo in .travis. [Martin Raspaud] - Add OrderedDict for python < 2.7. [Martin Raspaud] - Resample is now feature complete. [Martin Raspaud] - Dump kd_tree info to disk when asked - Cache the kd_tree info for later use, but cache is cleaned up. - OO architecture allowing other resampling methods to be implemented. - resampling is divided between pre- and actual computation. - hashing of areas is implemented, resampler-specific. - Fixed bad patch on new scene test. [davidh-ssec] - First try at more scene tests. [davidh-ssec] - Move image generation methods to Dataset and move enh. application to enhancer. [Martin Raspaud] - Sensor is now either None, a string, or a non-empty set. [Martin Raspaud] - Forgot to actually use default writer config filename. [davidh-ssec] - Fixed simple scene test for checking ppp_config_dir. [davidh-ssec] - Slightly better handling of default writer configs and writer arguments. [davidh-ssec] - Add a writer for png images, and move enhancer to mpop.writers. [Martin Raspaud] - Detached the enhancements handling into an Enhancer class. [Martin Raspaud] - Pass ppp_config_dir to writer, still needs work. [davidh-ssec] - First attempt at configured writers and all the stuff that goes along with it. Renamed 'format' in configs to more logical name. [davidh- ssec] - Remove the add_product method. [Martin Raspaud] - Cleanup scene unittest. [Martin Raspaud] - Finish testing scene.get_filenames. [Martin Raspaud] - Testing scene.get_filenames. [Martin Raspaud] - Updated tests to test new string messages. 100%! [davidh-ssec] - Merge branch 'pre-master' into feature-simplify. [Martin Raspaud] Conflicts: mpop/satellites/__init__.py mpop/satin/helper_functions.py mpop/satin/mipp_xrit.py - Add algorithm version in output cloud products. [Martin Raspaud] - Minor PEP8 tweaks. [Panu Lahtinen] - Script to generate external calibration files for AVHRR instruments. [Panu Lahtinen] - Support for external calibration coefficients for AVHRR. [Panu Lahtinen] - Removed obsolete "satname" and "number" from satellite configs, updated documentation. [Panu Lahtinen] - Renamed satellite configs to conform to OSCAR naming scheme. [Panu Lahtinen] - Add luts to the pps products from msg format. [Martin Raspaud] - Add metadata to nwcsaf products. [Martin Raspaud] - Add \0 to palette strings. [Martin Raspaud] - Fix pps format output for msg products. [Martin Raspaud] - Remove phase palette from msg products to avoid confusion. [Martin Raspaud] - Bugfix, np.string -> np.string_ [Martin Raspaud] - Change variable length strings in h5 products to fixed. [Martin Raspaud] - Fix some cloud product conversions. [Martin Raspaud] - Fix MSG format to PPS format conversion. [Martin Raspaud] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Martin Raspaud] - Merge pull request #16 from pnuu/simplified_platforms. [Martin Raspaud] Simplified platform names for reading custom composites - Simplified platform names for reading custom composites. [Panu Lahtinen] - Change: accept arbitrary kwargs for saving msg hdf products. [Martin Raspaud] - Revert concatenation to it's original place, in order to keep the tests working. [Martin Raspaud] - Fix whole globe area_extent for loading. [Martin Raspaud] - Fix rpm building. [Martin Raspaud] - Cleanup. [Martin Raspaud] - Change printing of projectables and cleanup. [Martin Raspaud] - Start testing mpop.scene. [Martin Raspaud] - Fixed assertIn for python 2.6. [davidh-ssec] - Added more tests for projectables and updated projectable 3d resample test. 100% coverage of projectable! [davidh-ssec] - Renamed .products to .compositors and fixed unknown names bug. [davidh-ssec] - Added check to see what composite configs were read already. [davidh- ssec] - Do not reread already loaded projectables. [Martin Raspaud] - Complete .gitignore. [Martin Raspaud] - Fix unittests for python 2.6. [Martin Raspaud] - Unittesting again... [Martin Raspaud] - More unittesting. [Martin Raspaud] - Fix projectables str to look better. [Martin Raspaud] - More unittesting. [Martin Raspaud] - Fix unittests for python 2.6. [Martin Raspaud] - Still cleaning up. [Martin Raspaud] - Cleanup. [Martin Raspaud] - Add tests to the package list in setup.py. [Martin Raspaud] - Make pylint happy. [Martin Raspaud] - Fix tests for projectable to pass on 2.6. [Martin Raspaud] - Start testing the new stuff in travis. [Martin Raspaud] - Cleanup. [Martin Raspaud] - Renamed newscene to scene. [Martin Raspaud] - Moved updated readers from mpop.satin to mpop.readers. [Martin Raspaud] - Changed 'uid' to 'name' for all new components. [davidh-ssec] - Moved composite configs to separate subdirectory. [davidh-ssec] - Add an RGBCompositor class and cleanup. [Martin Raspaud] - Allow passing "areas" to mipp_xrit. [Martin Raspaud] - Fix the overview composite giving sensible defaults. [Martin Raspaud] - Fixed bug with RGB composites with passing the wrong info keywords. [davidh-ssec] - Changed sensor keyword in scene to reader and added new sensor keyword behavior to find readers based on sensor names. [davidh-ssec] - Changed new style composites to use a list of projectables instead of the scene object implemented __setitem__ for scene. [davidh-ssec] - Reworked viirs and xrit reader to use .channels instead of .info. Simplified reader loading in newscene. [davidh-ssec] - Test and fix projectable. [Martin Raspaud] - Allow reading from wavelength, and add Meteosat HRIT support. [Martin Raspaud] - Moved reader init to scene init. Successfully created resampled fog image using composite configs. [davidh-ssec] - Added some default configs for new scene testing. [davidh-ssec] - Started rewriting viirs sdr reader to not need scene and produce projectables. [davidh-ssec] - Better config reading, and scene init. [Martin Raspaud] - WIP: removed CONFIG_PATH and changed projectables list into dict. [davidh-ssec] - Add resampling. Simple for now, with elementary caching. [Martin Raspaud] - WIP. [Martin Raspaud] * Product dependencies * loading from viirs * generating images - WIP: successfully loaded the first viirs granule with newscene! [Martin Raspaud] - Rewriting scene. [Martin Raspaud] - Add helper function to find files. [Martin Raspaud] - Fix the config eval thing in scene. [Martin Raspaud] - Fix masking of lonlats in viirs_sdr. [Martin Raspaud] - Fixing pps-nc reader. [Adam Dybbroe] - Clean temporary files after loading. [Adam Dybbroe] - Pep8 stuff. [Adam Dybbroe] - Fixed polar-stereographic projection bugs, thanks to Ron Goodson. [Lars Orum Rasmussen] - Update changelog. [Martin Raspaud] - Bump version: 1.0.2 → 1.1.0. [Martin Raspaud] - Put config files in etc/pytroll. [Martin Raspaud] - Fix version strings. [Martin.Raspaud] - Don't close the h5 files too soon. [Martin Raspaud] - Close h5 file uppon reading. [Adam Dybbroe] - Bugfix. [Adam Dybbroe] - Try a more clever handling of the case where more level-1b files exist for given sat and orbit. [Adam Dybbroe] - Print out files matching in debug. [Martin Raspaud] - Bugfix. [Adam Dybbroe] - Adding debug info. [Adam Dybbroe] - Bugfix. [Adam Dybbroe] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Adam Dybbroe] - Remove ugly print statements. [Martin Raspaud] - Load the palettes also. [Martin Raspaud] - AAPP1b: use operational coefficients for vis calibrating per default. [Martin Raspaud] - Fallback to pre-launch if not available. - load(..., pre_launch_coeffs=True) to force using pre-launch coeffs) - Correct npp name in h5 files. [Martin Raspaud] - Add the pps v2014 h5 reader. [Martin Raspaud] - Use h5py for lonlat reading also. [Martin Raspaud] - Use h5py instead of netcdf for reading nc files. [Martin Raspaud] - Fix orbit as int in nc_pps loader. [Martin Raspaud] - Add overlay from config feature. [Martin Raspaud] - Remove type testing for orbit number. [Martin Raspaud] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Adam Dybbroe] - Allowing kwargs. [Martin Raspaud] - Add 10 km to the area extent on each side, to avoid tangent cases. [Martin Raspaud] - Orbit doesn't have to be a string anymore. [Martin Raspaud] - Fix multiple file loading for metop l1b data. [Martin Raspaud] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Adam Dybbroe] - Implement save for all cloudproducts. [Martin Raspaud] - Change options names to cloud_product_* and add lookup in os.environ. [Martin Raspaud] - Some fixes to nc_pps_l2 for correct saving. [Martin Raspaud] - Add saving to the cloudtype object. [Martin Raspaud] - Add the save method to cloudtype object. [Martin Raspaud] - Rename _md attribute to mda. [Martin Raspaud] - Mask out bowtie deleted pixels for Suomi-NPP products. [Martin Raspaud] - When a file is provided in nc_pps_l2, just read this file. [Martin Raspaud] - Fix nc_pps_l2 for filename input and PC readiness. [Martin Raspaud] - ViirsSDR: Fix not to crash on single file input. [Martin Raspaud] - Fix aapp1b to be able to run both for given filename and config. [Martin Raspaud] - Try loading according to config if provided file doesn't work, aapp1b. [Martin Raspaud] - Don't crash when reading non aapp1b file. [Martin Raspaud] - Remove "/" from instrument names when loading custom composites. [Martin Raspaud] - Don't say generate lon lat when returning a cached version. [Martin Raspaud] - Nc_pps_l2: don't crash on multiple files, just go through them one at the time. [Martin Raspaud] - Hdfeos: don't just exit when filename doesn't match, try to look for files. [Martin Raspaud] - Don't crash if the file doesn't match (hdfeos) [Martin Raspaud] - Revert nc_reader back until generalization is ready. [Martin Raspaud] - Merge branch 'ppsv2014-reader' of github.com:mraspaud/mpop into ppsv2014-reader. [Martin Raspaud] - Adding dataset attributes to pps reading. [Adam Dybbroe] - Allow inputing filename in the nc_pps_l2 reader. [Martin Raspaud] - Merge branch 'pre-master' into ppsv2014-reader. [Martin Raspaud] - Viirs readers fixes. [Martin Raspaud] - Hdf_eos now uses 1 out of 4 available cores to interpolate data. [Martin Raspaud] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Martin Raspaud] - Fixed bug, now handling fill_value better. [Lars Orum Rasmussen] - More robust tiff header file decoder. [Lars Orum Rasmussen] - Add dnb_overview as a standard product (dnb, dnb, 10.8) [Martin Raspaud] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Martin Raspaud] - Corrected the reader for SAFNWC/PPS v2014. [Sara.Hornquist] - Allow multiresolution loading in hdf eos reader. [Martin Raspaud] - Revert back to old nwcsaf-pps reader for hdf. The reading of the new netcdf format is done with another reader! [Adam Dybbroe] - A new pps reader for the netCDF format of v2014. [Adam Dybbroe] - Adding for new cloudmask and type formats... [Adam Dybbroe] - Enhance nwc-pps reader to support v2014 format. [Adam Dybbroe] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Adam Dybbroe] - Put the config object back in Projector. [Martin Raspaud] - Fix area_file central search. [Martin Raspaud] - Move the area_file search inside Projector. [Martin Raspaud] - Error when satellite config file is not found. [Martin Raspaud] - Get rid of the funky logging style. [Martin Raspaud] - Log the config file used to generate the scene. [Martin Raspaud] - Support filename list to load in viirs_sdr loader. [Martin Raspaud] - Add avhrr/3 as aliar to avhrr in aapp reader. [Martin Raspaud] - Fix name matching in hdfeos_l1b. [Martin Raspaud] The full name didn't work with fnmatch, take basename instead. - Allows hdfeos_l1b to read a batch of files. [Martin Raspaud] - Add delitem, and code cleanup. [Martin Raspaud] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Martin Raspaud] - Added a reader for SAFNWC/PPS v2014 PPS v2014 has a different fileformat than previous SAFNWC/PPS versions. [Sara.Hornquist] - Aapp1b reader, be more clever when (re)reading. [Martin Raspaud] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Adam Dybbroe] Conflicts: mpop/satout/netcdf4.py - Allow reading several files at once in viirs_compact. [Martin Raspaud] - Allow reading several files at once in eps_l1b. [Martin Raspaud] - Style: use in instead for has_key() [Martin Raspaud] - Adding primitive umarf (native) format reader for meteosat. [Martin Raspaud] - Add logging when an info field can't be save to netcdf. [Martin Raspaud] - Add a name to the area when loading aapp data. [Martin Raspaud] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Martin Raspaud] - For PNG files, geo_mage.tags will be saved a PNG metadata. [Lars Orum Rasmussen] - Add a save method to cfscene objects. [Martin Raspaud] - Don't take None as a filename in loading avhrr data. [Martin Raspaud] - Allow loading a file directly for aapp1b and eps_l1b. [Martin Raspaud] Just run global_data.load(..., filename="/path/to/myfile.1b") - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Martin Raspaud] - Viirs_sdr can now load depending on an area. [Martin Raspaud] - Pep8 cosmetics. [Adam Dybbroe] - Merge pull request #12 from pnuu/pre-master. [Martin Raspaud] Fixed "logger" to "LOGGER" - Fixed "logger" to "LOGGER" [Panu Lahtinen] - Moving pysoectral module import down to function where pyspectral is used. [Adam Dybbroe] - Merge branch 'smhi-premaster' into pre-master. [Adam Dybbroe] - Fixing cloudtype product: palette projection. [Adam Dybbroe] - Turned on debugging to geo-test. [Adam Dybbroe] - Added debug printout for cloud product loading. [Adam Dybbroe] - Make snow and microphysics transparent. [Martin Raspaud] - Rename day_solar to snow. [Martin Raspaud] - Keep the name of cloudtype products when projecting. [Martin Raspaud] - Explicitly load parallax corrected files if present. [Martin Raspaud] - Adding logging for MSG cloud products loading. [Martin Raspaud] - Fix the parallax file sorting problem, again. [Martin Raspaud] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Martin Raspaud] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Adam Dybbroe] - Bugfix. [Adam Dybbroe] - Merge branch '3.9reflectance' into pre-master. [Adam Dybbroe] Conflicts: mpop/channel.py mpop/instruments/seviri.py mpop/satin/mipp_xrit.py setup.py - Support for rgbs using the seviri 3.9 reflectance (pyspectral) [Adam Dybbroe] - Adding a sun-corrected overview rgb. [Adam Dybbroe] - Adduing for "day microphysics" RGB. [Adam Dybbroe] - Deriving the day-solar RGB using pyspectral to derive the 3.9 reflectance. [Adam Dybbroe] - Use "imp" to find input plugins. [Martin Raspaud] - Cleanup trailing whitespaces. [Martin Raspaud] - Use cartesian coordinates for lon/lat computation if near-pole situations. [Martin Raspaud] - Set alpha channel to the same type as the other channels. [Martin Raspaud] - Sort the filenames in get_best_products (msg_hdf) [Martin Raspaud] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Martin Raspaud] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Adam Dybbroe] - Merge pull request #10 from pnuu/pre-master. [Martin Raspaud] Fixed failed merging. Thanks Pnuu. - Fixed failed merging (removed "<<<<<<< HEAD" and ">>>>>>> upstream /pre-master" lines) [Panu Lahtinen] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Adam Dybbroe] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Adam Dybbroe] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Adam Dybbroe] - Fix terra and aqua templates for the dual gain channels (13 & 14) [Adam Dybbroe] - Read both parallax corrected and usual cloudtype products. [Martin Raspaud] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Martin Raspaud] - Merge pull request #9 from pnuu/pre-master. [Martin Raspaud] Possibility to get area_extent from area definition(s) - Tests for mpop.satin.helper_functions.boundaries_to_extent. [Panu Lahtinen] - Separated area definitions and boundary calculations. [Panu Lahtinen] - Added test if proj string is in + -format or not. [Panu Lahtinen] - Re-ordered the tests. [Panu Lahtinen] - Fixed incorrect correct values. [Panu Lahtinen] - Test using area definitions instead of definition names. [Panu Lahtinen] - Possibility to give also area definition objects to area_def_names_to_extent() and log a warning if the area definition is not used. [Panu Lahtinen] - Fixed import. [Panu Lahtinen] - Added tests for mpop.satin.helper_functions. [Panu Lahtinen] - Moved to mpop/tests/ [Panu Lahtinen] - Moved to mpop/tests/ [Panu Lahtinen] - Merge remote-tracking branch 'upstream/pre-master' into pre-master. [Panu Lahtinen] Conflicts: mpop/satin/aapp1b.py - Removed unneeded functions. [Panu Lahtinen] - Test for area_def_names_to_extent() [Panu Lahtinen] - Removed unnecessary functions. [Panu Lahtinen] - Removed swath reduction functions. [Panu Lahtinen] - Reverted not to reduce swath data. [Panu Lahtinen] - Added possibility to do data reduction based on target area definition names. [Panu Lahtinen] - Added area extent calculations based on given area definition names. [Panu Lahtinen] - Helper functions for area extent and bondary calculations, and data reducing for swath data. [Panu Lahtinen] - Test for mpop.satin.mipp_xrit.lonlat_to_geo_extent() [Panu Lahtinen] - Support for lon/lat -based area extents. [Panu Lahtinen] - Add start and end time defaults for the images (runner). [Martin Raspaud] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Lars Orum Rasmussen] - Do not mask out negative reflectances in viirs_sdr reading. [Martin Raspaud] - Added navigation to hrpt_hmf plugin. [Martin Raspaud] - Started working on a new plugin version of hdfeos_l1b. [Martin Raspaud] - Cleanup. [Martin Raspaud] - Cleanup. [Martin Raspaud] - Cleanup. [Martin Raspaud] - Cleanup. [Martin Raspaud] - Adding scene tests to the test suite. [Martin Raspaud] - Revamped scene unittests. [Martin Raspaud] - Don't crash on errors. [Martin Raspaud] - Revamped projector tests. [Martin Raspaud] - More geo_image testing. [Martin Raspaud] - Don't use "super" in geo_image. [Martin Raspaud] - Fix testing. [Martin Raspaud] - Mock pyresample and mpop.projector in geo_image tests. [Martin Raspaud] - More testing geo_image. [Martin Raspaud] - Add tests for geo_image. [Martin Raspaud] - Merge branch 'unstable' of ssh://safe/data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Mock gdal for geo_image tests. [Martin Raspaud] - Added netCDF read support for four more projections. [Adam Dybbroe] - Adding support for eqc in cf format. [Adam Dybbroe] - Added config templates for GOES and MTSAT. [Lars Orum Rasmussen] - Copied visir.night_overview to seviri.night_overview, so night_overview.prerequisites is correct when night_overview is called from seviri.py. [ras] - Cloutop in seviri.py now same arguments as cloudtop in visir.py. [Lars Orum Rasmussen] - Fix saving as netcdf. [Martin Raspaud] - Fix floating point tiff saving. [Martin Raspaud] - Make pillow a requirement only if PIL is missing. [Martin Raspaud] - Add some modules to mock in the documentation. [Martin Raspaud] - Add pyorbital to the list of packets to install in travis. [Martin Raspaud] - Merge branch 'feature-travis' into unstable. [Martin Raspaud] - Test_projector doesn't pass. [Martin Raspaud] - Test_projector ? [Martin Raspaud] - Fix travis. [Martin Raspaud] - Adding test_geoimage. [Martin Raspaud] - Test_channel passes, test_image next. [Martin Raspaud] - Test_pp_core crashes, test_channel on. [Martin Raspaud] - Commenting out tests to find out the culprit. [Martin Raspaud] - Ok, last try for travis-ci. [Martin Raspaud] - What is happening with travis ? [Martin Raspaud] - More fiddling to find out why travis-ci complains. [Martin Raspaud] - Testing the simple test way (not coverage) [Martin Raspaud] - Trying to add the tests package for travis-ci. [Martin Raspaud] - Add the tests package. [Martin Raspaud] - Preprare for travis-ci. [Martin Raspaud] - Support 16 bits images (geotiff only at the moment). [Martin Raspaud] - Merge pull request #8 from pnuu/pre-master. [Martin Raspaud] Sun zenith angle correction added. - A section on mpop.tools added to documentation. [Panu Lahtinen] - Extra tests for sun_zen_corr(). [Panu Lahtinen] - Typo. [Panu Lahtinen] - Channel descriptions added. [Panu Lahtinen] - Channel desctiptions are added. [Panu Lahtinen] - Clarification to help sunzen_corr_cos() desctiption. [Panu Lahtinen] - Test cases for channel.sunzen_corr(). [Panu Lahtinen] - Sun zenith angle correction split into two functions. [Panu Lahtinen] - Revert to original version. [Panu Lahtinen] - Initial commit of mpop.tools (with Sun zenith angle correction). [Panu Lahtinen] - Sun zenith angle correction added. [Panu Lahtinen] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [ras] - Solve the multiple channel resolution with automatic resampling radius. [Martin Raspaud] - Add the "nprocs" option to projector objects and scene's project method. [Martin Raspaud] - Now saving orbit number (if available) as global attribute. [ras] - Adding more files to be ignored. [ras] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [ras] - New reader for hrpt level0 format. [Martin Raspaud] - Fix no calibration reading for aapp1b. [Martin Raspaud] - Add the product name to the the image info. [Martin Raspaud] - Add some debugging info about missing pixels in viirs_sdr. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Corrected a comment. [Adam Dybbroe] - Fix for M13 load problem - reported by stefano.cerino@gmail.com. [Adam Dybbroe] - Use number of scan to load the right amount of data in compact viirs reader. [Martin Raspaud] - Fix hook to be able to record both filename and uri. [Martin Raspaud] - Protecting MPOP from netcdf4's unicode variables. [ras] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam Dybbroe] - Adding a new convection RGB with co2 correction for SEVIRI. [Adam Dybbroe] - Temporary hack to solve for hdf5 files with more than one granule per file. [Adam Dybbroe] - Removing messaging code from saturn and added a more generic "hook" argument. [Martin Raspaud] - Bumped up version. [Martin Raspaud] - Make viirs_compact scan number independent. [Martin Raspaud] - Cleanup: marking some deprecated modules, removing unfinished file, improving documentation. [Martin Raspaud] - Adding the ears-viirs compact format reader. Untested. [Martin Raspaud] - Code cleanup. [Martin Raspaud] - Merge branch 'pre-master' into unstable. [Martin Raspaud] Conflicts: mpop/imageo/geo_image.py - Night_color (should had beed called night_overview) is the same as cloudtop. [Lars Orum Rasmussen] - Bug fix from Bocheng. [Lars Orum Rasmussen] - Night_overview is just like cloudtop. [Lars Orum Rasmussen] - Now also handling Polar satellites. [Lars Orum Rasmussen] - Cosmetic. [Lars Orum Rasmussen] - Fixed merge conflict. [Lars Orum Rasmussen] - Trying out a chlorophyll product. [Lars Orum Rasmussen] - Added a night overview composite. [Lars Orum Rasmussen] - Better check for empty array. [Lars Orum Rasmussen] - Fix logging. [Martin Raspaud] - Fix backward compatibility in, and deprecate image.py. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Calling numpy percentile only once when doing left and right cut offs. [Adam Dybbroe] - Add support for identifying npp directories by time-date as well as orbit number. [Adam Dybbroe] - Fix histogram-equalization stretch test. [Adam Dybbroe] - Bugfix in histogram equalization function. [Adam Dybbroe] - Using percentile function to generate histogram with constant number of values in each bin. [Adam Dybbroe] - Using numpy.pecentile function to cut the data in the linear stretch. [Adam Dybbroe] - Fix histogram stretch unit test. [Adam Dybbroe] - Correcting the histogram stretching. The com_histogram function was in error when asking for "normed" histograms. [Adam Dybbroe] - Added histogram method that makes a more populated histogram when the data are heaviliy skeewed. Fixes problem seen by Bocheng in DNB imagery. [Adam Dybbroe] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam Dybbroe] - Don't remove GeolocationFlyweight _instances, but reset it. Allowing for multiple "loads" [Adam Dybbroe] - Add imageo.formats to installation. [Martin Raspaud] - AAPP loading bug fix. [Martin Raspaud] the aapp1b.py loader to aapp data was broken as it was loading both channels 3a and 3b each time, one of them being entirely masked. This of course created some problem further down. Fixed by setting the not loadable channel to None. - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Bugfix in npp.cfg template. [Adam Dybbroe] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam Dybbroe] - Fixing bug concerning the identification of VIIRS geolocation files. Now the configuration specified in npp.cfg overwrites what is actually written in the metadata header of the band files. [Adam Dybbroe] - Make saturn posttroll capable. [Martin Raspaud] - Bump up version number. [Martin Raspaud] - Cosmetics. [Martin Raspaud] - Fixing test cases. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Remove dummy test to boost projection performance. [Martin Raspaud] Mpop was checking in 2 different places if the source and target areas were different, leading to pyresample expanding the area definitions to full lon/lat arrays when checking against a swath definition, and then running an allclose. This was inefficient, and the programming team decided that it was the user's task to know before projection if the source and target area were the same. In other words, the user should be at least a little smart. - Remove dummy test to boost projection performance. [Martin Raspaud] Mpop was checking in 2 different places if the source and target areas were different, leading to pyresample expanding the area definitions to full lon/lat arrays when checking against a swath definition, and then running an allclose. This was inefficient, and the programming team decided that it was the user's task to know before projection if the source and target area were the same. In other words, the user should be at least a little smart. - Update channel list for modis lvl2. [Martin Raspaud] - Bump up version number: 1.0.0. [Martin Raspaud] - Merge branch 'pre-master' into unstable. [Martin Raspaud] - Added Ninjo tiff example areas definitions. [Lars Orum Rasmussen] - Cosmetic. [Lars Orum Rasmussen] - Ninjo tiff writer now handles singel channels. [Lars Orum Rasmussen] Ninjo tiff meta-data can now all be passed as arguments - Better documentation. [Lars Orum Rasmussen] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Changing palette name to something more intuitive. Allow to have orbit number equals None. [Adam Dybbroe] - Fixing aqua/terra template config files for dual gain channels (13&14) [Adam Dybbroe] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam Dybbroe] - Make overview consistent with the standard overview. [Adam Dybbroe] - Cleanup. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] Conflicts: etc/npp.cfg.template - Updated npp-template to fit the new viirs reader using the (new) plugin-loader system. [Adam Dybbroe] - Minor clean up. [Adam Dybbroe] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam Dybbroe] Conflicts: mpop/satin/viirs_sdr.py - Lunar stuff... [Adam Dybbroe] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Adam Dybbroe] - Changed template to fit new npp reader. [krl] - Fix version stuff. [Martin Raspaud] - Merge branch 'feature-optimize_viirs' into unstable. [Martin Raspaud] - Make viirs_sdr a plugin of new format. [Martin Raspaud] - Finalize optimisation i new viirs reader. [Martin Raspaud] - Optimization ongoing. Mask issues. [Martin Raspaud] - Clarify failure to load hrit data. [Martin Raspaud] - Fix install requires. [Martin Raspaud] - Fix projector unit test. [Martin Raspaud] - Merge branch 'pre-master' into unstable. [Martin Raspaud] - Merge branch 'pre-master' of git://github.com/mraspaud/mpop into pre- master. [Martin Raspaud] - Fixed (temporary ?) misuse of Image.SAVE. [Lars Orum Rasmussen] - Now config reader is a singleton. [Lars Orum Rasmussen] - Merge branch 'pre-master' into unstable. [Martin Raspaud] - Merge branch 'pre-master' of git://github.com/mraspaud/mpop into pre- master. [Martin Raspaud] - Tmplate -> template. [Lars Orum Rasmussen] - Added support for saving in Ninjo tiff format. [Lars Orum Rasmussen] - Projector cleanup. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - New VIIRS reader. Better, faster, smarter (consumimg less memory) [Adam Dybbroe] - Fix area hashing. [Martin Raspaud] - Fix install dependency. [Martin Raspaud] - Merge branch 'pre-master' into unstable. [Martin Raspaud] - Merge branch 'pre-master' of git://github.com/mraspaud/mpop into pre- master. [Martin Raspaud] Conflicts: doc/source/conf.py setup.py - Bump up version number for release. [Martin Raspaud] - Optimize. [Martin Raspaud] - Remove the optional ahamap requirement. [Martin Raspaud] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Adam Dybbroe] - Manage version number centrally. [Martin Raspaud] - Merge branch 'pre-master' of git://github.com/mraspaud/mpop into pre- master. [Martin Raspaud] - Bump up version number. [Martin Raspaud] - Make old plugin an info instead of a warning. [Martin Raspaud] - Merge branch 'pre-master' of git://github.com/mraspaud/mpop into pre- master. [Martin Raspaud] - Pep8. [Adam Dybbroe] - Merge branch 'aapp1b' into unstable. [Adam Dybbroe] - Don't mask out IR channel data where count equals zero. [Adam Dybbroe] - Fixing the masking of the ir calibrated Tbs - count=0 not allowed. [Adam Dybbroe] - Make also vis channels masked arrays. [Adam Dybbroe] - Checking if file format is post or pre v4 : If bandcor_2 < 0 we are at versions higher than 4 Masking a bit more strict. [Adam Dybbroe] - Now handle data without a mask and handling lons and lats without crashing. [Lars Orum Rasmussen] - Read signed instead of unsigned (aapp1b). [Martin Raspaud] - Style cleanup. [Martin Raspaud] - Adding calibration type as an option to the loader. So counts, radiances or tbs/refl can be returned. [Adam Dybbroe] - Better show and more cosmetic. [Lars Orum Rasmussen] - Making pylint more happy and some cosmetic. [Lars Orum Rasmussen] - No need to night_overview, use cloudtop with options. [Lars Orum Rasmussen] - Now IR calibration returns a masked array. [Lars Orum Rasmussen] - Added som options for overview image and added a night overview. [Lars Orum Rasmussen] - Finalize aapp1b python-only reader. [Martin Raspaud] - Working on a aapp l1b reader. [oananicola] - Starting a aapp1b branch for directly reading aapp's l1b files. [Lars Orum Rasmussen] - Adding a bit of debug info... [Adam Dybbroe] - Adding orbit number to the cloud mask object. [Adam Dybbroe] - Channel cleanup and tests. [Martin Raspaud] - Merge branch 'feature_plugins' into unstable. [Martin Raspaud] - Make orbit number an 5-character string (padding with '0') [Martin Raspaud] - New plugin implementation, backward compatible. [Martin Raspaud] - Cleanup. [Martin Raspaud] - Add several cores for geoloc in eos. [Martin Raspaud] - Bugfix hdfeos. [Martin Raspaud] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Lars Orum Rasmussen] - Fix loading of terra aqua with multiple cores. [Martin Raspaud] - Add dust, fog, ash composites to VIIRS. [Martin Raspaud] - Enhance error messages. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Make orbit number an 5-character string (padding with '0') [Martin Raspaud] - New template files for regional EARS (AVHRR and NWC) file support. [Adam Dybbroe] - Minor cosmetics. [Adam Dybbroe] - Reverted to previous commit. [Lars Orum Rasmussen] - Correct green-snow. [Martin Raspaud] Use 0.6 instead on 0.8 - Merge branch 'fixrtd' into unstable. [Martin Raspaud] - Add pyresample to mock for doc building. [Martin Raspaud] - Get rid of the np.inf error in rtd. [Martin Raspaud] - Mock some import for the documentation. [Martin Raspaud] - Now, if specified in proj4 object, add EPGS code to tiff metadata. [Lars Orum Rasmussen] - Added, a poor man's version, of Adam's DNB RGB image. [Lars Orum Rasmussen] - Add symlink from README.rst to README. [Martin Raspaud] - Update download link and README. [Martin Raspaud] - Bump up version number. [Martin Raspaud] - Cosmetics. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Add template file for meteosat 10. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam Dybbroe] - Support for calibrate option. [Adam Dybbroe] - Add debug messages to hdf-eos loader. [Martin Raspaud] - Support pnm image formats. [Martin Raspaud] - Introducing clip percentage for SAR average product. [Lars Orum Rasmussen] - The pps palette broke msg compatibility. Now there are two palettes, one for msg and one for pps. [Adam Dybbroe] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam Dybbroe] Conflicts: mpop/satin/viirs_sdr.py - Adapted viirs reader to handle aggregated granule files. [Adam Dybbroe] - Fixing nwcsaf-pps ctth height palette. [Adam Dybbroe] - Take better care of the path (was uri) argument. [Martin Raspaud] - Don't do url parsing in the hdfeos reader. [Martin Raspaud] - Fix unit tests. [Martin Raspaud] - Remove the deprecated append function in scene. [Martin Raspaud] - Return when not locating hdf eos file. [Martin Raspaud] - Remove raveling in kd_tree. [Martin Raspaud] - Make use of the new strftime in the viirs reader. [Martin Raspaud] - Add a custom strftime. [Martin Raspaud] This fixes a bug in windows that prevents running strftime on string that contain mapping keys conversion specifiers. - Catch the error if there is no file to load from. [Martin Raspaud] - Add a proper logger in hdfeos reader. [Martin Raspaud] - Get resolution from filename for eos data. [Martin Raspaud] - Introducing stretch argument for average product. [Lars Orum Rasmussen] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Lars Orum Rasmussen] - Clean up. [Martin Raspaud] - Bump up version number. [Martin Raspaud] - Support passing a uri to hdfeos reader. [Martin Raspaud] - Fix the loading of BT for VIIRS M13 channel. [Martin Raspaud] Has no scale and offset - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Lars Orum Rasmussen] - Refactor the unsigned netcdf packing code. [Martin Raspaud] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Lars Orum Rasmussen] - Support packing data as unsigned in netcdf. [Martin Raspaud] - Replace auto mask and scale from netcdf4. [Martin Raspaud] Eats up too much memory. - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Lars Orum Rasmussen] - Feature: Added template for electro-l satellite. [Martin Raspaud] - Feature: taking care of missing data in the viirs reader, and allow for radiance retrieval. [Martin Raspaud] - Feature: last adjustments to new netcdf format. [Martin Raspaud] - Merge branch 'feature-netcdf-upgrade' into unstable. [Martin Raspaud] Conflicts: mpop/satout/cfscene.py mpop/satout/netcdf4.py - Merge branch 'unstable' into feature-netcdf-upgrade. [Martin Raspaud] - Merge branch 'unstable' into feature-netcdf-upgrade. [Martin Raspaud] Conflicts: mpop/satin/mipp_xsar.py - Work on new netcdf format nearing completion. [Martin Raspaud] - Feature: wrapping up new netcdf format, cf-satellite 0.2. [Martin Raspaud] - Renamed some global attributes. [Martin Raspaud] - Netcdf: working towards better matching CF conventions. [Martin Raspaud] - WIP: NetCDF cleaning. [Martin Raspaud] - scale_factor and add_offset are now single values. - vertical_perspective to geos - Merge branch 'unstable' into feature-netcdf-upgrade. [Martin Raspaud] - Group channels by unit and area. [Martin Raspaud] - Do not apply scale and offset when reading. [Martin Raspaud] - WIP: updating the netcdf interface. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Changed handeling of "_FillValue"-attributes. Added find_FillValue_tags function to search for "_FillValue" attributes. The "_FillValue" attributes are used and set when variables are created. [Nina.Hakansson] - Cosmetics. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Fixing bug concerning viirs bandlist and the issue of preventing the loading of channels when only products are requested. [Adam Dybbroe] - Fixing VIIRS reader - does not try to read SDR data if you only want to load a product. Minor fixes in MODIS and AAPP1b readers. [Adam Dybbroe] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam Dybbroe] - Bugfix in viirs sdr reader. [Adam Dybbroe] - Added ir108 composite to Viirs. [Martin Raspaud] - RUN: add possibility to get prerequisites for a list of areas. [Martin Raspaud] - Updating area_id for the channel during viirs loading and assembling of segments. [Martin Raspaud] - Area handling in viirs and assembling segments. [Martin Raspaud] - Viirs true color should have a transparent background. [Martin Raspaud] - Added enhancements to the image.__call__ function. [Martin Raspaud] - Fixing runner to warn for missing functions (instead of crashing). [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] Conflicts: mpop/satin/viirs_sdr.py - Bug fix green-snow RGB. [Adam Dybbroe] - Cleaning up a bit in viirs reader. [Adam Dybbroe] - Temporary fix to deal with scale-factors (in CLASS archive these are not tuples of 2 but 6). Taken from old fix in npp-support branch. [Adam Dybbroe] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam Dybbroe] - Support for bzip2 compressed NWCSAF products (EARS-NWC) [Adam Dybbroe] - More flexible viirs reading, and fixes to viirs composites. [Martin Raspaud] - Added a stereographic projection translation. [Lars Orum Rasmussen] - Added modist as valid name for 'eos1' [Lars Orum Rasmussen] - Added night_microphysics. [Lars Orum Rasmussen] - Added stretch option. [Lars Orum Rasmussen] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Feature: new function to create an image from a scene. [Martin Raspaud] - Fixed a new npp template config file, with geo_filename example. [Adam Dybbroe] - Adding 500meter scan area. [Adam Dybbroe] - Fixing bug in geolocation reading and removing old style viirs composite file. [Adam Dybbroe] - Using a template from configuration file to find the geolocation file to read - for all VIIRS bands. [Adam Dybbroe] - Fixed bug in hr_natural and added a dnb method. [Adam Dybbroe] - Fixing Bow-tie effects and geolocation for VIIRS when using Cloudtype. Needs to be generalised to all products! [Adam Dybbroe] - Support for tiepoint grids and interpolation + masking out no-data geolocation (handling VIIRS Bow-tie deletetion) [Adam Dybbroe] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam Dybbroe] - Adding viirs composites and pps_odim reader for avhrr and viirs channel data in satellite projection (swath) [Adam Dybbroe] - Added a Geo Phys Product to modis level2. [Lars Orum Rasmussen] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Lars Orum Rasmussen] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Adding support for ob_tran projection even though it is not cf- compatible yet. [Adam Dybbroe] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam Dybbroe] - Added the reading of geolocation data from the PPS formatet level1 file. [Adam Dybbroe] - Added Europe Mesan area to template. [Adam Dybbroe] - Feature: MSG hdf files are now used to determine the area. [Martin Raspaud] - Fixed error message. [Martin Raspaud] - Cleanup: clarified import error. [Martin Raspaud] - Cleanup: More descriptive message when plugin can't be loaded. [Martin Raspaud] - Raised version number. [Martin Raspaud] - More relevant messages in msg_hdf reading. [Martin Raspaud] - Adding a RGB for night condition. [Lars Orum Rasmussen] - Merge branch 'pre-master' into unstable. [Martin Raspaud] - Modis level-2 reader and netcdf writer can now handle scenes containing only geo-physical product (and no channels) [Lars Orum Rasmussen] - Merge branch 'pre-master' into unstable. [Martin Raspaud] - Merge pull request #2 from cheeseblok/FixViirsRedSnow. [Martin Raspaud] Fix typo in red_snow check_channels method - Fix typo in red_snow check_channels method. [Scott Macfarlane] - Feature: Pypi ready. [Martin Raspaud] - Bufix: updating to use python-geotiepoints. [Martin Raspaud] - Bumping up the version number for the next release. [Martin Raspaud] - Doc: updating add_overlay documentation. [Martin Raspaud] - Feature: adding interpolation to modis lon lats. [Martin Raspaud] - Use pynav to get lon/lats if no file can be read. [Martin Raspaud] - Hack to handle both level2 and granules. [Martin Raspaud] - Added the possibility to provide a filename to eps_l1b loader. [Martin Raspaud] - Updated npp confirg file template with geo_filename example. [Adam Dybbroe] - Merge branch 'feature_new_eps_reader' into unstable. [Martin Raspaud] - Added xml file to etc and setup.py. [Martin Raspaud] - Bugfix in geolocation assignment. [Martin Raspaud] - Allowing for both 3a and 3A. [Martin Raspaud] - Put xml file in etc. [Martin Raspaud] - New eps l1b is now feature complete. Comprehensive testing needed. [Martin Raspaud] - Added a new eps l1b reader based on xml description of the format. [Martin Raspaud] - Corrected longitude interpolation to work around datum shift line. [Martin Raspaud] - Cloudtype channel now called "CT". [Martin Raspaud] - Merge branch 'pre-master' of git://github.com/mraspaud/mpop into pre- master. [Martin Raspaud] - SetProjCS is now correctly called after ImportFromProj4. [Lars Orum Rasmussen] Added SetWellKnownGeogCS if available - Merge branch 'pre-master' into unstable. [Martin Raspaud] Conflicts: mpop/satin/mipp_xsar.py - More correct 'new area' [Lars Orum Rasmussen] - Mipp restructure. [Lars Orum Rasmussen] - Merge branch 'pre-master' into area-hash. [Lars Orum Rasmussen] - Merge branch 'pre-master' into area-hash. [Lars Orum Rasmussen] - Now more unique projection filenames (using hash of areas) [Lars Orum Rasmussen] - Enhancements to pps hdf format readers. [Martin Raspaud] - Feature: added support for geotiff float format in geo_image. [Martin Raspaud] - Don't touch satscene.area if already present (mipp reading) [Martin Raspaud] - Feature: get best msg hdf file using area_extent. [Martin Raspaud] - Duck typing for channel assignation. [Martin Raspaud] - Fixed meteosat reading. [Martin Raspaud] - do not change the scene metadata when no channel is loaded - do not crash if no PGE is present - Added shapes in mpop.cfg.template for pycoast. [Martin Raspaud] - Cleanup. [Martin Raspaud] - New add_overlay function, using pycoast. [Martin Raspaud] - Added test for __setitem__ (scene) [Martin Raspaud] - Feature: add a global area if possible. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Fixing so thar also other products (than Channel data) can be assempled. [Adam.Dybbroe] - Adding data member to CloudType. [Adam.Dybbroe] - Added support for trucolor image from modis. [Adam.Dybbroe] - Cleaning up geo_image.py. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] Conflicts: mpop/satin/hdfeos_l1b.py - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam.Dybbroe] - Minor cosmetic/editorial stuff. [Adam.Dybbroe] - Small bugfix - viirs interface. [Adam.Dybbroe] - Feature: wrapping up hdfeos upgrade. [Martin Raspaud] - migrated data to float32 instead of float64 - support only geoloc a 1km resolution at the moment - adjust channel resolution to match loaded data - added template terra.cfg file. - Trimming out dead detectors. [Adam.Dybbroe] - WIP: hdf eos now reads only the needed channels, and can have several resolutions. Geoloc is missing though. [Martin Raspaud] - WIP: Started working on supporting halv/quarter files for modis. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Changed MODIS HDF-EOS level 1b reader to accomodate both the thinned EUMETCasted data and Direct readout data. Changed name from thin_modis.py to hdfeos_l1b.py. Added filename pattern to config. [Adam.Dybbroe] - Fixing indexing bug: missing last line in Metop AVHRR granule. [Adam.Dybbroe] - Revert "Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable" [Martin Raspaud] This reverts commit 45809273f2f9670c8282c32197ef47071aecaa74, reversing changes made to 10ae6838131ae1b6e119e05e08496d1ec9018a4a. - Revert "Reapplying thin_modis cleaning" [Martin Raspaud] This reverts commit 52c63d6fbc9f12c03b645f29dd58250da943d24a. - Reapplying thin_modis cleaning. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam.Dybbroe] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam.Dybbroe] - Merge branch 'pre-master' into unstable. [Adam.Dybbroe] Conflicts: mpop/satin/eps_avhrr.py - Minor enhancements to nwcsaf pps cloud type reading: Adding support for phase and quality flags. [Adam.Dybbroe] - Fixing indexing bug: missing last line in Metop AVHRR granule. [Adam.Dybbroe] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam.Dybbroe] Conflicts: doc/source/conf.py mpop/instruments/mviri.py mpop/instruments/seviri.py mpop/instruments/test_mviri.py mpop/instruments/test_seviri.py mpop/instruments/test_visir.py mpop/instruments/visir.py mpop/satin/test_mipp.py mpop/satin/thin_modis.py mpop/saturn/runner.py mpop/scene.py setup.py version.py - Merge branch 'unstable' of https://github.com/mraspaud/mpop into unstable. [Adam.Dybbroe] - Thin_modis Cleanup. [Martin Raspaud] - Cleanup. [Martin Raspaud] - Style: Cleaning up. [Martin Raspaud] - Doc: added screenshots. [Martin Raspaud] - Cleanup, switch to compositer globaly. [Martin Raspaud] - Doc: added more documentation to polar_segments.py. [Martin Raspaud] - Cleanup: remove old unit test for assemble_swath. [Martin Raspaud] - Bugfix in assemble_segments. [Martin Raspaud] - Cleanup: removed old assemble_swath function. [Martin Raspaud] - Doc: update docstring for project. [Martin Raspaud] - Upgrade: assemble_segments now uses scene factory. [Martin Raspaud] - DOC: examples are now functional. [Martin Raspaud] - Cleanup: removed old plugins directory. [Martin Raspaud] - Merge branch 'new_plugins' into unstable. [Martin Raspaud] Conflicts: mpop/plugin_base.py - Init file for plugins initialization. [Adam.Dybbroe] - Merge branch 'new_plugins' of https://github.com/mraspaud/mpop into new_plugins. [Adam.Dybbroe] - Removing old deprecated and now buggy part - has been caught by the try-exception since long. Adding for plugins directory. [Adam.Dybbroe] - Corrected import bug. [Adam.Dybbroe] - Merge branch 'unstable' into new_plugins. [Adam.Dybbroe] - Bug correction - config file reading section 'format' [Adam.Dybbroe] - Removing old deprecated and now buggy part - has been caught by the try-exception since long. Adding for plugins directory. [Adam.Dybbroe] - Merge branch 'unstable' of github.com:mraspaud/mpop into unstable. [Martin Raspaud] - Merge branch 'unstable' of https://github.com/mraspaud/mpop into unstable. [Adam.Dybbroe] - First time in git. [Adam.Dybbroe] - Merge branch 'unstable' of https://github.com/mraspaud/mpop into unstable. [Adam.Dybbroe] - Meris level-2 reader - first commit. [Adam.Dybbroe] - Minor fixes. [Adam.Dybbroe] - Fixed typo. [Adam.Dybbroe] - Feature: updating mipp test to use factory. [Martin Raspaud] - Cleaning up an old print. [Martin Raspaud] - Merge branch 'v0.10.2-support' into unstable. [Martin Raspaud] - Feature: added support for new eumetsat names (modis) and terra. [Martin Raspaud] - Merge branch 'new_plugins' into unstable. [Martin Raspaud] - Moved mipp plugin back to satin. [Martin Raspaud] - Feature: all former plugins are adapted to newer format. [Martin Raspaud] - Style: finalizing plugin system. Now plugins directories loaded from mpop.cfg. [Martin Raspaud] - Cleanup: removing old stuff. [Martin Raspaud] - Feature: added reader plugins as attributes to the scene, called "_reader". [Martin Raspaud] - Feature: new plugin format, added a few getters and made scene reference weak. [Martin Raspaud] - New plugin system. [Martin Raspaud] Transfered the mipp plugin. - DOC: fixed path for examples. [Martin Raspaud] - DOC: Added documentation examples to the project. [Martin Raspaud] - Merge branch 'unstable' of github.com:mraspaud/mpop into unstable. [Martin Raspaud] - Using LOG call instead of print. [Adam.Dybbroe] - Fixed missing LOG import. [Adam.Dybbroe] - Further improvements to MODIS level2 reader and processor. [Adam.Dybbroe] - Feature: Added projection to the pps_hdf channels. [Martin Raspaud] - DOC: added use examples in the documentation directory. [Martin Raspaud] - Merge branch 'master' into unstable. [Martin Raspaud] - Added posibility to have instrument_name in the filenames. [Adam.Dybbroe] - Making sure we pass on orbit number when projecting the scene. [Adam.Dybbroe] - Added colour map for Modis Chlorophyl-A product. [Adam.Dybbroe] - Taking away the alpha parameters for RGB modes. [Martin Raspaud] - Added areas in channels for test. [Martin Raspaud] - Added the radius parameter to runner. [Martin Raspaud] - Adding preliminary NWCSAF pps product reader. [Adam.Dybbroe] - Cleaning up. [Martin Raspaud] - Updated satpos file directories. [Martin Raspaud] - Cleaning up. [Martin Raspaud] - Merge branch 'pre-master' into unstable. [Martin Raspaud] - Updated copyright and version number. [Martin Raspaud] - Merge branch 'release-0.11' [Martin Raspaud] - Merge branch 'pre-master' into release-0.11. [Martin Raspaud] - Updated copyright dates in setup.py. [Martin Raspaud] - Bumped version number to 0.11.0. [Martin Raspaud] - Updating setup stuff. [Martin Raspaud] - Merge branch 'pre-master' into unstable. [Martin Raspaud] - Adding Day/Night band support. [Adam.Dybbroe] - Adding area for mapping sample data i-bands. [Adam.Dybbroe] - Scaling reflectances to percent (%) as required in mpop. [Adam.Dybbroe] - Adding support for I-bands. [Adam.Dybbroe] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Adam.Dybbroe] - Merge branch 'npp-support' into pre-master. [Adam.Dybbroe] - Renamed to npp1.cfg. [Adam.Dybbroe] - VIIRS composites - M-bands only so far. [Adam.Dybbroe] - Cleaning print statements. [Adam.Dybbroe] - NPP template. [Adam.Dybbroe] - Adding NPP/VIIRS test area for sample data: M-bands. [Adam.Dybbroe] - Adding I-band support. [Adam.Dybbroe] - Fixing for re-projection. [Adam.Dybbroe] - Various small corrections. [Adam.Dybbroe] - Corrected band widths - ned to be in microns not nm. [Adam.Dybbroe] - Support for NPP/JPSS VIIRS. [Adam.Dybbroe] - Updated copyright in sphinx doc. [Martin Raspaud] - Deprecating add_overlay in favor of pycoast. [Martin Raspaud] - Merge branch 'feature-new-nc-format' into unstable. [Martin Raspaud] - Added support for different ordering of dimensions in band data. [Martin Raspaud] Use the band_axis keyword argument. - NC reader support different dimension orderings for band-data. [Martin Raspaud] - NC: now band data is of shape (band, x, y). [Martin Raspaud] - Merge branch 'pre-master' into unstable. [Martin Raspaud] - Now a channel can be added to a scene dynamically using dict notation. [esn] - Added units to aapp1b reader. [Martin Raspaud] - Deactivating mipp loading test. [Martin Raspaud] - Adjusted tests for compositer. [Martin Raspaud] - Merge branch 'feature-cleaning' into unstable. [Martin Raspaud] - Merge branch 'unstable' into feature-cleaning. [Martin Raspaud] - Merge branch 'pre-master' into unstable. [Martin Raspaud] - Added append function to scene.py. [Esben S. Nielsen] - New error message when no instrument-levelN section is there in the satellite config file. [Martin Raspaud] - Merge branch 'feature-radius-of-influence' into unstable. [Martin Raspaud] - Syntax bug fixed. [Martin Raspaud] - Made orbit number default to None for PolarFactory's create_scene. [Martin Raspaud] - Merge branch 'feature-radius-of-influence' into unstable. [Martin Raspaud] - Radius of influence is now a keyword parameter to the scene.project method. [Martin Raspaud] - Merge branch 'pre-master' into unstable. [Martin Raspaud] - Can now get reader plugin from PYTHONPATH. [Esben S. Nielsen] - Renamed asimage to as_image. [Martin Raspaud] - Wavelength and resolution are not requirements in config files anymore. [Martin Raspaud] - Merge branch 'feature-channel-to-image' into unstable. [Martin Raspaud] - Feature: added the asimage method to channels, to retrieve a black and white image from the channel data. [Martin Raspaud] - Merge branch 'feature-doc-examples' into unstable. [Martin Raspaud] - Doc: added more documentation to polar_segments.py. [Martin Raspaud] - DOC: examples are now functional. [Martin Raspaud] - DOC: fixed path for examples. [Martin Raspaud] - DOC: Added documentation examples to the project. [Martin Raspaud] - DOC: added use examples in the documentation directory. [Martin Raspaud] - Merge branch 'feature-project-mode' into unstable. [Martin Raspaud] - Doc: update docstring for project. [Martin Raspaud] - Cleanup. [Martin Raspaud] - Switched seviri and mviri to compositer. [Martin Raspaud] - Cleanup. [Martin Raspaud] - Style: Cleaning up. [Martin Raspaud] - Doc: added screenshots. [Martin Raspaud] - Cleanup, switch to compositer globaly. [Martin Raspaud] Conflicts: mpop/instruments/visir.py mpop/satin/hrpt.py mpop/saturn/runner.py - Cleanup: remove old unit test for assemble_swath. [Martin Raspaud] - Bugfix in assemble_segments. [Martin Raspaud] - Cleanup: removed old assemble_swath function. [Martin Raspaud] Conflicts: mpop/scene.py - Upgrade: assemble_segments now uses scene factory. [Martin Raspaud] - Fixed typo. [Adam.Dybbroe] - Feature: updating mipp test to use factory. [Martin Raspaud] - Cleaning up an old print. [Martin Raspaud] Conflicts: mpop/satin/mipp.py - Cleanup: removing old stuff. [Martin Raspaud] - Cleaned up and updated meteosat 9 cfg template further. [Martin Raspaud] - Updated templates to match pytroll MSG tutorial. [Esben S. Nielsen] - Simplified reading of log-level. [Lars Orum Rasmussen] - Proposal for reading loglevel from config file. [Lars Orum Rasmussen] - Cfscene now handles channels with all masked data. [Esben S. Nielsen] - Netcdf area fix. [Martin Raspaud] - Syle: copyright updates. [Martin Raspaud] - Modified the modis-lvl2 loader and extended a bit the cf-io interfaces. [Adam.Dybbroe] - First time in GIT A new reader for EOS-HDF Modis level-2 files from NASA. See http://oceancolor.gsfc.nasa.gov/DOCS/ocformats.html#3 for format description. [Adam.Dybbroe] - Added license. [Martin Raspaud] - Merge branch 'unstable' of github.com:mraspaud/mpop into unstable. [Martin Raspaud] - Info needs to be an instance attribute. [Lars Orum Rasmussen] - Fix initialization of self.time_slot. [Lars Orum Rasmussen] - Merge branch 'v0.10.2-support' into unstable. [Martin Raspaud] - Added pyc and ~ files to gitignore. [Martin Raspaud] - Updated thin modis reader for new file name. [Martin Raspaud] - Merge branch 'v0.10.1-support' into unstable. [Martin Raspaud] - Compression and tiling as default for geotifs. [Martin Raspaud] - Merge branch 'v0.10.0-support' into unstable. [Martin Raspaud] - Feauture: support for qc_straylight. [Martin Raspaud] - Compression and tiling as default for geotifs. [Martin Raspaud] - WIP: attempting interrupt switch for sequential runner. [Martin Raspaud] - Feature: changing filewatcher from processes to threads. [Martin Raspaud] - Feauture: support for qc_straylight. [Martin Raspaud] - Compression and tiling as default for geotifs. [Martin Raspaud] - Update: modis enhancements. [Martin Raspaud] - Feature: filewatcher keeps arrival order. [Martin Raspaud] - Feature: concatenation loads channels. [Martin Raspaud] - Feature: use local tles instead of downloading systematically. [Martin Raspaud] - Feature: move pyaapp as single module. [Martin Raspaud] - Feature: added ana geoloc for hrpt and eps lvl 1a. [Martin Raspaud] - Cosmetics. [Martin Raspaud] - Added gatherer and two_line_elements. [Martin Raspaud] - Merge branch 'unstable' of github.com:mraspaud/mpop into unstable. [Martin Raspaud] - Moved a parenthesis six characters to the left. [Lars Orum Rasmussen] - Feature: assemble_segments function, more clever and should replace assemble_swaths. [Martin Raspaud] - Feature: thin modis reader upgrade, with lonlat estimator and channel trimmer for broken sensors. [Martin Raspaud] - Merge branch 'unstable' of github.com:mraspaud/mpop into unstable. [Martin Raspaud] - Netcdf bandname now only uses integer part of resolution. [Esben S. Nielsen] - Improvement: made resolution int in band names, for netcdf. [Martin Raspaud] - Cleaning. [Martin Raspaud] - WIP: ears. [Martin Raspaud] - Trying to revive the pynwclib module. [Martin Raspaud] - Cleaning. [Martin Raspaud] - Wip: polar hrpt 0 to 1b. [Martin Raspaud] - Feature: Added proj4 parameters for meteosat 7. [Martin Raspaud] - Merge branch 'unstable' of github.com:mraspaud/mpop into unstable. [Martin Raspaud] - Cosmetic. [Esben S. Nielsen] - Now channels are read and saved in order. Optimized scaling during CF save. [Esben S. Nielsen] - Feature: Adding more factories. [Martin Raspaud] - Documentation: adding something on factories and area_extent. [Martin Raspaud] - Documentation: added needed files in setup.py. [Martin Raspaud] - Style: remove a print statement and an unused import. [Martin Raspaud] - Feature: Added natural composite to default composite list. [Martin Raspaud] - Feature: made compositer sensitive to custom composites. [Martin Raspaud] - Documentation: Upgraded documentation to 0.10.0. [Martin Raspaud] - Merge branch 'unstable' of github.com:mraspaud/mpop into unstable. [Martin Raspaud] - The RELEASE-VERSION file should not be checked into git. [Lars Orum Rasmussen] - Optimized parts of mpop. Fixed projector caching. [Esben S. Nielsen] - Optimized parts of mpop processing. Made projector caching functional. [Esben S. Nielsen] - Ignore build directory. [Lars Orum Rasmussen] - Check array in stretch_logarithmic. [Lars Orum Rasmussen] - Prevent adding unintended logging handlers. [Lars Orum Rasmussen] - Feature: Adding extra tags to the image allowed in local_runner. [Martin Raspaud] - Style: lines to 80 chars. [Martin Raspaud] - Merge branch 'unstable' [Martin Raspaud] - Feature: pps hdf loading and polar production update. [Martin Raspaud] - Style: cleanup. [Martin Raspaud] - Merge branch 'unstable' of github.com:mraspaud/mpop into unstable. [Martin Raspaud] Conflicts: mpop/satin/mipp.py - Fixed memory problems. Workaround for lazy import of pyresample. Now uses weakref for compositor. [Esben S. Nielsen] - Better logging in scene loading function. [Martin Raspaud] - Remove unneeded import. [Martin Raspaud] - New version. [Martin Raspaud] - Merge branch 'master' of github.com:mraspaud/mpop. [Lars Orum Rasmussen] - Feature: direct_readout chain in place. [Martin Raspaud] - Removing no longer needed avhrr.py. [Martin Raspaud] - Made scaling expression in cfscene.py nicer. [Esben S. Nielsen] - Corrected shallow copy problem with compositor. Simplyfied usage of GeostationaryFactory. [Esben S. Nielsen] - Feature: cleaner hdf reading for both pps and msg. [Martin Raspaud] - Stability: added failsafe in case no config file is there when loading. [Martin Raspaud] - Merge branch 'pps_hdf' into unstable. [Martin Raspaud] - Feature: Support area_extent in scene.load. [Martin Raspaud] - Feature: Cleaning and use the mipp area_extent and sublon. [Martin Raspaud] - Style: Allow to exclude all the *level? sections. [Martin Raspaud] - Redespached a few composites. [Martin Raspaud] - Style: cosmetics. [Martin Raspaud] - Feature: added the power operation to channels. [Martin Raspaud] - Removed the no longer needed meteosat09.py file. [Martin Raspaud] - Wip: iterative loading, untested. [Martin Raspaud] - More on versionning. [Martin Raspaud] - Merge branch 'unstable' into pps_hdf. [Martin Raspaud] - Feature: started working on the PPS support. [Martin Raspaud] - Spelling. [Martin Raspaud] - Added logarithmic enhancement. [Lars Orum Rasmussen] - Removed unneeded file. [Martin Raspaud] - Api: new version of mipp. [Martin Raspaud] - Added automatic version numbering. [Martin Raspaud] - Version update to 0.10.0alpha1. [Martin Raspaud] - Api: unload takes separate channels (not iterable) as input. [Martin Raspaud] - Doc: updated the meteosat 9 template config. [Martin Raspaud] - Merge branch 'unstable' of github.com:mraspaud/mpop into unstable. [Martin Raspaud] Conflicts: mpop/satellites/meteosat09.py - Feature: Introduced compound satscene objects. [Martin Raspaud] This is done through the use of an "image" attribute, created by the factory in the "satellites" package. The image attribute holds all the compositing functions, while the satscene object remains solely a container for satellite data and metadata. - Feature: added the get_custom_composites function and a composites section in mpop.cfg to load custom made composites on the fly. [Martin Raspaud] - Feature: make use of mipp's area_extent function. [Martin Raspaud] - Style: cleanup channels_to_load after loading. [Martin Raspaud] - Doc: introduce mpop.cfg. [Martin Raspaud] - Feature: make use of the new mpop.cfg file to find the area file. Added the get_area_def helper function in projector. [Martin Raspaud] - Feature: Added the new pge02f product for met09. [Martin Raspaud] - Feature: New format keyword for images. [Martin Raspaud] - Update: new version of mipp, putting the image upright when slicing. [Martin Raspaud] - Merge branch 'unstable' of github.com:mraspaud/mpop into unstable. [Martin Raspaud] Conflicts: mpop/satout/netcdf4.py mpop/scene.py - Corrected mipp slicing in mipp.py. Added keyword for selecting datatype in cfscene.py. Corrected transformation for netCDF data type in cfscene.py. [Esben S. Nielsen] - New add_history function, and some changes in the netcdf handling. [Martin Raspaud] - Upgrade: Upgraded the assemble_segments module to use only one coordinate class. [Martin Raspaud] - Cosmetics: Added log message when slicing in mipp. [Martin Raspaud] - Move everything to a mpop folder, so that import mpop should be used. [Martin Raspaud] - WIP: Completing the nc4 reader. [Martin Raspaud] - Doc: Added credits. [Martin Raspaud] - Doc: updated build for github. [Martin Raspaud] - Feature: Started to support arithmetic operations on channels. [Martin Raspaud] - Feature: support for calibration flag for met 9. [Martin Raspaud] - Cosmetics: Added names to copyrigths. [Martin Raspaud] - Changed default logging. [Esben S. Nielsen] - Merge branch 'dmi_fix' into unstable. [Martin Raspaud] Conflicts: pp/scene.py - Added fill_valued as a keyworded argument. [Lars Orum Rasmussen] - Fixed oversampling error when pyresample is not present. Added compression as default option when writing netCDF files. [Esben S. Nielsen] - Moved pyresample and osgeo dependency in geo_image.py. [Esben S. Nielsen] - Feature: support umarf files for eps avhrr. [Martin Raspaud] - Feature: support the load_again flag for meteosat 9. [Martin Raspaud] - Feature: Allows passing arguments to reader plugins in SatelliteScene.load, and in particular "calibrate" to mipp. [Martin Raspaud] - Feature: added the fill_value argument to channel_image function. [Martin Raspaud] - Cosmetics: reorganized imports. [Martin Raspaud] - Cosmetics: Updated some template files. [Martin Raspaud] - Feature: Added the resave argument for saving projector objects. [Martin Raspaud] - Installation: Updated version number, removed obsolete file to install, and made the package non zip-safe. [Martin Raspaud] - Testing: Added tests for pp.satellites, and some cosmetics. [Martin Raspaud] - Feature: Handled the case of several instruments for get_satellite_class. [Martin Raspaud] - Cosmetics: changed the name of the satellite classes generated on the fly. [Martin Raspaud] - Testing: more on scene unit tests. [Martin Raspaud] - Testing: started integration testing of pp core parts. [Martin Raspaud] - Testing: completed seviri tests. [Martin Raspaud] - Testing: completed avhrr test. [Martin Raspaud] - Testing: Added tests for instruments : seviri, mviri, avhrr. [Martin Raspaud] - Testing: took away prerequisites tests for python 2.4 compatibility. [Martin Raspaud] - Testing: final adjustments for visir. [Martin Raspaud] - Testing: visir tests complete. [Martin Raspaud] - Testing: fixed nosetest running in test_visir. [Martin Raspaud] - Testing: corrected scene patching for visir tests. [Martin Raspaud] - Tests: started testing the visir instrument. [Martin Raspaud] - Cosmetics and documentation in the scene module. [Martin Raspaud] - Feature: better handling of tags and gdal options in geo_images. [Martin Raspaud] - Cleanup: removed uneeded hardcoded satellites and instruments. [Martin Raspaud] - Documentation: Updated readme, with link to the documentation. [Martin Raspaud] - Documentation: Added a paragraph on geolocalisation. [Martin Raspaud] - Refactoring: took away the precompute flag from the projector constructor, added the save method instead. [Martin Raspaud] - Cosmetics. [Martin Raspaud] - Cosmetics. [Martin Raspaud] - Feature: pyresample 0.7 for projector, and enhanced unittesting. [Martin Raspaud] - New template file for areas. [Martin Raspaud] - Feature: First draft for the hrpt reading (using aapp) and eps1a reading (using aapp and kai). [Martin Raspaud] - Cosmetics: cleaning up the etc directory. [Martin Raspaud] - Testing: Basic mipp testing. [Martin Raspaud] - Cosmetics: cfscene. [Martin Raspaud] - Feature: One mipp reader fits all :) [Martin Raspaud] - Feature: helper "debug_on" function. [Martin Raspaud] - Feature: save method for satscene. Supports only netcdf4 for now. [Martin Raspaud] - Feature: reload keyword for loading channels. [Martin Raspaud] - Documentation: better pp.satellites docstring. [Martin Raspaud] - Testing: updated the test_scene file to reflect scene changes. [Martin Raspaud] - Documentation: changed a couple of docstrings. [Martin Raspaud] - Feature: support pyresample areas in geo images. [Martin Raspaud] - Cosmetics: changing area_id to area. [Martin Raspaud] - Feature: adding metadata handling to channels. [Martin Raspaud] - Feature: now scene and channel accept a pyresample area as area attribute. [Martin Raspaud] - Enhancement: making a better mipp plugin. [Martin Raspaud] - Feature: Finished the netcdf writer. [Martin Raspaud] - Feature: updated the netcdf writer and added a proxy scene class for cf conventions. [Martin Raspaud] - Documentation: big update. [Martin Raspaud] - Documentation: quickstart now passes the doctest. [Martin Raspaud] - Documentation: reworking. [Martin Raspaud] - Feature: Moved get_satellite_class and build_satellite_class to pp.satellites. [Martin Raspaud] - Doc: starting documentation update. [Martin Raspaud] - Enhanced mipp reader. [Martin Raspaud] * Added metadata when loading scenes. * Added slicing when reading data from seviri * Added a draft generic reader - Cosmetics: enhanced error description and debug message in aapp1b, giving names to loaded/missing files. [Martin Raspaud] - Testing: updated test_scene. [Martin Raspaud] - Feature: Added automatic retreiving of product list for a given satellite. [Martin Raspaud] - Cleaning: remove class retrieving and building from runner.py. [Martin Raspaud] - Cosmetics: Better error message in scene when a reader is not found, plus some code enbelishment. [Martin Raspaud] - Feature: made scene object iteratable (channels are iterated). [Martin Raspaud] - Feature: Adding functions to retreive a satellite class from the satellites name and to build it on the fly from a configuration file. [Martin Raspaud] - Testing: more on channel. [Martin Raspaud] - Testing: added test for pp.scene.assemble_swaths. [Martin Raspaud] - Testing: scene loading tested. [Martin Raspaud] - Cleaning: test_scene is now more pylint friendly. [Martin Raspaud] - Feature: extended scene test. [Martin Raspaud] - Feature: more testing of scene.py. [Martin Raspaud] - Merge branch 'unstable' of github.com:mraspaud/mpop into unstable. [Martin Raspaud] Conflicts: pp/test_scene.py - Feature: Enhanced unitests for scene. [Martin Raspaud] - Feature: Enhanced unitests for scene. [Martin Raspaud] - Tests: Improving unittests for channel classes. [Martin Raspaud] - Feature: Project function won't crash if pyresample can't be loaded. Returns the untouched scene instead. [Martin Raspaud] - Rewrote Filewatcher code. [Martin Raspaud] - Feature: added the refresh option to filewatcher to call the processing function even if no new file has come. [Martin Raspaud] - Refactoring: satellite, number, variant arguments to runner __init__ are now a single list argument. [Martin Raspaud] - Cleaning: Removing pylint errors from runner.py code. [Martin Raspaud] - Resolution can now be a floating point number. [Martin Raspaud] - Added the osgeo namespace when importing gdal. [Martin Raspaud] - Warning: Eps spline interpolation does not work around poles. [Martin Raspaud] - Added the "info" attribute to channel and scene as metadata holder. [Martin Raspaud] - Functionality: Automatically build satellite classes from config files. [Martin Raspaud] - Added copyright notices and updated version. [Martin Raspaud] - Changed channel names for seviri. [Martin Raspaud] - Added info stuff in mipp reader. [Martin Raspaud] - Added info.area_name update on projection. [Martin Raspaud] - Added quick mode for projecting fast and dirty. [Martin Raspaud] - Added single channel image building. [Martin Raspaud] - Added support for gdal_options when saving a geo_image. [Martin Raspaud] - Made satout a package. [Martin Raspaud] - Added a few information tags. [Martin Raspaud] - Added support for mipp reading of met 09. [Martin Raspaud] - Added reader and writer to netcdf format. [Martin Raspaud] - Added info object to the scene object in preparation for the netCDF/CF writer. [Adam Dybbroe] - Added support for FY3 satellite and MERSI instrument. [Adam Dybbroe] - Merge branch 'unstable' of git@github.com:mraspaud/mpop into unstable. [Martin Raspaud] Conflicts: imageo/test_image.py Conflicts: imageo/test_image.py - Bugfix in image unit test: testing "almost equal" instead of "equal" for image inversion (floating point errors). [Martin Raspaud] - Bugfix in image unit test: testing "almost equal" instead of "equal" for image inversion (floating point errors). [Martin Raspaud] - Modified image inversion unit test to reflect new behaviour. [Martin Raspaud] - New rebase. [Martin Raspaud] satpy-0.20.0/doc/000077500000000000000000000000001362525524100135035ustar00rootroot00000000000000satpy-0.20.0/doc/Makefile000066400000000000000000000061051362525524100151450ustar00rootroot00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = build # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source .PHONY: help clean html dirhtml pickle json htmlhelp qthelp latex changes linkcheck doctest help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " changes to make an overview of all changed/added/deprecated items" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: -rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/NWCSAFMSGPP.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/NWCSAFMSGPP.qhc" latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \ "run these through (pdf)latex." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." satpy-0.20.0/doc/README000066400000000000000000000006111362525524100143610ustar00rootroot00000000000000With sphinx and satpy's dependencies installed documentation can be generated by running: make html The generated HTML documentation pages are available in `build/html`. If Satpy's API has changed (new functions, modules, classes, etc) then the API documentation should be regenerated before running the above make command. sphinx-apidoc -f -T -o source/api ../satpy ../satpy/tests satpy-0.20.0/doc/rtd_environment.yml000066400000000000000000000005251362525524100174450ustar00rootroot00000000000000name: readthedocs channels: - conda-forge dependencies: - python=3.7 - pip - dask - graphviz - numpy - pillow - pyresample - setuptools - setuptools_scm - setuptools_scm_git_archive - sphinx - sphinx_rtd_theme - trollsift - xarray - zarr - pip: - graphviz - .. # relative path to the satpy project satpy-0.20.0/doc/source/000077500000000000000000000000001362525524100150035ustar00rootroot00000000000000satpy-0.20.0/doc/source/_static/000077500000000000000000000000001362525524100164315ustar00rootroot00000000000000satpy-0.20.0/doc/source/_static/theme_overrides.css000066400000000000000000000005541362525524100223330ustar00rootroot00000000000000/* override table width restrictions */ @media screen and (min-width: 767px) { .wy-table-responsive table td { /* !important prevents the common CSS stylesheets from overriding this as on RTD they are loaded after this stylesheet */ white-space: normal !important; } .wy-table-responsive { overflow: visible !important; } }satpy-0.20.0/doc/source/api/000077500000000000000000000000001362525524100155545ustar00rootroot00000000000000satpy-0.20.0/doc/source/api/satpy.composites.rst000066400000000000000000000022041362525524100216300ustar00rootroot00000000000000satpy.composites package ======================== Submodules ---------- satpy.composites.abi module --------------------------- .. automodule:: satpy.composites.abi :members: :undoc-members: :show-inheritance: satpy.composites.ahi module --------------------------- .. automodule:: satpy.composites.ahi :members: :undoc-members: :show-inheritance: satpy.composites.cloud\_products module --------------------------------------- .. automodule:: satpy.composites.cloud_products :members: :undoc-members: :show-inheritance: satpy.composites.crefl\_utils module ------------------------------------ .. automodule:: satpy.composites.crefl_utils :members: :undoc-members: :show-inheritance: satpy.composites.sar module --------------------------- .. automodule:: satpy.composites.sar :members: :undoc-members: :show-inheritance: satpy.composites.viirs module ----------------------------- .. automodule:: satpy.composites.viirs :members: :undoc-members: :show-inheritance: Module contents --------------- .. automodule:: satpy.composites :members: :undoc-members: :show-inheritance: satpy-0.20.0/doc/source/api/satpy.demo.rst000066400000000000000000000002311362525524100203650ustar00rootroot00000000000000satpy.demo package ================== Module contents --------------- .. automodule:: satpy.demo :members: :undoc-members: :show-inheritance: satpy-0.20.0/doc/source/api/satpy.enhancements.rst000066400000000000000000000010051362525524100221110ustar00rootroot00000000000000satpy.enhancements package ========================== Submodules ---------- satpy.enhancements.abi module ----------------------------- .. automodule:: satpy.enhancements.abi :members: :undoc-members: :show-inheritance: satpy.enhancements.viirs module ------------------------------- .. automodule:: satpy.enhancements.viirs :members: :undoc-members: :show-inheritance: Module contents --------------- .. automodule:: satpy.enhancements :members: :undoc-members: :show-inheritance: satpy-0.20.0/doc/source/api/satpy.readers.rst000066400000000000000000000241201362525524100210710ustar00rootroot00000000000000satpy.readers package ===================== Submodules ---------- satpy.readers.aapp\_l1b module ------------------------------ .. automodule:: satpy.readers.aapp_l1b :members: :undoc-members: :show-inheritance: satpy.readers.abi\_base module ------------------------------ .. automodule:: satpy.readers.abi_base :members: :undoc-members: :show-inheritance: satpy.readers.abi\_l1b module ----------------------------- .. automodule:: satpy.readers.abi_l1b :members: :undoc-members: :show-inheritance: satpy.readers.abi\_l2\_nc module -------------------------------- .. automodule:: satpy.readers.abi_l2_nc :members: :undoc-members: :show-inheritance: satpy.readers.acspo module -------------------------- .. automodule:: satpy.readers.acspo :members: :undoc-members: :show-inheritance: satpy.readers.agri\_l1 module ----------------------------- .. automodule:: satpy.readers.agri_l1 :members: :undoc-members: :show-inheritance: satpy.readers.ahi\_hsd module ----------------------------- .. automodule:: satpy.readers.ahi_hsd :members: :undoc-members: :show-inheritance: satpy.readers.amsr2\_l1b module ------------------------------- .. automodule:: satpy.readers.amsr2_l1b :members: :undoc-members: :show-inheritance: satpy.readers.avhrr\_l1b\_gaclac module --------------------------------------- .. automodule:: satpy.readers.avhrr_l1b_gaclac :members: :undoc-members: :show-inheritance: satpy.readers.caliop\_l2\_cloud module -------------------------------------- .. automodule:: satpy.readers.caliop_l2_cloud :members: :undoc-members: :show-inheritance: satpy.readers.clavrx module --------------------------- .. automodule:: satpy.readers.clavrx :members: :undoc-members: :show-inheritance: satpy.readers.electrol\_hrit module ----------------------------------- .. automodule:: satpy.readers.electrol_hrit :members: :undoc-members: :show-inheritance: satpy.readers.eps\_l1b module ----------------------------- .. automodule:: satpy.readers.eps_l1b :members: :undoc-members: :show-inheritance: satpy.readers.eum\_base module ------------------------------ .. automodule:: satpy.readers.eum_base :members: :undoc-members: :show-inheritance: satpy.readers.fci\_l1c\_fdhsi module ------------------------------------ .. automodule:: satpy.readers.fci_l1c_fdhsi :members: :undoc-members: :show-inheritance: satpy.readers.file\_handlers module ----------------------------------- .. automodule:: satpy.readers.file_handlers :members: :undoc-members: :show-inheritance: satpy.readers.generic\_image module ----------------------------------- .. automodule:: satpy.readers.generic_image :members: :undoc-members: :show-inheritance: satpy.readers.geocat module --------------------------- .. automodule:: satpy.readers.geocat :members: :undoc-members: :show-inheritance: satpy.readers.ghrsst\_l3c\_sst module ------------------------------------- .. automodule:: satpy.readers.ghrsst_l3c_sst :members: :undoc-members: :show-inheritance: satpy.readers.goes\_imager\_hrit module --------------------------------------- .. automodule:: satpy.readers.goes_imager_hrit :members: :undoc-members: :show-inheritance: satpy.readers.goes\_imager\_nc module ------------------------------------- .. automodule:: satpy.readers.goes_imager_nc :members: :undoc-members: :show-inheritance: satpy.readers.grib module ------------------------- .. automodule:: satpy.readers.grib :members: :undoc-members: :show-inheritance: satpy.readers.hdf4\_utils module -------------------------------- .. automodule:: satpy.readers.hdf4_utils :members: :undoc-members: :show-inheritance: satpy.readers.hdf5\_utils module -------------------------------- .. automodule:: satpy.readers.hdf5_utils :members: :undoc-members: :show-inheritance: satpy.readers.hdfeos\_base module --------------------------------- .. automodule:: satpy.readers.hdfeos_base :members: :undoc-members: :show-inheritance: satpy.readers.hrit\_base module ------------------------------- .. automodule:: satpy.readers.hrit_base :members: :undoc-members: :show-inheritance: satpy.readers.hrit\_jma module ------------------------------ .. automodule:: satpy.readers.hrit_jma :members: :undoc-members: :show-inheritance: satpy.readers.hrpt module ------------------------- .. automodule:: satpy.readers.hrpt :members: :undoc-members: :show-inheritance: satpy.readers.hsaf\_grib module ------------------------------- .. automodule:: satpy.readers.hsaf_grib :members: :undoc-members: :show-inheritance: satpy.readers.iasi\_l2 module ----------------------------- .. automodule:: satpy.readers.iasi_l2 :members: :undoc-members: :show-inheritance: satpy.readers.li\_l2 module --------------------------- .. automodule:: satpy.readers.li_l2 :members: :undoc-members: :show-inheritance: satpy.readers.maia module ------------------------- .. automodule:: satpy.readers.maia :members: :undoc-members: :show-inheritance: satpy.readers.mersi2\_l1b module -------------------------------- .. automodule:: satpy.readers.mersi2_l1b :members: :undoc-members: :show-inheritance: satpy.readers.modis\_l1b module ------------------------------- .. automodule:: satpy.readers.modis_l1b :members: :undoc-members: :show-inheritance: satpy.readers.modis\_l2 module ------------------------------ .. automodule:: satpy.readers.modis_l2 :members: :undoc-members: :show-inheritance: satpy.readers.msi\_safe module ------------------------------ .. automodule:: satpy.readers.msi_safe :members: :undoc-members: :show-inheritance: satpy.readers.netcdf\_utils module ---------------------------------- .. automodule:: satpy.readers.netcdf_utils :members: :undoc-members: :show-inheritance: satpy.readers.nucaps module --------------------------- .. automodule:: satpy.readers.nucaps :members: :undoc-members: :show-inheritance: satpy.readers.nwcsaf\_nc module ------------------------------- .. automodule:: satpy.readers.nwcsaf_nc :members: :undoc-members: :show-inheritance: satpy.readers.olci\_nc module ----------------------------- .. automodule:: satpy.readers.olci_nc :members: :undoc-members: :show-inheritance: satpy.readers.omps\_edr module ------------------------------ .. automodule:: satpy.readers.omps_edr :members: :undoc-members: :show-inheritance: satpy.readers.safe\_sar\_l2\_ocn module --------------------------------------- .. automodule:: satpy.readers.safe_sar_l2_ocn :members: :undoc-members: :show-inheritance: satpy.readers.sar\_c\_safe module --------------------------------- .. automodule:: satpy.readers.sar_c_safe :members: :undoc-members: :show-inheritance: satpy.readers.scatsat1\_l2b module ---------------------------------- .. automodule:: satpy.readers.scatsat1_l2b :members: :undoc-members: :show-inheritance: satpy.readers.scmi module ------------------------- .. automodule:: satpy.readers.scmi :members: :undoc-members: :show-inheritance: satpy.readers.seviri\_base module --------------------------------- .. automodule:: satpy.readers.seviri_base :members: :undoc-members: :show-inheritance: satpy.readers.seviri\_l1b\_hrit module -------------------------------------- .. automodule:: satpy.readers.seviri_l1b_hrit :members: :undoc-members: :show-inheritance: satpy.readers.seviri\_l1b\_native module ---------------------------------------- .. automodule:: satpy.readers.seviri_l1b_native :members: :undoc-members: :show-inheritance: satpy.readers.seviri\_l1b\_native\_hdr module --------------------------------------------- .. automodule:: satpy.readers.seviri_l1b_native_hdr :members: :undoc-members: :show-inheritance: satpy.readers.seviri\_l1b\_nc module ------------------------------------ .. automodule:: satpy.readers.seviri_l1b_nc :members: :undoc-members: :show-inheritance: satpy.readers.slstr\_l1b module ------------------------------- .. automodule:: satpy.readers.slstr_l1b :members: :undoc-members: :show-inheritance: satpy.readers.tropomi\_l2 module -------------------------------- .. automodule:: satpy.readers.tropomi_l2 :members: :undoc-members: :show-inheritance: satpy.readers.utils module -------------------------- .. automodule:: satpy.readers.utils :members: :undoc-members: :show-inheritance: satpy.readers.vaisala\_gld360 module ------------------------------------ .. automodule:: satpy.readers.vaisala_gld360 :members: :undoc-members: :show-inheritance: satpy.readers.viirs\_compact module ----------------------------------- .. automodule:: satpy.readers.viirs_compact :members: :undoc-members: :show-inheritance: satpy.readers.viirs\_edr\_active\_fires module ---------------------------------------------- .. automodule:: satpy.readers.viirs_edr_active_fires :members: :undoc-members: :show-inheritance: satpy.readers.viirs\_edr\_flood module -------------------------------------- .. automodule:: satpy.readers.viirs_edr_flood :members: :undoc-members: :show-inheritance: satpy.readers.viirs\_l1b module ------------------------------- .. automodule:: satpy.readers.viirs_l1b :members: :undoc-members: :show-inheritance: satpy.readers.viirs\_sdr module ------------------------------- .. automodule:: satpy.readers.viirs_sdr :members: :undoc-members: :show-inheritance: satpy.readers.virr\_l1b module ------------------------------ .. automodule:: satpy.readers.virr_l1b :members: :undoc-members: :show-inheritance: satpy.readers.xmlformat module ------------------------------ .. automodule:: satpy.readers.xmlformat :members: :undoc-members: :show-inheritance: satpy.readers.yaml\_reader module --------------------------------- .. automodule:: satpy.readers.yaml_reader :members: :undoc-members: :show-inheritance: Module contents --------------- .. automodule:: satpy.readers :members: :undoc-members: :show-inheritance: satpy-0.20.0/doc/source/api/satpy.rst000066400000000000000000000024461362525524100174540ustar00rootroot00000000000000satpy package ============= Subpackages ----------- .. toctree:: satpy.composites satpy.demo satpy.enhancements satpy.readers satpy.writers Submodules ---------- satpy.config module ------------------- .. automodule:: satpy.config :members: :undoc-members: :show-inheritance: satpy.dataset module -------------------- .. automodule:: satpy.dataset :members: :undoc-members: :show-inheritance: satpy.multiscene module ----------------------- .. automodule:: satpy.multiscene :members: :undoc-members: :show-inheritance: satpy.node module ----------------- .. automodule:: satpy.node :members: :undoc-members: :show-inheritance: satpy.plugin\_base module ------------------------- .. automodule:: satpy.plugin_base :members: :undoc-members: :show-inheritance: satpy.resample module --------------------- .. automodule:: satpy.resample :members: :undoc-members: :show-inheritance: satpy.scene module ------------------ .. automodule:: satpy.scene :members: :undoc-members: :show-inheritance: satpy.utils module ------------------ .. automodule:: satpy.utils :members: :undoc-members: :show-inheritance: Module contents --------------- .. automodule:: satpy :members: :undoc-members: :show-inheritance: satpy-0.20.0/doc/source/api/satpy.writers.rst000066400000000000000000000023571362525524100211530ustar00rootroot00000000000000satpy.writers package ===================== Submodules ---------- satpy.writers.cf\_writer module ------------------------------- .. automodule:: satpy.writers.cf_writer :members: :undoc-members: :show-inheritance: satpy.writers.geotiff module ---------------------------- .. automodule:: satpy.writers.geotiff :members: :undoc-members: :show-inheritance: satpy.writers.mitiff module --------------------------- .. automodule:: satpy.writers.mitiff :members: :undoc-members: :show-inheritance: satpy.writers.ninjotiff module ------------------------------ .. automodule:: satpy.writers.ninjotiff :members: :undoc-members: :show-inheritance: satpy.writers.scmi module ------------------------- .. automodule:: satpy.writers.scmi :members: :undoc-members: :show-inheritance: satpy.writers.simple\_image module ---------------------------------- .. automodule:: satpy.writers.simple_image :members: :undoc-members: :show-inheritance: satpy.writers.utils module -------------------------- .. automodule:: satpy.writers.utils :members: :undoc-members: :show-inheritance: Module contents --------------- .. automodule:: satpy.writers :members: :undoc-members: :show-inheritance: satpy-0.20.0/doc/source/composites.rst000066400000000000000000000407421362525524100177310ustar00rootroot00000000000000========== Composites ========== Built-in Compositors ==================== .. py:currentmodule:: satpy.composites There are several built-in compositors available in SatPy. All of them use the :class:`GenericCompositor` base class which handles various image modes (`L`, `LA`, `RGB`, and `RGBA` at the moment) and updates attributes. The below sections summarize the composites that come with SatPy and show basic examples of creating and using them with an existing :class:`~satpy.scene.Scene` object. It is recommended that any composites that are used repeatedly be configured in YAML configuration files. General-use compositor code dealing with visible or infrared satellite data can be put in a configuration file called ``visir.yaml``. Composites that are specific to an instrument can be placed in YAML config files named accordingly (e.g., ``seviri.yaml`` or ``viirs.yaml``). See the `satpy repository `_ for more examples. GenericCompositor ----------------- :class:`GenericCompositor` class can be used to create basic single channel and RGB composites. For example, building an overview composite can be done manually within Python code with:: >>> from satpy.composites import GenericCompositor >>> compositor = GenericCompositor("overview") >>> composite = compositor([local_scene[0.6], ... local_scene[0.8], ... local_scene[10.8]]) One important thing to notice is that there is an internal difference between a composite and an image. A composite is defined as a special dataset which may have several bands (like `R`, `G` and `B` bands). However, the data isn't stretched, or clipped or gamma filtered until an image is generated. To get an image out of the above composite:: >>> from satpy.writers import to_image >>> img = to_image(composite) >>> img.invert([False, False, True]) >>> img.stretch("linear") >>> img.gamma(1.7) >>> img.show() This part is called `enhancement`, and is covered in more detail in :doc:`enhancements`. DifferenceCompositor -------------------- :class:`DifferenceCompositor` calculates a difference of two datasets:: >>> from satpy.composites import DifferenceCompositor >>> compositor = DifferenceCompositor("diffcomp") >>> composite = compositor([local_scene[10.8], local_scene[12.0]]) FillingCompositor ----------------- :class:`FillingCompositor`:: fills the missing values in three datasets with the values of another dataset::: >>> from satpy.composites import FillingCompositor >>> compositor = FillingCompositor("fillcomp") >>> filler = local_scene[0.6] >>> data_with_holes_1 = local_scene['ch_a'] >>> data_with_holes_2 = local_scene['ch_b'] >>> data_with_holes_3 = local_scene['ch_c'] >>> composite = compositor([filler, data_with_holes_1, data_with_holes_2, ... data_with_holes_3]) PaletteCompositor ------------------ :class:`PaletteCompositor` creates a color version of a single channel categorical dataset using a colormap:: >>> from satpy.composites import PaletteCompositor >>> compositor = PaletteCompositor("palcomp") >>> composite = compositor([local_scene['cma'], local_scene['cma_pal']]) The palette should have a single entry for all the (possible) values in the dataset mapping the value to an RGB triplet. Typically the palette comes with the categorical (e.g. cloud mask) product that is being visualized. DayNightCompositor ------------------ :class:`DayNightCompositor` merges two different composites. The first composite will be placed on the day-side of the scene, and the second one on the night side. The transition from day to night is done by calculating solar zenith angle (SZA) weighed average of the two composites. The SZA can optionally be given as third dataset, and if not given, the angles will be calculated. Width of the blending zone can be defined when initializing the compositor (default values shown in the example below). >>> from satpy.composites import DayNightCompositor >>> compositor = DayNightCompositor("dnc", lim_low=85., lim_high=88.) >>> composite = compositor([local_scene['true_color'], ... local_scene['night_fog']]) RealisticColors --------------- :class:`RealisticColors` compositor is a special compositor that is used to create realistic near-true-color composite from MSG/SEVIRI data:: >>> from satpy.composites import RealisticColors >>> compositor = RealisticColors("realcols", lim_low=85., lim_high=95.) >>> composite = compositor([local_scene['VIS006'], ... local_scene['VIS008'], ... local_scene['HRV']]) CloudCompositor --------------- :class:`CloudCompositor` can be used to threshold the data so that "only" clouds are visible. These composites can be used as an overlay on top of e.g. static terrain images to show a rough idea where there are clouds. The data are thresholded using three variables:: - `transition_min`: values below or equal to this are clouds -> opaque white - `transition_max`: values above this are cloud free -> transparent - `transition_gamma`: gamma correction applied to clarify the clouds Usage (with default values):: >>> from satpy.composites import CloudCompositor >>> compositor = CloudCompositor("clouds", transition_min=258.15, ... transition_max=298.15, ... transition_gamma=3.0) >>> composite = compositor([local_scene[10.8]]) Support for using this compositor for VIS data, where the values for high/thick clouds tend to be in reverse order to brightness temperatures, is to be added. RatioSharpenedRGB ----------------- :class:`RatioSharpenedRGB` SelfSharpenedRGB ---------------- :class:`SelfSharpenedRGB` sharpens the RGB with ratio of a band with a strided version of itself. LuminanceSharpeningCompositor ----------------------------- :class:`LuminanceSharpeningCompositor` replaces the luminance from an RGB composite with luminance created from reflectance data. If the resolutions of the reflectance data _and_ of the target area definition are higher than the base RGB, more details can be retrieved. This compositor can be useful also with matching resolutions, e.g. to highlight shadowing at cloudtops in colorized infrared composite. >>> from satpy.composites import LuminanceSharpeningCompositor >>> compositor = LuminanceSharpeningCompositor("vis_sharpened_ir") >>> vis_data = local_scene['HRV'] >>> colorized_ir_clouds = local_scene['colorized_ir_clouds'] >>> composite = compositor([vis_data, colorized_ir_clouds]) SandwichCompositor ------------------ Similar to :class:`LuminanceSharpeningCompositor`, :class:`SandwichCompositor` uses reflectance data to bring out more details out of infrared or low-resolution composites. :class:`SandwichCompositor` multiplies the RGB channels with (scaled) reflectance. >>> from satpy.composites import SandwichCompositor >>> compositor = SandwichCompositor("ir_sandwich") >>> vis_data = local_scene['HRV'] >>> colorized_ir_clouds = local_scene['colorized_ir_clouds'] >>> composite = compositor([vis_data, colorized_ir_clouds]) StaticImageCompositor --------------------- :class:`StaticImageCompositor` can be used to read an image from disk and used just like satellite data, including resampling and using as a part of other composites. >>> from satpy.composites import StaticImageCompositor >>> compositor = StaticImageCompositor("static_image", filename="image.tif") >>> composite = compositor() BackgroundCompositor -------------------- :class:`BackgroundCompositor` can be used to stack two composites together. If the composites don't have `alpha` channels, the `background` is used where `foreground` has no data. If `foreground` has alpha channel, the `alpha` values are used to weight when blending the two composites. >>> from satpy import Scene >>> from satpy.composites import BackgroundCompositor >>> compositor = BackgroundCompositor() >>> clouds = local_scene['ir_cloud_day'] >>> background = local_scene['overview'] >>> composite = compositor([clouds, background]) Creating composite configuration files ====================================== To save the custom composite, the following procedure can be used: 1. Create a custom directory for your custom configs. 2. Set the environment variable ``PPP_CONFIG_DIR`` to this path. 3. Write config files with your changes only (see examples below), pointing to the (custom) module containing your composites. Generic compositors can be placed in ``$PPP_CONFIG_DIR/composites/visir.yaml`` and instrument- specific ones in ``$PPP_CONFIG_DIR/composites/.yaml``. Don't forget to add changes to the ``enhancement/generic.yaml`` file too. 4. If custom compositing code was used then it must be importable by python. If the code is not installed in your python environment then another option it to add it to your ``PYTHONPATH``. With that, you should be able to load your new composite directly. Example configuration files can be found in the satpy repository as well as a few simple examples below. Simple RGB composite -------------------- This is the overview composite shown in the first code example above using :class:`GenericCompositor`:: sensor_name: visir composites: overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - 0.6 - 0.8 - 10.8 standard_name: overview For an instrument specific version (here MSG/SEVIRI), we should use the channel _names_ instead of wavelengths. Note also that the sensor_name is now combination of visir and seviri, which means that it extends the generic visir composites:: sensor_name: visir/seviri composites: overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - VIS006 - VIS008 - IR_108 standard_name: overview In the following examples only the composite receipes are shown, and the header information (sensor_name, composites) and intendation needs to be added. Using modifiers --------------- In many cases the basic datasets need to be adjusted, e.g. for Solar zenith angle normalization. These modifiers can be applied in the following way:: overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: VIS006 modifiers: [sunz_corrected] - name: VIS008 modifiers: [sunz_corrected] - IR_108 standard_name: overview Here we see two changes: 1. channels with modifiers need to have either `name` or `wavelength` added in front of the channel name or wavelength, respectively 2. a list of modifiers attached to the dictionary defining the channel The modifier above is a built-in that normalizes the Solar zenith angle to Sun being directly at the zenith. Using other composites ---------------------- Often it is handy to use other composites as a part of the composite. In this example we have one composite that relies on solar channels on the day side, and another for the night side:: natural_with_night_fog: compositor: !!python/name:satpy.composites.DayNightCompositor prerequisites: - natural_color - night_fog standard_name: natural_with_night_fog This compositor has two additional keyword arguments that can be defined (shown with the default values, thus identical result as above):: natural_with_night_fog: compositor: !!python/name:satpy.composites.DayNightCompositor prerequisites: - natural_color - night_fog lim_low: 85.0 lim_high: 95.0 standard_name: natural_with_night_fog Defining other composites in-line --------------------------------- It is also possible to define sub-composites in-line. This example is the built-in airmass composite:: airmass: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - wavelength: 6.2 - wavelength: 7.3 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - wavelength: 9.7 - wavelength: 10.8 - wavelength: 6.2 standard_name: airmass Using a pre-made image as a background -------------------------------------- Below is an example composite config using :class:`StaticImageCompositor`, :class:`DayNightCompositor`, :class:`CloudCompositor` and :class:`BackgroundCompositor` to show how to create a composite with a blended day/night imagery as background for clouds. As the images are in PNG format, and thus not georeferenced, the name of the area definition for the background images are given. When using GeoTIFF images the `area` parameter can be left out. .. note:: The background blending uses the current time if there is no timestamps in the image filenames. :: clouds_with_background: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: clouds_with_background prerequisites: - ir_cloud_day - compositor: !!python/name:satpy.composites.DayNightCompositor prerequisites: - static_day - static_night static_day: compositor: !!python/name:satpy.composites.StaticImageCompositor standard_name: static_day filename: /path/to/day_image.png area: euro4 static_night: compositor: !!python/name:satpy.composites.StaticImageCompositor standard_name: static_night filename: /path/to/night_image.png area: euro4 To ensure that the images aren't auto-stretched and possibly altered, the following should be added to enhancement config (assuming 8-bit image) for both of the static images:: static_day: standard_name: static_day operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [255, 255, 255] Enhancing the images ==================== .. todo:: Explain how composite names, composite standard_name, enhancement names, and enhancement standard_name are related to each other Explain what happens when no enhancement is configured for a product (= use the default enhancement). Explain that the methods are often just a wrapper for XRImage methods, but can also be something completely custom. List and explain in detail the built-in enhancements: - stretch - gamma - invert - crefl_scaling - cira_stretch - lookup - colorize - palettize - three_d_effect - btemp_threshold .. todo:: Should this be in another file/page? After the composite is defined and created, it needs to be converted to an image. To do this, it is necessary to describe how the data values are mapped to values stored in the image format. This procedure is called ``stretching``, and in SatPy it is implemented by ``enhancements``. The first step is to convert the composite to an :class:`~trollimage.xrimage.XRImage` object:: >>> from satpy.writers import to_image >>> img = to_image(composite) Now it is possible to apply enhancements available in the class:: >>> img.invert([False, False, True]) >>> img.stretch("linear") >>> img.gamma(1.7) And finally either show or save the image:: >>> img.show() >>> img.save('image.tif') As pointed out in the composite section, it is better to define frequently used enhancements in configuration files under ``$PPP_CONFIG_DIR/enhancements/``. The enhancements can either be in ``generic.yaml`` or instrument-specific file (e.g., ``seviri.yaml``). The above enhancement can be written (with the headers necessary for the file) as:: enhancements: overview: standard_name: overview operations: - name: inverse method: !!python/name:satpy.enhancements.invert args: [False, False, True] - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: linear - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [1.7, 1.7, 1.7] More examples can be found in SatPy source code directory ``satpy/etc/enhancements/generic.yaml``. See the :doc:`enhancements` documentation for more information on available built-in enhancements. satpy-0.20.0/doc/source/conf.py000066400000000000000000000221131362525524100163010ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # NWCSAF/MSG PP documentation build configuration file, created by # sphinx-quickstart on Fri Sep 25 16:58:28 2009. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. # To generate apidoc modules: # sphinx-apidoc -f -T -o source/api ../satpy ../satpy/tests """Sphinx documentation configuration and setup.""" import os import sys from datetime import datetime from pkg_resources import get_distribution # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.append(os.path.abspath('../../')) sys.path.append(os.path.abspath(os.path.dirname(__file__))) # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # get version using setuptools-scm release = get_distribution('satpy').version # The full version, including alpha/beta/rc tags. # for example take major/minor version = '.'.join(release.split('.')[:2]) class Mock(object): # noqa """Mock class for mocking module instances.""" def __init__(self, *args, **kwargs): """Mask any arguments to mock object.""" pass def __call__(self, *args, **kwargs): """Mock a function and class object when accessed from mocked module.""" return Mock() @classmethod def __getattr__(cls, name): """Mock common module attributes used in documentation.""" if name in ('__file__', '__path__'): return '/dev/null' elif name[0] == name[0].upper(): mockType = type(name, (), {}) mockType.__module__ = __name__ return mockType elif name == "inf": return 0 else: return Mock() # https://github.com/sphinx-doc/sphinx/issues/3920 MOCK_MODULES = ['h5py'] for mod_name in MOCK_MODULES: sys.modules[mod_name] = Mock() autodoc_mock_imports = ['cf', 'glymur', 'h5netcdf', 'imageio', 'mipp', 'netCDF4', 'pygac', 'pygrib', 'pyhdf', 'pyninjotiff', 'pyorbital', 'pyspectral', 'rasterio', 'trollimage', 'zarr'] autoclass_content = 'both' # append class __init__ docstring to the class docstring # -- General configuration ----------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.doctest', 'sphinx.ext.napoleon', 'sphinx.ext.autosummary', 'doi_role', 'sphinx.ext.viewcode'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. # source_encoding = 'utf-8' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Satpy' copyright = u'2009-{}, The PyTroll Team'.format(datetime.utcnow().strftime("%Y")) # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # today = '' # Else, today_fmt is used as the format for a strftime call. # today_fmt = '%B %d, %Y' # List of documents that shouldn't be included in the build. # unused_docs = [] # List of directories, relative to source directory, that shouldn't be searched # for source files. exclude_trees = [] # The reST default role (used for this markup: `text`) to use for all documents. # default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. # add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). # add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. # show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. Major themes that come with # Sphinx are currently 'default' and 'sphinxdoc'. html_theme = 'sphinx_rtd_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". # html_title = None # A shorter title for the navigation bar. Default is the same as html_title. # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] html_context = { 'css_files': [ '_static/theme_overrides.css', # override wide tables in RTD theme ], } # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. # html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. # html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. # html_additional_pages = {} # If false, no module index is generated. # html_use_modindex = True # If false, no index is generated. # html_use_index = True # If true, the index is split into individual pages for each letter. # html_split_index = False # If true, links to the reST sources are added to the pages. # html_show_sourcelink = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # html_use_opensearch = '' # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = '' # Output file base name for HTML help builder. htmlhelp_basename = 'NWCSAFMSGPPdoc' # -- Options for LaTeX output -------------------------------------------------- # The paper size ('letter' or 'a4'). # latex_paper_size = 'letter' # The font size ('10pt', '11pt' or '12pt'). # latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'satpy.tex', u'satpy documentation', u'SMHI', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # latex_use_parts = False # Additional stuff for the LaTeX preamble. # latex_preamble = '' # Documents to append as an appendix to all manuals. # latex_appendices = [] # If false, no module index is generated. # latex_use_modindex = True # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { 'dask': ('https://docs.dask.org/en/latest', None), 'geoviews': ('http://geoviews.org', None), 'jobqueue': ('https://jobqueue.dask.org/en/latest', None), 'numpy': ('https://docs.scipy.org/doc/numpy', None), 'pydecorate': ('https://pydecorate.readthedocs.io/en/stable', None), 'pyorbital': ('https://pyorbital.readthedocs.io/en/stable', None), 'pyproj': ('https://pyproj4.github.io/pyproj/dev', None), 'pyresample': ('https://pyresample.readthedocs.io/en/stable', None), 'python': ('https://docs.python.org/3', None), 'scipy': ('https://docs.scipy.org/doc/scipy/reference', None), 'trollimage': ('https://trollimage.readthedocs.io/en/stable', None), 'trollsift': ('https://trollsift.readthedocs.io/en/stable', None), 'xarray': ('https://xarray.pydata.org/en/stable', None) } satpy-0.20.0/doc/source/data_download.rst000066400000000000000000000056171362525524100203460ustar00rootroot00000000000000Downloading Data ================ One of the main features of Satpy is its ability to read various satellite data formats. However, it currently only provides limited methods for downloading data from remote sources and these methods are limited to demo data for `Pytroll examples `_. See the examples and the :mod:`~satpy.demo` API documentation for details. Otherwise, Satpy assumes all data is available through the local system, either as a local directory or network mounted file systems. Certain readers that use ``xarray`` to open data files may be able to load files from remote systems by using OpenDAP or similar protocols. As a user there are two options for getting access to data: 1. Download data to your local machine. 2. Connect to a remote system that already has access to data. The most common case of a remote system having access to data is with a cloud computing service like Google Cloud Platform (GCP) or Amazon Web Services (AWS). Another possible case is an organization having direct broadcast antennas where they receive data directly from the satellite or satellite mission organization (NOAA, NASA, EUMETSAT, etc). In these cases data is usually available as a mounted network file system and can be accessed like a normal local path (with the added latency of network communications). Below are some data sources that provide data that can be read by Satpy. If you know of others please let us know by either creating a GitHub issue or pull request. NOAA GOES on Amazon Web Services -------------------------------- * `Resource Description `__ * `Data Browser `__ * Associated Readers: ``abi_l1b`` In addition ot the pages above, Brian Blaylock has prepared some instructions for using the ``rclone`` tool for downloading AWS data to a local machine. The instructions can be found `here `_. NOAA GOES on Google Cloud Platform ---------------------------------- GOES-16 ^^^^^^^ * `Resource Description `__ * `Data Browser `__ * Associated Readers: ``abi_l1b`` GOES-17 ^^^^^^^ * `Resource Description `__ * `Data Browser `__ * Associated Readers: ``abi_l1b`` NOAA CLASS ---------- * `Data Ordering `__ * Associated Readers: ``viirs_sdr`` NASA VIIRS Atmosphere SIPS -------------------------- * `Resource Description `__ * Associated Readers: ``viirs_l1b`` EUMETSAT Data Center -------------------- * `Data Ordering `__ satpy-0.20.0/doc/source/dev_guide/000077500000000000000000000000001362525524100167365ustar00rootroot00000000000000satpy-0.20.0/doc/source/dev_guide/CONTRIBUTING.rst000077700000000000000000000000001362525524100246742../../../CONTRIBUTING.rstustar00rootroot00000000000000satpy-0.20.0/doc/source/dev_guide/custom_reader.rst000066400000000000000000000537241362525524100223370ustar00rootroot00000000000000================================= Adding a Custom Reader to Satpy ================================= In order to add a reader to satpy, you will need to create two files: - a YAML file for describing the files to read and the datasets that are available - a python file implementing the actual reading of the datasets and metadata Satpy implements readers by defining a single "reader" object that pulls information from one or more file handler objects. The base reader class provided by Satpy is enough for most cases and does not need to be modified. The individual file handler classes do need to be created due to the small differences between file formats. The below documentation will walk through each part of making a reader in detail. To do this we will implement a reader for the EUMETSAT NetCDF format for SEVIRI data. .. _reader_naming: Naming your reader ------------------ Satpy tries to follow a standard scheme for naming its readers. These names are used in filenames, but are also used by users so it is important that the name be recognizable and clear. Although some special cases exist, most fit in to the following naming scheme: .. parsed-literal:: [_[_]][_] All components of the name should be lowercase and use underscores as the main separator between fields. Hyphens should be used as an intra-field separator if needed (ex. goes-imager). :sensor: The first component of the name represents the sensor or instrument that observed the data stored in the files being read. If the files are the output of a specific processing software or a certain algorithm implementation that supports multiple sensors then a lowercase version of that software's name should be used (e.g. clavrx for CLAVR-x, nucaps for NUCAPS). The ``sensor`` field is the only required field of the naming scheme. If it is actually an instrument name then the reader name should include one of the other optional fields. If sensor is a software package then that may be enough without any additional information to uniquely identify the reader. :processing level: This field marks the specific level of processing or calibration that has been performed to produce the data in the files being read. Common values of this field include: ``sdr`` for Sensor Data Record (SDR), ``edr`` for Environmental Data Record (EDR), ``l1b`` for Level 1B, and ``l2`` for Level 2. :level detail: In cases where the processing level is not enough to completely define the reader this field can be used to provide a little more context. For example, some VIIRS EDR products are specific to a particular field of study or type of scientific event, like a flood or cloud product. In these cases the detail field can be added to produce a name like ``viirs_edr_flood``. This field shouldn't be used unless processing level is also specified. :file format: If the file format of the files is informative to the user or can distinguish one reader from another then this field should be specified. Common format names should be abbreviated following existing abbreviations like `nc` for NetCDF3 or NetCDF4, `hdf` for HDF4, `h5` for HDF5. The existing :ref:`reader's table ` can be used for reference. When in doubt, reader names can be discussed in the github pull request when this reader is added to Satpy or a github issue. The YAML file ------------- The yaml file is composed of three sections: - the :ref:`reader ` section, that provides basic parameters for the reader - the :ref:`file_types ` section, that gives the patterns of the files this reader can handle - the :ref:`datasets ` section, that describes the datasets available from this reader .. _custom_reader_reader_section: The ``reader`` section ~~~~~~~~~~~~~~~~~~~~~~ The ``reader`` section provides basic parameters for the overall reader. The parameters to provide in this section are: - name: This is the name of the reader, it should be the same as the filename (without the .yaml extension). The naming convention for this is described above in the :ref:`reader_naming` section above. - short_name (optional): Human-readable version of the reader 'name'. If not provided, applications using this can default to taking the 'name', replacing ``_`` with spaces and uppercasing every letter. - long_name: Human-readable title for the reader. This may be used as a section title on a website or in GUI applications using Satpy. Default naming scheme is `` Level []``. For example, for the ``abi_l1b`` reader this is ``"GOES-R ABI Level 1b"`` where "GOES-R" is the name of the program and **not** the name of the platform/satellite. This scheme may not work for all readers, but in general should be followed. See existing readers for more examples. - description: General description of the reader. This may include any `restructuredtext `_ formatted text like links to PDFs or sites with more information on the file format. This can be multiline if formatted properly in YAML (see example below). - sensors: The list of sensors this reader will support. This must be all lowercase letters for full support throughout in Satpy. - reader: The main python reader class to use, in most cases the ``FileYAMLReader`` is a good choice. .. code:: yaml reader: name: seviri_l1b_nc short_name: SEVIRI L1b NetCDF4 long_name: MSG SEVIRI Level 1b (NetCDF4) description: > NetCDF4 reader for EUMETSAT MSG SEVIRI Level 1b files. sensors: [seviri] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader .. _custom_reader_file_types_section: The ``file_types`` section ~~~~~~~~~~~~~~~~~~~~~~~~~~ Each file type needs to provide: - ``file_reader``, the class that will handle the files for this reader, that you will implement in the corresponding python file. See the :ref:`custom_reader_python` section below. - ``file_patterns``, the patterns to match to find files this reader can handle. The syntax to use is basically the same as ``format`` with the addition of time. See the `trollsift package documentation `__ for more details. - Optionally, a file type can have a ``requires`` field: it is a list of file types that the current file types needs to function. For example, the HRIT MSG format segment files each need a prologue and epilogue file to be read properly, hence in this case we have added ``requires: [HRIT_PRO, HRIT_EPI]`` to the file type definition. .. code:: yaml file_types: nc_seviri_l1b: file_reader: !!python/name:satpy.readers.nc_seviri_l1b.NCSEVIRIFileHandler file_patterns: ['W_XX-EUMETSAT-Darmstadt,VIS+IR+IMAGERY,{satid:4s}+SEVIRI_C_EUMG_{processing_time:%Y%m%d%H%M%S}.nc'] nc_seviri_l1b_hrv: file_reader: !!python/name:satpy.readers.nc_seviri_l1b.NCSEVIRIHRVFileHandler file_patterns: ['W_XX-EUMETSAT-Darmstadt,HRV+IMAGERY,{satid:4s}+SEVIRI_C_EUMG_{processing_time:%Y%m%d%H%M%S}.nc'] .. _custom_reader_datasets_section: The ``datasets`` section ~~~~~~~~~~~~~~~~~~~~~~~~ The datasets section describes each dataset available in the files. The parameters provided are made available to the methods of the implemented python class. If your input files contain all the necessary metadata or you have a lot of datasets to configure look at the :ref:`custom_reader_available_datasets` section below. Implementing this will save you from having to write a lot of configuration in the YAML files. Parameters you can define for example are: - name - sensor - resolution - wavelength - polarization - standard\_name: The `CF standard name `_ for the dataset that will be used to determine the type of data. See existing readers for common standard names in Satpy or the CF standard name documentation for other available names or how to define your own. Satpy does not currently have a hard requirement on these names being completely CF compliant, but consistency across readers is important. - units: The units of the data when returned by the file handler. Although not technically a requirement, it is common for Satpy datasets to use "%" for reflectance fields and "K" for brightness temperature fields. - modifiers: The modification(s) that have already been applied to the data when it is returned by the file handler. Only a few of these have been standardized across Satpy, but are based on the names of the modifiers configured in the "composites" YAML files. Examples include ``sunz_corrected`` or ``rayleigh_corrected``. See the `metadata wiki `_ for more information. - file\_type: Name of file type (see above). - coordinates: An optional two-element list with the names of the longitude and latitude datasets describing the location of this dataset. This is optional if the data being read is gridded already. Swath data, from example data from some polar-orbiting satellites, should have these defined or no geolocation information will be available when the data is loaded. For gridded datasets a `get_area_def` function will be implemented in python (see below) to define geolocation information. - Any other field that is relevant for the reader or could be useful metadata provided to the user. This section can be copied and adapted simply from existing seviri readers, like for example the ``msg_native`` reader. .. code:: yaml datasets: HRV: name: HRV resolution: 1000.134348869 wavelength: [0.5, 0.7, 0.9] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: nc_seviri_l1b_hrv IR_016: name: IR_016 resolution: 3000.403165817 wavelength: [1.5, 1.64, 1.78] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: nc_seviri_l1b nc_key: 'ch3' IR_039: name: IR_039 resolution: 3000.403165817 wavelength: [3.48, 3.92, 4.36] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: nc_seviri_l1b nc_key: 'ch4' IR_087: name: IR_087 resolution: 3000.403165817 wavelength: [8.3, 8.7, 9.1] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: nc_seviri_l1b IR_097: name: IR_097 resolution: 3000.403165817 wavelength: [9.38, 9.66, 9.94] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: nc_seviri_l1b IR_108: name: IR_108 resolution: 3000.403165817 wavelength: [9.8, 10.8, 11.8] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: nc_seviri_l1b IR_120: name: IR_120 resolution: 3000.403165817 wavelength: [11.0, 12.0, 13.0] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: nc_seviri_l1b IR_134: name: IR_134 resolution: 3000.403165817 wavelength: [12.4, 13.4, 14.4] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: nc_seviri_l1b VIS006: name: VIS006 resolution: 3000.403165817 wavelength: [0.56, 0.635, 0.71] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: nc_seviri_l1b VIS008: name: VIS008 resolution: 3000.403165817 wavelength: [0.74, 0.81, 0.88] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: nc_seviri_l1b WV_062: name: WV_062 resolution: 3000.403165817 wavelength: [5.35, 6.25, 7.15] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: nc_seviri_l1b WV_073: name: WV_073 resolution: 3000.403165817 wavelength: [6.85, 7.35, 7.85] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: nc_seviri_l1b The YAML file is now ready and you can move on to writing your python code. .. _custom_reader_available_datasets: Dynamic Dataset Configuration ----------------------------- The above "datasets" section for reader configuration is the most explicit method for specifying metadata about possible data that can be loaded from input files. It is also the easiest way for people with little python experience to customize or add new datasets to a reader. However, some file formats may have 10s or even 100s of datasets or variations of datasets. Writing the metadata and access information for every one of these datasets can easily become a problem. To help in these cases the :meth:`~satpy.readers.file_handlers.BaseFileHandler.available_datasets` file handler interface can be used. This method, if needed, should be implemented in your reader's file handler classes. The best information for what this method does and how to use it is available in the :meth:`API documentation `. This method is good when you want to: 1. Define datasets dynamically without needing to define them in the YAML. 2. Supplement metadata from the YAML file with information from the file content (ex. `resolution`). 3. Determine if a dataset is available by the file contents. This differs from the default behavior of a dataset being considered loadable if its "file_type" is loaded. Note that this is considered an advanced interface and involves more advanced Python concepts like generators. If you need help with anything feel free to ask questions in your pull request or on the :ref:`Pytroll Slack `. .. _custom_reader_python: The python file --------------- The python files needs to implement a file handler class for each file type that we want to read. Such a class needs to implement a few methods: - the ``__init__`` method, that takes as arguments - the filename (string) - the filename info (dict) that we get by parsing the filename using the pattern defined in the yaml file - the filetype info that we get from the filetype definition in the yaml file This method can also receive other file handler instances as parameter if the filetype at hand has requirements. (See the explanation in the YAML file filetype section above) - the ``get_dataset`` method, which takes as arguments - the dataset ID of the dataset to load - the dataset info that is the description of the channel in the YAML file This method has to return an xarray.DataArray instance if the loading is successful, containing the data and :ref:`metadata ` of the loaded dataset, or return None if the loading was unsuccessful. The DataArray should at least have a ``y`` dimension. For data covering a 2D region on the Earth, their should be at least a ``y`` and ``x`` dimension. This applies to non-gridded data like that of a polar-orbiting satellite instrument. The latitude dimension is typically named ``y`` and longitude named ``x``. This may require renaming dimensions from the file, see for the :meth:`xarray.DataArray.rename` method for more information and its use in the example below. - the ``get_area_def`` method, that takes as single argument the :class:`~satpy.dataset.DatasetID` for which we want the area. It should return a :class:`~pyresample.geometry.AreaDefinition` object. For data that cannot be geolocated with an area definition, the pixel coordinates will be loaded using the ``get_dataset`` method for the resulting scene to be navigated. The names of the datasets to be loaded should be specified as a special ``coordinates`` attribute in the YAML file. For example, by specifying ``coordinates: [longitude_dataset, latitude_dataset]`` in the YAML, Satpy will call ``get_dataset`` twice, once to load the dataset named ``longitude_dataset`` and once to load ``latitude_dataset``. Satpy will then create a :class:`~pyresample.geometry.SwathDefinition` with this coordinate information and assign it to the dataset's ``.attrs['area']`` attribute. - Optionally, the ``get_bounding_box`` method can be implemented if filtering files by area is desirable for this data type On top of that, two attributes need to be defined: ``start_time`` and ``end_time``, that define the start and end times of the sensing. If you are writing a file handler for more common formats like HDF4, HDF5, or NetCDF4 you may want to consider using the utility base classes for each: :class:`satpy.readers.hdf4_utils.HDF4FileHandler`, :class:`satpy.readers.hdf5_utils.HDF5FileHandler`, and :class:`satpy.readers.netcdf_utils.NetCDF4FileHandler`. These were added as a convenience and are not required to read these formats. In many cases using the :func:`xarray.open_dataset` function in a custom file handler is a much better idea. One way of implementing a file handler is shown below: .. code:: python # this is seviri_l1b_nc.py from satpy.readers.file_handlers import BaseFileHandler from pyresample.geometry import AreaDefinition class NCSEVIRIFileHandler(BaseFileHandler): def __init__(self, filename, filename_info, filetype_info): super(NCSEVIRIFileHandler, self).__init__(filename, filename_info, filetype_info) self.nc = None def get_dataset(self, dataset_id, dataset_info): if dataset_id.calibration != 'radiance': # TODO: implement calibration to reflectance or brightness temperature return if self.nc is None: self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=True, chunks={'num_columns_vis_ir': CHUNK_SIZE, 'num_rows_vis_ir': CHUNK_SIZE}) self.nc = self.nc.rename({'num_columns_vir_ir': 'x', 'num_rows_vir_ir': 'y'}) dataset = self.nc[dataset_info['nc_key']] dataset.attrs.update(dataset_info) return dataset def get_area_def(self, dataset_id): return pyresample.geometry.AreaDefinition( "some_area_name", "on-the-fly area", "geos", "+a=6378169.0 +h=35785831.0 +b=6356583.8 +lon_0=0 +proj=geos", 3636, 3636, [-5456233.41938636, -5453233.01608472, 5453233.01608472, 5456233.41938636]) class NCSEVIRIHRVFileHandler(): # left as an exercise to the reader :) If you have any questions, please contact the :ref:`Satpy developers `.satpy-0.20.0/doc/source/dev_guide/index.rst000066400000000000000000000041351362525524100206020ustar00rootroot00000000000000================= Developer's Guide ================= The below sections will walk through how to set up a development environment, make changes to the code, and test that they work. See the :doc:`CONTRIBUTING` section for more information on getting started and contributor expectations. Additional information for developer's can be found at the pages listed below. .. toctree:: :maxdepth: 1 CONTRIBUTING xarray_migration custom_reader Coding guidelines ================= Satpy is part of `PyTroll `_, and all code should follow the `PyTroll coding guidelines and best practices `_. Satpy currently supports Python 2.7 and 3.4+. All code should be written to be compatible with these versions. Development installation ======================== See the :doc:`../install` section for basic installation instructions. When it comes time to install Satpy it should be installed from a clone of the git repository and in development mode so that local file changes are automatically reflected in the python environment. We highly recommend making a separate conda environment or virtualenv for development. First, if you plan on contributing back to the project you should `fork the repository `_ and clone your fork. The package can then be installed in development by doing:: pip install -e . Running tests ============= Satpy tests are written using the python :mod:`unittest` module and the tests can be executed by running:: python setup.py test Documentation ============= Satpy's documentation is built using Sphinx. All documentation lives in the ``doc/`` directory of the project repository. After editing the source files there the documentation can be generated locally:: cd doc make html The output of the make command should be checked for warnings and errors. If code has been changed (new functions or classes) then the API documentation files should be regenerated before running the above command:: sphinx-apidoc -f -T -o source/api ../satpy ../satpy/tests satpy-0.20.0/doc/source/dev_guide/xarray_migration.rst000066400000000000000000000261711362525524100230560ustar00rootroot00000000000000============================ Migrating to xarray and dask ============================ Many python developers dealing with meteorologic satellite data begin with using NumPy arrays directly. This work usually involves masked arrays, boolean masks, index arrays, and reshaping. Due to the libraries used by Satpy these operations can't always be done in the same way. This guide acts as a starting point for new Satpy developers in transitioning from NumPy's array operations to Satpy's operations, although they are very similar. To provide the most functionality for users, Satpy uses the `xarray `_ library's :class:`~xarray.DataArray` object as the main representation for its data. DataArray objects can also benefit from the `dask `_ library. The combination of these libraries allow Satpy to easily distribute operations over multiple workers, lazy evaluate operations, and keep track additional metadata and coordinate information. XArray ------ .. code-block:: python import xarray as xr :class:`XArray's DataArray ` is now the standard data structure for arrays in satpy. They allow the array to define dimensions, coordinates, and attributes (that we use for metadata). To create such an array, you can do for example .. code-block:: python my_dataarray = xr.DataArray(my_data, dims=['y', 'x'], coords={'x': np.arange(...)}, attrs={'sensor': 'olci'}) where ``my_data`` can be a regular numpy array, a numpy memmap, or, if you want to keep things lazy, a dask array (more on dask later). Satpy uses dask arrays with all of its DataArrays. Dimensions ********** In satpy, the dimensions of the arrays should include: - `x` for the x or column or pixel dimension - `y` for the y or row or line dimension - `bands` for composites - `time` can also be provided, but we have limited support for it at the moment. Use metadata for common cases (`start_time`, `end_time`) Dimensions are accessible through :attr:`my_dataarray.dims `. To get the size of a given dimension, use :attr:`~xarray.DataArray.sizes`: .. code-block:: python my_dataarray.sizes['x'] Coordinates *********** Coordinates can be defined for those dimensions when it makes sense: - `x` and `y`: Usually defined when the data's area is an :class:`~pyresample.geometry.AreaDefinition`, and they contain the projection coordinates in x and y. - `bands`: Contain the letter of the color they represent, eg ``['R', 'G', 'B']`` for an RGB composite. This allows then to select for example a single band like this: .. code-block:: python red = my_composite.sel(bands='R') or even multiple bands: .. code-block:: python red_and_blue = my_composite.sel(bands=['R', 'B']) To access the coordinates of the data array, use the following syntax: .. code-block:: python x_coords = my_dataarray['x'] my_dataarray['y'] = np.arange(...) Most of the time, satpy will fill the coordinates for you, so you just need to provide the dimension names. Attributes ********** To save metadata, we use the :attr:`~xarray.DataArray.attrs` dictionary. .. code-block:: python my_dataarray.attrs['platform_name'] = 'Sentinel-3A' Some metadata that should always be present in our dataarrays: - ``area`` the area of the dataset. This should be handled in the reader. - ``start_time``, ``end_time`` - ``sensor`` Operations on DataArrays ************************ DataArrays work with regular arithmetic operation as one would expect of eg numpy arrays, with the exception that using an operator on two DataArrays requires both arrays to share the same dimensions, and coordinates if those are defined. For mathematical functions like cos or log, you can use numpy functions directly and they will return a DataArray object: .. code-block:: python import numpy as np cos_zen = np.cos(zen_xarray) Masking data ************ In DataArrays, masked data is represented with NaN values. Hence the default type is ``float64``, but ``float32`` works also in this case. XArray can't handle masked data for integer data, but in satpy we try to use the special ``_FillValue`` attribute (in ``.attrs``) to handle this case. If you come across a case where this isn't handled properly, contact us. Masking data from a condition can be done with: .. code-block:: python result = my_dataarray.where(my_dataarray > 5) Result is then analogous to my_dataarray, with values lower or equal to 5 replaced by NaNs. Further reading *************** http://xarray.pydata.org/en/stable/generated/xarray.DataArray.html#xarray.DataArray Dask ---- .. code-block:: python import dask.array as da The data part of the DataArrays we use in satpy are mostly dask Arrays. That allows lazy and chunked operations for efficient processing. Creation ******** From a numpy array ++++++++++++++++++ To create a dask array from a numpy array, one can call the :func:`~dask.array.from_array` function: .. code-block:: python darr = da.from_array(my_numpy_array, chunks=4096) The *chunks* keyword tells dask the size of a chunk of data. If the numpy array is 3-dimensional, the chunk size provide above means that one chunk will be 4096x4096x4096 elements. To prevent this, one can provide a tuple: .. code-block:: python darr = da.from_array(my_numpy_array, chunks=(4096, 1024, 2)) meaning a chunk will be 4096x1024x2 elements in size. Even more detailed sizes for the chunks can be provided if needed, see the :doc:`dask documentation `. From memmaps or other lazy objects ++++++++++++++++++++++++++++++++++ To avoid loading the data into memory when creating a dask array, other kinds of arrays can be passed to :func:`~dask.array.from_array`. For example, a numpy memmap allows dask to know where the data is, and will only be loaded when the actual values need to be computed. Another example is a hdf5 variable read with h5py. Procedural generation of data +++++++++++++++++++++++++++++ Some procedural generation function are available in dask, eg :func:`~dask.array.meshgrid`, :func:`~dask.array.arange`, or :func:`random.random `. From XArray to Dask and back **************************** Certain operations are easiest to perform on dask arrays by themselves, especially when certain functions are only available from the dask library. In these cases you can operate on the dask array beneath the DataArray and create a new DataArray when done. Note dask arrays do not support in-place operations. In-place operations on xarray DataArrays will reassign the dask array automatically. .. code-block:: python dask_arr = my_dataarray.data dask_arr = dask_arr + 1 # ... other non-xarray operations ... new_dataarr = xr.DataArray(dask_arr, dims=my_dataarray.dims, attrs=my_dataarray.attrs.copy()) Or if the operation should be assigned back to the original DataArray (if and only if the data is the same size): .. code-block:: python my_dataarray.data = dask_arr Operations and how to get actual results **************************************** Regular arithmetic operations are provided, and generate another dask array. >>> arr1 = da.random.uniform(0, 1000, size=(1000, 1000), chunks=100) >>> arr2 = da.random.uniform(0, 1000, size=(1000, 1000), chunks=100) >>> arr1 + arr2 dask.array In order to compute the actual data during testing, use the :func:`~dask.compute` method. In normal Satpy operations you will want the data to be evaluated as late as possible to improve performance so `compute` should only be used when needed. >>> (arr1 + arr2).compute() array([[ 898.08811639, 1236.96107629, 1154.40255292, ..., 1537.50752674, 1563.89278664, 433.92598566], [ 1657.43843608, 1063.82390257, 1265.08687916, ..., 1103.90421234, 1721.73564104, 1276.5424228 ], [ 1620.11393216, 212.45816261, 771.99348555, ..., 1675.6561068 , 585.89123159, 935.04366354], ..., [ 1533.93265862, 1103.33725432, 191.30794159, ..., 520.00434673, 426.49238283, 1090.61323471], [ 816.6108554 , 1526.36292498, 412.91953023, ..., 982.71285721, 699.087645 , 1511.67447362], [ 1354.6127365 , 1671.24591983, 1144.64848757, ..., 1247.37586051, 1656.50487092, 978.28184726]]) Dask also provides `cos`, `log` and other mathematical function, that you can use with :func:`da.cos ` and :func:`da.log `. However, since satpy uses xarrays as standard data structure, prefer the xarray functions when possible (they call in turn the dask counterparts when possible). Wrapping non-dask friendly functions ************************************ Some operations are not supported by dask yet or are difficult to convert to take full advantage of dask's multithreaded operations. In these cases you can wrap a function to run on an entire dask array when it is being computed and pass on the result. Note that this requires fully computing all of the dask inputs to the function and are passed as a numpy array or in the case of an XArray DataArray they will be a DataArray with a numpy array underneath. You should *NOT* use dask functions inside the delayed function. .. code-block:: python import dask import dask.array as da def _complex_operation(my_arr1, my_arr2): return my_arr1 + my_arr2 delayed_result = dask.delayed(_complex_operation)(my_dask_arr1, my_dask_arr2) # to create a dask array to use in the future my_new_arr = da.from_delayed(delayed_result, dtype=my_dask_arr1.dtype, shape=my_dask_arr1.shape) Dask Delayed objects can also be computed ``delayed_result.compute()`` if the array is not needed or if the function doesn't return an array. http://dask.pydata.org/en/latest/array-api.html#dask.array.from_delayed Map dask blocks to non-dask friendly functions ********************************************** If the complicated operation you need to perform can be vectorized and does not need the entire data array to do its operations you can use :func:`da.map_blocks ` to get better performance than creating a delayed function. Similar to delayed functions the inputs to the function are fully computed DataArrays or numpy arrays, but only the individual chunks of the dask array at a time. Note that ``map_blocks`` must be provided dask arrays and won't function properly on XArray DataArrays. It is recommended that the function object passed to ``map_blocks`` **not** be an internal function (a function defined inside another function) or it may be unserializable and can cause issues in some environments. .. code-block:: python my_new_arr = da.map_blocks(_complex_operation, my_dask_arr1, my_dask_arr2, dtype=my_dask_arr1.dtype) Helpful functions ***************** - :func:`~dask.array.core.map_blocks` - :func:`~dask.array.map_overlap` - :func:`~dask.array.core.atop` - :func:`~dask.array.store` - :func:`~dask.array.tokenize` - :func:`~dask.compute` - :doc:`delayed` - :func:`~dask.array.rechunk` - :attr:`~dask.array.Array.vindex` satpy-0.20.0/doc/source/doi_role.py000066400000000000000000000033101362525524100171460ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ doilinks ~~~~~~~~~~~~~~~~~~~ Extension to add links to DOIs. With this extension you can use e.g. :doi:`10.1016/S0022-2836(05)80360-2` in your documents. This will create a link to a DOI resolver (``https://doi.org/10.1016/S0022-2836(05)80360-2``). The link caption will be the raw DOI. You can also give an explicit caption, e.g. :doi:`Basic local alignment search tool <10.1016/S0022-2836(05)80360-2>`. :copyright: Copyright 2015 Jon Lund Steffensen. Based on extlinks by the Sphinx team. :license: BSD. """ from docutils import nodes, utils from sphinx.util.nodes import split_explicit_title def doi_role(typ, rawtext, text, lineno, inliner, options={}, content=[]): text = utils.unescape(text) has_explicit_title, title, part = split_explicit_title(text) full_url = 'https://doi.org/' + part if not has_explicit_title: title = 'DOI:' + part pnode = nodes.reference(title, title, internal=False, refuri=full_url) return [pnode], [] def arxiv_role(typ, rawtext, text, lineno, inliner, options={}, content=[]): text = utils.unescape(text) has_explicit_title, title, part = split_explicit_title(text) full_url = 'https://arxiv.org/abs/' + part if not has_explicit_title: title = 'arXiv:' + part pnode = nodes.reference(title, title, internal=False, refuri=full_url) return [pnode], [] def setup_link_role(app): app.add_role('doi', doi_role) app.add_role('DOI', doi_role) app.add_role('arXiv', arxiv_role) app.add_role('arxiv', arxiv_role) def setup(app): app.connect('builder-inited', setup_link_role) return {'version': '0.1', 'parallel_read_safe': True} satpy-0.20.0/doc/source/enhancements.rst000066400000000000000000000040071362525524100202060ustar00rootroot00000000000000============ Enhancements ============ Built-in enhancement methods ============================ stretch ------- The most basic operation is to stretch the image so that the data fits to the output format. There are many different ways to stretch the data, which are configured by giving them in `kwargs` dictionary, like in the example above. The default, if nothing else is defined, is to apply a linear stretch. For more details, see below. linear ****** As the name suggests, linear stretch converts the input values to output values in a linear fashion. By default, 5% of the data is cut on both ends of the scale, but these can be overridden with ``cutoffs=(0.005, 0.005)`` argument:: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: linear cutoffs: (0.003, 0.005) .. note:: This enhancement is currently not optimized for dask because it requires getting minimum/maximum information for the entire data array. crude ***** The crude stretching is used to limit the input values to a certain range by clipping the data. This is followed by a linear stretch with no cutoffs specified (see above). Example:: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [100, 100, 100] It is worth noting that this stretch can also be used to _invert_ the data by giving larger values to the min_stretch than to max_stretch. histogram ********* gamma ----- invert ------ crefl_scaling ------------- cira_stretch ------------ lookup ------ colorize -------- palettize --------- three_d_effect -------------- The `three_d_effect` enhancement adds an 3D look to an image by convolving with a 3x3 kernel. User can adjust the strength of the effect by determining the weight (default: 1.0). Example:: - name: 3d_effect method: !!python/name:satpy.enhancements.three_d_effect kwargs: weight: 1.0 btemp_threshold --------------- satpy-0.20.0/doc/source/examples.rst000066400000000000000000000047041362525524100173600ustar00rootroot00000000000000Examples ======== Satpy examples are available as Jupyter Notebooks on the `pytroll-examples `_ git repository. They include python code, PNG images, and descriptions of what the example is doing. Below is a list of some of the examples and a brief summary. Additional example can be found at the repository mentioned above or as explanations in the various sections of this documentation. .. list-table:: :header-rows: 1 * - Name - Description * - `Quickstart with MSG data `_ - Satpy quickstart for loading and processing satellite data, with MSG data in this examples * - `Cartopy Plot `_ - Plot a single VIIRS SDR granule using Cartopy and matplotlib * - `Himawari-8 AHI True Color `_ - Generate and resample a rayleigh corrected true color RGB from Himawari-8 AHI data * - `Sentinel-3 OLCI True Color `_ - Reading OLCI data from Sentinel 3 with Pytroll/Satpy * - `Sentinel 2 MSI true color `_ - Reading MSI data from Sentinel 2 with Pytroll/Satpy * - `Suomi-NPP VIIRS SDR True Color `_ - Generate a rayleigh corrected true color RGB from VIIRS I- and M-bands * - `Aqua/Terra MODIS True Color `_ - Generate and resample a rayleigh corrected true color RGB from MODIS * - `Sentinel 1 SAR-C False Color `_ - Generate a false color composite RGB from SAR-C polarized datasets * - `Level 2 EARS-NWC cloud products `_ - Reading Level 2 EARS-NWC cloud products * - `Level 2 MAIA cloud products `_ - Reading Level 2 MAIA cloud products satpy-0.20.0/doc/source/faq.rst000066400000000000000000000140271362525524100163100ustar00rootroot00000000000000FAQ === Below you'll find frequently asked questions, performance tips, and other topics that don't really fit in to the rest of the Satpy documentation. If you have any other questions that aren't answered here feel free to make an issue on GitHub or talk to us on the Slack team or mailing list. See the :ref:`contributing ` documentation for more information. .. contents:: Topics :depth: 1 :local: Why is Satpy slow on my powerful machine? ----------------------------------------- Satpy depends heavily on the dask library for its performance. However, on some systems dask's default settings can actually hurt performance. By default dask will create a "worker" for each logical core on your system. In most systems you have twice as many logical cores (also known as threaded cores) as physical cores. Managing and communicating with all of these workers can slow down dask, especially when they aren't all being used by most Satpy calculations. One option is to limit the number of workers by doing the following at the **top** of your python code: .. code-block:: python import dask from multiprocessing.pool import ThreadPool dask.config.set(pool=ThreadPool(8)) # all other Satpy imports and code This will limit dask to using 8 workers. Typically numbers between 4 and 8 are good starting points. Number of workers can also be set from an environment variable before running the python script, so code modification isn't necessary: .. code-block:: bash DASK_NUM_WORKERS=4 python myscript.py Similarly, if you have many workers processing large chunks of data you may be using much more memory than you expect. If you limit the number of workers *and* the size of the data chunks being processed by each worker you can reduce the overall memory usage. Default chunk size can be configured in Satpy by setting the following environment variable: .. code-block:: bash export PYTROLL_CHUNK_SIZE=2048 This could also be set inside python using ``os.environ``, but must be set **before** Satpy is imported. This value defaults to 4096, meaning each chunk of data will be 4096 rows by 4096 columns. In the future setting this value will change to be easier to set in python. Why multiple CPUs are used even with one worker? ------------------------------------------------ Many of the underlying Python libraries use math libraries like BLAS and LAPACK written in C or FORTRAN, and they are often compiled to be multithreaded. If necessary, it is possible to force the number of threads they use by setting an environment variable: .. code-block:: bash OMP_NUM_THREADS=2 python myscript.py What is the difference between number of workers and number of threads? ----------------------------------------------------------------------- The above questions handle two different stages of parallellization: Dask workers and math library threading. The number of Dask workers affect how many separate tasks are started, effectively telling how many chunks of the data are processed at the same time. The more workers are in use, the higher also the memory usage will be. The number of threads determine how much parallel computations are run for the chunk handled by each worker. This has minimal effect on memory usage. The optimal setup is often a mix of these two settings, for example .. code-block:: bash DASK_NUM_WORKERS=2 OMP_NUM_THREADS=4 python myscript.py would create two workers, and each of them would process their chunk of data using 4 threads when calling the underlying math libraries. How do I avoid memory errors? ----------------------------- If your environment is using many dask workers, it may be using more memory than it needs to be using. See the "Why is Satpy slow on my powerful machine?" question above for more information on changing Satpy's memory usage. Reducing GDAL output size? -------------------------- Sometimes GDAL-based products, like geotiffs, can be much larger than expected. This can be caused by GDAL's internal memory caching conflicting with dask's chunking of the data arrays. Modern versions of GDAL default to using 5% of available memory for holding on to data before compressing it and writing it to disk. On more powerful systems (~128GB of memory) this is usually not a problem. However, on low memory systems this may mean that GDAL is only compressing a small amount of data before writing it to disk. This results in poor compression and large overhead from the many small compressed areas. One solution is to increase the chunk size used by dask but this can result in poor performance during computation. Another solution is to increase ``GDAL_CACHEMAX``, an environment variable that GDAL uses. This defaults to ``"5%"``, but can be increased:: export GDAL_CACHEMAX="15%" For more information see `GDAL's documentation `_. How do I use multi-threaded compression when writing GeoTIFFs? -------------------------------------------------------------- The GDAL library's GeoTIFF driver has a lot of options for changing how your GeoTIFF is formatted and written. One of the most important ones when it comes to writing GeoTIFFs is using multiple threads to compress your data. By default Satpy will use DEFLATE compression which can be slower to compress than other options out there, but faster to read. GDAL gives us the option to control the number of threads used during compression by specifying the ``num_threads`` option. This option defaults to ``1``, but it is recommended to set this to at least the same number of dask workers you use. Do this by adding ``num_threads`` to your `save_dataset` or `save_datasets` call:: scn.save_datasets(base_dir='/tmp', tiled=True, num_threads=8) Here we're also using the `tiled` option to store our data as "tiles" instead of "stripes" which is another way to get more efficient compression of our GeoTIFF image. See the `GDAL GeoTIFF documentation `_ for more information on the creation options available including other compression choices.satpy-0.20.0/doc/source/index.rst000066400000000000000000000170271362525524100166530ustar00rootroot00000000000000===================== Satpy's Documentation ===================== Satpy is a python library for reading, manipulating, and writing data from remote-sensing earth-observing meteorological satellite instruments. Satpy provides users with readers that convert geophysical parameters from various file formats to the common Xarray :class:`~xarray.DataArray` and :class:`~xarray.Dataset` classes for easier interoperability with other scientific python libraries. Satpy also provides interfaces for creating RGB (Red/Green/Blue) images and other composite types by combining data from multiple instrument bands or products. Various atmospheric corrections and visual enhancements are provided for improving the usefulness and quality of output images. Output data can be written to multiple output file formats such as PNG, GeoTIFF, and CF standard NetCDF files. Satpy also allows users to resample data to geographic projected grids (areas). Satpy is maintained by the open source `Pytroll `_ group. The Satpy library acts as a high-level abstraction layer on top of other libraries maintained by the Pytroll group including: - `Pyresample `_ - `PySpectral `_ - `Trollimage `_ - `Pycoast `_ - `Pydecorate `_ - `python-geotiepoints `_ - `pyninjotiff `_ Go to the Satpy project_ page for source code and downloads. Satpy is designed to be easily extendable to support any meteorological satellite by the creation of plugins (readers, compositors, writers, etc). The table at the bottom of this page shows the input formats supported by the base Satpy installation. .. note:: Satpy's interfaces are not guaranteed stable and may change until version 1.0 when backwards compatibility will be a main focus. .. versionchanged:: 0.20.0 Dropped Python 2 support. .. _project: http://github.com/pytroll/satpy .. toctree:: :maxdepth: 2 overview install data_download examples quickstart readers composites resample enhancements writers multiscene dev_guide/index .. toctree:: :maxdepth: 1 Satpy API faq .. _reader_table: .. list-table:: Satpy Readers :header-rows: 1 :widths: 45 25 30 * - Description - Reader name - Status * - MSG (Meteosat 8 to 11) SEVIRI data in HRIT format - `seviri_l1b_hrit` - Nominal * - MSG (Meteosat 8 to 11) SEVIRI data in native format - `seviri_l1b_native` - HRV full disk data cannot be remapped. * - MSG (Meteosat 8 to 11) SEVIRI data in netCDF format - `seviri_l1b_nc` - | HRV channel not supported, incomplete metadata | in the files. EUMETSAT has been notified. * - MSG (Meteosat 8 to 11) L2 products in BUFR format - `seviri_l2_bufr` - AMV Bufr products not available yet. * - Himawari 8 and 9 AHI data in HSD format - `ahi_hsd` - Nominal * - Himawari 8 and 9 AHI data in HRIT format - `ahi_hrit` - Nominal * - MTSAT-1R JAMI data in JMA HRIT format - `jami_hrit` - Beta * - MTSAT-2 Imager data in JMA HRIT format - `mtsat2-imager_hrit` - Beta * - GOES-R imager data in netcdf format - `abi_l1b` - Nominal * - NOAA GOES-R ABI L2+ products in netcdf format - `abi_l2_nc` - Beta * - GOES 11 to 15 imager data in HRIT format - `goes-imager_hrit` - Nominal * - GOES 8 to 15 imager data in netCDF format (from NOAA CLASS) - `goes-imager_nc` - Beta * - Electro-L N2 MSU-GS data in HRIT format - `electrol_hrit` - Nominal * - NOAA 15 to 19, Metop A to C AVHRR data in AAPP format - `avhrr_l1b_aapp` - Nominal * - Metop A to C AVHRR in native level 1 format - `avhrr_l1b_eps` - Nominal * - Tiros-N, NOAA 7 to 19 AVHRR data in GAC and LAC format - `avhrr_l1b_gaclac` - Nominal * - NOAA 15 to 19 AVHRR data in raw HRPT format - `avhrr_l1b_hrpt` - In development * - GCOM-W1 AMSR2 data in HDF5 format - `amsr2_l1b` - Nominal * - MTG FCI Level 1C data for Full Disk High Spectral Imagery (FDHSI) in netcdf format - `fci_l1c_fdhsi` - In development * - Callipso Caliop Level 2 Cloud Layer data (v3) in EOS-hdf4 format - `caliop_l2_cloud` - In development * - Terra and Aqua MODIS data in EOS-hdf4 level-1 format as produced by IMAPP and IPOPP or downloaded from LAADS - `modis_l1b` - Nominal * - NWCSAF GEO 2016 products in netCDF4 format (limited to SEVIRI) - `nwcsaf-geo` - In development * - NWCSAF PPS 2014, 2018 products in netCDF4 format - `nwcsaf-pps_nc` - | Not yet support for remapped netCDF products. | Only the standard swath based output is supported. | CPP products not supported yet * - Sentinel-1 A and B SAR-C data in SAFE format - `sar-c_safe` - Nominal * - Sentinel-2 A and B MSI data in SAFE format - `msi_safe` - Nominal * - Sentinel-3 A and B OLCI Level 1B data in netCDF4 format - `olci_l1b` - Nominal * - Sentinel-3 A and B OLCI Level 2 data in netCDF4 format - `olci_l2` - Nominal * - Sentinel-3 A and B SLSTR data in netCDF4 format - `slstr_l1b` - In development * - OSISAF SST data in GHRSST (netcdf) format - `ghrsst_l3c_sst` - In development * - NUCAPS EDR Retrieval in NetCDF4 format - `nucaps` - Nominal * - NOAA Level 2 ACSPO SST data in netCDF4 format - `acspo` - Nominal * - GEOstationary Cloud Algorithm Test-bed (GEOCAT) - `geocat` - Nominal * - The Clouds from AVHRR Extended (CLAVR-x) - `clavrx` - Nominal * - SNPP VIIRS data in HDF5 SDR format - `viirs_sdr` - Nominal * - SNPP VIIRS data in netCDF4 L1B format - `viirs_l1b` - Nominal * - SNPP VIIRS SDR data in HDF5 Compact format - `viirs_compact` - Nominal * - AAPP MAIA VIIRS and AVHRR products in hdf5 format - `maia` - Nominal * - VIIRS EDR Active Fires data in NetCDF4 & CSV .txt format - `viirs_edr_active_fires` - Beta * - VIIRS EDR Flood data in hdf4 format - `viirs_edr_flood` - Beta * - GRIB2 format - `grib` - Beta * - SCMI ABI L1B format - `abi_l1b_scmi` - Beta * - VIRR data in HDF5 format - `virr_l1b` - Beta * - MERSI-2 L1B data in HDF5 format - `mersi2_l1b` - Beta * - FY-4A AGRI L1 data in HDF5 format - `agri_l1` - Beta * - Vaisala Global Lightning Dataset GLD360 data in ASCII format - `vaisala_gld360` - Beta * - TROPOMI L2 data in NetCDF4 format - `tropomi_l2` - Beta * - Hydrology SAF products in GRIB format - `hsaf_grib` - | Beta | Only the h03, h03b, h05 and h05B products are supported at-present * - GEO-KOMPSAT-2 AMI L1B data in NetCDF4 format - `ami_l1b` - Beta * - GOES-R GLM Grided Level 2 in NetCDF4 format - `glm_l2` - Beta * - Sentinel-3 SLSTR SST data in NetCDF4 format - `slstr_l2` - Beta * - IASI level 2 SO2 in BUFR format - `iasi_l2_so2_bufr` - Beta Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` satpy-0.20.0/doc/source/install.rst000066400000000000000000000043121362525524100172030ustar00rootroot00000000000000========================= Installation Instructions ========================= Pip-based Installation ====================== Satpy is available from the Python Packaging Index (PyPI). A sandbox environment for `satpy` can be created using `Virtualenv `_. To install the `satpy` package and the minimum amount of python dependencies: .. code-block:: bash $ pip install satpy Additional dependencies can be installed as "extras" and are grouped by reader, writer, or feature added. Extras available can be found in the `setup.py `_ file. They can be installed individually: .. code-block:: bash $ pip install satpy[viirs_sdr] Or all at once, although this isn't recommended due to the large number of dependencies: .. code-block:: bash $ pip install satpy[all] Conda-based Installation ======================== Starting with version 0.9, Satpy is available from the conda-forge channel. If you have not configured your conda environment to search conda-forge already then do: .. code-block:: bash $ conda config --add channels conda-forge Then to install Satpy in to your current environment run: .. code-block:: bash $ conda install satpy .. note:: Satpy only automatically installs the dependencies needed to process the most common use cases. Additional dependencies may need to be installed with conda or pip if import errors are encountered. Ubuntu System Python Installation ================================= To install Satpy on an Ubuntu system we recommend using virtual environments to separate Satpy and its dependencies from the rest of the system. Note that these instructions require using "sudo" privileges which may not be available to all users and can be very dangerous. The following instructions attempt to install some Satpy dependencies using the Ubuntu `apt` package manager to ease installation. Replace `/path/to/pytroll-env` with the environment to be created. .. code-block:: bash $ sudo apt-get install python-pip python-gdal $ sudo pip install virtualenv $ virtualenv /path/to/pytroll-env $ source /path/to/pytroll-env/bin/activate $ pip install satpy satpy-0.20.0/doc/source/multiscene.rst000066400000000000000000000145631362525524100177160ustar00rootroot00000000000000MultiScene (Experimental) ========================= Scene objects in Satpy are meant to represent a single geographic region at a specific single instant in time or range of time. This means they are not suited for handling multiple orbits of polar-orbiting satellite data, multiple time steps of geostationary satellite data, or other special data cases. To handle these cases Satpy provides the `MultiScene` class. The below examples will walk through some basic use cases of the MultiScene. .. warning:: These features are still early in development and may change overtime as more user feedback is received and more features added. Blending Scenes in MultiScene ----------------------------- Scenes contained in a MultiScene can be combined in different ways. Stacking scenes *************** The code below uses the :meth:`~satpy.multiscene.MultiScene.blend` method of the ``MultiScene`` object to stack two separate orbits from a VIIRS sensor. By default the ``blend`` method will use the :func:`~satpy.multiscene.stack` function which uses the first dataset as the base of the image and then iteratively overlays the remaining datasets on top. >>> from satpy import Scene, MultiScene >>> from glob import glob >>> from pyresample.geometry import AreaDefinition >>> my_area = AreaDefinition(...) >>> scenes = [ ... Scene(reader='viirs_sdr', filenames=glob('/data/viirs/day_1/*t180*.h5')), ... Scene(reader='viirs_sdr', filenames=glob('/data/viirs/day_2/*t180*.h5')) ... ] >>> mscn = MultiScene(scenes) >>> mscn.load(['I04']) >>> new_mscn = mscn.resample(my_area) >>> blended_scene = new_mscn.blend() >>> blended_scene.save_datasets() Timeseries ********** Using the :meth:`~satpy.multiscene.MultiScene.blend` method with the :func:`~satpy.multiscene.timeseries` function will combine multiple scenes from different time slots by time. A single `Scene` with each dataset/channel extended by the time dimension will be returned. If used together with the :meth:`~satpy.scene.Scene.to_geoviews` method, creation of interactive timeseries Bokeh plots is possible. >>> from satpy import Scene, MultiScene >>> from satpy.multiscene import timeseries >>> from glob import glob >>> from pyresample.geometry import AreaDefinition >>> my_area = AreaDefinition(...) >>> scenes = [ ... Scene(reader='viirs_sdr', filenames=glob('/data/viirs/day_1/*t180*.h5')), ... Scene(reader='viirs_sdr', filenames=glob('/data/viirs/day_2/*t180*.h5')) ... ] >>> mscn = MultiScene(scenes) >>> mscn.load(['I04']) >>> new_mscn = mscn.resample(my_area) >>> blended_scene = new_mscn.blend(blend_function=timeseries) >>> blended_scene['I04'] dask.array Coordinates: * time (time) datetime64[ns] 2012-02-25T18:01:24.570942 2012-02-25T18:02:49.975797 Dimensions without coordinates: y, x Saving frames of an animation ----------------------------- The MultiScene can take "frames" of data and join them together in a single animation movie file. Saving animations requires the `imageio` python library and for most available formats the ``ffmpeg`` command line tool suite should also be installed. The below example saves a series of GOES-EAST ABI channel 1 and channel 2 frames to MP4 movie files. We can use the :meth:`MultiScene.from_files ` class method to create a `MultiScene` from a series of files. This uses the :func:`~satpy.readers.group_files` utility function to group files by start time. >>> from satpy import Scene, MultiScene >>> from glob import glob >>> mscn = MultiScene.from_files(glob('/data/abi/day_1/*C0[12]*.nc'), reader='abi_l1b') >>> mscn.load(['C01', 'C02']) >>> mscn.save_animation('{name}_{start_time:%Y%m%d_%H%M%S}.mp4', fps=2) .. versionadded:: 0.12 The ``from_files`` and ``group_files`` functions were added in Satpy 0.12. See below for an alternative solution. This will compute one video frame (image) at a time and write it to the MPEG-4 video file. For users with more powerful systems it is possible to use the ``client`` and ``batch_size`` keyword arguments to compute multiple frames in parallel using the dask ``distributed`` library (if installed). See the :doc:`dask distributed ` documentation for information on creating a ``Client`` object. If working on a cluster you may want to use :doc:`dask jobqueue ` to take advantage of multiple nodes at a time. For older versions of Satpy we can manually create the `Scene` objects used. The :func:`~glob.glob` function and for loops are used to group files into Scene objects that, if used individually, could load the data we want. The code below is equivalent to the ``from_files`` code above: >>> from satpy import Scene, MultiScene >>> from glob import glob >>> scene_files = [] >>> for time_step in ['1800', '1810', '1820', '1830']: ... scene_files.append(glob('/data/abi/day_1/*C0[12]*s???????{}*.nc'.format(time_step))) >>> scenes = [ ... Scene(reader='abi_l1b', filenames=files) for files in sorted(scene_files) ... ] >>> mscn = MultiScene(scenes) >>> mscn.load(['C01', 'C02']) >>> mscn.save_animation('{name}_{start_time:%Y%m%d_%H%M%S}.mp4', fps=2) .. warning:: GIF images, although supported, are not recommended due to the large file sizes that can be produced from only a few frames. Saving multiple scenes ---------------------- The ``MultiScene`` object includes a :meth:`~satpy.multiscene.MultiScene.save_datasets` method for saving the data from multiple Scenes to disk. By default this will operate on one Scene at a time, but similar to the ``save_animation`` method above this method can accept a dask distributed ``Client`` object via the ``client`` keyword argument to compute scenes in parallel (see documentation above). Note however that some writers, like the ``geotiff`` writer, do not support multi-process operations at this time and will fail when used with dask distributed. To save multiple Scenes use: >>> from satpy import Scene, MultiScene >>> from glob import glob >>> mscn = MultiScene.from_files(glob('/data/abi/day_1/*C0[12]*.nc'), reader='abi_l1b') >>> mscn.load(['C01', 'C02']) >>> mscn.save_datasets(base_dir='/path/for/output') satpy-0.20.0/doc/source/overview.rst000066400000000000000000000143301362525524100174040ustar00rootroot00000000000000======== Overview ======== Satpy is designed to provide easy access to common operations for processing meteorological remote sensing data. Any details needed to perform these operations are configured internally to Satpy meaning users should not have to worry about *how* something is done, only ask for what they want. Most of the features provided by Satpy can be configured by keyword arguments (see the :doc:`API Documentation ` or other specific section for more details). For more complex customizations or added features Satpy uses a set of configuration files that can be modified by the user. The various components and concepts of Satpy are described below. The :doc:`quickstart` guide also provides simple example code for the available features of Satpy. Scene ===== Satpy provides most of its functionality through the :class:`~satpy.scene.Scene` class. This acts as a container for the datasets being operated on and provides methods for acting on those datasets. It attempts to reduce the amount of low-level knowledge needed by the user while still providing a pythonic interface to the functionality underneath. A Scene object represents a single geographic region of data, typically at a single continuous time range. It is possible to combine Scenes to form a Scene with multiple regions or multiple time observations, but it is not guaranteed that all functionality works in these situations. DataArrays ========== Satpy's lower-level container for data is the :class:`xarray.DataArray`. For historical reasons DataArrays are often referred to as "Datasets" in Satpy. These objects act similar to normal numpy arrays, but add additional metadata and attributes for describing the data. Metadata is stored in a ``.attrs`` dictionary and named dimensions can be accessed in a ``.dims`` attribute, along with other attributes. In most use cases these objects can be operated on like normal NumPy arrays with special care taken to make sure the metadata dictionary contains expected values. See the XArray documentation for more info on handling :class:`xarray.DataArray` objects. Additionally, Satpy uses a special form of DataArrays where data is stored in :class:`dask.array.Array` objects which allows Satpy to perform multi-threaded lazy operations vastly improving the performance of processing. For help on developing with dask and xarray see :doc:`dev_guide/xarray_migration` or the documentation for the specific project. To uniquely identify ``DataArray`` objects Satpy uses `DatasetID`. A ``DatasetID`` consists of various pieces of available metadata. This usually includes `name` and `wavelength` as identifying metadata, but also includes `resolution`, `calibration`, `polarization`, and additional `modifiers` to further distinguish one dataset from another. .. warning:: XArray includes other object types called "Datasets". These are different from the "Datasets" mentioned in Satpy. Reading ======= One of the biggest advantages of using Satpy is the large number of input file formats that it can read. It encapsulates this functionality into individual :doc:`readers`. Satpy Readers handle all of the complexity of reading whatever format they represent. Meteorological Satellite file formats can be extremely complex and formats are rarely reused across satellites or instruments. No matter the format, Satpy's Reader interface is meant to provide a consistent data loading interface while still providing flexibility to add new complex file formats. Compositing =========== Many users of satellite imagery combine multiple sensor channels to bring out certain features of the data. This includes using one dataset to enhance another, combining 3 or more datasets in to an RGB image, or any other combination of datasets. Satpy comes with a lot of common composite combinations built-in and allows the user to request them like any other dataset. Satpy also makes it possible to create your own custom composites and have Satpy treat them like any other dataset. See :doc:`composites` for more information. Resampling ========== Satellite imagery data comes in two forms when it comes to geolocation, native satellite swath coordinates and uniform gridded projection coordinates. It is also common to see the channels from a single sensor in multiple resolutions, making it complicated to combine or compare the datasets. Many use cases of satellite data require the data to be in a certain projection other than the native projection or to have output imagery cover a specific area of interest. Satpy makes it easy to resample datasets to allow for users to combine them or grid them to these projections or areas of interest. Satpy uses the PyTroll `pyresample` package to provide nearest neighbor, bilinear, or elliptical weighted averaging resampling methods. See :doc:`resample` for more information. Enhancements ============ When making images from satellite data the data has to be manipulated to be compatible with the output image format and still look good to the human eye. Satpy calls this functionality "enhancing" the data, also commonly called scaling or stretching the data. This process can become complicated not just because of how subjective the quality of an image can be, but also because of historical expectations of forecasters and other users for how the data should look. Satpy tries to hide the complexity of all the possible enhancement methods from the user and just provide the best looking image by default. Satpy still makes it possible to customize these procedures, but in most cases it shouldn't be necessary. See the documentation on :doc:`writers` for more information on what's possible for output formats and enhancing images. Writing ======= Satpy is designed to make data loading, manipulating, and analysis easy. However, the best way to get satellite imagery data out to as many users as possible is to make it easy to save it in multiple formats. Satpy allows users to save data in image formats like PNG or GeoTIFF as well as data file formats like NetCDF. Each format's complexity is hidden behind the interface of individual Writer objects and includes keyword arguments for accessing specific format features like compression and output data type. See the :doc:`writers` documentation for the available writers and how to use them. satpy-0.20.0/doc/source/quickstart.rst000066400000000000000000000320551362525524100177340ustar00rootroot00000000000000========== Quickstart ========== Loading and accessing data ========================== .. testsetup:: * >>> import sys >>> reload(sys) >>> sys.setdefaultencoding('utf8') To work with weather satellite data you must create a :class:`~satpy.scene.Scene` object. Satpy does not currently provide an interface to download satellite data, it assumes that the data is on a local hard disk already. In order for Satpy to get access to the data the Scene must be told what files to read and what :ref:`Satpy Reader ` should read them: >>> from satpy import Scene >>> from glob import glob >>> filenames = glob("/home/a001673/data/satellite/Meteosat-10/seviri/lvl1.5/2015/04/20/HRIT/*201504201000*") >>> global_scene = Scene(reader="seviri_l1b_hrit", filenames=filenames) To load data from the files use the :meth:`Scene.load ` method. Printing the Scene object will list each of the :class:`xarray.DataArray` objects currently loaded: >>> global_scene.load([0.6, 0.8, 10.8]) >>> print(global_scene) dask.array Coordinates: * x (x) float64 5.567e+06 5.564e+06 5.561e+06 5.558e+06 5.555e+06 ... * y (y) float64 -5.567e+06 -5.564e+06 -5.561e+06 -5.558e+06 ... Attributes: satellite_longitude: 0.0 sensor: seviri satellite_altitude: 35785831.0 platform_name: Meteosat-11 standard_name: brightness_temperature units: K wavelength: (9.8, 10.8, 11.8) satellite_latitude: 0.0 start_time: 2018-02-28 15:00:10.814000 end_time: 2018-02-28 15:12:43.956000 area: Area ID: some_area_name\nDescription: On-the-fly ar... name: IR_108 resolution: 3000.40316582 calibration: brightness_temperature polarization: None level: None modifiers: () ancillary_variables: [] dask.array Coordinates: * x (x) float64 5.567e+06 5.564e+06 5.561e+06 5.558e+06 5.555e+06 ... * y (y) float64 -5.567e+06 -5.564e+06 -5.561e+06 -5.558e+06 ... Attributes: satellite_longitude: 0.0 sensor: seviri satellite_altitude: 35785831.0 platform_name: Meteosat-11 standard_name: toa_bidirectional_reflectance units: % wavelength: (0.74, 0.81, 0.88) satellite_latitude: 0.0 start_time: 2018-02-28 15:00:10.814000 end_time: 2018-02-28 15:12:43.956000 area: Area ID: some_area_name\nDescription: On-the-fly ar... name: VIS008 resolution: 3000.40316582 calibration: reflectance polarization: None level: None modifiers: () ancillary_variables: [] dask.array Coordinates: * x (x) float64 5.567e+06 5.564e+06 5.561e+06 5.558e+06 5.555e+06 ... * y (y) float64 -5.567e+06 -5.564e+06 -5.561e+06 -5.558e+06 ... Attributes: satellite_longitude: 0.0 sensor: seviri satellite_altitude: 35785831.0 platform_name: Meteosat-11 standard_name: toa_bidirectional_reflectance units: % wavelength: (0.56, 0.635, 0.71) satellite_latitude: 0.0 start_time: 2018-02-28 15:00:10.814000 end_time: 2018-02-28 15:12:43.956000 area: Area ID: some_area_name\nDescription: On-the-fly ar... name: VIS006 resolution: 3000.40316582 calibration: reflectance polarization: None level: None modifiers: () ancillary_variables: [] Satpy allows loading file data by wavelengths in micrometers (shown above) or by channel name:: >>> global_scene.load(["VIS006", "VIS008", "IR_108"]) To have a look at the available channels for loading from your :class:`~satpy.scene.Scene` object use the :meth:`~satpy.scene.Scene.available_dataset_names` method: >>> global_scene.available_dataset_names() ['HRV', 'IR_108', 'IR_120', 'VIS006', 'WV_062', 'IR_039', 'IR_134', 'IR_097', 'IR_087', 'VIS008', 'IR_016', 'WV_073'] To access the loaded data use the wavelength or name: >>> print(global_scene[0.6]) For more information on loading datasets by resolution, calibration, or other advanced loading methods see the :doc:`readers` documentation. Calculating measurement values and navigation coordinates ========================================================= Once loaded, measurement values can be calculated from a DataArray within a scene, using .values to get a fully calculated numpy array: >>> vis006 = global_scene["VIS006"] >>> vis006_meas = vis006.values Note that for very large images, such as half-kilometer geostationary imagery, calculated measurement arrays may require multiple gigabytes of memory; using deferred computation and/or subsetting of datasets may be preferred in such cases. The 'area' attribute of the DataArray, if present, can be converted to latitude and longitude arrays. For some instruments (typically polar-orbiters), the get_lonlats() may result in arrays needing an additional .compute() or .values extraction. >>> vis006_lon, vis006_lat = vis006.attrs['area'].get_lonlats() Visualizing data ================ To visualize loaded data in a pop-up window: >>> global_scene.show(0.6) Alternatively if working in a Jupyter notebook the scene can be converted to a `geoviews `_ object using the :meth:`~satpy.scene.Scene.to_geoviews` method. The geoviews package is not a requirement of the base satpy install so in order to use this feature the user needs to install the geoviews package himself. >>> import holoviews as hv >>> import geoviews as gv >>> import geoviews.feature as gf >>> gv.extension("bokeh", "matplotlib") >>> %opts QuadMesh Image [width=600 height=400 colorbar=True] Feature [apply_ranges=False] >>> %opts Image QuadMesh (cmap='RdBu_r') >>> gview = global_scene.to_geoviews(vdims=[0.6]) >>> gview[::5,::5] * gf.coastline * gf.borders Creating new datasets ===================== Calculations based on loaded datasets/channels can easily be assigned to a new dataset: >>> global_scene["ndvi"] = (global_scene[0.8] - global_scene[0.6]) / (global_scene[0.8] + global_scene[0.6]) >>> global_scene.show("ndvi") When doing calculations Xarray, by default, will drop all attributes so attributes need to be copied over by hand. The :func:`~satpy.dataset.combine_metadata` function can assist with this task. Assigning additional custom metadata is also possible. >>> from satpy.dataset import combine_metadata >>> scene['new_band'] = scene[0.8] / scene[0.6] >>> scene['new_band'].attrs = combine_metadata(scene[0.8], scene[0.6]) >>> scene['new_band'].attrs['some_other_key'] = 'whatever_value_you_want' Generating composites ===================== Satpy comes with many composite recipes built-in and makes them loadable like any other dataset: >>> global_scene.load(['overview']) To get a list of all available composites for the current scene: >>> global_scene.available_composite_names() ['overview_sun', 'airmass', 'natural_color', 'night_fog', 'overview', 'green_snow', 'dust', 'fog', 'natural_color_raw', 'cloudtop', 'convection', 'ash'] Loading composites will load all necessary dependencies to make that composite and unload them after the composite has been generated. .. note:: Some composite require datasets to be at the same resolution or shape. When this is the case the Scene object must be resampled before the composite can be generated (see below). Resampling ========== .. todo:: Explain where and how to define new areas In certain cases it may be necessary to resample datasets whether they come from a file or are generated composites. Resampling is useful for mapping data to a uniform grid, limiting input data to an area of interest, changing from one projection to another, or for preparing datasets to be combined in a composite (see above). For more details on resampling, different resampling algorithms, and creating your own area of interest see the :doc:`resample` documentation. To resample a Satpy Scene: >>> local_scene = global_scene.resample("eurol") This creates a copy of the original ``global_scene`` with all loaded datasets resampled to the built-in "eurol" area. Any composites that were requested, but could not be generated are automatically generated after resampling. The new ``local_scene`` can now be used like the original ``global_scene`` for working with datasets, saving them to disk or showing them on screen: >>> local_scene.show('overview') >>> local_scene.save_dataset('overview', './local_overview.tif') Saving to disk ============== To save all loaded datasets to disk as geotiff images: >>> global_scene.save_datasets() To save all loaded datasets to disk as PNG images: >>> global_scene.save_datasets(writer='simple_image') Or to save an individual dataset: >>> global_scene.save_dataset('VIS006', 'my_nice_image.png') Datasets are automatically scaled or "enhanced" to be compatible with the output format and to provide the best looking image. For more information on saving datasets and customizing enhancements see the documentation on :doc:`writers`. Slicing and subsetting scenes ============================= Array slicing can be done at the scene level in order to get subsets with consistent navigation throughout. Note that this does not take into account scenes that may include channels at multiple resolutions, i.e. index slicing does not account for dataset spatial resolution. >>> scene_slice = global_scene[2000:2004, 2000:2004] >>> vis006_slice = scene_slice['VIS006'] >>> vis006_slice_meas = vis006_slice.values >>> vis006_slice_lon, vis006_slice_lat = vis006_slice.attrs['area'].get_lonlats() To subset multi-resolution data consistently, use the :meth:`~satpy.scene.Scene.crop` method. >>> scene_llbox = global_scene.crop(ll_bbox=(-4.0, -3.9, 3.9, 4.0)) >>> vis006_llbox = scene_llbox['VIS006'] >>> vis006_llbox_meas = vis006_llbox.values >>> vis006_llbox_lon, vis006_llbox_lat = vis006_llbox.attrs['area'].get_lonlats() Troubleshooting =============== When something goes wrong, a first step to take is check that the latest Version of satpy and its dependencies are installed. Satpy drags in a few packages as dependencies per default, but each reader and writer has it's own dependencies which can be unfortunately easy to miss when just doing a regular `pip install`. To check the missing dependencies for the readers and writers, a utility function called `check_satpy` can be used: >>> from satpy.config import check_satpy >>> check_satpy() Due to the way Satpy works, producing as many datasets as possible, there are times that behavior can be unexpected but with no exceptions raised. To help troubleshoot these situations log messages can be turned on. To do this run the following code before running any other Satpy code: >>> from satpy.utils import debug_on >>> debug_on() satpy-0.20.0/doc/source/readers.rst000066400000000000000000000216031362525524100171640ustar00rootroot00000000000000======= Readers ======= .. todo:: How to read cloud products from NWCSAF software. (separate document?) Satpy supports reading and loading data from many input file formats and schemes. The :class:`~satpy.scene.Scene` object provides a simple interface around all the complexity of these various formats through its ``load`` method. The following sections describe the different way data can be loaded, requested, or added to a Scene object. Available Readers ================= To get a list of available readers use the `available_readers` function:: >>> from satpy import available_readers >>> available_readers() Filter loaded files =================== Coming soon... Load data ========= Datasets in Satpy are identified by certain pieces of metadata set during data loading. These include `name`, `wavelength`, `calibration`, `resolution`, `polarization`, and `modifiers`. Normally, once a ``Scene`` is created requesting datasets by `name` or `wavelength` is all that is needed:: >>> from satpy import Scene >>> scn = Scene(reader="seviri_l1b_hrit", filenames=filenames) >>> scn.load([0.6, 0.8, 10.8]) >>> scn.load(['IR_120', 'IR_134']) However, in many cases datasets are available in multiple spatial resolutions, multiple calibrations (``brightness_temperature``, ``reflectance``, ``radiance``, etc), multiple polarizations, or have corrections or other modifiers already applied to them. By default Satpy will provide the version of the dataset with the highest resolution and the highest level of calibration (brightness temperature or reflectance over radiance). It is also possible to request one of these exact versions of a dataset by using the :class:`~satpy.dataset.DatasetID` class:: >>> from satpy import DatasetID >>> my_channel_id = DatasetID(name='IR_016', calibration='radiance') >>> scn.load([my_channel_id]) >>> print(scn['IR_016']) Or request multiple datasets at a specific calibration, resolution, or polarization:: >>> scn.load([0.6, 0.8], resolution=1000) Or multiple calibrations:: >>> scn.load([0.6, 10.8], calibrations=['brightness_temperature', 'radiance']) In the above case Satpy will load whatever dataset is available and matches the specified parameters. So the above ``load`` call would load the ``0.6`` (a visible/reflectance band) radiance data and ``10.8`` (an IR band) brightness temperature data. For geostatinary satellites that have the individual channel data separated to several files (segments) the missing segments are padded by default to full disk area. This is made to simplify caching of resampling look-up tables (see :doc:`resample` for more information). To disable this, the user can pass ``pad_data`` keyword argument when loading datasets:: >>> scn.load([0.6, 10.8], pad_data=False) .. note:: If a dataset could not be loaded there is no exception raised. You must check the :meth:`scn.missing_datasets ` property for any ``DatasetID`` that could not be loaded. To find out what datasets are available from a reader from the files that were provided to the ``Scene`` use :meth:`~satpy.scene.Scene.available_dataset_ids`:: >>> scn.available_dataset_ids() Or :meth:`~satpy.scene.Scene.available_dataset_names` for just the string names of Datasets:: >>> scn.available_dataset_names() Search for local files ====================== Satpy provides a utility :func:`~satpy.readers.find_files_and_readers` for searching for files in a base directory matching various search parameters. This function discovers files based on filename patterns. It returns a dictionary mapping reader name to a list of filenames supported. This dictionary can be passed directly to the :class:`~satpy.scene.Scene` initialization. :: >>> from satpy import find_files_and_readers, Scene >>> from datetime import datetime >>> my_files = find_files_and_readers(base_dir='/data/viirs_sdrs', ... reader='viirs_sdr', ... start_time=datetime(2017, 5, 1, 18, 1, 0), ... end_time=datetime(2017, 5, 1, 18, 30, 0)) >>> scn = Scene(filenames=my_files) See the :func:`~satpy.readers.find_files_and_readers` documentation for more information on the possible parameters. Metadata ======== .. _dataset_metadata: The datasets held by a scene also provide vital metadata such as dataset name, units, observation time etc. The following attributes are standardized across all readers: * ``name``, ``wavelength``, ``resolution``, ``polarization``, ``calibration``, ``level``, ``modifiers``: See :class:`satpy.dataset.DatasetID`. * ``start_time``: Left boundary of the time interval covered by the dataset. * ``end_time``: Right boundary of the time interval covered by the dataset. * ``area``: :class:`~pyresample.geometry.AreaDefinition` or :class:`~pyresample.geometry.SwathDefinition` if data is geolocated. Areas are used for gridded projected data and Swaths when data must be described by individual longitude/latitude coordinates. See the Coordinates section below. * ``orbital_parameters``: Dictionary of orbital parameters describing the satellite's position. * For *geostationary* satellites it is described using the following scalar attributes: * ``satellite_actual_longitude/latitude/altitude``: Current position of the satellite at the time of observation in geodetic coordinates (i.e. altitude is relative and normal to the surface of the ellipsoid). * ``satellite_nominal_longitude/latitude/altitude``: Center of the station keeping box (a confined area in which the satellite is actively maintained in using maneuvres). Inbetween major maneuvres, when the satellite is permanently moved, the nominal position is constant. * ``nadir_longitude/latitude``: Intersection of the instrument's Nadir with the surface of the earth. May differ from the actual satellite position, if the instrument is pointing slightly off the axis (satellite, earth-center). If available, this should be used to compute viewing angles etc. Otherwise, use the actual satellite position. * ``projection_longitude/latitude/altitude``: Projection center of the re-projected data. This should be used to compute lat/lon coordinates. Note that the projection center can differ considerably from the actual satellite position. For example MSG-1 was at times positioned at 3.4 degrees west, while the image data was re-projected to 0 degrees. * [DEPRECATED] ``satellite_longitude/latitude/altitude``: Current position of the satellite at the time of observation in geodetic coordinates. .. note:: Longitudes and latitudes are given in degrees, altitude in meters. For use in pyorbital, the altitude has to be converted to kilometers, see for example :func:`pyorbital.orbital.get_observer_look`. * For *polar orbiting* satellites the readers usually provide coordinates and viewing angles of the swath as ancillary datasets. Additional metadata related to the satellite position include: * ``tle``: Two-Line Element (TLE) set used to compute the satellite's orbit * ``raw_metadata``: Raw, unprocessed metadata from the reader. Note that the above attributes are not necessarily available for each dataset. Coordinates =========== Each :class:`~xarray.DataArray` produced by Satpy has several Xarray coordinate variables added to them. * ``x`` and ``y``: Projection coordinates for gridded and projected data. By default `y` and `x` are the preferred **dimensions** for all 2D data, but these **coordinates** are only added for gridded (non-swath) data. For 1D data only the ``y`` dimension may be specified. * ``crs``: A :class:`~pyproj.crs.CRS` object defined the Coordinate Reference System for the data. Requires pyproj 2.0 or later to be installed. This is stored as a scalar array by Xarray so it must be accessed by doing ``crs = my_data_arr.attrs['crs'].item()``. For swath data this defaults to a ``longlat`` CRS using the WGS84 datum. * ``longitude``: Array of longitude coordinates for swath data. * ``latitude``: Array of latitude coordinates for swath data. Readers are free to define any coordinates in addition to the ones above that are automatically added. Other possible coordinates you may see: * ``acq_time``: Instrument data acquisition time per scan or row of data. Adding a Reader to Satpy ======================== This is described in the developer guide, see :doc:`dev_guide/custom_reader`. Implemented readers =================== xRIT-based readers ------------------ .. automodule:: satpy.readers.hrit_base .. automodule:: satpy.readers.seviri_l1b_hrit .. automodule:: satpy.readers.hrit_jma .. automodule:: satpy.readers.goes_imager_hrit .. automodule:: satpy.readers.electrol_hrit hdf-eos based readers --------------------- .. automodule:: satpy.readers.modis_l1b .. automodule:: satpy.readers.modis_l2 satpy-0.20.0/doc/source/resample.rst000066400000000000000000000001011362525524100173350ustar00rootroot00000000000000========== Resampling ========== .. automodule:: satpy.resample satpy-0.20.0/doc/source/writers.rst000066400000000000000000000106751362525524100172450ustar00rootroot00000000000000======= Writers ======= Satpy makes it possible to save datasets in multiple formats. For details on additional arguments and features available for a specific Writer see the table below. Most use cases will want to save datasets using the :meth:`~satpy.scene.Scene.save_datasets` method:: >>> scn.save_datasets(writer='simple_image') The ``writer`` parameter defaults to using the ``geotiff`` writer. One common parameter across almost all Writers is ``filename`` and ``base_dir`` to help automate saving files with custom filenames:: >>> scn.save_datasets( ... filename='{name}_{start_time:%Y%m%d_%H%M%S}.tif', ... base_dir='/tmp/my_ouput_dir') .. versionchanged:: 0.10 The `file_pattern` keyword argument was renamed to `filename` to match the `save_dataset` method's keyword argument. .. _writer_table: .. list-table:: Satpy Writers :header-rows: 1 * - Description - Writer name - Status - Examples * - GeoTIFF - :class:`geotiff ` - Nominal - * - Simple Image (PNG, JPEG, etc) - :class:`simple_image ` - Nominal - * - NinJo TIFF (using ``pyninjotiff`` package) - :class:`ninjotiff ` - Nominal - * - NetCDF (Standard CF) - :class:`cf ` - Pre-alpha - :mod:`Usage example ` * - AWIPS II Tiled SCMI NetCDF4 - :class:`scmi ` - Beta - Available Writers ================= To get a list of available writers use the `available_writers` function:: >>> from satpy import available_writers >>> available_writers() Colorizing and Palettizing using user-supplied colormaps ======================================================== .. note:: In the future this functionality will be added to the ``Scene`` object. It is possible to create single channel "composites" that are then colorized using users' own colormaps. The colormaps are Numpy arrays with shape (num, 3), see the example below how to create the mapping file(s). This example creates a 2-color colormap, and we interpolate the colors between the defined temperature ranges. Beyond those limits the image clipped to the specified colors. >>> import numpy as np >>> from satpy.composites import BWCompositor >>> from satpy.enhancements import colorize >>> from satpy.writers import to_image >>> arr = np.array([[0, 0, 0], [255, 255, 255]]) >>> np.save("/tmp/binary_colormap.npy", arr) >>> compositor = BWCompositor("test", standard_name="colorized_ir_clouds") >>> composite = compositor((local_scene[10.8], )) >>> img = to_image(composite) >>> kwargs = {"palettes": [{"filename": "/tmp/binary_colormap.npy", ... "min_value": 223.15, "max_value": 303.15}]} >>> colorize(img, **kwargs) >>> img.show() Similarly it is possible to use discrete values without color interpolation using `palettize()` instead of `colorize()`. You can define several colormaps and ranges in the `palettes` list and they are merged together. See trollimage_ documentation for more information how colormaps and color ranges are merged. The above example can be used in enhancements YAML config like this: .. code-block:: yaml hot_or_cold: standard_name: hot_or_cold operations: - name: colorize method: &colorizefun !!python/name:satpy.enhancements.colorize '' kwargs: palettes: - {filename: /tmp/binary_colormap.npy, min_value: 223.15, max_value: 303.15} .. _trollimage: http://trollimage.readthedocs.io/en/latest/ Saving multiple Scenes in one go ================================ As mentioned earlier, it is possible to save `Scene` datasets directly using :meth:`~satpy.scene.Scene.save_datasets` method. However, sometimes it is beneficial to collect more `Scene`\ s together and process and save them all at once. :: >>> from satpy.writers import compute_writer_results >>> res1 = scn.save_datasets(filename="/tmp/{name}.png", ... writer='simple_image', ... compute=False) >>> res2 = scn.save_datasets(filename="/tmp/{name}.tif", ... writer='geotiff', ... compute=False) >>> results = [res1, res2] >>> compute_writer_results(results) satpy-0.20.0/satpy/000077500000000000000000000000001362525524100140765ustar00rootroot00000000000000satpy-0.20.0/satpy/__init__.py000066400000000000000000000037421362525524100162150ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2009-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Satpy Package initializer. """ import os from pkg_resources import get_distribution, DistributionNotFound try: __version__ = get_distribution(__name__).version except DistributionNotFound: # package is not installed pass CHUNK_SIZE = int(os.getenv('PYTROLL_CHUNK_SIZE', 4096)) # Order of "highest" calibration from highest to lowest DEFAULT_CALIBRATION_ORDER = [ 'brightness_temperature', 'reflectance', 'radiance', 'counts', 'gamma', 'sigma_nought', 'beta_nought', ] CALIBRATION_ORDER = os.getenv('PYTROLL_CALIBRATION_ORDER', None) if CALIBRATION_ORDER is None: CALIBRATION_ORDER = DEFAULT_CALIBRATION_ORDER else: CALIBRATION_ORDER = [x.strip() for x in CALIBRATION_ORDER.split(' ')] # convert to a dictionary of priority for faster access (0 higher priority) CALIBRATION_ORDER = {cal: idx for idx, cal in enumerate(CALIBRATION_ORDER)} from satpy.utils import get_logger # noqa from satpy.dataset import DatasetID, DATASET_KEYS # noqa from satpy.readers import (DatasetDict, find_files_and_readers, # noqa available_readers) # noqa from satpy.writers import available_writers # noqa from satpy.scene import Scene # noqa from satpy.multiscene import MultiScene # noqa log = get_logger('satpy') satpy-0.20.0/satpy/composites/000077500000000000000000000000001362525524100162635ustar00rootroot00000000000000satpy-0.20.0/satpy/composites/__init__.py000066400000000000000000001756061362525524100204130ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Base classes for composite objects.""" import logging import os import time import warnings from weakref import WeakValueDictionary import dask.array as da import numpy as np import six import xarray as xr import yaml try: from yaml import UnsafeLoader except ImportError: from yaml import Loader as UnsafeLoader from satpy.config import CONFIG_PATH, config_search_paths, recursive_dict_update from satpy.config import get_environ_ancpath from satpy.dataset import DATASET_KEYS, DatasetID, MetadataObject, combine_metadata from satpy.readers import DatasetDict from satpy.utils import sunzen_corr_cos, atmospheric_path_length_correction, get_satpos from satpy.writers import get_enhanced_image try: from pyspectral.near_infrared_reflectance import Calculator except ImportError: Calculator = None try: from pyorbital.astronomy import sun_zenith_angle except ImportError: sun_zenith_angle = None LOG = logging.getLogger(__name__) NEGLIBLE_COORDS = ['time'] """Keywords identifying non-dimensional coordinates to be ignored during composite generation.""" class IncompatibleAreas(Exception): """Error raised upon compositing things of different shapes.""" pass class IncompatibleTimes(Exception): """Error raised upon compositing things from different times.""" pass class CompositorLoader(object): """Read composites using the configuration files on disk.""" def __init__(self, ppp_config_dir=None): """Initialize the compositor loader.""" if ppp_config_dir is None: ppp_config_dir = CONFIG_PATH self.modifiers = {} self.compositors = {} self.ppp_config_dir = ppp_config_dir def load_sensor_composites(self, sensor_name): """Load all compositor configs for the provided sensor.""" config_filename = sensor_name + ".yaml" LOG.debug("Looking for composites config file %s", config_filename) composite_configs = config_search_paths( os.path.join("composites", config_filename), self.ppp_config_dir, check_exists=True) if not composite_configs: LOG.debug("No composite config found called {}".format( config_filename)) return self._load_config(composite_configs) def get_compositor(self, key, sensor_names): """Get the modifier for *sensor_names*.""" for sensor_name in sensor_names: try: return self.compositors[sensor_name][key] except KeyError: continue raise KeyError("Could not find compositor '{}'".format(key)) def get_modifier(self, key, sensor_names): """Get the modifier for *sensor_names*.""" for sensor_name in sensor_names: try: return self.modifiers[sensor_name][key] except KeyError: continue raise KeyError("Could not find modifier '{}'".format(key)) def load_compositors(self, sensor_names): """Load all compositor configs for the provided sensors. Args: sensor_names (list of strings): Sensor names that have matching ``sensor_name.yaml`` config files. Returns: (comps, mods): Where `comps` is a dictionary: sensor_name -> composite ID -> compositor object And `mods` is a dictionary: sensor_name -> modifier name -> (modifier class, modifiers options) Note that these dictionaries are copies of those cached in this object. """ comps = {} mods = {} for sensor_name in sensor_names: if sensor_name not in self.compositors: self.load_sensor_composites(sensor_name) if sensor_name in self.compositors: comps[sensor_name] = DatasetDict( self.compositors[sensor_name].copy()) mods[sensor_name] = self.modifiers[sensor_name].copy() return comps, mods def _process_composite_config(self, composite_name, conf, composite_type, sensor_id, composite_config, **kwargs): compositors = self.compositors[sensor_id] modifiers = self.modifiers[sensor_id] try: options = conf[composite_type][composite_name] loader = options.pop('compositor') except KeyError: if composite_name in compositors or composite_name in modifiers: return conf raise ValueError("'compositor' missing or empty in {0}. Option keys = {1}".format( composite_config, str(options.keys()))) options['name'] = composite_name for prereq_type in ['prerequisites', 'optional_prerequisites']: prereqs = [] dep_num = 0 for item in options.get(prereq_type, []): if isinstance(item, dict): # Handle in-line composites if 'compositor' in item: # Create an unique temporary name for the composite sub_comp_name = '_' + composite_name + '_dep_{}'.format(dep_num) dep_num += 1 # Minimal composite config sub_conf = {composite_type: {sub_comp_name: item}} self._process_composite_config( sub_comp_name, sub_conf, composite_type, sensor_id, composite_config, **kwargs) else: # we want this prerequisite to act as a query with # 'modifiers' being None otherwise it will be an empty # tuple item.setdefault('modifiers', None) key = DatasetID.from_dict(item) prereqs.append(key) else: prereqs.append(item) options[prereq_type] = prereqs if composite_type == 'composites': options.update(**kwargs) key = DatasetID.from_dict(options) comp = loader(**options) compositors[key] = comp elif composite_type == 'modifiers': modifiers[composite_name] = loader, options def _load_config(self, composite_configs, **kwargs): if not isinstance(composite_configs, (list, tuple)): composite_configs = [composite_configs] conf = {} for composite_config in composite_configs: with open(composite_config) as conf_file: conf = recursive_dict_update(conf, yaml.load(conf_file, Loader=UnsafeLoader)) try: sensor_name = conf['sensor_name'] except KeyError: LOG.debug('No "sensor_name" tag found in %s, skipping.', composite_config) return sensor_id = sensor_name.split('/')[-1] sensor_deps = sensor_name.split('/')[:-1] compositors = self.compositors.setdefault(sensor_id, DatasetDict()) modifiers = self.modifiers.setdefault(sensor_id, {}) for sensor_dep in reversed(sensor_deps): if sensor_dep not in self.compositors or sensor_dep not in self.modifiers: self.load_sensor_composites(sensor_dep) if sensor_deps: compositors.update(self.compositors[sensor_deps[-1]]) modifiers.update(self.modifiers[sensor_deps[-1]]) for composite_type in ['modifiers', 'composites']: if composite_type not in conf: continue for composite_name in conf[composite_type]: self._process_composite_config(composite_name, conf, composite_type, sensor_id, composite_config, **kwargs) def check_times(projectables): """Check that *projectables* have compatible times.""" times = [] for proj in projectables: try: if proj['time'].size and proj['time'][0] != 0: times.append(proj['time'][0].values) else: break # right? except KeyError: # the datasets don't have times break except IndexError: # time is a scalar if proj['time'].values != 0: times.append(proj['time'].values) else: break else: # Is there a more gracious way to handle this ? if np.max(times) - np.min(times) > np.timedelta64(1, 's'): raise IncompatibleTimes else: mid_time = (np.max(times) - np.min(times)) / 2 + np.min(times) return mid_time def sub_arrays(proj1, proj2): """Substract two DataArrays and combine their attrs.""" attrs = combine_metadata(proj1.attrs, proj2.attrs) if (attrs.get('area') is None and proj1.attrs.get('area') is not None and proj2.attrs.get('area') is not None): raise IncompatibleAreas res = proj1 - proj2 res.attrs = attrs return res class CompositeBase(MetadataObject): """Base class for all compositors and modifiers.""" def __init__(self, name, prerequisites=None, optional_prerequisites=None, **kwargs): """Initialise the compositor.""" # Required info kwargs["name"] = name kwargs["prerequisites"] = prerequisites or [] kwargs["optional_prerequisites"] = optional_prerequisites or [] super(CompositeBase, self).__init__(**kwargs) def __call__(self, datasets, optional_datasets=None, **info): """Generate a composite.""" raise NotImplementedError() def __str__(self): """Stringify the object.""" from pprint import pformat return pformat(self.attrs) def __repr__(self): """Represent the object.""" from pprint import pformat return pformat(self.attrs) def apply_modifier_info(self, origin, destination): """Apply the modifier info from *origin* to *destination*.""" o = getattr(origin, 'attrs', origin) d = getattr(destination, 'attrs', destination) for k in DATASET_KEYS: if k == 'modifiers': d[k] = self.attrs[k] elif d.get(k) is None: if self.attrs.get(k) is not None: d[k] = self.attrs[k] elif o.get(k) is not None: d[k] = o[k] def match_data_arrays(self, data_arrays): """Match data arrays so that they can be used together in a composite.""" self.check_geolocation(data_arrays) return self.drop_coordinates(data_arrays) def drop_coordinates(self, data_arrays): """Drop neglible non-dimensional coordinates.""" new_arrays = [] for ds in data_arrays: drop = [coord for coord in ds.coords if coord not in ds.dims and any([neglible in coord for neglible in NEGLIBLE_COORDS])] if drop: new_arrays.append(ds.drop(drop)) else: new_arrays.append(ds) return new_arrays def check_geolocation(self, data_arrays): """Check that the geolocations of the *data_arrays* are compatible.""" if len(data_arrays) == 1: return if 'x' in data_arrays[0].dims and \ not all(x.sizes['x'] == data_arrays[0].sizes['x'] for x in data_arrays[1:]): raise IncompatibleAreas("X dimension has different sizes") if 'y' in data_arrays[0].dims and \ not all(x.sizes['y'] == data_arrays[0].sizes['y'] for x in data_arrays[1:]): raise IncompatibleAreas("Y dimension has different sizes") areas = [ds.attrs.get('area') for ds in data_arrays] if all(a is None for a in areas): return elif any(a is None for a in areas): raise ValueError("Missing 'area' attribute") if not all(areas[0] == x for x in areas[1:]): LOG.debug("Not all areas are the same in " "'{}'".format(self.attrs['name'])) raise IncompatibleAreas("Areas are different") def check_areas(self, data_arrays): """Check that the areas of the *data_arrays* are compatible.""" warnings.warn('satpy.composites.CompositeBase.check_areas is deprecated, use ' 'satpy.composites.CompositeBase.match_data_arrays instead') return self.match_data_arrays(data_arrays) class SunZenithCorrectorBase(CompositeBase): """Base class for sun zenith correction.""" coszen = WeakValueDictionary() def __init__(self, max_sza=95.0, **kwargs): """Collect custom configuration values. Args: max_sza (float): Maximum solar zenith angle in degrees that is considered valid and correctable. Default 95.0. """ self.max_sza = max_sza self.max_sza_cos = np.cos(np.deg2rad(max_sza)) if max_sza is not None else None super(SunZenithCorrectorBase, self).__init__(**kwargs) def __call__(self, projectables, **info): """Generate the composite.""" projectables = self.match_data_arrays(projectables) vis = projectables[0] if vis.attrs.get("sunz_corrected"): LOG.debug("Sun zen correction already applied") return vis area_name = hash(vis.attrs['area']) key = (vis.attrs["start_time"], area_name) tic = time.time() LOG.debug("Applying sun zen correction") coszen = self.coszen.get(key) if coszen is None and len(projectables) == 1: # we were not given SZA, generate SZA then calculate cos(SZA) from pyorbital.astronomy import cos_zen LOG.debug("Computing sun zenith angles.") lons, lats = vis.attrs["area"].get_lonlats(chunks=vis.data.chunks) coords = {} if 'y' in vis.coords and 'x' in vis.coords: coords['y'] = vis['y'] coords['x'] = vis['x'] coszen = xr.DataArray(cos_zen(vis.attrs["start_time"], lons, lats), dims=['y', 'x'], coords=coords) if self.max_sza is not None: coszen = coszen.where(coszen >= self.max_sza_cos) self.coszen[key] = coszen elif coszen is None: # we were given the SZA, calculate the cos(SZA) coszen = np.cos(np.deg2rad(projectables[1])) self.coszen[key] = coszen proj = self._apply_correction(vis, coszen) proj.attrs = vis.attrs.copy() self.apply_modifier_info(vis, proj) LOG.debug("Sun-zenith correction applied. Computation time: %5.1f (sec)", time.time() - tic) return proj def _apply_correction(self, proj, coszen): raise NotImplementedError("Correction method shall be defined!") class SunZenithCorrector(SunZenithCorrectorBase): """Standard sun zenith correction using ``1 / cos(sunz)``. In addition to adjusting the provided reflectances by the cosine of the solar zenith angle, this modifier forces all reflectances beyond a solar zenith angle of ``max_sza`` to 0. It also gradually reduces the amount of correction done between ``correction_limit`` and ``max_sza``. If ``max_sza`` is ``None`` then a constant correction is applied to zenith angles beyond ``correction_limit``. To set ``max_sza`` to ``None`` in a YAML configuration file use: .. code-block:: yaml sunz_corrected: compositor: !!python/name:satpy.composites.SunZenithCorrector max_sza: !!null optional_prerequisites: - solar_zenith_angle """ def __init__(self, correction_limit=88., **kwargs): """Collect custom configuration values. Args: correction_limit (float): Maximum solar zenith angle to apply the correction in degrees. Pixels beyond this limit have a constant correction applied. Default 88. max_sza (float): Maximum solar zenith angle in degrees that is considered valid and correctable. Default 95.0. """ self.correction_limit = correction_limit super(SunZenithCorrector, self).__init__(**kwargs) def _apply_correction(self, proj, coszen): LOG.debug("Apply the standard sun-zenith correction [1/cos(sunz)]") return sunzen_corr_cos(proj, coszen, limit=self.correction_limit, max_sza=self.max_sza) class EffectiveSolarPathLengthCorrector(SunZenithCorrectorBase): """Special sun zenith correction with the method proposed by Li and Shibata. (2006): https://doi.org/10.1175/JAS3682.1 In addition to adjusting the provided reflectances by the cosine of the solar zenith angle, this modifier forces all reflectances beyond a solar zenith angle of `max_sza` to 0 to reduce noise in the final data. It also gradually reduces the amount of correction done between ``correction_limit`` and ``max_sza``. If ``max_sza`` is ``None`` then a constant correction is applied to zenith angles beyond ``correction_limit``. To set ``max_sza`` to ``None`` in a YAML configuration file use: .. code-block:: yaml effective_solar_pathlength_corrected: compositor: !!python/name:satpy.composites.EffectiveSolarPathLengthCorrector max_sza: !!null optional_prerequisites: - solar_zenith_angle """ def __init__(self, correction_limit=88., **kwargs): """Collect custom configuration values. Args: correction_limit (float): Maximum solar zenith angle to apply the correction in degrees. Pixels beyond this limit have a constant correction applied. Default 88. max_sza (float): Maximum solar zenith angle in degrees that is considered valid and correctable. Default 95.0. """ self.correction_limit = correction_limit super(EffectiveSolarPathLengthCorrector, self).__init__(**kwargs) def _apply_correction(self, proj, coszen): LOG.debug("Apply the effective solar atmospheric path length correction method by Li and Shibata") return atmospheric_path_length_correction(proj, coszen, limit=self.correction_limit, max_sza=self.max_sza) class PSPRayleighReflectance(CompositeBase): """Pyspectral-based rayleigh corrector for visible channels.""" _rayleigh_cache = WeakValueDictionary() def get_angles(self, vis): """Get the sun and satellite angles from the current dataarray.""" from pyorbital.astronomy import get_alt_az, sun_zenith_angle from pyorbital.orbital import get_observer_look lons, lats = vis.attrs['area'].get_lonlats(chunks=vis.data.chunks) lons = da.where(lons >= 1e30, np.nan, lons) lats = da.where(lats >= 1e30, np.nan, lats) sunalt, suna = get_alt_az(vis.attrs['start_time'], lons, lats) suna = np.rad2deg(suna) sunz = sun_zenith_angle(vis.attrs['start_time'], lons, lats) sat_lon, sat_lat, sat_alt = get_satpos(vis) sata, satel = get_observer_look( sat_lon, sat_lat, sat_alt / 1000.0, # km vis.attrs['start_time'], lons, lats, 0) satz = 90 - satel return sata, satz, suna, sunz def __call__(self, projectables, optional_datasets=None, **info): """Get the corrected reflectance when removing Rayleigh scattering. Uses pyspectral. """ from pyspectral.rayleigh import Rayleigh if not optional_datasets or len(optional_datasets) != 4: vis, red = self.match_data_arrays(projectables) sata, satz, suna, sunz = self.get_angles(vis) red.data = da.rechunk(red.data, vis.data.chunks) else: vis, red, sata, satz, suna, sunz = self.match_data_arrays( projectables + optional_datasets) sata, satz, suna, sunz = optional_datasets # get the dask array underneath sata = sata.data satz = satz.data suna = suna.data sunz = sunz.data # First make sure the two azimuth angles are in the range 0-360: sata = sata % 360. suna = suna % 360. ssadiff = da.absolute(suna - sata) ssadiff = da.minimum(ssadiff, 360 - ssadiff) del sata, suna atmosphere = self.attrs.get('atmosphere', 'us-standard') aerosol_type = self.attrs.get('aerosol_type', 'marine_clean_aerosol') rayleigh_key = (vis.attrs['platform_name'], vis.attrs['sensor'], atmosphere, aerosol_type) LOG.info("Removing Rayleigh scattering with atmosphere '{}' and aerosol type '{}' for '{}'".format( atmosphere, aerosol_type, vis.attrs['name'])) if rayleigh_key not in self._rayleigh_cache: corrector = Rayleigh(vis.attrs['platform_name'], vis.attrs['sensor'], atmosphere=atmosphere, aerosol_type=aerosol_type) self._rayleigh_cache[rayleigh_key] = corrector else: corrector = self._rayleigh_cache[rayleigh_key] try: refl_cor_band = corrector.get_reflectance(sunz, satz, ssadiff, vis.attrs['name'], red.data) except (KeyError, IOError): LOG.warning("Could not get the reflectance correction using band name: %s", vis.attrs['name']) LOG.warning("Will try use the wavelength, however, this may be ambiguous!") refl_cor_band = corrector.get_reflectance(sunz, satz, ssadiff, vis.attrs['wavelength'][1], red.data) proj = vis - refl_cor_band proj.attrs = vis.attrs self.apply_modifier_info(vis, proj) return proj class NIRReflectance(CompositeBase): """Get the reflective part of NIR bands.""" def __call__(self, projectables, optional_datasets=None, **info): """Get the reflectance part of an NIR channel. Not supposed to be used for wavelength outside [3, 4] µm. """ self._init_refl3x(projectables) _nir, _ = projectables refl = self._get_reflectance(projectables, optional_datasets) * 100 proj = xr.DataArray(refl, dims=_nir.dims, coords=_nir.coords, attrs=_nir.attrs) proj.attrs['units'] = '%' self.apply_modifier_info(_nir, proj) return proj def _init_refl3x(self, projectables): """Initialize the 3.x reflectance derivations.""" if not Calculator: LOG.info("Couldn't load pyspectral") raise ImportError("No module named pyspectral.near_infrared_reflectance") _nir, _tb11 = projectables self._refl3x = Calculator(_nir.attrs['platform_name'], _nir.attrs['sensor'], _nir.attrs['name']) def _get_reflectance(self, projectables, optional_datasets): """Calculate 3.x reflectance with pyspectral.""" _nir, _tb11 = projectables LOG.info('Getting reflective part of %s', _nir.attrs['name']) da_nir = _nir.data da_tb11 = _tb11.data sun_zenith = None tb13_4 = None for dataset in optional_datasets: wavelengths = dataset.attrs.get('wavelength', [100., 0, 0]) if (dataset.attrs.get('units') == 'K' and wavelengths[0] <= 13.4 <= wavelengths[2]): tb13_4 = dataset.data elif ("standard_name" in dataset.attrs and dataset.attrs["standard_name"] == "solar_zenith_angle"): sun_zenith = dataset.data # Check if the sun-zenith angle was provided: if sun_zenith is None: if sun_zenith_angle is None: raise ImportError("No module named pyorbital.astronomy") lons, lats = _nir.attrs["area"].get_lonlats(chunks=_nir.data.chunks) sun_zenith = sun_zenith_angle(_nir.attrs['start_time'], lons, lats) return self._refl3x.reflectance_from_tbs(sun_zenith, da_nir, da_tb11, tb_ir_co2=tb13_4) class NIREmissivePartFromReflectance(NIRReflectance): """Get the emissive par of NIR bands.""" def __call__(self, projectables, optional_datasets=None, **info): """Get the emissive part an NIR channel after having derived the reflectance. Not supposed to be used for wavelength outside [3, 4] µm. """ self._init_refl3x(projectables) # Derive the sun-zenith angles, and use the nir and thermal ir # brightness tempertures and derive the reflectance using # PySpectral. The reflectance is stored internally in PySpectral and # needs to be derived first in order to get the emissive part. _ = self._get_reflectance(projectables, optional_datasets) _nir, _ = projectables proj = xr.DataArray(self._refl3x.emissive_part_3x(), attrs=_nir.attrs, dims=_nir.dims, coords=_nir.coords) proj.attrs['units'] = 'K' self.apply_modifier_info(_nir, proj) return proj class PSPAtmosphericalCorrection(CompositeBase): """Correct for atmospheric effects.""" def __call__(self, projectables, optional_datasets=None, **info): """Get the atmospherical correction. Uses pyspectral. """ from pyspectral.atm_correction_ir import AtmosphericalCorrection band = projectables[0] if optional_datasets: satz = optional_datasets[0] else: from pyorbital.orbital import get_observer_look lons, lats = band.attrs['area'].get_lonlats(chunks=band.data.chunks) sat_lon, sat_lat, sat_alt = get_satpos(band) try: dummy, satel = get_observer_look(sat_lon, sat_lat, sat_alt / 1000.0, # km band.attrs['start_time'], lons, lats, 0) except KeyError: raise KeyError( 'Band info is missing some meta data!') satz = 90 - satel del satel LOG.info('Correction for limb cooling') corrector = AtmosphericalCorrection(band.attrs['platform_name'], band.attrs['sensor']) atm_corr = corrector.get_correction(satz, band.attrs['name'], band) proj = band - atm_corr proj.attrs = band.attrs self.apply_modifier_info(band, proj) return proj class CO2Corrector(CompositeBase): """Correct for CO2.""" def __call__(self, projectables, optional_datasets=None, **info): """CO2 correction of the brightness temperature of the MSG 3.9um channel. .. math:: T4_CO2corr = (BT(IR3.9)^4 + Rcorr)^0.25 Rcorr = BT(IR10.8)^4 - (BT(IR10.8)-dt_CO2)^4 dt_CO2 = (BT(IR10.8)-BT(IR13.4))/4.0 """ (ir_039, ir_108, ir_134) = projectables LOG.info('Applying CO2 correction') dt_co2 = (ir_108 - ir_134) / 4.0 rcorr = ir_108**4 - (ir_108 - dt_co2)**4 t4_co2corr = (ir_039**4 + rcorr).clip(0.0) ** 0.25 t4_co2corr.attrs = ir_039.attrs.copy() self.apply_modifier_info(ir_039, t4_co2corr) return t4_co2corr class DifferenceCompositor(CompositeBase): """Make the difference of two data arrays.""" def __call__(self, projectables, nonprojectables=None, **info): """Generate the composite.""" if len(projectables) != 2: raise ValueError("Expected 2 datasets, got %d" % (len(projectables),)) projectables = self.match_data_arrays(projectables) info = combine_metadata(*projectables) info['name'] = self.attrs['name'] proj = projectables[0] - projectables[1] proj.attrs = info return proj class SingleBandCompositor(CompositeBase): """Basic single-band composite builder. This preserves all the attributes of the dataset it is derived from. """ def __call__(self, projectables, nonprojectables=None, **attrs): """Build the composite.""" if len(projectables) != 1: raise ValueError("Can't have more than one band in a single-band composite") data = projectables[0] new_attrs = data.attrs.copy() new_attrs.update({key: val for (key, val) in attrs.items() if val is not None}) resolution = new_attrs.get('resolution', None) new_attrs.update(self.attrs) if resolution is not None: new_attrs['resolution'] = resolution return xr.DataArray(data=data.data, attrs=new_attrs, dims=data.dims, coords=data.coords) class GenericCompositor(CompositeBase): """Basic colored composite builder.""" modes = {1: 'L', 2: 'LA', 3: 'RGB', 4: 'RGBA'} def __init__(self, name, common_channel_mask=True, **kwargs): """Collect custom configuration values. Args: common_channel_mask (bool): If True, mask all the channels with a mask that combines all the invalid areas of the given data. """ self.common_channel_mask = common_channel_mask super(GenericCompositor, self).__init__(name, **kwargs) @classmethod def infer_mode(cls, data_arr): """Guess at the mode for a particular DataArray.""" if 'mode' in data_arr.attrs: return data_arr.attrs['mode'] if 'bands' not in data_arr.dims: return cls.modes[1] if 'bands' in data_arr.coords and isinstance(data_arr.coords['bands'][0], str): return ''.join(data_arr.coords['bands'].values) return cls.modes[data_arr.sizes['bands']] def _concat_datasets(self, projectables, mode): try: data = xr.concat(projectables, 'bands', coords='minimal') data['bands'] = list(mode) except ValueError as e: LOG.debug("Original exception for incompatible areas: {}".format(str(e))) raise IncompatibleAreas return data def _get_sensors(self, projectables): sensor = set() for projectable in projectables: current_sensor = projectable.attrs.get("sensor", None) if current_sensor: if isinstance(current_sensor, (str, bytes, six.text_type)): sensor.add(current_sensor) else: sensor |= current_sensor if len(sensor) == 0: sensor = None elif len(sensor) == 1: sensor = list(sensor)[0] return sensor def __call__(self, projectables, nonprojectables=None, **attrs): """Build the composite.""" num = len(projectables) mode = attrs.get('mode') if mode is None: # num may not be in `self.modes` so only check if we need to mode = self.modes[num] if len(projectables) > 1: projectables = self.match_data_arrays(projectables) data = self._concat_datasets(projectables, mode) # Skip masking if user wants it or a specific alpha channel is given. if self.common_channel_mask and mode[-1] != 'A': data = data.where(data.notnull().all(dim='bands')) else: data = projectables[0] # if inputs have a time coordinate that may differ slightly between # themselves then find the mid time and use that as the single # time coordinate value if len(projectables) > 1: time = check_times(projectables) if time is not None and 'time' in data.dims: data['time'] = [time] new_attrs = combine_metadata(*projectables) # remove metadata that shouldn't make sense in a composite new_attrs["wavelength"] = None new_attrs.pop("units", None) new_attrs.pop('calibration', None) new_attrs.pop('modifiers', None) new_attrs.update({key: val for (key, val) in attrs.items() if val is not None}) resolution = new_attrs.get('resolution', None) new_attrs.update(self.attrs) if resolution is not None: new_attrs['resolution'] = resolution new_attrs["sensor"] = self._get_sensors(projectables) new_attrs["mode"] = mode return xr.DataArray(data=data.data, attrs=new_attrs, dims=data.dims, coords=data.coords) class FillingCompositor(GenericCompositor): """Make a regular RGB, filling the RGB bands with the first provided dataset's values.""" def __call__(self, projectables, nonprojectables=None, **info): """Generate the composite.""" projectables = self.match_data_arrays(projectables) projectables[1] = projectables[1].fillna(projectables[0]) projectables[2] = projectables[2].fillna(projectables[0]) projectables[3] = projectables[3].fillna(projectables[0]) return super(FillingCompositor, self).__call__(projectables[1:], **info) class Filler(GenericCompositor): """Fix holes in projectable 1 with data from projectable 2.""" def __call__(self, projectables, nonprojectables=None, **info): """Generate the composite.""" projectables = self.match_data_arrays(projectables) filled_projectable = projectables[0].fillna(projectables[1]) return super(Filler, self).__call__([filled_projectable], **info) class RGBCompositor(GenericCompositor): """Make a composite from three color bands (deprecated).""" def __call__(self, projectables, nonprojectables=None, **info): """Generate the composite.""" warnings.warn("RGBCompositor is deprecated, use GenericCompositor instead.", DeprecationWarning) if len(projectables) != 3: raise ValueError("Expected 3 datasets, got %d" % (len(projectables),)) return super(RGBCompositor, self).__call__(projectables, **info) class ColormapCompositor(GenericCompositor): """A compositor that uses colormaps.""" @staticmethod def build_colormap(palette, dtype, info): """Create the colormap from the `raw_palette` and the valid_range.""" from trollimage.colormap import Colormap sqpalette = np.asanyarray(palette).squeeze() / 255.0 if hasattr(palette, 'attrs') and 'palette_meanings' in palette.attrs: meanings = palette.attrs['palette_meanings'] iterator = zip(meanings, sqpalette) else: iterator = enumerate(sqpalette[:-1]) if dtype == np.dtype('uint8'): tups = [(val, tuple(tup)) for (val, tup) in iterator] colormap = Colormap(*tups) elif 'valid_range' in info: tups = [(val, tuple(tup)) for (val, tup) in iterator] colormap = Colormap(*tups) sf = info.get('scale_factor', np.array(1)) colormap.set_range( *info['valid_range'] * sf + info.get('add_offset', 0)) else: raise AttributeError("Data needs to have either a valid_range or be of type uint8" + " in order to be displayable with an attached color-palette!") return colormap, sqpalette class ColorizeCompositor(ColormapCompositor): """A compositor colorizing the data, interpolating the palette colors when needed.""" def __call__(self, projectables, **info): """Generate the composite.""" if len(projectables) != 2: raise ValueError("Expected 2 datasets, got %d" % (len(projectables), )) # TODO: support datasets with palette to delegate this to the image # writer. data, palette = projectables colormap, palette = self.build_colormap(palette, data.dtype, data.attrs) r, g, b = colormap.colorize(np.asanyarray(data)) r[data.mask] = palette[-1][0] g[data.mask] = palette[-1][1] b[data.mask] = palette[-1][2] raise NotImplementedError("This compositor wasn't fully converted to dask yet.") # r = Dataset(r, copy=False, mask=data.mask, **data.attrs) # g = Dataset(g, copy=False, mask=data.mask, **data.attrs) # b = Dataset(b, copy=False, mask=data.mask, **data.attrs) # # return super(ColorizeCompositor, self).__call__((r, g, b), **data.attrs) class PaletteCompositor(ColormapCompositor): """A compositor colorizing the data, not interpolating the palette colors.""" def __call__(self, projectables, **info): """Generate the composite.""" if len(projectables) != 2: raise ValueError("Expected 2 datasets, got %d" % (len(projectables),)) # TODO: support datasets with palette to delegate this to the image # writer. data, palette = projectables colormap, palette = self.build_colormap(palette, data.dtype, data.attrs) channels, colors = colormap.palettize(np.asanyarray(data.squeeze())) channels = palette[channels] fill_value = data.attrs.get('_FillValue', np.nan) if np.isnan(fill_value): mask = data.notnull() else: mask = data != data.attrs['_FillValue'] r = xr.DataArray(channels[:, :, 0].reshape(data.shape), dims=data.dims, coords=data.coords, attrs=data.attrs).where(mask) g = xr.DataArray(channels[:, :, 1].reshape(data.shape), dims=data.dims, coords=data.coords, attrs=data.attrs).where(mask) b = xr.DataArray(channels[:, :, 2].reshape(data.shape), dims=data.dims, coords=data.coords, attrs=data.attrs).where(mask) res = super(PaletteCompositor, self).__call__((r, g, b), **data.attrs) res.attrs['_FillValue'] = np.nan return res class DayNightCompositor(GenericCompositor): """A compositor that blends a day data with night data.""" def __init__(self, name, lim_low=85., lim_high=88., **kwargs): """Collect custom configuration values. Args: lim_low (float): lower limit of Sun zenith angle for the blending of the given channels lim_high (float): upper limit of Sun zenith angle for the blending of the given channels """ self.lim_low = lim_low self.lim_high = lim_high super(DayNightCompositor, self).__init__(name, **kwargs) def __call__(self, projectables, **kwargs): """Generate the composite.""" projectables = self.match_data_arrays(projectables) day_data = projectables[0] night_data = projectables[1] lim_low = np.cos(np.deg2rad(self.lim_low)) lim_high = np.cos(np.deg2rad(self.lim_high)) try: coszen = np.cos(np.deg2rad(projectables[2])) except IndexError: from pyorbital.astronomy import cos_zen LOG.debug("Computing sun zenith angles.") # Get chunking that matches the data try: chunks = day_data.sel(bands=day_data['bands'][0]).chunks except KeyError: chunks = day_data.chunks lons, lats = day_data.attrs["area"].get_lonlats(chunks=chunks) coszen = xr.DataArray(cos_zen(day_data.attrs["start_time"], lons, lats), dims=['y', 'x'], coords=[day_data['y'], day_data['x']]) # Calculate blending weights coszen -= np.min((lim_high, lim_low)) coszen /= np.abs(lim_low - lim_high) coszen = coszen.clip(0, 1) # Apply enhancements to get images day_data = enhance2dataset(day_data) night_data = enhance2dataset(night_data) # Adjust bands so that they match # L/RGB -> RGB/RGB # LA/RGB -> RGBA/RGBA # RGB/RGBA -> RGBA/RGBA day_data = add_bands(day_data, night_data['bands']) night_data = add_bands(night_data, day_data['bands']) # Replace missing channel data with zeros day_data = zero_missing_data(day_data, night_data) night_data = zero_missing_data(night_data, day_data) # Get merged metadata attrs = combine_metadata(day_data, night_data) # Blend the two images together data = (1 - coszen) * night_data + coszen * day_data data.attrs = attrs # Split to separate bands so the mode is correct data = [data.sel(bands=b) for b in data['bands']] return super(DayNightCompositor, self).__call__(data, **kwargs) def enhance2dataset(dset): """Return the enhancemened to dataset *dset* as an array.""" attrs = dset.attrs img = get_enhanced_image(dset) # Clip image data to interval [0.0, 1.0] data = img.data.clip(0.0, 1.0) data.attrs = attrs # remove 'mode' if it is specified since it may have been updated data.attrs.pop('mode', None) # update mode since it may have changed (colorized/palettize) data.attrs['mode'] = GenericCompositor.infer_mode(data) return data def add_bands(data, bands): """Add bands so that they match *bands*.""" # Add R, G and B bands, remove L band if 'L' in data['bands'].data and 'R' in bands.data: lum = data.sel(bands='L') # Keep 'A' if it was present if 'A' in data['bands']: alpha = data.sel(bands='A') new_data = (lum, lum, lum, alpha) new_bands = ['R', 'G', 'B', 'A'] mode = 'RGBA' else: new_data = (lum, lum, lum) new_bands = ['R', 'G', 'B'] mode = 'RGB' data = xr.concat(new_data, dim='bands', coords={'bands': new_bands}) data['bands'] = new_bands data.attrs['mode'] = mode # Add alpha band if 'A' not in data['bands'].data and 'A' in bands.data: new_data = [data.sel(bands=band) for band in data['bands'].data] # Create alpha band based on a copy of the first "real" band alpha = new_data[0].copy() alpha.data = da.ones((data.sizes['y'], data.sizes['x']), chunks=new_data[0].chunks) # Rename band to indicate it's alpha alpha['bands'] = 'A' new_data.append(alpha) new_data = xr.concat(new_data, dim='bands') new_data.attrs['mode'] = data.attrs['mode'] + 'A' data = new_data return data def zero_missing_data(data1, data2): """Replace NaN values with zeros in data1 if the data is valid in data2.""" nans = np.logical_and(np.isnan(data1), np.logical_not(np.isnan(data2))) return data1.where(~nans, 0) class RealisticColors(GenericCompositor): """Create a realistic colours composite for SEVIRI.""" def __call__(self, projectables, *args, **kwargs): """Generate the composite.""" projectables = self.match_data_arrays(projectables) vis06 = projectables[0] vis08 = projectables[1] hrv = projectables[2] try: ch3 = 3 * hrv - vis06 - vis08 ch3.attrs = hrv.attrs except ValueError: raise IncompatibleAreas ndvi = (vis08 - vis06) / (vis08 + vis06) ndvi = np.where(ndvi < 0, 0, ndvi) ch1 = ndvi * vis06 + (1 - ndvi) * vis08 ch1.attrs = vis06.attrs ch2 = ndvi * vis08 + (1 - ndvi) * vis06 ch2.attrs = vis08.attrs res = super(RealisticColors, self).__call__((ch1, ch2, ch3), *args, **kwargs) return res class CloudCompositor(GenericCompositor): """Detect clouds based on thresholding and use it as a mask for compositing.""" def __init__(self, name, transition_min=258.15, transition_max=298.15, transition_gamma=3.0, **kwargs): """Collect custom configuration values. Args: transition_min (float): Values below or equal to this are clouds -> opaque white transition_max (float): Values above this are cloud free -> transparent transition_gamma (float): Gamma correction to apply at the end """ self.transition_min = transition_min self.transition_max = transition_max self.transition_gamma = transition_gamma super(CloudCompositor, self).__init__(name, **kwargs) def __call__(self, projectables, **kwargs): """Generate the composite.""" data = projectables[0] # Default to rough IR thresholds # Values below or equal to this are clouds -> opaque white tr_min = self.transition_min # Values above this are cloud free -> transparent tr_max = self.transition_max # Gamma correction gamma = self.transition_gamma slope = 1 / (tr_min - tr_max) offset = 1 - slope * tr_min alpha = data.where(data > tr_min, 1.) alpha = alpha.where(data <= tr_max, 0.) alpha = alpha.where((data <= tr_min) | (data > tr_max), slope * data + offset) # gamma adjustment alpha **= gamma res = super(CloudCompositor, self).__call__((data, alpha), **kwargs) return res class RatioSharpenedRGB(GenericCompositor): """Sharpen RGB bands with ratio of a high resolution band to a lower resolution version. Any pixels where the ratio is computed to be negative or infinity, it is reset to 1. Additionally, the ratio is limited to 1.5 on the high end to avoid high changes due to small discrepancies in instrument detector footprint. Note that the input data to this compositor must already be resampled so all data arrays are the same shape. Example:: R_lo - 1000m resolution - shape=(2000, 2000) G - 1000m resolution - shape=(2000, 2000) B - 1000m resolution - shape=(2000, 2000) R_hi - 500m resolution - shape=(4000, 4000) ratio = R_hi / R_lo new_R = R_hi new_G = G * ratio new_B = B * ratio """ def __init__(self, *args, **kwargs): """Instanciate the ration sharpener.""" self.high_resolution_band = kwargs.pop("high_resolution_band", "red") if self.high_resolution_band not in ['red', 'green', 'blue', None]: raise ValueError("RatioSharpenedRGB.high_resolution_band must " "be one of ['red', 'green', 'blue', None]. Not " "'{}'".format(self.high_resolution_band)) kwargs.setdefault('common_channel_mask', False) super(RatioSharpenedRGB, self).__init__(*args, **kwargs) def _get_band(self, high_res, low_res, color, ratio): """Figure out what data should represent this color.""" if self.high_resolution_band == color: ret = high_res else: ret = low_res * ratio ret.attrs = low_res.attrs.copy() return ret def __call__(self, datasets, optional_datasets=None, **info): """Sharpen low resolution datasets by multiplying by the ratio of ``high_res / low_res``.""" if len(datasets) != 3: raise ValueError("Expected 3 datasets, got %d" % (len(datasets), )) if not all(x.shape == datasets[0].shape for x in datasets[1:]) or \ (optional_datasets and optional_datasets[0].shape != datasets[0].shape): raise IncompatibleAreas('RatioSharpening requires datasets of ' 'the same size. Must resample first.') new_attrs = {} if optional_datasets: datasets = self.match_data_arrays(datasets + optional_datasets) high_res = datasets[-1] p1, p2, p3 = datasets[:3] if 'rows_per_scan' in high_res.attrs: new_attrs.setdefault('rows_per_scan', high_res.attrs['rows_per_scan']) new_attrs.setdefault('resolution', high_res.attrs['resolution']) colors = ['red', 'green', 'blue'] if self.high_resolution_band in colors: LOG.debug("Sharpening image with high resolution {} band".format(self.high_resolution_band)) low_res = datasets[:3][colors.index(self.high_resolution_band)] ratio = high_res / low_res # make ratio a no-op (multiply by 1) where the ratio is NaN or # infinity or it is negative. ratio = ratio.where(np.isfinite(ratio) & (ratio >= 0), 1.) # we don't need ridiculously high ratios, they just make bright pixels ratio = ratio.clip(0, 1.5) else: LOG.debug("No sharpening band specified for ratio sharpening") high_res = None ratio = 1. r = self._get_band(high_res, p1, 'red', ratio) g = self._get_band(high_res, p2, 'green', ratio) b = self._get_band(high_res, p3, 'blue', ratio) else: datasets = self.match_data_arrays(datasets) r, g, b = datasets[:3] # combine the masks mask = ~(r.isnull() | g.isnull() | b.isnull()) r = r.where(mask) g = g.where(mask) b = b.where(mask) # Collect information that is the same between the projectables # we want to use the metadata from the original datasets since the # new r, g, b arrays may have lost their metadata during calculations info = combine_metadata(*datasets) info.update(new_attrs) # Update that information with configured information (including name) info.update(self.attrs) # Force certain pieces of metadata that we *know* to be true info.setdefault("standard_name", "true_color") return super(RatioSharpenedRGB, self).__call__((r, g, b), **info) def _mean4(data, offset=(0, 0), block_id=None): rows, cols = data.shape # we assume that the chunks except the first ones are aligned if block_id[0] == 0: row_offset = offset[0] % 2 else: row_offset = 0 if block_id[1] == 0: col_offset = offset[1] % 2 else: col_offset = 0 row_after = (row_offset + rows) % 2 col_after = (col_offset + cols) % 2 pad = ((row_offset, row_after), (col_offset, col_after)) rows2 = rows + row_offset + row_after cols2 = cols + col_offset + col_after av_data = np.pad(data, pad, 'edge') new_shape = (int(rows2 / 2.), 2, int(cols2 / 2.), 2) data_mean = np.nanmean(av_data.reshape(new_shape), axis=(1, 3)) data_mean = np.repeat(np.repeat(data_mean, 2, axis=0), 2, axis=1) data_mean = data_mean[row_offset:row_offset + rows, col_offset:col_offset + cols] return data_mean class SelfSharpenedRGB(RatioSharpenedRGB): """Sharpen RGB with ratio of a band with a strided-version of itself. Example:: R - 500m resolution - shape=(4000, 4000) G - 1000m resolution - shape=(2000, 2000) B - 1000m resolution - shape=(2000, 2000) ratio = R / four_element_average(R) new_R = R new_G = G * ratio new_B = B * ratio """ @staticmethod def four_element_average_dask(d): """Average every 4 elements (2x2) in a 2D array.""" try: offset = d.attrs['area'].crop_offset except (KeyError, AttributeError): offset = (0, 0) res = d.data.map_blocks(_mean4, offset=offset, dtype=d.dtype) return xr.DataArray(res, attrs=d.attrs, dims=d.dims, coords=d.coords) def __call__(self, datasets, optional_datasets=None, **attrs): """Generate the composite.""" colors = ['red', 'green', 'blue'] if self.high_resolution_band not in colors: raise ValueError("SelfSharpenedRGB requires at least one high resolution band, not " "'{}'".format(self.high_resolution_band)) high_res = datasets[colors.index(self.high_resolution_band)] high_mean = self.four_element_average_dask(high_res) red = high_mean if self.high_resolution_band == 'red' else datasets[0] green = high_mean if self.high_resolution_band == 'green' else datasets[1] blue = high_mean if self.high_resolution_band == 'blue' else datasets[2] return super(SelfSharpenedRGB, self).__call__((red, green, blue), optional_datasets=(high_res,), **attrs) class LuminanceSharpeningCompositor(GenericCompositor): """Create a high resolution composite by sharpening a low resolution using high resolution luminance. This is done by converting to YCbCr colorspace, replacing Y, and convertin back to RGB. """ def __call__(self, projectables, *args, **kwargs): """Generate the composite.""" from trollimage.image import rgb2ycbcr, ycbcr2rgb projectables = self.match_data_arrays(projectables) luminance = projectables[0].copy() luminance /= 100. # Limit between min(luminance) ... 1.0 luminance = da.where(luminance > 1., 1., luminance) # Get the enhanced version of the composite to be sharpened rgb_img = enhance2dataset(projectables[1]) # This all will be eventually replaced with trollimage convert() method # ycbcr_img = rgb_img.convert('YCbCr') # ycbcr_img.data[0, :, :] = luminance # rgb_img = ycbcr_img.convert('RGB') # Replace luminance of the IR composite y__, cb_, cr_ = rgb2ycbcr(rgb_img.data[0, :, :], rgb_img.data[1, :, :], rgb_img.data[2, :, :]) r__, g__, b__ = ycbcr2rgb(luminance, cb_, cr_) y_size, x_size = r__.shape r__ = da.reshape(r__, (1, y_size, x_size)) g__ = da.reshape(g__, (1, y_size, x_size)) b__ = da.reshape(b__, (1, y_size, x_size)) rgb_img.data = da.vstack((r__, g__, b__)) return super(LuminanceSharpeningCompositor, self).__call__(rgb_img, *args, **kwargs) class SandwichCompositor(GenericCompositor): """Make a sandwich product.""" def __call__(self, projectables, *args, **kwargs): """Generate the composite.""" projectables = self.match_data_arrays(projectables) luminance = projectables[0] luminance /= 100. # Limit between min(luminance) ... 1.0 luminance = luminance.clip(max=1.) # Get the enhanced version of the RGB composite to be sharpened rgb_img = enhance2dataset(projectables[1]) rgb_img *= luminance return super(SandwichCompositor, self).__call__(rgb_img, *args, **kwargs) class StaticImageCompositor(GenericCompositor): """A compositor that loads a static image from disk. If the filename passed to this compositor is not valid then the SATPY_ANCPATH environment variable will be checked to see if the image is located there """ def __init__(self, name, filename=None, area=None, **kwargs): """Collect custom configuration values. Args: filename (str): Filename of the image to load area (str): Name of area definition for the image. Optional for images with built-in area definitions (geotiff) """ if filename is None: raise ValueError("No image configured for static image compositor") self.filename = filename self.area = None if area is not None: from satpy.resample import get_area_def self.area = get_area_def(area) super(StaticImageCompositor, self).__init__(name, **kwargs) def __call__(self, *args, **kwargs): """Call the compositor.""" from satpy import Scene # Check if filename exists, if not then try from SATPY_ANCPATH if not os.path.isfile(self.filename): tmp_filename = os.path.join(get_environ_ancpath(), self.filename) if os.path.isfile(tmp_filename): self.filename = tmp_filename scn = Scene(reader='generic_image', filenames=[self.filename]) scn.load(['image']) img = scn['image'] # use compositor parameters as extra metadata # most important: set 'name' of the image img.attrs.update(self.attrs) # Check for proper area definition. Non-georeferenced images # do not have `area` in the attributes if 'area' not in img.attrs: if self.area is None: raise AttributeError("Area definition needs to be configured") img.attrs['area'] = self.area img.attrs['sensor'] = None img.attrs['mode'] = ''.join(img.bands.data) img.attrs.pop('modifiers', None) img.attrs.pop('calibration', None) # Add start time if not present in the filename if 'start_time' not in img.attrs or not img.attrs['start_time']: import datetime as dt img.attrs['start_time'] = dt.datetime.utcnow() if 'end_time' not in img.attrs or not img.attrs['end_time']: import datetime as dt img.attrs['end_time'] = dt.datetime.utcnow() return img class BackgroundCompositor(GenericCompositor): """A compositor that overlays one composite on top of another.""" def __call__(self, projectables, *args, **kwargs): """Call the compositor.""" projectables = self.match_data_arrays(projectables) # Get enhanced datasets foreground = enhance2dataset(projectables[0]) background = enhance2dataset(projectables[1]) # Adjust bands so that they match # L/RGB -> RGB/RGB # LA/RGB -> RGBA/RGBA # RGB/RGBA -> RGBA/RGBA foreground = add_bands(foreground, background['bands']) background = add_bands(background, foreground['bands']) # Get merged metadata attrs = combine_metadata(foreground, background) if attrs.get('sensor') is None: # sensor can be a set attrs['sensor'] = self._get_sensors(projectables) # Stack the images if 'A' in foreground.attrs['mode']: # Use alpha channel as weight and blend the two composites alpha = foreground.sel(bands='A') data = [] # NOTE: there's no alpha band in the output image, it will # be added by the data writer for band in foreground.mode[:-1]: fg_band = foreground.sel(bands=band) bg_band = background.sel(bands=band) chan = (fg_band * alpha + bg_band * (1 - alpha)) chan = xr.where(chan.isnull(), bg_band, chan) data.append(chan) else: data = xr.where(foreground.isnull(), background, foreground) # Split to separate bands so the mode is correct data = [data.sel(bands=b) for b in data['bands']] res = super(BackgroundCompositor, self).__call__(data, **kwargs) res.attrs.update(attrs) return res class MaskingCompositor(GenericCompositor): """A compositor that masks e.g. IR 10.8 channel data using cloud products from NWC SAF.""" def __init__(self, name, transparency=None, **kwargs): """Collect custom configuration values. Args: transparency: transparency for each cloud type as key-value pairs in a dictionary The `transparencies` can be either the numerical values in the data used as a mask with the corresponding transparency (0...100 %) as the value, or, for NWC SAF products, the flag names in the dataset `flag_meanings` attribute. Transparency value of `0` means that the composite being masked will be fully visible, and `100` means it will be completely transparent and not visible in the resulting image. For the mask values not listed in `transparencies`, the data will be completely opaque (transparency = 0). Example:: >>> transparency = {0: 100, 1: 80, 2: 0} >>> compositor = MaskingCompositor("masking compositor", transparency=transparency) >>> result = compositor([data, mask]) This will set transparency of `data` based on the values in the `mask` dataset. Locations where `mask` has values of `0` will be fully transparent, locations with `1` will be semi-transparent and locations with `2` will be fully visible in the resulting image. All the unlisted locations will be visible. The transparency is implemented by adding an alpha layer to the composite. If the input `data` contains an alpha channel, it will be discarded. """ if transparency is None: raise ValueError("No transparency configured for simple masking compositor") self.transparency = transparency super(MaskingCompositor, self).__init__(name, **kwargs) def __call__(self, projectables, *args, **kwargs): """Call the compositor.""" if len(projectables) != 2: raise ValueError("Expected 2 datasets, got %d" % (len(projectables),)) projectables = self.match_data_arrays(projectables) cloud_mask = projectables[1] cloud_mask_data = cloud_mask.data data = projectables[0] alpha_attrs = data.attrs.copy() if 'bands' in data.dims: data = [data.sel(bands=b) for b in data['bands'] if b != 'A'] else: data = [data] # Create alpha band alpha = da.ones((data[0].sizes['y'], data[0].sizes['x']), chunks=data[0].chunks) # Modify alpha based on transparency per class from yaml flag_meanings = cloud_mask.attrs['flag_meanings'] flag_values = cloud_mask.attrs['flag_values'] if isinstance(flag_meanings, str): flag_meanings = flag_meanings.split() for key, val in self.transparency.items(): if isinstance(key, str): key_index = flag_meanings.index(key) key = flag_values[key_index] alpha_val = 1. - val / 100. alpha = da.where(cloud_mask_data == key, alpha_val, alpha) alpha = xr.DataArray(data=alpha, attrs=alpha_attrs, dims=data[0].dims, coords=data[0].coords) data.append(alpha) res = super(MaskingCompositor, self).__call__(data, **kwargs) return res satpy-0.20.0/satpy/composites/abi.py000066400000000000000000000045151362525524100173750ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Composite classes for the ABI instrument.""" import logging from satpy.composites import GenericCompositor LOG = logging.getLogger(__name__) class SimulatedGreen(GenericCompositor): """A single-band dataset resembling a Green (0.55 µm) band. This compositor creates a single band product by combining three other bands in various amounts. The general formula with dependencies (d) and fractions (f) is:: result = d1 * f1 + d2 * f2 + d3 * f3 See the `fractions` keyword argument for more information. Common used fractions for ABI data with C01, C02, and C03 inputs include: - SatPy default (historical): (0.465, 0.465, 0.07) - `CIMSS (Kaba) `_: (0.45, 0.45, 0.10) - `EDC `_: (0.45706946, 0.48358168, 0.06038137) """ def __init__(self, name, fractions=(0.465, 0.465, 0.07), **kwargs): """Initialize fractions for input channels. Args: name (str): Name of this composite fractions (iterable): Fractions of each input band to include in the result. """ self.fractions = fractions super(SimulatedGreen, self).__init__(name, **kwargs) def __call__(self, projectables, optional_datasets=None, **attrs): """Generate the single band composite.""" c01, c02, c03 = self.match_data_arrays(projectables) res = c01 * self.fractions[0] + c02 * self.fractions[1] + c03 * self.fractions[2] res.attrs = c03.attrs.copy() return super(SimulatedGreen, self).__call__((res,), **attrs) satpy-0.20.0/satpy/composites/ahi.py000066400000000000000000000032751362525524100174050ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Composite classes for the AHI instrument.""" import logging from satpy.composites import GenericCompositor LOG = logging.getLogger(__name__) class GreenCorrector(GenericCompositor): """Corrector of the AHI green band to compensate for the deficit of chlorophyll signal.""" def __init__(self, *args, **kwargs): """Set default keyword argument values.""" # XXX: Should this be 0.93 and 0.07 self.fractions = kwargs.pop('fractions', [0.85, 0.15]) super(GreenCorrector, self).__init__(*args, **kwargs) def __call__(self, projectables, optional_datasets=None, **attrs): """Boost vegetation effect thanks to NIR (0.8µm) band.""" green, nir = self.match_data_arrays(projectables) LOG.info('Boosting vegetation on green band') new_green = green * self.fractions[0] + nir * self.fractions[1] new_green.attrs = green.attrs.copy() return super(GreenCorrector, self).__call__((new_green,), **attrs) satpy-0.20.0/satpy/composites/cloud_products.py000066400000000000000000000104761362525524100216760ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Compositors for cloud products. """ import numpy as np import xarray as xr from satpy.composites import ColormapCompositor from satpy.composites import GenericCompositor class CloudTopHeightCompositor(ColormapCompositor): """Colorize with a palette, put cloud-free pixels as black.""" @staticmethod def build_colormap(palette, info): """Create the colormap from the `raw_palette` and the valid_range.""" from trollimage.colormap import Colormap if 'palette_meanings' in palette.attrs: palette_indices = palette.attrs['palette_meanings'] else: palette_indices = range(len(palette)) sqpalette = np.asanyarray(palette).squeeze() / 255.0 tups = [(val, tuple(tup)) for (val, tup) in zip(palette_indices, sqpalette)] colormap = Colormap(*tups) if 'palette_meanings' not in palette.attrs: sf = info.get('scale_factor', np.array(1)) colormap.set_range( *(np.array(info['valid_range']) * sf + info.get('add_offset', 0))) return colormap, sqpalette def __call__(self, projectables, **info): """Create the composite.""" if len(projectables) != 3: raise ValueError("Expected 3 datasets, got %d" % (len(projectables), )) data, palette, status = projectables colormap, palette = self.build_colormap(palette, data.attrs) channels, colors = colormap.palettize(np.asanyarray(data.squeeze())) channels = palette[channels] mask_nan = data.notnull() mask_cloud_free = (status + 1) % 2 chans = [] for idx in range(channels.shape[-1]): chan = xr.DataArray(channels[:, :, idx].reshape(data.shape), dims=data.dims, coords=data.coords, attrs=data.attrs).where(mask_nan) # Set cloud-free pixels as black chans.append(chan.where(mask_cloud_free, 0).where(status != status.attrs['_FillValue'])) res = super(CloudTopHeightCompositor, self).__call__(chans, **data.attrs) res.attrs['_FillValue'] = np.nan return res class PrecipCloudsRGB(GenericCompositor): def __call__(self, projectables, *args, **kwargs): """Make an RGB image out of the three probability categories of the NWCSAF precip product.""" projectables = self.match_data_arrays(projectables) light = projectables[0] moderate = projectables[1] intense = projectables[2] status_flag = projectables[3] if np.bitwise_and(status_flag, 4).any(): # AMSU is used maxs1 = 70 maxs2 = 70 maxs3 = 100 else: # avhrr only maxs1 = 30 maxs2 = 50 maxs3 = 40 scalef3 = 1.0 / maxs3 - 1 / 255.0 scalef2 = 1.0 / maxs2 - 1 / 255.0 scalef1 = 1.0 / maxs1 - 1 / 255.0 p1data = (light*scalef1).where(light != 0) p1data = p1data.where(light != light.attrs['_FillValue']) p1data.attrs = light.attrs data = moderate*scalef2 p2data = data.where(moderate != 0) p2data = p2data.where(moderate != moderate.attrs['_FillValue']) p2data.attrs = moderate.attrs data = intense*scalef3 p3data = data.where(intense != 0) p3data = p3data.where(intense != intense.attrs['_FillValue']) p3data.attrs = intense.attrs res = super(PrecipCloudsRGB, self).__call__((p3data, p2data, p1data), *args, **kwargs) return res satpy-0.20.0/satpy/composites/crefl_utils.py000066400000000000000000000424231362525524100211550ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2010-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Shared utilities for correcting reflectance data using the 'crefl' algorithm. Original code written by Ralph Kuehn with modifications by David Hoese and Martin Raspaud. Ralph's code was originally based on the C crefl code distributed for VIIRS and MODIS. """ import logging import numpy as np import xarray as xr import dask.array as da LOG = logging.getLogger(__name__) bUseV171 = False if bUseV171: UO3 = 0.319 UH2O = 2.93 else: UO3 = 0.285 UH2O = 2.93 MAXSOLZ = 86.5 MAXAIRMASS = 18 SCALEHEIGHT = 8000 FILL_INT16 = 32767 TAUSTEP4SPHALB_ABI = .0003 TAUSTEP4SPHALB = .0001 MAXNUMSPHALBVALUES = 4000 # with no aerosol taur <= 0.4 in all bands everywhere REFLMIN = -0.01 REFLMAX = 1.6 def csalbr(tau): # Previously 3 functions csalbr fintexp1, fintexp3 a = [-.57721566, 0.99999193, -0.24991055, 0.05519968, -0.00976004, 0.00107857] # xx = a[0] + a[1] * tau + a[2] * tau**2 + a[3] * tau**3 + a[4] * tau**4 + a[5] * tau**5 # xx = np.polyval(a[::-1], tau) # xx = a[0] # xftau = 1.0 # for i in xrange(5): # xftau = xftau*tau # xx = xx + a[i] * xftau fintexp1 = np.polyval(a[::-1], tau) - np.log(tau) fintexp3 = (np.exp(-tau) * (1.0 - tau) + tau**2 * fintexp1) / 2.0 return (3.0 * tau - fintexp3 * (4.0 + 2.0 * tau) + 2.0 * np.exp(-tau)) / (4.0 + 3.0 * tau) # From crefl.1.7.1 if bUseV171: aH2O = np.array([-5.60723, -5.25251, 0, 0, -6.29824, -7.70944, -3.91877, 0, 0, 0, 0, 0, 0, 0, 0, 0]) bH2O = np.array([0.820175, 0.725159, 0, 0, 0.865732, 0.966947, 0.745342, 0, 0, 0, 0, 0, 0, 0, 0, 0]) # const float aO3[Nbands]={ 0.0711, 0.00313, 0.0104, 0.0930, 0, # 0, 0, 0.00244, 0.00383, 0.0225, 0.0663, 0.0836, 0.0485, 0.0395, 0.0119, # 0.00263};*/ aO3 = np.array( [0.0715289, 0, 0.00743232, 0.089691, 0, 0, 0, 0.001, 0.00383, 0.0225, 0.0663, 0.0836, 0.0485, 0.0395, 0.0119, 0.00263]) # const float taur0[Nbands] = { 0.0507, 0.0164, 0.1915, 0.0948, # 0.0036, 0.0012, 0.0004, 0.3109, 0.2375, 0.1596, 0.1131, 0.0994, # 0.0446, 0.0416, 0.0286, 0.0155};*/ taur0 = np.array( [0.05100, 0.01631, 0.19325, 0.09536, 0.00366, 0.00123, 0.00043, 0.3139, 0.2375, 0.1596, 0.1131, 0.0994, 0.0446, 0.0416, 0.0286, 0.0155]) else: # From polar2grid cviirs.c # This number is what Ralph says "looks good" rg_fudge = .55 aH2O = np.array( [0.000406601, 0.0015933, 0, 1.78644e-05, 0.00296457, 0.000617252, 0.000996563, 0.00222253, 0.00094005, 0.000563288, 0, 0, 0, 0, 0, 0, 2.4111e-003, 7.8454e-003*rg_fudge, 7.9258e-3, 9.3392e-003, 2.53e-2]) bH2O = np.array([0.812659, 0.832931, 1., 0.8677850, 0.806816, 0.944958, 0.78812, 0.791204, 0.900564, 0.942907, 0, 0, 0, 0, 0, 0, # These are actually aO2 values for abi calculations 1.2360e-003, 3.7296e-003, 177.7161e-006, 10.4899e-003, 1.63e-2]) # /*const float aO3[Nbands]={ 0.0711, 0.00313, 0.0104, 0.0930, 0, 0, 0, 0.00244, # 0.00383, 0.0225, 0.0663, 0.0836, 0.0485, 0.0395, 0.0119, 0.00263};*/ aO3 = np.array([0.0433461, 0.0, 0.0178299, 0.0853012, 0, 0, 0, 0.0813531, 0, 0, 0.0663, 0.0836, 0.0485, 0.0395, 0.0119, 0.00263, 4.2869e-003, 25.6509e-003*rg_fudge, 802.4319e-006, 0.0000e+000, 2e-5]) # /*const float taur0[Nbands] = { 0.0507, 0.0164, 0.1915, 0.0948, 0.0036, 0.0012, 0.0004, # 0.3109, 0.2375, 0.1596, 0.1131, 0.0994, 0.0446, 0.0416, 0.0286, 0.0155};*/ taur0 = np.array([0.04350, 0.01582, 0.16176, 0.09740, 0.00369, 0.00132, 0.00033, 0.05373, 0.01561, 0.00129, 0.1131, 0.0994, 0.0446, 0.0416, 0.0286, 0.0155, 184.7200e-003, 52.3490e-003, 15.8450e-003, 1.3074e-003, 311.2900e-006]) # add last 5 from bH2O to aO2 aO2 = 0 # Map of pixel resolutions -> wavelength -> coefficient index # Map of pixel resolutions -> band name -> coefficient index # Index is used in aH2O, bH2O, aO3, and taur0 arrays above MODIS_COEFF_INDEX_MAP = { 1000: { (0.620, 0.6450, 0.670): 0, "1": 0, (0.841, 0.8585, 0.876): 1, "2": 1, (0.459, 0.4690, 0.479): 2, "3": 2, (0.545, 0.5550, 0.565): 3, "4": 3, (1.230, 1.2400, 1.250): 4, "5": 4, (1.628, 1.6400, 1.652): 5, "6": 5, (2.105, 2.1300, 2.155): 6, "7": 6, } } MODIS_COEFF_INDEX_MAP[500] = MODIS_COEFF_INDEX_MAP[1000] MODIS_COEFF_INDEX_MAP[250] = MODIS_COEFF_INDEX_MAP[1000] # resolution -> wavelength -> coefficient index # resolution -> band name -> coefficient index VIIRS_COEFF_INDEX_MAP = { 1000: { (0.662, 0.6720, 0.682): 0, # M05 "M05": 0, (0.846, 0.8650, 0.885): 1, # M07 "M07": 1, (0.478, 0.4880, 0.498): 2, # M03 "M03": 2, (0.545, 0.5550, 0.565): 3, # M04 "M04": 3, (1.230, 1.2400, 1.250): 4, # M08 "M08": 4, (1.580, 1.6100, 1.640): 5, # M10 "M10": 5, (2.225, 2.2500, 2.275): 6, # M11 "M11": 6, }, 500: { (0.600, 0.6400, 0.680): 7, # I01 "I01": 7, (0.845, 0.8650, 0.884): 8, # I02 "I02": 8, (1.580, 1.6100, 1.640): 9, # I03 "I03": 9, }, } # resolution -> wavelength -> coefficient index # resolution -> band name -> coefficient index ABI_COEFF_INDEX_MAP = { 2000: { (0.450, 0.470, 0.490): 16, # C01 "C01": 16, (0.590, 0.640, 0.690): 17, # C02 "C02": 17, (0.8455, 0.865, 0.8845): 18, # C03 "C03": 18, # (1.3705, 1.378, 1.3855): None, # C04 # "C04": None, (1.580, 1.610, 1.640): 19, # C05 "C05": 19, (2.225, 2.250, 2.275): 20, # C06 "C06": 20 }, } COEFF_INDEX_MAP = { "viirs": VIIRS_COEFF_INDEX_MAP, "modis": MODIS_COEFF_INDEX_MAP, "abi": ABI_COEFF_INDEX_MAP, } def find_coefficient_index(sensor, wavelength_range, resolution=0): """Return index in to coefficient arrays for this band's wavelength. This function search through the `COEFF_INDEX_MAP` dictionary and finds the first key where the nominal wavelength of `wavelength_range` falls between the minimum wavelength and maximum wavelength of the key. `wavelength_range` can also be the standard name of the band. For example, "M05" for VIIRS or "1" for MODIS. :param sensor: sensor of band to be corrected :param wavelength_range: 3-element tuple of (min wavelength, nominal wavelength, max wavelength) :param resolution: resolution of the band to be corrected :return: index in to coefficient arrays like `aH2O`, `aO3`, etc. None is returned if no matching wavelength is found """ index_map = COEFF_INDEX_MAP[sensor.lower()] # Find the best resolution of coefficients for res in sorted(index_map.keys()): if resolution <= res: index_map = index_map[res] break else: raise ValueError("Unrecognized data resolution: {}", resolution) # Find the best wavelength of coefficients if isinstance(wavelength_range, str): # wavelength range is actually a band name return index_map[wavelength_range] else: for k, v in index_map.items(): if isinstance(k, str): # we are analyzing wavelengths and ignoring dataset names continue if k[0] <= wavelength_range[1] <= k[2]: return v def get_coefficients(sensor, wavelength_range, resolution=0): """ :param sensor: sensor of the band to be corrected :param wavelength_range: 3-element tuple of (min wavelength, nominal wavelength, max wavelength) :param resolution: resolution of the band to be corrected :return: aH2O, bH2O, aO3, taur0 coefficient values """ idx = find_coefficient_index(sensor, wavelength_range, resolution=resolution) return aH2O[idx], bH2O[idx], aO3[idx], taur0[idx] def chand(phi, muv, mus, taur): # FROM FUNCTION CHAND # phi: azimuthal difference between sun and observation in degree # (phi=0 in backscattering direction) # mus: cosine of the sun zenith angle # muv: cosine of the observation zenith angle # taur: molecular optical depth # rhoray: molecular path reflectance # constant xdep: depolarization factor (0.0279) # xfd = (1-xdep/(2-xdep)) / (1 + 2*xdep/(2-xdep)) = 2 * (1 - xdep) / (2 + xdep) = 0.958725775 # */ xfd = 0.958725775 xbeta2 = 0.5 # float pl[5]; # double fs01, fs02, fs0, fs1, fs2; as0 = [0.33243832, 0.16285370, -0.30924818, -0.10324388, 0.11493334, -6.777104e-02, 1.577425e-03, -1.240906e-02, 3.241678e-02, -3.503695e-02] as1 = [0.19666292, -5.439061e-02] as2 = [0.14545937, -2.910845e-02] # float phios, xcos1, xcos2, xcos3; # float xph1, xph2, xph3, xitm1, xitm2; # float xlntaur, xitot1, xitot2, xitot3; # int i, ib; xph1 = 1.0 + (3.0 * mus * mus - 1.0) * (3.0 * muv * muv - 1.0) * xfd / 8.0 xph2 = -xfd * xbeta2 * 1.5 * mus * muv * da.sqrt( 1.0 - mus * mus) * da.sqrt(1.0 - muv * muv) xph3 = xfd * xbeta2 * 0.375 * (1.0 - mus * mus) * (1.0 - muv * muv) # pl[0] = 1.0 # pl[1] = mus + muv # pl[2] = mus * muv # pl[3] = mus * mus + muv * muv # pl[4] = mus * mus * muv * muv fs01 = as0[0] + (mus + muv) * as0[1] + (mus * muv) * as0[2] + ( mus * mus + muv * muv) * as0[3] + (mus * mus * muv * muv) * as0[4] fs02 = as0[5] + (mus + muv) * as0[6] + (mus * muv) * as0[7] + ( mus * mus + muv * muv) * as0[8] + (mus * mus * muv * muv) * as0[9] # for (i = 0; i < 5; i++) { # fs01 += (double) (pl[i] * as0[i]); # fs02 += (double) (pl[i] * as0[5 + i]); # } # for refl, (ah2o, bh2o, ao3, tau) in zip(reflectance_bands, coefficients): # ib = find_coefficient_index(center_wl) # if ib is None: # raise ValueError("Can't handle band with wavelength '{}'".format(center_wl)) xlntaur = da.log(taur) fs0 = fs01 + fs02 * xlntaur fs1 = as1[0] + xlntaur * as1[1] fs2 = as2[0] + xlntaur * as2[1] del xlntaur, fs01, fs02 trdown = da.exp(-taur / mus) trup = da.exp(-taur / muv) xitm1 = (1.0 - trdown * trup) / 4.0 / (mus + muv) xitm2 = (1.0 - trdown) * (1.0 - trup) xitot1 = xph1 * (xitm1 + xitm2 * fs0) xitot2 = xph2 * (xitm1 + xitm2 * fs1) xitot3 = xph3 * (xitm1 + xitm2 * fs2) del xph1, xph2, xph3, xitm1, xitm2, fs0, fs1, fs2 phios = da.deg2rad(phi + 180.0) xcos1 = 1.0 xcos2 = da.cos(phios) xcos3 = da.cos(2.0 * phios) del phios rhoray = xitot1 * xcos1 + xitot2 * xcos2 * 2.0 + xitot3 * xcos3 * 2.0 return rhoray, trdown, trup def _sphalb_index(index_arr, sphalb0): # FIXME: if/when dask can support lazy index arrays then remove this return sphalb0[index_arr] def atm_variables_finder(mus, muv, phi, height, tau, tO3, tH2O, taustep4sphalb, tO2=1.0): tau_step = da.linspace(taustep4sphalb, MAXNUMSPHALBVALUES * taustep4sphalb, MAXNUMSPHALBVALUES, chunks=int(MAXNUMSPHALBVALUES / 2)) sphalb0 = csalbr(tau_step) taur = tau * da.exp(-height / SCALEHEIGHT) rhoray, trdown, trup = chand(phi, muv, mus, taur) if isinstance(height, xr.DataArray): sphalb = da.map_blocks(_sphalb_index, (taur / taustep4sphalb + 0.5).astype(np.int32).data, sphalb0.compute(), dtype=sphalb0.dtype) else: sphalb = sphalb0[(taur / taustep4sphalb + 0.5).astype(np.int32)] Ttotrayu = ((2 / 3. + muv) + (2 / 3. - muv) * trup) / (4 / 3. + taur) Ttotrayd = ((2 / 3. + mus) + (2 / 3. - mus) * trdown) / (4 / 3. + taur) TtotraytH2O = Ttotrayu * Ttotrayd * tH2O tOG = tO3 * tO2 return sphalb, rhoray, TtotraytH2O, tOG def get_atm_variables(mus, muv, phi, height, ah2o, bh2o, ao3, tau): air_mass = 1.0 / mus + 1 / muv air_mass = air_mass.where(air_mass <= MAXAIRMASS, -1.0) tO3 = 1.0 tH2O = 1.0 if ao3 != 0: tO3 = da.exp(-air_mass * UO3 * ao3) if bh2o != 0: if bUseV171: tH2O = da.exp(-da.exp(ah2o + bh2o * da.log(air_mass * UH2O))) else: tH2O = da.exp(-(ah2o * ((air_mass * UH2O) ** bh2o))) # Returns sphalb, rhoray, TtotraytH2O, tOG return atm_variables_finder(mus, muv, phi, height, tau, tO3, tH2O, TAUSTEP4SPHALB) def get_atm_variables_abi(mus, muv, phi, height, G_O3, G_H2O, G_O2, ah2o, ao2, ao3, tau): tO3 = 1.0 tH2O = 1.0 if ao3 != 0: tO3 = da.exp(-G_O3 * ao3) if ah2o != 0: tH2O = da.exp(-G_H2O * ah2o) tO2 = da.exp(-G_O2 * ao2) # Returns sphalb, rhoray, TtotraytH2O, tOG. return atm_variables_finder(mus, muv, phi, height, tau, tO3, tH2O, TAUSTEP4SPHALB_ABI, tO2=tO2) def G_calc(zenith, a_coeff): return (da.cos(da.deg2rad(zenith))+(a_coeff[0]*(zenith**a_coeff[1])*(a_coeff[2]-zenith)**a_coeff[3]))**-1 def _avg_elevation_index(avg_elevation, row, col): return avg_elevation[row, col] def run_crefl(refl, coeffs, lon, lat, sensor_azimuth, sensor_zenith, solar_azimuth, solar_zenith, avg_elevation=None, percent=False, use_abi=False): """Run main crefl algorithm. All input parameters are per-pixel values meaning they are the same size and shape as the input reflectance data, unless otherwise stated. :param reflectance_bands: tuple of reflectance band arrays :param coefficients: tuple of coefficients for each band (see `get_coefficients`) :param lon: input swath longitude array :param lat: input swath latitude array :param sensor_azimuth: input swath sensor azimuth angle array :param sensor_zenith: input swath sensor zenith angle array :param solar_azimuth: input swath solar azimuth angle array :param solar_zenith: input swath solar zenith angle array :param avg_elevation: average elevation (usually pre-calculated and stored in CMGDEM.hdf) :param percent: True if input reflectances are on a 0-100 scale instead of 0-1 scale (default: False) """ # FUTURE: Find a way to compute the average elevation before hand # Get digital elevation map data for our granule, set ocean fill value to 0 if avg_elevation is None: LOG.debug("No average elevation information provided in CREFL") # height = np.zeros(lon.shape, dtype=np.float) height = 0. else: LOG.debug("Using average elevation information provided to CREFL") lat[(lat <= -90) | (lat >= 90)] = np.nan lon[(lon <= -180) | (lon >= 180)] = np.nan row = ((90.0 - lat) * avg_elevation.shape[0] / 180.0).astype(np.int32) col = ((lon + 180.0) * avg_elevation.shape[1] / 360.0).astype(np.int32) space_mask = da.isnull(lon) | da.isnull(lat) row[space_mask] = 0 col[space_mask] = 0 height = da.map_blocks(_avg_elevation_index, avg_elevation, row, col, dtype=avg_elevation.dtype) height = xr.DataArray(height, dims=['y', 'x']) # negative heights aren't allowed, clip to 0 height = height.where((height >= 0.) & ~space_mask, 0.0) del lat, lon, row, col mus = da.cos(da.deg2rad(solar_zenith)) mus = mus.where(mus >= 0) muv = da.cos(da.deg2rad(sensor_zenith)) phi = solar_azimuth - sensor_azimuth if use_abi: LOG.debug("Using ABI CREFL algorithm") a_O3 = [268.45, 0.5, 115.42, -3.2922] a_H2O = [0.0311, 0.1, 92.471, -1.3814] a_O2 = [0.4567, 0.007, 96.4884, -1.6970] G_O3 = G_calc(solar_zenith, a_O3) + G_calc(sensor_zenith, a_O3) G_H2O = G_calc(solar_zenith, a_H2O) + G_calc(sensor_zenith, a_H2O) G_O2 = G_calc(solar_zenith, a_O2) + G_calc(sensor_zenith, a_O2) # Note: bh2o values are actually ao2 values for abi sphalb, rhoray, TtotraytH2O, tOG = get_atm_variables_abi(mus, muv, phi, height, G_O3, G_H2O, G_O2, *coeffs) else: LOG.debug("Using original VIIRS CREFL algorithm") sphalb, rhoray, TtotraytH2O, tOG = get_atm_variables(mus, muv, phi, height, *coeffs) del solar_azimuth, solar_zenith, sensor_zenith, sensor_azimuth # Note: Assume that fill/invalid values are either NaN or we are dealing # with masked arrays if percent: corr_refl = ((refl / 100.) / tOG - rhoray) / TtotraytH2O else: corr_refl = (refl / tOG - rhoray) / TtotraytH2O corr_refl /= (1.0 + corr_refl * sphalb) return corr_refl.clip(REFLMIN, REFLMAX) satpy-0.20.0/satpy/composites/sar.py000066400000000000000000000114731362525524100174300ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Composite classes for the VIIRS instrument. """ import logging import numpy as np from satpy.composites import GenericCompositor from satpy.dataset import combine_metadata LOG = logging.getLogger(__name__) def overlay(top, bottom, maxval=None): """Blending two layers. from: https://docs.gimp.org/en/gimp-concepts-layer-modes.html """ if maxval is None: maxval = np.maximum(top.max(), bottom.max()) res = ((2 * top / maxval - 1) * bottom + 2 * top) * bottom / maxval return res.clip(min=0) class SARIce(GenericCompositor): """The SAR Ice composite.""" def __call__(self, projectables, *args, **kwargs): """Create the SAR Ice composite.""" (mhh, mhv) = projectables ch1attrs = mhh.attrs ch2attrs = mhv.attrs mhh = np.sqrt(mhh ** 2 + 0.002) - 0.04 mhv = np.sqrt(mhv ** 2 + 0.002) - 0.04 mhh.attrs = ch1attrs mhv.attrs = ch2attrs green = overlay(mhh, mhv, 30) * 1000 green.attrs = combine_metadata(mhh, mhv) return super(SARIce, self).__call__((mhv, green, mhh), *args, **kwargs) class SARIceLegacy(GenericCompositor): """The SAR Ice composite, legacy version with dynamic stretching.""" def __call__(self, projectables, *args, **kwargs): """Create the SAR RGB composite.""" (mhh, mhv) = projectables green = overlay(mhh, mhv) green.attrs = combine_metadata(mhh, mhv) return super(SARIceLegacy, self).__call__((mhv, green, mhh), *args, **kwargs) class SARRGB(GenericCompositor): """The SAR RGB composite.""" def __call__(self, projectables, *args, **kwargs): """Create the SAR RGB composite.""" (mhh, mhv) = projectables green = overlay(mhh, mhv) green.attrs = combine_metadata(mhh, mhv) return super(SARRGB, self).__call__((-mhv, -green, -mhh), *args, **kwargs) # (mhh, mhv) = projectables # green = 1 - (overlay(mhh, mhv) / .0044) # red = 1 - (mhv / .223) # blue = 1 - (mhh / .596) # import xarray as xr # from functools import reduce # # mask1 = reduce(np.logical_and, # [abs(green - blue) < 10 / 255., # red - blue >= 0, # np.maximum(green, blue) < 200 / 255.]) # # mask2 = np.logical_and(abs(green - blue) < 40 / 255., # red - blue > 40 / 255.) # # mask3 = np.logical_and(red - blue > 10 / 255., # np.maximum(green, blue) < 120 / 255.) # # mask4 = reduce(np.logical_and, # [red < 70 / 255., # green < 60 / 255., # blue < 60 / 255.]) # # mask5 = reduce(np.logical_and, # [red < 80 / 255., # green < 80 / 255., # blue < 80 / 255., # np.minimum(np.minimum(red, green), blue) < 30 / 255.]) # # mask6 = reduce(np.logical_and, # [red < 110 / 255., # green < 110 / 255., # blue < 110 / 255., # np.minimum(red, green) < 10 / 255.]) # # mask = reduce(np.logical_or, [mask1, mask2, mask3, mask4, mask5, mask6]) # # red = xr.where(mask, 230 / 255. - red, red).clip(min=0) # green = xr.where(mask, 1 - green, green) # blue = xr.where(mask, 1 - blue, blue) # # attrs = combine_metadata(mhh, mhv) # green.attrs = attrs # red.attrs = attrs # blue.attrs = attrs # # return super(SARRGB, self).__call__((mhv, green, mhh), *args, **kwargs) class SARQuickLook(GenericCompositor): """The SAR QuickLook composite.""" def __call__(self, projectables, *args, **kwargs): """Create the SAR QuickLook composite.""" (mhh, mhv) = projectables blue = mhv / mhh blue.attrs = combine_metadata(mhh, mhv) return super(SARQuickLook, self).__call__((mhh, mhv, blue), *args, **kwargs) satpy-0.20.0/satpy/composites/viirs.py000066400000000000000000001433671362525524100200070ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Composite classes for the VIIRS instrument.""" import logging import os import numpy as np import dask import dask.array as da import xarray as xr from satpy.composites import CompositeBase, GenericCompositor from satpy.config import get_environ_ancpath from satpy.dataset import combine_metadata from satpy.utils import get_satpos LOG = logging.getLogger(__name__) class VIIRSFog(CompositeBase): """A simple temperature difference composite for showing fog.""" def __call__(self, projectables, nonprojectables=None, **info): """Create the temperature difference DataArray.""" import warnings warnings.warn("VIIRSFog compositor is deprecated, use DifferenceCompositor " "instead.", DeprecationWarning) if len(projectables) != 2: raise ValueError("Expected 2 datasets, got %d" % (len(projectables), )) p1, p2 = projectables fog = p1 - p2 fog.attrs.update(self.attrs) fog.attrs["area"] = p1.attrs["area"] fog.attrs["start_time"] = p1.attrs["start_time"] fog.attrs["end_time"] = p1.attrs["end_time"] fog.attrs["name"] = self.attrs["name"] fog.attrs["wavelength"] = None fog.attrs.setdefault("mode", "L") return fog class ReflectanceCorrector(CompositeBase): """Corrected Reflectance (crefl) modifier. Uses a python rewrite of the C CREFL code written for VIIRS and MODIS. """ def __init__(self, *args, **kwargs): """Initialize the compositor with values from the user or from the configuration file. If `dem_filename` can't be found or opened then correction is done assuming TOA or sealevel options. :param dem_filename: path to the ancillary 'averaged heights' file default: CMGDEM.hdf environment override: os.path.join(, ) :param dem_sds: variable name to load from the ancillary file """ dem_filename = kwargs.pop("dem_filename", os.environ.get("CREFL_ANCFILENAME", "CMGDEM.hdf")) if os.path.exists(dem_filename): self.dem_file = dem_filename else: self.dem_file = os.path.join(get_environ_ancpath(), dem_filename) self.dem_sds = kwargs.pop("dem_sds", "averaged elevation") super(ReflectanceCorrector, self).__init__(*args, **kwargs) def __call__(self, datasets, optional_datasets, **info): """Create modified DataArray object by applying the crefl algorithm.""" if not optional_datasets or len(optional_datasets) != 4: vis = self.match_data_arrays([datasets[0]])[0] sensor_aa, sensor_za, solar_aa, solar_za = self.get_angles(vis) else: vis, sensor_aa, sensor_za, solar_aa, solar_za = self.match_data_arrays( datasets + optional_datasets) # get the dask array underneath sensor_aa = sensor_aa.data sensor_za = sensor_za.data solar_aa = solar_aa.data solar_za = solar_za.data # angles must be xarrays sensor_aa = xr.DataArray(sensor_aa, dims=['y', 'x']) sensor_za = xr.DataArray(sensor_za, dims=['y', 'x']) solar_aa = xr.DataArray(solar_aa, dims=['y', 'x']) solar_za = xr.DataArray(solar_za, dims=['y', 'x']) refl_data = datasets[0] if refl_data.attrs.get("rayleigh_corrected"): return refl_data if os.path.isfile(self.dem_file): LOG.debug("Loading CREFL averaged elevation information from: %s", self.dem_file) from netCDF4 import Dataset as NCDataset # HDF4 file, NetCDF library needs to be compiled with HDF4 support nc = NCDataset(self.dem_file, "r") # average elevation is stored as a 16-bit signed integer but with # scale factor 1 and offset 0, convert it to float here avg_elevation = nc.variables[self.dem_sds][:].astype(np.float) if isinstance(avg_elevation, np.ma.MaskedArray): avg_elevation = avg_elevation.filled(np.nan) else: avg_elevation = None from satpy.composites.crefl_utils import run_crefl, get_coefficients percent = refl_data.attrs["units"] == "%" coefficients = get_coefficients(refl_data.attrs["sensor"], refl_data.attrs["wavelength"], refl_data.attrs["resolution"]) use_abi = vis.attrs['sensor'] == 'abi' lons, lats = vis.attrs['area'].get_lonlats(chunks=vis.chunks) results = run_crefl(refl_data, coefficients, lons, lats, sensor_aa, sensor_za, solar_aa, solar_za, avg_elevation=avg_elevation, percent=percent, use_abi=use_abi) info.update(refl_data.attrs) info["rayleigh_corrected"] = True factor = 100. if percent else 1. results = results * factor results.attrs = info self.apply_modifier_info(refl_data, results) return results def get_angles(self, vis): """Get sun and satellite angles to use in crefl calculations.""" from pyorbital.astronomy import get_alt_az, sun_zenith_angle from pyorbital.orbital import get_observer_look lons, lats = vis.attrs['area'].get_lonlats(chunks=vis.data.chunks) lons = da.where(lons >= 1e30, np.nan, lons) lats = da.where(lats >= 1e30, np.nan, lats) suna = get_alt_az(vis.attrs['start_time'], lons, lats)[1] suna = np.rad2deg(suna) sunz = sun_zenith_angle(vis.attrs['start_time'], lons, lats) sat_lon, sat_lat, sat_alt = get_satpos(vis) sata, satel = get_observer_look( sat_lon, sat_lat, sat_alt / 1000.0, # km vis.attrs['start_time'], lons, lats, 0) satz = 90 - satel return sata, satz, suna, sunz class HistogramDNB(CompositeBase): """Histogram equalized DNB composite. The logic for this code was taken from Polar2Grid and was originally developed by Eva Schiffer (SSEC). This composite separates the DNB data in to 3 main regions: Day, Night, and Mixed. Each region is equalized separately to bring out the most information from the region due to the high dynamic range of the DNB data. Optionally, the mixed region can be separated in to multiple smaller regions by using the `mixed_degree_step` keyword. """ def __init__(self, *args, **kwargs): """Initialize the compositor with values from the user or from the configuration file. :param high_angle_cutoff: solar zenith angle threshold in degrees, values above this are considered "night" :param low_angle_cutoff: solar zenith angle threshold in degrees, values below this are considered "day" :param mixed_degree_step: Step interval to separate "mixed" region in to multiple parts by default does whole mixed region """ self.high_angle_cutoff = int(kwargs.pop("high_angle_cutoff", 100)) self.low_angle_cutoff = int(kwargs.pop("low_angle_cutoff", 88)) self.mixed_degree_step = int(kwargs.pop( "mixed_degree_step")) if "mixed_degree_step" in kwargs else None super(HistogramDNB, self).__init__(*args, **kwargs) def _run_dnb_normalization(self, dnb_data, sza_data): """Scale the DNB data using a histogram equalization method. Args: dnb_data (ndarray): Day/Night Band data array sza_data (ndarray): Solar Zenith Angle data array """ # convert dask arrays to DataArray objects dnb_data = xr.DataArray(dnb_data, dims=('y', 'x')) sza_data = xr.DataArray(sza_data, dims=('y', 'x')) good_mask = ~(dnb_data.isnull() | sza_data.isnull()) output_dataset = dnb_data.where(good_mask) # we only need the numpy array output_dataset = output_dataset.values.copy() dnb_data = dnb_data.values sza_data = sza_data.values day_mask, mixed_mask, night_mask = make_day_night_masks( sza_data, good_mask.values, self.high_angle_cutoff, self.low_angle_cutoff, stepsDegrees=self.mixed_degree_step) did_equalize = False if day_mask.any(): LOG.debug("Histogram equalizing DNB day data...") histogram_equalization(dnb_data, day_mask, out=output_dataset) did_equalize = True if mixed_mask: for mask in mixed_mask: if mask.any(): LOG.debug("Histogram equalizing DNB mixed data...") histogram_equalization(dnb_data, mask, out=output_dataset) did_equalize = True if night_mask.any(): LOG.debug("Histogram equalizing DNB night data...") histogram_equalization(dnb_data, night_mask, out=output_dataset) did_equalize = True if not did_equalize: raise RuntimeError("No valid data found to histogram equalize") return output_dataset def __call__(self, datasets, **info): """Create the composite by scaling the DNB data using a histogram equalization method. :param datasets: 2-element tuple (Day/Night Band data, Solar Zenith Angle data) :param **info: Miscellaneous metadata for the newly produced composite """ if len(datasets) != 2: raise ValueError("Expected 2 datasets, got %d" % (len(datasets), )) dnb_data = datasets[0] sza_data = datasets[1] delayed = dask.delayed(self._run_dnb_normalization)(dnb_data.data, sza_data.data) output_dataset = dnb_data.copy() output_data = da.from_delayed(delayed, dnb_data.shape, dnb_data.dtype) output_dataset.data = output_data.rechunk(dnb_data.data.chunks) info = dnb_data.attrs.copy() info.update(self.attrs) info["standard_name"] = "equalized_radiance" info["mode"] = "L" output_dataset.attrs = info return output_dataset class AdaptiveDNB(HistogramDNB): """Adaptive histogram equalized DNB composite. The logic for this code was taken from Polar2Grid and was originally developed by Eva Schiffer (SSEC). This composite separates the DNB data in to 3 main regions: Day, Night, and Mixed. Each region is equalized separately to bring out the most information from the region due to the high dynamic range of the DNB data. Optionally, the mixed region can be separated in to multiple smaller regions by using the `mixed_degree_step` keyword. """ def __init__(self, *args, **kwargs): """Initialize the compositor with values from the user or from the configuration file. Adaptive histogram equalization and regular histogram equalization can be configured independently for each region: day, night, or mixed. A region can be set to use adaptive equalization "always", or "never", or only when there are multiple regions in a single scene "multiple" via the `adaptive_X` keyword arguments (see below). :param adaptive_day: one of ("always", "multiple", "never") meaning when adaptive equalization is used. :param adaptive_mixed: one of ("always", "multiple", "never") meaning when adaptive equalization is used. :param adaptive_night: one of ("always", "multiple", "never") meaning when adaptive equalization is used. """ self.adaptive_day = kwargs.pop("adaptive_day", "always") self.adaptive_mixed = kwargs.pop("adaptive_mixed", "always") self.adaptive_night = kwargs.pop("adaptive_night", "always") self.day_radius_pixels = int(kwargs.pop("day_radius_pixels", 400)) self.mixed_radius_pixels = int(kwargs.pop("mixed_radius_pixels", 100)) self.night_radius_pixels = int(kwargs.pop("night_radius_pixels", 400)) super(AdaptiveDNB, self).__init__(*args, **kwargs) def _run_dnb_normalization(self, dnb_data, sza_data): """Scale the DNB data using a adaptive histogram equalization method. Args: dnb_data (ndarray): Day/Night Band data array sza_data (ndarray): Solar Zenith Angle data array """ # convert dask arrays to DataArray objects dnb_data = xr.DataArray(dnb_data, dims=('y', 'x')) sza_data = xr.DataArray(sza_data, dims=('y', 'x')) good_mask = ~(dnb_data.isnull() | sza_data.isnull()) # good_mask = ~(dnb_data.mask | sza_data.mask) output_dataset = dnb_data.where(good_mask) # we only need the numpy array output_dataset = output_dataset.values.copy() dnb_data = dnb_data.values sza_data = sza_data.values day_mask, mixed_mask, night_mask = make_day_night_masks( sza_data, good_mask.values, self.high_angle_cutoff, self.low_angle_cutoff, stepsDegrees=self.mixed_degree_step) did_equalize = False has_multi_times = len(mixed_mask) > 0 if day_mask.any(): did_equalize = True if self.adaptive_day == "always" or ( has_multi_times and self.adaptive_day == "multiple"): LOG.debug("Adaptive histogram equalizing DNB day data...") local_histogram_equalization( dnb_data, day_mask, valid_data_mask=good_mask.values, local_radius_px=self.day_radius_pixels, out=output_dataset) else: LOG.debug("Histogram equalizing DNB day data...") histogram_equalization(dnb_data, day_mask, out=output_dataset) if mixed_mask: for mask in mixed_mask: if mask.any(): did_equalize = True if self.adaptive_mixed == "always" or ( has_multi_times and self.adaptive_mixed == "multiple"): LOG.debug( "Adaptive histogram equalizing DNB mixed data...") local_histogram_equalization( dnb_data, mask, valid_data_mask=good_mask.values, local_radius_px=self.mixed_radius_pixels, out=output_dataset) else: LOG.debug("Histogram equalizing DNB mixed data...") histogram_equalization(dnb_data, day_mask, out=output_dataset) if night_mask.any(): did_equalize = True if self.adaptive_night == "always" or ( has_multi_times and self.adaptive_night == "multiple"): LOG.debug("Adaptive histogram equalizing DNB night data...") local_histogram_equalization( dnb_data, night_mask, valid_data_mask=good_mask.values, local_radius_px=self.night_radius_pixels, out=output_dataset) else: LOG.debug("Histogram equalizing DNB night data...") histogram_equalization(dnb_data, night_mask, out=output_dataset) if not did_equalize: raise RuntimeError("No valid data found to histogram equalize") return output_dataset class ERFDNB(CompositeBase): """Equalized DNB composite using the error function (erf). The logic for this code was taken from Polar2Grid and was originally developed by Curtis Seaman and Steve Miller. The original code was written in IDL and is included as comments in the code below. """ def __init__(self, *args, **kwargs): """Initialize ERFDNB specific keyword arguments.""" self.saturation_correction = kwargs.pop("saturation_correction", False) super(ERFDNB, self).__init__(*args, **kwargs) def _saturation_correction(self, dnb_data, unit_factor, min_val, max_val): saturation_pct = float(np.count_nonzero(dnb_data > max_val)) / dnb_data.size LOG.debug("Dynamic DNB saturation percentage: %f", saturation_pct) while saturation_pct > 0.005: max_val *= 1.1 * unit_factor saturation_pct = float(np.count_nonzero( dnb_data > max_val)) / dnb_data.size LOG.debug("Dynamic DNB saturation percentage: %f", saturation_pct) inner_sqrt = (dnb_data - min_val) / (max_val - min_val) # clip negative values to 0 before the sqrt inner_sqrt[inner_sqrt < 0] = 0 return np.sqrt(inner_sqrt) def __call__(self, datasets, **info): """Create the composite DataArray object for ERFDNB.""" if len(datasets) != 4: raise ValueError("Expected 4 datasets, got %d" % (len(datasets), )) from scipy.special import erf dnb_data = datasets[0] sza_data = datasets[1] lza_data = datasets[2] output_dataset = dnb_data.where(~(dnb_data.isnull() | sza_data.isnull())) # this algorithm assumes units of "W cm-2 sr-1" so if there are other # units we need to adjust for that if dnb_data.attrs.get("units", "W m-2 sr-1") == "W m-2 sr-1": unit_factor = 10000. else: unit_factor = 1. # convert to decimal instead of % moon_illum_fraction = da.mean(datasets[3].data) * 0.01 # From Steve Miller and Curtis Seaman # maxval = 10.^(-1.7 - (((2.65+moon_factor1+moon_factor2))*(1+erf((solar_zenith-95.)/(5.*sqrt(2.0)))))) # minval = 10.^(-4. - ((2.95+moon_factor2)*(1+erf((solar_zenith-95.)/(5.*sqrt(2.0)))))) # scaled_radiance = (radiance - minval) / (maxval - minval) # radiance = sqrt(scaled_radiance) # Version 2: Update from Curtis Seaman # maxval = 10.^(-1.7 - (((2.65+moon_factor1+moon_factor2))*(1+erf((solar_zenith-95.)/(5.*sqrt(2.0)))))) # minval = 10.^(-4. - ((2.95+moon_factor2)*(1+erf((solar_zenith-95.)/(5.*sqrt(2.0)))))) # saturated_pixels = where(radiance gt maxval, nsatpx) # saturation_pct = float(nsatpx)/float(n_elements(radiance)) # print, 'Saturation (%) = ', saturation_pct # # while saturation_pct gt 0.005 do begin # maxval = maxval*1.1 # saturated_pixels = where(radiance gt maxval, nsatpx) # saturation_pct = float(nsatpx)/float(n_elements(radiance)) # print, saturation_pct # endwhile # # scaled_radiance = (radiance - minval) / (maxval - minval) # radiance = sqrt(scaled_radiance) moon_factor1 = 0.7 * (1.0 - moon_illum_fraction) moon_factor2 = 0.0022 * lza_data.data erf_portion = 1 + erf((sza_data.data - 95.0) / (5.0 * np.sqrt(2.0))) max_val = da.power( 10, -1.7 - (2.65 + moon_factor1 + moon_factor2) * erf_portion) * unit_factor min_val = da.power(10, -4.0 - (2.95 + moon_factor2) * erf_portion) * unit_factor # Update from Curtis Seaman, increase max radiance curve until less # than 0.5% is saturated if self.saturation_correction: delayed = dask.delayed(self._saturation_correction)(output_dataset.data, unit_factor, min_val, max_val) output_dataset.data = da.from_delayed(delayed, output_dataset.shape, output_dataset.dtype) output_dataset.data = output_dataset.data.rechunk(dnb_data.data.chunks) else: inner_sqrt = (output_dataset - min_val) / (max_val - min_val) # clip negative values to 0 before the sqrt inner_sqrt = inner_sqrt.where(inner_sqrt > 0, 0) output_dataset.data = np.sqrt(inner_sqrt).data info = dnb_data.attrs.copy() info.update(self.attrs) info["standard_name"] = "equalized_radiance" info["mode"] = "L" output_dataset.attrs = info return output_dataset def make_day_night_masks(solarZenithAngle, good_mask, highAngleCutoff, lowAngleCutoff, stepsDegrees=None): """Generate masks for day, night, and twilight regions. Masks are created from the provided solar zenith angle data. Optionally provide the highAngleCutoff and lowAngleCutoff that define the limits of the terminator region (if no cutoffs are given the DEFAULT_HIGH_ANGLE and DEFAULT_LOW_ANGLE will be used). Optionally provide the stepsDegrees that define how many degrees each "mixed" mask in the terminator region should be (if no stepsDegrees is given, the whole terminator region will be one mask). """ # if the caller passes None, we're only doing one step stepsDegrees = highAngleCutoff - lowAngleCutoff if stepsDegrees is None else stepsDegrees night_mask = (solarZenithAngle > highAngleCutoff) & good_mask day_mask = (solarZenithAngle <= lowAngleCutoff) & good_mask mixed_mask = [] steps = list(range(lowAngleCutoff, highAngleCutoff + 1, stepsDegrees)) if steps[-1] >= highAngleCutoff: steps[-1] = highAngleCutoff steps = zip(steps, steps[1:]) for i, j in steps: LOG.debug("Processing step %d to %d" % (i, j)) tmp = (solarZenithAngle > i) & (solarZenithAngle <= j) & good_mask if tmp.any(): LOG.debug("Adding step %d to %d" % (i, j)) # log.debug("Points to process in this range: " + str(np.sum(tmp))) mixed_mask.append(tmp) del tmp return day_mask, mixed_mask, night_mask def histogram_equalization( data, mask_to_equalize, number_of_bins=1000, std_mult_cutoff=4.0, do_zerotoone_normalization=True, valid_data_mask=None, # these are theoretically hooked up, but not useful with only one # equalization clip_limit=None, slope_limit=None, # these parameters don't do anything, they're just here to mirror those # in the other call do_log_scale=False, log_offset=None, local_radius_px=None, out=None): """Perform a histogram equalization on the data. Data is selected by the mask_to_equalize mask. The data will be separated into number_of_bins levels for equalization and outliers beyond +/- std_mult_cutoff*std will be ignored. If do_zerotoone_normalization is True the data selected by mask_to_equalize will be returned in the 0 to 1 range. Otherwise the data selected by mask_to_equalize will be returned in the 0 to number_of_bins range. Note: the data will be changed in place. """ out = out if out is not None else data.copy() mask_to_use = mask_to_equalize if valid_data_mask is None else valid_data_mask LOG.debug("determining DNB data range for histogram equalization") avg = np.mean(data[mask_to_use]) std = np.std(data[mask_to_use]) # limit our range to +/- std_mult_cutoff*std; e.g. the default # std_mult_cutoff is 4.0 so about 99.8% of the data concervative_mask = (data < (avg + std * std_mult_cutoff)) & ( data > (avg - std * std_mult_cutoff)) & mask_to_use LOG.debug("running histogram equalization") cumulative_dist_function, temp_bins = _histogram_equalization_helper( data[concervative_mask], number_of_bins, clip_limit=clip_limit, slope_limit=slope_limit) # linearly interpolate using the distribution function to get the new # values out[mask_to_equalize] = np.interp(data[mask_to_equalize], temp_bins[:-1], cumulative_dist_function) # if we were asked to, normalize our data to be between zero and one, # rather than zero and number_of_bins if do_zerotoone_normalization: _linear_normalization_from_0to1(out, mask_to_equalize, number_of_bins) return out def local_histogram_equalization(data, mask_to_equalize, valid_data_mask=None, number_of_bins=1000, std_mult_cutoff=3.0, do_zerotoone_normalization=True, local_radius_px=300, clip_limit=60.0, # 20.0, slope_limit=3.0, # 0.5, do_log_scale=True, # can't take the log of zero, so the offset # may be needed; pass 0.0 if your data doesn't # need it log_offset=0.00001, out=None ): """Equalize the provided data (in the mask_to_equalize) using adaptive histogram equalization. Tiles of width/height (2 * local_radius_px + 1) will be calculated and results for each pixel will be bilinearly interpolated from the nearest 4 tiles when pixels fall near the edge of the image (there is no adjacent tile) the resultant interpolated sum from the available tiles will be multiplied to account for the weight of any missing tiles:: pixel total interpolated value = pixel available interpolated value / (1 - missing interpolation weight) If ``do_zerotoone_normalization`` is True the data will be scaled so that all data in the mask_to_equalize falls between 0 and 1; otherwise the data in mask_to_equalize will all fall between 0 and number_of_bins. Returns: The equalized data """ out = out if out is not None else np.zeros_like(data) # if we don't have a valid mask, use the mask of what we should be # equalizing if valid_data_mask is None: valid_data_mask = mask_to_equalize # calculate some useful numbers for our tile math total_rows = data.shape[0] total_cols = data.shape[1] tile_size = int((local_radius_px * 2.0) + 1.0) row_tiles = int(total_rows / tile_size) if ( (total_rows % tile_size) == 0) else int(total_rows / tile_size) + 1 col_tiles = int(total_cols / tile_size) if ( (total_cols % tile_size) == 0) else int(total_cols / tile_size) + 1 # an array of our distribution functions for equalization all_cumulative_dist_functions = [[]] # an array of our bin information for equalization all_bin_information = [[]] # loop through our tiles and create the histogram equalizations for each # one for num_row_tile in range(row_tiles): # make sure we have enough rows available to store info on this next # row of tiles if len(all_cumulative_dist_functions) <= num_row_tile: all_cumulative_dist_functions.append([]) if len(all_bin_information) <= num_row_tile: all_bin_information.append([]) # go through each tile in this row and calculate the equalization for num_col_tile in range(col_tiles): # calculate the range for this tile (min is inclusive, max is # exclusive) min_row = num_row_tile * tile_size max_row = min_row + tile_size min_col = num_col_tile * tile_size max_col = min_col + tile_size # for speed of calculation, pull out the mask of pixels that should # be used to calculate the histogram mask_valid_data_in_tile = valid_data_mask[min_row:max_row, min_col: max_col] # if we have any valid data in this tile, calculate a histogram equalization for this tile # (note: even if this tile does no fall in the mask_to_equalize, it's histogram may be used by other tiles) cumulative_dist_function, temp_bins = None, None if mask_valid_data_in_tile.any(): # use all valid data in the tile, so separate sections will # blend cleanly temp_valid_data = data[min_row:max_row, min_col:max_col][ mask_valid_data_in_tile] temp_valid_data = temp_valid_data[ temp_valid_data >= 0 ] # TEMP, testing to see if negative data is messing everything up # limit the contrast by only considering data within a certain # range of the average if std_mult_cutoff is not None: avg = np.mean(temp_valid_data) std = np.std(temp_valid_data) # limit our range to avg +/- std_mult_cutoff*std; e.g. the # default std_mult_cutoff is 4.0 so about 99.8% of the data concervative_mask = ( temp_valid_data < (avg + std * std_mult_cutoff)) & ( temp_valid_data > (avg - std * std_mult_cutoff)) temp_valid_data = temp_valid_data[concervative_mask] # if we are taking the log of our data, do so now if do_log_scale: temp_valid_data = np.log(temp_valid_data + log_offset) # do the histogram equalization and get the resulting # distribution function and bin information if temp_valid_data.size > 0: cumulative_dist_function, temp_bins = _histogram_equalization_helper( temp_valid_data, number_of_bins, clip_limit=clip_limit, slope_limit=slope_limit) # hang on to our equalization related information for use later all_cumulative_dist_functions[num_row_tile].append( cumulative_dist_function) all_bin_information[num_row_tile].append(temp_bins) # get the tile weight array so we can use it to interpolate our data tile_weights = _calculate_weights(tile_size) # now loop through our tiles and linearly interpolate the equalized # versions of the data for num_row_tile in range(row_tiles): for num_col_tile in range(col_tiles): # calculate the range for this tile (min is inclusive, max is # exclusive) min_row = num_row_tile * tile_size max_row = min_row + tile_size min_col = num_col_tile * tile_size max_col = min_col + tile_size # for convenience, pull some of these tile sized chunks out temp_all_data = data[min_row:max_row, min_col:max_col].copy() temp_mask_to_equalize = mask_to_equalize[min_row:max_row, min_col: max_col] temp_all_valid_data_mask = valid_data_mask[min_row:max_row, min_col:max_col] # if we have any data in this tile, calculate our weighted sum if temp_mask_to_equalize.any(): if do_log_scale: temp_all_data[temp_all_valid_data_mask] = np.log( temp_all_data[temp_all_valid_data_mask] + log_offset) temp_data_to_equalize = temp_all_data[temp_mask_to_equalize] temp_all_valid_data = temp_all_data[temp_all_valid_data_mask] # a place to hold our weighted sum that represents the interpolated contributions # of the histogram equalizations from the surrounding tiles temp_sum = np.zeros_like(temp_data_to_equalize) # how much weight were we unable to use because those tiles # fell off the edge of the image? unused_weight = np.zeros(temp_data_to_equalize.shape, dtype=tile_weights.dtype) # loop through all the surrounding tiles and process their # contributions to this tile for weight_row in range(3): for weight_col in range(3): # figure out which adjacent tile we're processing (in # overall tile coordinates instead of relative to our # current tile) calculated_row = num_row_tile - 1 + weight_row calculated_col = num_col_tile - 1 + weight_col tmp_tile_weights = tile_weights[ weight_row, weight_col][np.where( temp_mask_to_equalize)] # if we're inside the tile array and the tile we're # processing has a histogram equalization for us to # use, process it if ((calculated_row >= 0) and (calculated_row < row_tiles) and (calculated_col >= 0) and (calculated_col < col_tiles) and ( all_bin_information[calculated_row][ calculated_col] is not None) and (all_cumulative_dist_functions[calculated_row][ calculated_col] is not None)): # equalize our current tile using the histogram # equalization from the tile we're processing temp_equalized_data = np.interp( temp_all_valid_data, all_bin_information[ calculated_row][calculated_col][:-1], all_cumulative_dist_functions[calculated_row][ calculated_col]) temp_equalized_data = temp_equalized_data[np.where( temp_mask_to_equalize[ temp_all_valid_data_mask])] # add the contribution for the tile we're # processing to our weighted sum temp_sum += (temp_equalized_data * tmp_tile_weights) # if the tile we're processing doesn't exist, hang onto the weight we # would have used for it so we can correct that later else: unused_weight -= tmp_tile_weights # if we have unused weights, scale our values to correct for # that if unused_weight.any(): # TODO, if the mask masks everything out this will be a # zero! temp_sum /= unused_weight + 1 # now that we've calculated the weighted sum for this tile, set # it in our data array out[min_row:max_row, min_col:max_col][ temp_mask_to_equalize] = temp_sum # TEMP, test without using weights # data[min_row:max_row, min_col:max_col][temp_mask_to_equalize] = \ # np.interp(temp_data_to_equalize, all_bin_information[num_row_tile][num_col_tile][:-1], # all_cumulative_dist_functions[num_row_tile][num_col_tile]) # if we were asked to, normalize our data to be between zero and one, # rather than zero and number_of_bins if do_zerotoone_normalization: _linear_normalization_from_0to1(out, mask_to_equalize, number_of_bins) return out def _histogram_equalization_helper(valid_data, number_of_bins, clip_limit=None, slope_limit=None): """Calculate the simplest possible histogram equalization, using only valid data. Returns: cumulative distribution function and bin information """ # bucket all the selected data using np's histogram function temp_histogram, temp_bins = np.histogram(valid_data, number_of_bins) # if we have a clip limit and we should do our clipping before building # the cumulative distribution function, clip off our histogram if clip_limit is not None: # clip our histogram and remember how much we removed pixels_to_clip_at = int(clip_limit * (valid_data.size / float(number_of_bins))) mask_to_clip = temp_histogram > clip_limit # num_bins_clipped = sum(mask_to_clip) # num_pixels_clipped = sum(temp_histogram[mask_to_clip]) - (num_bins_clipped * pixels_to_clip_at) temp_histogram[mask_to_clip] = pixels_to_clip_at # calculate the cumulative distribution function cumulative_dist_function = temp_histogram.cumsum() # if we have a clip limit and we should do our clipping after building the # cumulative distribution function, clip off our cdf if slope_limit is not None: # clip our cdf and remember how much we removed pixel_height_limit = int(slope_limit * (valid_data.size / float(number_of_bins))) cumulative_excess_height = 0 num_clipped_pixels = 0 weight_metric = np.zeros(cumulative_dist_function.shape, dtype=float) for pixel_index in range(1, cumulative_dist_function.size): current_pixel_count = cumulative_dist_function[pixel_index] diff_from_acceptable = ( current_pixel_count - cumulative_dist_function[pixel_index - 1] - pixel_height_limit - cumulative_excess_height) if diff_from_acceptable < 0: weight_metric[pixel_index] = abs(diff_from_acceptable) cumulative_excess_height += max(diff_from_acceptable, 0) cumulative_dist_function[ pixel_index] = current_pixel_count - cumulative_excess_height num_clipped_pixels = num_clipped_pixels + cumulative_excess_height # now normalize the overall distribution function cumulative_dist_function = (number_of_bins - 1) * cumulative_dist_function / cumulative_dist_function[-1] # return what someone else will need in order to apply the equalization later return cumulative_dist_function, temp_bins def _calculate_weights(tile_size): """Calculate a weight array for bilinear interpolation of histogram tiles. The weight array will be used to quickly bilinearly-interpolate the histogram equalizations tile size should be the width and height of a tile in pixels. Returns: 4D weight array where the first 2 dimensions correspond to the grid of where the tiles are relative to the tile being interpolated. """ # we are essentially making a set of weight masks for an ideal center tile # that has all 8 surrounding tiles available # create our empty template tiles template_tile = np.zeros((3, 3, tile_size, tile_size), dtype=np.float32) """ # TEMP FOR TESTING, create a weight tile that does no interpolation template_tile[1,1] = template_tile[1,1] + 1.0 """ # for ease of calculation, figure out the index of the center pixel in a tile # and how far that pixel is from the edge of the tile (in pixel units) center_index = int(tile_size / 2) center_dist = tile_size / 2.0 # loop through each pixel in the tile and calculate the 9 weights for that pixel # were weights for a pixel are 0.0 they are not set (since the template_tile # starts out as all zeros) for row in range(tile_size): for col in range(tile_size): vertical_dist = abs( center_dist - row ) # the distance from our pixel to the center of our tile, vertically horizontal_dist = abs( center_dist - col ) # the distance from our pixel to the center of our tile, horizontally # pre-calculate which 3 adjacent tiles will affect our tile # (note: these calculations aren't quite right if center_index equals the row or col) horizontal_index = 0 if col < center_index else 2 vertical_index = 0 if row < center_index else 2 # if this is the center pixel, we only need to use it's own tile # for it if (row is center_index) and (col is center_index): # all of the weight for this pixel comes from it's own tile template_tile[1, 1][row, col] = 1.0 # if this pixel is in the center row, but is not the center pixel # we're going to need to linearly interpolate it's tile and the # tile that is horizontally nearest to it elif (row is center_index) and (col is not center_index): # linear interp horizontally beside_weight = horizontal_dist / tile_size # the weight from the adjacent tile local_weight = ( tile_size - horizontal_dist) / tile_size # the weight from this tile # set the weights for the two relevant tiles template_tile[1, 1][row, col] = local_weight template_tile[1, horizontal_index][row, col] = beside_weight # if this pixel is in the center column, but is not the center pixel # we're going to need to linearly interpolate it's tile and the # tile that is vertically nearest to it elif (row is not center_index) and (col is center_index): # linear interp vertical beside_weight = vertical_dist / tile_size # the weight from the adjacent tile local_weight = ( tile_size - vertical_dist) / tile_size # the weight from this tile # set the weights for the two relevant tiles template_tile[1, 1][row, col] = local_weight template_tile[vertical_index, 1][row, col] = beside_weight # if the pixel is in one of the four quadrants that are above or below the center # row and column, we need to bilinearly interpolate it between the # nearest four tiles else: # bilinear interpolation local_weight = ((tile_size - vertical_dist) / tile_size) * ( (tile_size - horizontal_dist) / tile_size) # the weight from this tile vertical_weight = ((vertical_dist) / tile_size) * ( (tile_size - horizontal_dist) / tile_size ) # the weight from the vertically adjacent tile horizontal_weight = ( (tile_size - vertical_dist) / tile_size) * ( (horizontal_dist) / tile_size ) # the weight from the horizontally adjacent tile diagonal_weight = ((vertical_dist) / tile_size) * ( (horizontal_dist) / tile_size ) # the weight from the diagonally adjacent tile # set the weights for the four relevant tiles template_tile[1, 1, row, col] = local_weight template_tile[vertical_index, 1, row, col] = vertical_weight template_tile[1, horizontal_index, row, col] = horizontal_weight template_tile[vertical_index, horizontal_index, row, col] = diagonal_weight # return the weights for an ideal center tile return template_tile def _linear_normalization_from_0to1( data, mask, theoretical_max, theoretical_min=0, message="normalizing equalized data to fit in 0 to 1 range"): """Do a linear normalization so all data is in the 0 to 1 range. This is a sloppy but fast calculation that relies on parameters giving it the correct theoretical current max and min so it can scale the data accordingly. """ LOG.debug(message) if theoretical_min != 0: data[mask] = data[mask] - theoretical_min theoretical_max = theoretical_max - theoretical_min data[mask] = data[mask] / theoretical_max class NCCZinke(CompositeBase): """Equalized DNB composite using the Zinke algorithm [#ncc1]_. References: .. [#ncc1] Stephan Zinke (2017), A simplified high and near-constant contrast approach for the display of VIIRS day/night band imagery :doi:`10.1080/01431161.2017.1338838` """ def __call__(self, datasets, **info): """Create HNCC DNB composite.""" if len(datasets) != 4: raise ValueError("Expected 4 datasets, got %d" % (len(datasets),)) dnb_data = datasets[0] sza_data = datasets[1] lza_data = datasets[2] # this algorithm assumes units of "W cm-2 sr-1" so if there are other # units we need to adjust for that if dnb_data.attrs.get("units", "W m-2 sr-1") == "W m-2 sr-1": unit_factor = 10000. else: unit_factor = 1. mda = dnb_data.attrs.copy() dnb_data = dnb_data.copy() / unit_factor # convert to decimal instead of % moon_illum_fraction = da.mean(datasets[3].data) * 0.01 phi = da.rad2deg(da.arccos(2. * moon_illum_fraction - 1)) vfl = 0.026 * phi + 4.0e-9 * (phi ** 4.) m_fullmoon = -12.74 m_sun = -26.74 m_moon = vfl + m_fullmoon gs_ = self.gain_factor(sza_data.data) r_sun_moon = 10.**((m_sun - m_moon) / -2.5) gl_ = r_sun_moon * self.gain_factor(lza_data.data) gtot = 1. / (1. / gs_ + 1. / gl_) dnb_data += 2.6e-10 dnb_data *= gtot mda['name'] = self.attrs['name'] mda['standard_name'] = 'ncc_radiance' dnb_data.attrs = mda return dnb_data def gain_factor(self, theta): """Compute gain factor in a dask-friendly manner.""" return theta.map_blocks(self._gain_factor, dtype=theta.dtype) @staticmethod def _gain_factor(theta): gain = np.empty_like(theta) mask = theta <= 87.541 gain[mask] = (58 + 4 / np.cos(np.deg2rad(theta[mask]))) / 5 mask = np.logical_and(theta <= 96, 87.541 < theta) gain[mask] = (123 * np.exp(1.06 * (theta[mask] - 89.589)) * ((theta[mask] - 93)**2 / 18 + 0.5)) mask = np.logical_and(96 < theta, theta <= 101) gain[mask] = 123 * np.exp(1.06 * (theta[mask] - 89.589)) mask = np.logical_and(101 < theta, theta <= 103.49) gain[mask] = (123 * np.exp(1.06 * (101 - 89.589)) * np.log(theta[mask] - (101 - np.e)) ** 2) gain[theta > 103.49] = 6.0e7 return gain class SnowAge(GenericCompositor): """Create RGB snow product. Product is based on method presented at the second CSPP/IMAPP users' meeting at Eumetsat in Darmstadt on 14-16 April 2015 # Bernard Bellec snow Look-Up Tables V 1.0 (c) Meteo-France # These Look-up Tables allow you to create the RGB snow product # for SUOMI-NPP VIIRS Imager according to the algorithm # presented at the second CSPP/IMAPP users' meeting at Eumetsat # in Darmstadt on 14-16 April 2015 # The algorithm and the product are described in this # presentation : # http://www.ssec.wisc.edu/meetings/cspp/2015/Agenda%20PDF/Wednesday/Roquet_snow_product_cspp2015.pdf # For further information you may contact # Bernard Bellec at Bernard.Bellec@meteo.fr # or # Pascale Roquet at Pascale.Roquet@meteo.fr """ def __call__(self, projectables, nonprojectables=None, **info): """Generate a SnowAge RGB composite. The algorithm and the product are described in this presentation : http://www.ssec.wisc.edu/meetings/cspp/2015/Agenda%20PDF/Wednesday/Roquet_snow_product_cspp2015.pdf For further information you may contact Bernard Bellec at Bernard.Bellec@meteo.fr or Pascale Roquet at Pascale.Roquet@meteo.fr """ if len(projectables) != 5: raise ValueError("Expected 5 datasets, got %d" % (len(projectables), )) # Collect information that is the same between the projectables info = combine_metadata(*projectables) # Update that information with configured information (including name) info.update(self.attrs) # Force certain pieces of metadata that we *know* to be true info["wavelength"] = None m07 = projectables[0] * 255. / 160. m08 = projectables[1] * 255. / 160. m09 = projectables[2] * 255. / 160. m10 = projectables[3] * 255. / 160. m11 = projectables[4] * 255. / 160. refcu = m11 - m10 refcu = refcu.clip(min=0) ch1 = m07 - refcu / 2. - m09 / 4. ch2 = m08 + refcu / 4. + m09 / 4. ch3 = m11 + m09 # GenericCompositor needs valid DataArrays with 'area' metadata ch1.attrs = info ch2.attrs = info ch3.attrs = info return super(SnowAge, self).__call__([ch1, ch2, ch3], **info) satpy-0.20.0/satpy/config.py000066400000000000000000000156251362525524100157260ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Satpy Configuration directory and file handling.""" from __future__ import print_function import glob import logging import os from collections.abc import Mapping from collections import OrderedDict import yaml from six.moves import configparser try: from yaml import UnsafeLoader except ImportError: from yaml import Loader as UnsafeLoader from yaml import BaseLoader LOG = logging.getLogger(__name__) BASE_PATH = os.path.dirname(os.path.realpath(__file__)) # FIXME: Use package_resources? PACKAGE_CONFIG_PATH = os.path.join(BASE_PATH, 'etc') def get_environ_config_dir(default=None): """Get the config dir.""" if default is None: default = PACKAGE_CONFIG_PATH return os.environ.get('PPP_CONFIG_DIR', default) def get_environ_ancpath(default='.'): """Get the ancpath.""" return os.environ.get('SATPY_ANCPATH', default) # FIXME: Old readers still use only this, but this may get updated by Scene CONFIG_PATH = get_environ_config_dir() def runtime_import(object_path): """Import at runtime.""" obj_module, obj_element = object_path.rsplit(".", 1) loader = __import__(obj_module, globals(), locals(), [str(obj_element)]) return getattr(loader, obj_element) def config_search_paths(filename, *search_dirs, **kwargs): """Get the environment variable value every time (could be set dynamically).""" # FIXME: Consider removing the 'magic' environment variable all together CONFIG_PATH = get_environ_config_dir() # noqa paths = [filename, os.path.basename(filename)] paths += [os.path.join(search_dir, filename) for search_dir in search_dirs] # FUTURE: Remove CONFIG_PATH because it should be included as a search_dir paths += [os.path.join(CONFIG_PATH, filename), os.path.join(PACKAGE_CONFIG_PATH, filename)] paths = [os.path.abspath(path) for path in paths] if kwargs.get("check_exists", True): paths = [x for x in paths if os.path.isfile(x)] paths = list(OrderedDict.fromkeys(paths)) # flip the order of the list so builtins are loaded first return paths[::-1] def get_config(filename, *search_dirs, **kwargs): """Blends the different configs, from package defaults to .""" config = kwargs.get("config_reader_class", configparser.ConfigParser)() paths = config_search_paths(filename, *search_dirs) successes = config.read(reversed(paths)) if successes: LOG.debug("Read config from %s", str(successes)) return config, successes LOG.warning("Couldn't file any config file matching %s", filename) return None, [] def glob_config(pattern, *search_dirs): """Return glob results for all possible configuration locations. Note: This method does not check the configuration "base" directory if the pattern includes a subdirectory. This is done for performance since this is usually used to find *all* configs for a certain component. """ patterns = config_search_paths(pattern, *search_dirs, check_exists=False) for pattern in patterns: for path in glob.iglob(pattern): yield path def get_config_path(filename, *search_dirs): """Get the appropriate path for a filename, in that order: filename, ., PPP_CONFIG_DIR, package's etc dir.""" paths = config_search_paths(filename, *search_dirs) for path in paths[::-1]: if os.path.exists(path): return path def recursive_dict_update(d, u): """Recursive dictionary update. Copied from: http://stackoverflow.com/questions/3232943/update-value-of-a-nested-dictionary-of-varying-depth """ for k, v in u.items(): if isinstance(v, Mapping): r = recursive_dict_update(d.get(k, {}), v) d[k] = r else: d[k] = u[k] return d def check_yaml_configs(configs, key): """Get a diagnostic for the yaml *configs*. *key* is the section to look for to get a name for the config at hand. """ diagnostic = {} for i in configs: for fname in i: with open(fname) as stream: try: res = yaml.load(stream, Loader=UnsafeLoader) msg = 'ok' except yaml.YAMLError as err: stream.seek(0) res = yaml.load(stream, Loader=BaseLoader) if err.context == 'while constructing a Python object': msg = err.problem else: msg = 'error' finally: try: diagnostic[res[key]['name']] = msg except (KeyError, TypeError): # this object doesn't have a 'name' pass return diagnostic def _check_import(module_names): """Import the specified modules and provide status.""" diagnostics = {} for module_name in module_names: try: __import__(module_name) res = 'ok' except ImportError as err: res = str(err) diagnostics[module_name] = res return diagnostics def check_satpy(readers=None, writers=None, extras=None): """Check the satpy readers and writers for correct installation. Args: readers (list or None): Limit readers checked to those specified writers (list or None): Limit writers checked to those specified extras (list or None): Limit extras checked to those specified Returns: bool True if all specified features were successfully loaded. """ from satpy.readers import configs_for_reader from satpy.writers import configs_for_writer print('Readers') print('=======') for reader, res in sorted(check_yaml_configs(configs_for_reader(reader=readers), 'reader').items()): print(reader + ': ', res) print() print('Writers') print('=======') for writer, res in sorted(check_yaml_configs(configs_for_writer(writer=writers), 'writer').items()): print(writer + ': ', res) print() print('Extras') print('======') module_names = extras if extras is not None else ('cartopy', 'geoviews') for module_name, res in sorted(_check_import(module_names).items()): print(module_name + ': ', res) print() satpy-0.20.0/satpy/dataset.py000066400000000000000000000272351362525524100161060ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Dataset objects.""" import sys import logging import numbers from collections import namedtuple from datetime import datetime import numpy as np logger = logging.getLogger(__name__) class MetadataObject(object): """A general metadata object.""" def __init__(self, **attributes): """Initialize the class with *attributes*.""" self.attrs = attributes @property def id(self): """Return the DatasetID of the object.""" return DatasetID.from_dict(self.attrs) def average_datetimes(dt_list): """Average a series of datetime objects. .. note:: This function assumes all datetime objects are naive and in the same time zone (UTC). Args: dt_list (iterable): Datetime objects to average Returns: Average datetime as a datetime object """ if sys.version_info < (3, 3): # timestamp added in python 3.3 import time def timestamp_func(dt): return time.mktime(dt.timetuple()) else: timestamp_func = datetime.timestamp total = [timestamp_func(dt) for dt in dt_list] return datetime.fromtimestamp(sum(total) / len(total)) def combine_metadata(*metadata_objects, **kwargs): """Combine the metadata of two or more Datasets. If any keys are not equal or do not exist in all provided dictionaries then they are not included in the returned dictionary. By default any keys with the word 'time' in them and consisting of datetime objects will be averaged. This is to handle cases where data were observed at almost the same time but not exactly. Args: *metadata_objects: MetadataObject or dict objects to combine average_times (bool): Average any keys with 'time' in the name Returns: dict: the combined metadata """ average_times = kwargs.get('average_times', True) # python 2 compatibility (no kwarg after *args) shared_keys = None info_dicts = [] # grab all of the dictionary objects provided and make a set of the shared keys for metadata_object in metadata_objects: if isinstance(metadata_object, dict): metadata_dict = metadata_object elif hasattr(metadata_object, "attrs"): metadata_dict = metadata_object.attrs else: continue info_dicts.append(metadata_dict) if shared_keys is None: shared_keys = set(metadata_dict.keys()) else: shared_keys &= set(metadata_dict.keys()) # combine all of the dictionaries shared_info = {} for k in shared_keys: values = [nfo[k] for nfo in info_dicts] any_arrays = any([isinstance(val, np.ndarray) for val in values]) if any_arrays: if all(np.all(val == values[0]) for val in values[1:]): shared_info[k] = values[0] elif 'time' in k and isinstance(values[0], datetime) and average_times: shared_info[k] = average_datetimes(values) elif all(val == values[0] for val in values[1:]): shared_info[k] = values[0] return shared_info DATASET_KEYS = ("name", "wavelength", "resolution", "polarization", "calibration", "level", "modifiers") DatasetID = namedtuple("DatasetID", " ".join(DATASET_KEYS)) DatasetID.__new__.__defaults__ = (None, None, None, None, None, None, tuple()) class DatasetID(DatasetID): """Identifier for all `Dataset` objects. DatasetID is a namedtuple that holds identifying and classifying information about a Dataset. There are two identifying elements, ``name`` and ``wavelength``. These can be used to generically refer to a Dataset. The other elements of a DatasetID are meant to further distinguish a Dataset from the possible variations it may have. For example multiple Datasets may be called by one ``name`` but may exist in multiple resolutions or with different calibrations such as "radiance" and "reflectance". If an element is `None` then it is considered not applicable. A DatasetID can also be used in Satpy to query for a Dataset. This way a fully qualified DatasetID can be found even if some of the DatasetID elements are unknown. In this case a `None` signifies something that is unknown or not applicable to the requested Dataset. Args: name (str): String identifier for the Dataset wavelength (float, tuple): Single float wavelength when querying for a Dataset. Otherwise 3-element tuple of floats specifying the minimum, nominal, and maximum wavelength for a Dataset. `None` if not applicable. resolution (int, float): Per data pixel/area resolution. If resolution varies across the Dataset then nadir view resolution is preferred. Usually this is in meters, but for lon/lat gridded data angle degrees may be used. polarization (str): 'V' or 'H' polarizations of a microwave channel. `None` if not applicable. calibration (str): String identifying the calibration level of the Dataset (ex. 'radiance', 'reflectance', etc). `None` if not applicable. level (int, float): Pressure/altitude level of the dataset. This is typically in hPa, but may be in inverse meters for altitude datasets (1/meters). modifiers (tuple): Tuple of strings identifying what corrections or other modifications have been performed on this Dataset (ex. 'sunz_corrected', 'rayleigh_corrected', etc). `None` or empty tuple if not applicable. """ def __new__(cls, *args, **kwargs): """Create new DatasetID.""" ret = super(DatasetID, cls).__new__(cls, *args, **kwargs) if ret.modifiers is not None and not isinstance(ret.modifiers, tuple): raise TypeError("'DatasetID' modifiers must be a tuple or None, " "not {}".format(type(ret.modifiers))) return ret @staticmethod def name_match(a, b): """Return if two string names are equal. Args: a (str): DatasetID.name or other string b (str): DatasetID.name or other string """ return a == b @staticmethod def wavelength_match(a, b): """Return if two wavelengths are equal. Args: a (tuple or scalar): (min wl, nominal wl, max wl) or scalar wl b (tuple or scalar): (min wl, nominal wl, max wl) or scalar wl """ if type(a) == (type(b) or isinstance(a, numbers.Number) and isinstance(b, numbers.Number)): return a == b elif a is None or b is None: return False elif isinstance(a, (list, tuple)) and len(a) == 3: return a[0] <= b <= a[2] elif isinstance(b, (list, tuple)) and len(b) == 3: return b[0] <= a <= b[2] else: raise ValueError("Can only compare wavelengths of length 1 or 3") def _comparable(self): """Get a comparable version of the DatasetID. Without this DatasetIDs often raise an exception when compared in Python 3 due to None not being comparable with other types. """ return self._replace( name='' if self.name is None else self.name, wavelength=tuple() if self.wavelength is None else self.wavelength, resolution=0 if self.resolution is None else self.resolution, polarization='' if self.polarization is None else self.polarization, calibration='' if self.calibration is None else self.calibration, ) def __lt__(self, other): """Less than.""" """Compare DatasetIDs with special handling of `None` values""" # modifiers should never be None when sorted, should be tuples if isinstance(other, DatasetID): other = other._comparable() return super(DatasetID, self._comparable()).__lt__(other) def __eq__(self, other): """Check for equality.""" if isinstance(other, str): return self.name_match(self.name, other) elif isinstance(other, numbers.Number) or \ isinstance(other, (tuple, list)) and len(other) == 3: return self.wavelength_match(self.wavelength, other) else: return super(DatasetID, self).__eq__(other) def __hash__(self): """Generate the hash of the ID.""" return tuple.__hash__(self) @classmethod def from_dict(cls, d, **kwargs): """Convert a dict to an ID.""" args = [] for k in DATASET_KEYS: val = kwargs.get(k, d.get(k)) # force modifiers to tuple if k == 'modifiers' and val is not None: val = tuple(val) args.append(val) return cls(*args) def to_dict(self, trim=True): """Convert the ID to a dict.""" if trim: return self._to_trimmed_dict() else: return dict(zip(DATASET_KEYS, self)) def _to_trimmed_dict(self): return {key: getattr(self, key) for key in DATASET_KEYS if getattr(self, key) is not None} def create_filtered_dsid(dataset_key, **dfilter): """Create a DatasetID matching *dataset_key* and *dfilter*. If a proprety is specified in both *dataset_key* and *dfilter*, the former has priority. """ try: ds_dict = dataset_key.to_dict() except AttributeError: if isinstance(dataset_key, str): ds_dict = {'name': dataset_key} elif isinstance(dataset_key, numbers.Number): ds_dict = {'wavelength': dataset_key} for key, value in dfilter.items(): if value is not None: ds_dict.setdefault(key, value) return DatasetID.from_dict(ds_dict) def dataset_walker(datasets): """Walk through *datasets* and their ancillary data. Yields datasets and their parent. """ for dataset in datasets: yield dataset, None for anc_ds in dataset.attrs.get('ancillary_variables', []): try: anc_ds.attrs yield anc_ds, dataset except AttributeError: continue def replace_anc(dataset, parent_dataset): """Replace *dataset* the *parent_dataset*'s `ancillary_variables` field.""" if parent_dataset is None: return current_dsid = DatasetID.from_dict(dataset.attrs) for idx, ds in enumerate(parent_dataset.attrs['ancillary_variables']): if current_dsid == DatasetID.from_dict(ds.attrs): parent_dataset.attrs['ancillary_variables'][idx] = dataset return class Dataset(object): """Placeholder for the deprecated class.""" pass satpy-0.20.0/satpy/demo/000077500000000000000000000000001362525524100150225ustar00rootroot00000000000000satpy-0.20.0/satpy/demo/__init__.py000066400000000000000000000142351362525524100171400ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Demo data download helper functions. Each ``get_*`` function below downloads files to a local directory and returns a list of paths to those files. Some (not all) functions have multiple options for how the data is downloaded (via the ``method`` keyword argument) including: - gcsfs: Download data from a public google cloud storage bucket using the ``gcsfs`` package. - unidata_thredds: Access data using OpenDAP or similar method from Unidata's public THREDDS server (https://thredds.unidata.ucar.edu/thredds/catalog.html). - uwaos_thredds: Access data using OpenDAP or similar method from the University of Wisconsin - Madison's AOS department's THREDDS server. - http: A last resort download method when nothing else is available of a tarball or zip file from one or more servers available to the Satpy project. - uw_arcdata: A network mount available on many servers at the Space Science and Engineering Center (SSEC) at the University of Wisconsin - Madison. This is method is mainly meant when tutorials are taught at the SSEC using a Jupyter Hub server. To use these functions, do: >>> from satpy import Scene, demo >>> filenames = demo.get_us_midlatitude_cyclone_abi() >>> scn = Scene(reader='abi_l1b', filenames=filenames) """ import os import logging LOG = logging.getLogger(__name__) def _makedirs(directory, exist_ok=False): """Python 2.7 friendly os.makedirs. After python 2.7 is dropped, just use `os.makedirs` with `existsok=True`. """ try: os.makedirs(directory) except OSError: if not exist_ok: raise def get_us_midlatitude_cyclone_abi(base_dir='.', method=None, force=False): """Get GOES-16 ABI (CONUS sector) data from 2019-03-14 00:00Z. Args: base_dir (str): Base directory for downloaded files. method (str): Force download method for the data if not already cached. Allowed options are: 'gcsfs'. Default of ``None`` will choose the best method based on environment settings. force (bool): Force re-download of data regardless of its existence on the local system. Warning: May delete non-demo files stored in download directory. Total size: ~110MB """ if method is None: method = 'gcsfs' if method not in ['gcsfs']: raise NotImplementedError("Demo data download method '{}' not " "implemented yet.".format(method)) from ._google_cloud_platform import get_bucket_files patterns = ['gs://gcp-public-data-goes-16/ABI-L1b-RadC/2019/073/00/*0002*.nc'] subdir = os.path.join(base_dir, 'abi_l1b', '20190314_us_midlatitude_cyclone') _makedirs(subdir, exist_ok=True) filenames = get_bucket_files(patterns, subdir, force=force) assert len(filenames) == 16, "Not all files could be downloaded" return filenames def get_hurricane_florence_abi(base_dir='.', method=None, force=False, channels=range(1, 17), num_frames=10): """Get GOES-16 ABI (Meso sector) data from 2018-09-11 13:00Z to 17:00Z. Args: base_dir (str): Base directory for downloaded files. method (str): Force download method for the data if not already cached. Allowed options are: 'gcsfs'. Default of ``None`` will choose the best method based on environment settings. force (bool): Force re-download of data regardless of its existence on the local system. Warning: May delete non-demo files stored in download directory. channels (list): Channels to include in download. Defaults to all 16 channels. num_frames (int or slice): Number of frames to download. Maximum 240 frames. Default 10 frames. Size per frame (all channels): ~15MB Total size (default 10 frames, all channels): ~124MB Total size (240 frames, all channels): ~3.5GB """ if method is None: method = 'gcsfs' if method not in ['gcsfs']: raise NotImplementedError("Demo data download method '{}' not " "implemented yet.".format(method)) if isinstance(num_frames, (int, float)): frame_slice = slice(0, num_frames) else: frame_slice = num_frames from ._google_cloud_platform import get_bucket_files patterns = [] for channel in channels: # patterns += ['gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/1[3456]/' # '*C{:02d}*s20182541[3456]*.nc'.format(channel)] patterns += [( 'gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/13/*RadM1*C{:02d}*s201825413*.nc'.format(channel), 'gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/14/*RadM1*C{:02d}*s201825414*.nc'.format(channel), 'gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/15/*RadM1*C{:02d}*s201825415*.nc'.format(channel), 'gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/16/*RadM1*C{:02d}*s201825416*.nc'.format(channel), )] subdir = os.path.join(base_dir, 'abi_l1b', '20180911_hurricane_florence_abi_l1b') _makedirs(subdir, exist_ok=True) filenames = get_bucket_files(patterns, subdir, force=force, pattern_slice=frame_slice) actual_slice = frame_slice.indices(240) # 240 max frames num_frames = int((actual_slice[1] - actual_slice[0]) / actual_slice[2]) assert len(filenames) == len(channels) * num_frames, "Not all files could be downloaded" return filenames satpy-0.20.0/satpy/demo/_google_cloud_platform.py000066400000000000000000000071341362525524100221060ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . import os import logging try: from urllib.request import urlopen from urllib.error import URLError except ImportError: # python 2 from urllib2 import urlopen, URLError try: import gcsfs except ImportError: gcsfs = None LOG = logging.getLogger(__name__) def is_google_cloud_instance(): try: return urlopen('http://metadata.google.internal').headers.get('Metadata-Flavor') == 'Google' except URLError: return False def get_bucket_files(glob_pattern, base_dir, force=False, pattern_slice=slice(None)): """Helper function to download files from Google Cloud Storage. Args: glob_pattern (str or list): Glob pattern string or series of patterns used to search for on Google Cloud Storage. The pattern should include the "gs://" protocol prefix. If a list of lists, then the results of each sublist pattern are concatenated and the result is treated as one pattern result. This is important for things like ``pattern_slice`` and complicated glob patterns not supported by GCP. base_dir (str): Root directory to place downloaded files on the local system. force (bool): Force re-download of data regardless of its existence on the local system. Warning: May delete non-demo files stored in download directory. pattern_slice (slice): Slice object to limit the number of files returned by each glob pattern. """ if gcsfs is None: raise RuntimeError("Missing 'gcsfs' dependency for GCS download.") if not os.path.isdir(base_dir): # it is the caller's responsibility to make this raise OSError("Directory does not exist: {}".format(base_dir)) if isinstance(glob_pattern, str): glob_pattern = [glob_pattern] fs = gcsfs.GCSFileSystem(token='anon') filenames = [] for gp in glob_pattern: # handle multiple glob patterns being treated as one pattern # for complicated patterns that GCP can't handle if isinstance(gp, str): glob_results = list(fs.glob(gp)) else: # flat list of results glob_results = [fn for pat in gp for fn in fs.glob(pat)] for fn in glob_results[pattern_slice]: ondisk_fn = os.path.basename(fn) ondisk_pathname = os.path.join(base_dir, ondisk_fn) filenames.append(ondisk_pathname) if force and os.path.isfile(ondisk_pathname): os.remove(ondisk_pathname) elif os.path.isfile(ondisk_pathname): LOG.info("Found existing: {}".format(ondisk_pathname)) continue LOG.info("Downloading: {}".format(ondisk_pathname)) fs.get('gs://' + fn, ondisk_pathname) if not filenames: raise OSError("No files could be found or downloaded.") return filenames satpy-0.20.0/satpy/enhancements/000077500000000000000000000000001362525524100165465ustar00rootroot00000000000000satpy-0.20.0/satpy/enhancements/__init__.py000066400000000000000000000364311362525524100206660ustar00rootroot00000000000000#!/usr/bin/env python # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Enhancements.""" import numpy as np import xarray as xr import dask import dask.array as da import logging LOG = logging.getLogger(__name__) def stretch(img, **kwargs): """Perform stretch.""" return img.stretch(**kwargs) def gamma(img, **kwargs): """Perform gamma correction.""" return img.gamma(**kwargs) def invert(img, *args): """Perform inversion.""" return img.invert(*args) def apply_enhancement(data, func, exclude=None, separate=False, pass_dask=False): """Apply `func` to the provided data. Args: data (xarray.DataArray): Data to be modified inplace. func (callable): Function to be applied to an xarray exclude (iterable): Bands in the 'bands' dimension to not include in the calculations. separate (bool): Apply `func` one band at a time. Default is False. pass_dask (bool): Pass the underlying dask array instead of the xarray.DataArray. """ attrs = data.attrs bands = data.coords['bands'].values if exclude is None: exclude = ['A'] if 'A' in bands else [] if separate: data_arrs = [] for idx, band_name in enumerate(bands): band_data = data.sel(bands=[band_name]) if band_name in exclude: # don't modify alpha data_arrs.append(band_data) continue if pass_dask: dims = band_data.dims coords = band_data.coords d_arr = func(band_data.data, index=idx) band_data = xr.DataArray(d_arr, dims=dims, coords=coords) else: band_data = func(band_data, index=idx) data_arrs.append(band_data) # we assume that the func can add attrs attrs.update(band_data.attrs) data.data = xr.concat(data_arrs, dim='bands').data data.attrs = attrs return data else: band_data = data.sel(bands=[b for b in bands if b not in exclude]) if pass_dask: dims = band_data.dims coords = band_data.coords d_arr = func(band_data.data) band_data = xr.DataArray(d_arr, dims=dims, coords=coords) else: band_data = func(band_data) attrs.update(band_data.attrs) # combine the new data with the excluded data new_data = xr.concat([band_data, data.sel(bands=exclude)], dim='bands') data.data = new_data.sel(bands=bands).data data.attrs = attrs return data def crefl_scaling(img, **kwargs): """Apply non-linear stretch used by CREFL-based RGBs.""" LOG.debug("Applying the crefl_scaling") def func(band_data, index=None): idx = np.array(kwargs['idx']) / 255 sc = np.array(kwargs['sc']) / 255 band_data *= .01 # Interpolate band on [0,1] using "lazy" arrays (put calculations off until the end). band_data = xr.DataArray(da.clip(band_data.data.map_blocks(np.interp, xp=idx, fp=sc), 0, 1), coords=band_data.coords, dims=band_data.dims, name=band_data.name, attrs=band_data.attrs) return band_data return apply_enhancement(img.data, func, separate=True) def cira_stretch(img, **kwargs): """Logarithmic stretch adapted to human vision. Applicable only for visible channels. """ LOG.debug("Applying the cira-stretch") def func(band_data): log_root = np.log10(0.0223) denom = (1.0 - log_root) * 0.75 band_data *= 0.01 band_data = band_data.clip(np.finfo(float).eps) band_data = np.log10(band_data) band_data -= log_root band_data /= denom return band_data return apply_enhancement(img.data, func) def _lookup_delayed(luts, band_data): # can't use luts.__getitem__ for some reason return luts[band_data] def lookup(img, **kwargs): """Assign values to channels based on a table.""" luts = np.array(kwargs['luts'], dtype=np.float32) / 255.0 def func(band_data, luts=luts, index=-1): # NaN/null values will become 0 lut = luts[:, index] if len(luts.shape) == 2 else luts band_data = band_data.clip(0, lut.size - 1).astype(np.uint8) new_delay = dask.delayed(_lookup_delayed)(lut, band_data) new_data = da.from_delayed(new_delay, shape=band_data.shape, dtype=luts.dtype) return new_data return apply_enhancement(img.data, func, separate=True, pass_dask=True) def colorize(img, **kwargs): """Colorize the given image. Args: img: image to be colorized Kwargs: palettes: colormap(s) to use The `palettes` kwarg can be one of the following: - a trollimage.colormap.Colormap object - list of dictionaries with each of one of the following forms: - {'filename': '/path/to/colors.npy', 'min_value': , 'max_value': , 'reverse': , 'min_value': , 'max_value': , 'reverse': , 'min_value': , 'max_value': , 'reverse': , 'values': , 'min_value': , 'max_value': , 'reverse': = threshold, high_offset - high_factor * band_data, low_offset - low_factor * band_data) return apply_enhancement(img.data, _bt_threshold, pass_dask=True) satpy-0.20.0/satpy/enhancements/abi.py000066400000000000000000000033241362525524100176550ustar00rootroot00000000000000#!/usr/bin/env python # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Enhancement functions specific to the ABI sensor.""" from satpy.enhancements import apply_enhancement def cimss_true_color_contrast(img, **kwargs): """Scale data based on CIMSS True Color recipe for AWIPS.""" def func(img_data): """Perform per-chunk enhancement. Code ported from Kaba Bah's AWIPS python plugin for creating the CIMSS Natural (True) Color image in AWIPS. AWIPS provides that python code the image data on a 0-255 scale. Satpy gives this function the data on a 0-1.0 scale (assuming linear stretching and sqrt enhancements have already been applied). """ max_value = 1.0 acont = (255.0 / 10.0) / 255.0 amax = (255.0 + 4.0) / 255.0 amid = 1.0 / 2.0 afact = (amax * (acont + max_value) / (max_value * (amax - acont))) aband = (afact * (img_data - amid) + amid) aband[aband <= 10 / 255.0] = 0 aband[aband >= 1.0] = 1.0 return aband apply_enhancement(img.data, func, pass_dask=True) satpy-0.20.0/satpy/enhancements/mimic.py000066400000000000000000000021451362525524100202200ustar00rootroot00000000000000#!/usr/bin/env python # Copyright (c) 2018-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . from trollimage.colormap import Colormap def total_precipitable_water(img, **kwargs): """Palettizes images from VIIRS flood data. This modifies the image's data so the correct colors can be applied to it, and then palettizes the image. """ palette = kwargs['palettes'] palette['colors'] = tuple(map(tuple, palette['colors'])) cm = Colormap(*palette['colors']) img.palettize(cm) satpy-0.20.0/satpy/enhancements/viirs.py000066400000000000000000000026311362525524100202560ustar00rootroot00000000000000#!/usr/bin/env python # Copyright (c) 2018-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . from trollimage.colormap import Colormap import numpy as np from satpy.enhancements import apply_enhancement def water_detection(img, **kwargs): """Palettizes images from VIIRS flood data. This modifies the image's data so the correct colors can be applied to it, and then palettizes the image. """ palette = kwargs['palettes'] palette['colors'] = tuple(map(tuple, palette['colors'])) def func(img_data): data = np.asarray(img_data) data[data == 150] = 31 data[data == 199] = 18 data[data >= 200] = data[data >= 200] - 100 return data apply_enhancement(img.data, func, pass_dask=True) cm = Colormap(*palette['colors']) img.palettize(cm) satpy-0.20.0/satpy/etc/000077500000000000000000000000001362525524100146515ustar00rootroot00000000000000satpy-0.20.0/satpy/etc/areas.yaml000066400000000000000000000727071362525524100166450ustar00rootroot00000000000000omerc_bb: description: Oblique Mercator Bounding Box for Polar Overpasses projection: # The omerc projection does not work well with non-spherical ellipsoids. ellps: sphere proj: omerc optimize_projection: True laea_bb: description: Lambert Azimuthal Equal-Area Bounding Box for Polar Overpasses projection: ellps: WGS84 proj: laea optimize_projection: True australia: description: australia projection: proj: merc lat_0: -27.5 lon_0: 132.5 ellps: WGS84 shape: height: 895 width: 1001 area_extent: lower_left_xy: [-2504688.5428486555, -5591295.9185533915] upper_right_xy: [2504688.5428486555, -1111475.102852225] mali: description: mali projection: proj: merc lat_0: 19.0 lon_0: -1.0 ellps: WGS84 shape: height: 705 width: 816 area_extent: lower_left_xy: [-1224514.3987260093, 1111475.1028522244] upper_right_xy: [1224514.3987260093, 3228918.5790461157] mali_eqc: description: mali projection: proj: eqc lat_0: 19.0 lon_0: -1.0 ellps: WGS84 shape: height: 667 width: 816 area_extent: lower_left_xy: [-1224514.3987260093, -1001875.4171394627] upper_right_xy: [1224514.3987260093, 1001875.4171394617] sve: description: North half of the Iberian Peninsula and the Gulf of Biscay image 0 degrees projection: init: epsg:3006 shape: height: 2000 width: 2000 area_extent: lower_left_xy: [-342379.698, 6032580.06] upper_right_xy: [1423701.52, 8029648.75] iber: description: North half of the Iberian Peninsula and the Gulf of Biscay image 0 degrees projection: proj: utm shape: height: 1000 width: 2000 area_extent: lower_left_xy: [-342379.698, 4432580.06] upper_right_xy: [723701.52, 5029648.75] units: m SouthAmerica: description: South America projection: proj: geos lon_0: 0.0 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 1200 width: 3000 area_extent: lower_left_xy: [-5570248.477339261, -4263473.561036119] upper_right_xy: [-384719.90821206354, 1339786.2707295895] brazil2: description: brazil, platecarree projection: proj: eqc ellps: WGS84 shape: height: 768 width: 768 area_extent: lower_left_xy: [-7792364.355529149, -4452779.631730943] upper_right_xy: [-2226389.8158654715, 1669792.3618991035] units: m sudeste: description: sudeste, platecarree projection: proj: eqc ellps: WGS84 shape: height: 959 width: 959 area_extent: lower_left_xy: [-6122571.993630046, -3005626.251418386] upper_right_xy: [-4230140.650144396, -1447153.3803125564] units: m SouthAmerica_flat: description: South America flat projection: proj: eqc a: 6378137.0 b: 6378137.0 shape: height: 1213 width: 1442 area_extent: lower_left_xy: [-8326322.82790897, -4609377.085697311] upper_right_xy: [-556597.4539663679, 1535833.8895192828] units: m worldeqc30km: description: World in 3km, platecarree projection: proj: eqc ellps: WGS84 shape: height: 410 width: 820 area_extent: lower_left_xy: [-20037508.3428, -10018754.1714] upper_right_xy: [20037508.3428, 10018754.1714] units: m south_america: description: south_america, platecarree projection: proj: eqc ellps: WGS84 shape: height: 1024 width: 1024 area_extent: lower_left_xy: [-8126322.82790897, -5009377.085697311] upper_right_xy: [-556597.4539663679, 1335833.8895192828] units: m brazil: description: brazil, platecarree projection: proj: eqc ellps: WGS84 shape: height: 768 width: 768 area_extent: lower_left_xy: [-8348961.809495518, -3896182.1777645745] upper_right_xy: [-3784862.6869713017, 1001875.4171394621] units: m worldeqc3km70: description: World in 3km, platecarree projection: proj: eqc ellps: WGS84 shape: height: 4096 width: 8192 area_extent: lower_left_xy: [-20037508.3428, -7792364.355533333] upper_right_xy: [20037508.3428, 7792364.355533333] units: m worldeqc30km70: description: World in 3km, platecarree projection: proj: eqc ellps: WGS84 shape: height: 410 width: 819 area_extent: lower_left_xy: [-20037508.3428, -7792364.355533333] upper_right_xy: [20037508.3428, 7792364.355533333] units: m worldeqc3km73: description: World in 3km, platecarree projection: proj: eqc ellps: WGS84 shape: height: 2048 width: 4096 area_extent: lower_left_xy: [-20037508.3428, -8181982.573309999] upper_right_xy: [20037508.3428, 8181982.573309999] units: m worldeqc3km: description: World in 3km, platecarree projection: proj: eqc ellps: WGS84 shape: height: 2048 width: 4096 area_extent: lower_left_xy: [-20037508.3428, -10018754.1714] upper_right_xy: [20037508.3428, 10018754.1714] units: m worldeqc30km: description: World in 3km, platecarree projection: proj: eqc ellps: WGS84 shape: height: 410 width: 820 area_extent: lower_left_xy: [-20037508.3428, -10018754.1714] upper_right_xy: [20037508.3428, 10018754.1714] units: m libya: description: libya area projection: proj: merc lat_ts: 31.625 lon_0: 17.875 ellps: WGS84 shape: height: 1024 width: 1280 area_extent: lower_left_xy: [-1921632.0902750609, 1725320.2028891125] upper_right_xy: [1918367.9097249391, 4797320.202889113] units: m phil: description: kuwait area projection: proj: merc lat_0: 10.0 lat_ts: 10.0 lon_0: 125.0 ellps: WGS84 shape: height: 2048 width: 4096 area_extent: lower_left_xy: [-2200000.0, 0.0] upper_right_xy: [2200000.0, 2200000.0] units: m phil_small: description: kuwait area projection: proj: merc lat_0: 10.0 lat_ts: 10.0 lon_0: 125.0 ellps: WGS84 shape: height: 512 width: 512 area_extent: lower_left_xy: [-600000.0, 0.0] upper_right_xy: [1600000.0, 2200000.0] units: m kuwait: description: kuwait area projection: proj: merc lat_ts: 30.0 lon_0: 44.75 ellps: WGS84 shape: height: 512 width: 512 area_extent: lower_left_xy: [-1280000.0, 1820000.0] upper_right_xy: [1280000.0, 4380000.0] units: m afghanistan: description: Afghanistan projection: proj: merc lat_ts: 35.0 a: 6370997.0 b: 6370997.0 lon_0: 67.5 lat_0: 35.0 shape: height: 1600 width: 1600 area_extent: lower_left_xy: [-1600000.0, 1600000.0] upper_right_xy: [1600000.0, 4800000.0] EuropeCanary: description: Northern globe MSG image 0 degrees projection: proj: geos lon_0: 0.0 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 1200 width: 3000 area_extent: lower_left_xy: [-4823148.089050828, 1969764.6783588605] upper_right_xy: [4178061.408400173, 5570248.477339261] EastEurope: description: Easten part of Northern globe MSG image 0 degrees projection: proj: geos lon_0: 0.0 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 800 width: 1300 area_extent: lower_left_xy: [654112.8864287604, 2989901.7547366405] upper_right_xy: [4553111.804127298, 5390224.287390241] AfHorn: description: Eastern globe MSG image 0 degrees projection: proj: geos lon_0: 0.0 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 1600 width: 1100 area_extent: lower_left_xy: [2263804.1886089267, -1327678.4008740226] upper_right_xy: [5564247.671007627, 3472966.6644331776] maspalomas: description: Western Africa and Canary Islands projection: proj: merc ellps: bessel lon_0: -10.0 shape: height: 1100 width: 2100 area_extent: lower_left_xy: [-1200000.0, 2900000.0] upper_right_xy: [900000.0, 4000000.0] afhorn: description: Africa horn 3km resolution projection: proj: merc ellps: bessel lon_0: 50.0 shape: height: 1622 width: 1622 area_extent: lower_left_xy: [-2432000.0, -1130348.139543] upper_right_xy: [2432000.0, 3733651.860457] met09globe: description: Cropped globe MSG image 0 degrees projection: proj: geos lon_0: 0.0 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 3620 width: 3620 area_extent: lower_left_xy: [-5432229.931711678, -5429229.528545862] upper_right_xy: [5429229.528545862, 5432229.931711678] met09globeFull: description: Full globe MSG image 0 degrees projection: proj: geos lon_0: 0.0 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 3712 width: 3712 area_extent: lower_left_xy: [-5570248.477339261, -5567248.074173444] upper_right_xy: [5567248.074173444, 5570248.477339261] seviri_0deg: description: Full globe MSG image 0 degrees projection: proj: geos lon_0: 0.0 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 3712 width: 3712 area_extent: lower_left_xy: [-5570248.686685662, -5567248.28340708] upper_right_xy: [5567248.28340708, 5570248.686685662] seviri_iodc: description: Full globe MSG image 41.5 degrees projection: proj: geos lon_0: 41.5 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 3712 width: 3712 area_extent: lower_left_xy: [-5570248.686685662, -5567248.28340708] upper_right_xy: [5567248.28340708, 5570248.686685662] msg_resample_area: description: Full globe MSG image 20.75 degrees projection: proj: geos lon_0: 20.75 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 3712 width: 3712 area_extent: lower_left_xy: [-5570248.477339261, -5567248.074173444] upper_right_xy: [5567248.074173444, 5570248.477339261] met07globe: description: Full globe IODC image 57 degrees projection: proj: geos lon_0: 57.0 a: 6378140.0 b: 6356755.0 h: 35785831.0 shape: height: 2500 width: 2500 area_extent: lower_left_xy: [-5621225.237846375, -5621225.237846375] upper_right_xy: [5621225.237846375, 5621225.237846375] spain: description: Spain projection: proj: stere ellps: bessel lat_0: 40.0 lon_0: -3.0 lat_ts: 40.0 a: 6378144.0 b: 6356759.0 shape: height: 2048 width: 2048 area_extent: lower_left_xy: [-500000.0, -500000.0] upper_right_xy: [500000.0, 500000.0] germ: description: Germany projection: proj: stere ellps: bessel lat_0: 90.0 lon_0: 5.0 lat_ts: 50.0 a: 6378144.0 b: 6356759.0 shape: height: 1024 width: 1024 area_extent: lower_left_xy: [-155100.436345, -4441495.37946] upper_right_xy: [868899.563655, -3417495.37946] germ2: description: Germany projection: proj: stere ellps: bessel lat_0: 90.0 lon_0: 5.0 lat_ts: 50.0 a: 6378144.0 b: 6356759.0 shape: height: 1024 width: 1280 area_extent: lower_left_xy: [-165100.436345, -4441495.37946] upper_right_xy: [878899.563655, -3417495.37946] euro4: description: Euro 4km area - Europe projection: proj: stere ellps: bessel lat_0: 90.0 lon_0: 14.0 lat_ts: 60.0 shape: height: 1024 width: 1024 area_extent: lower_left_xy: [-2717181.7304994687, -5571048.14031214] upper_right_xy: [1378818.2695005313, -1475048.1403121399] euro1: description: Euro 4km area - Europe projection: proj: stere ellps: bessel lat_0: 90.0 lon_0: 14.0 lat_ts: 60.0 shape: height: 4096 width: 4096 area_extent: lower_left_xy: [-2717181.7304994687, -5571048.14031214] upper_right_xy: [1378818.2695005313, -1475048.1403121399] scan: description: Scandinavia projection: proj: stere ellps: bessel lat_0: 90.0 lon_0: 14.0 lat_ts: 60.0 shape: height: 512 width: 512 area_extent: lower_left_xy: [-1268854.126638295, -4150234.8425892727] upper_right_xy: [779145.8733617051, -2102234.8425892727] scan2: description: Scandinavia - 2km area projection: proj: stere ellps: bessel lat_0: 90.0 lon_0: 14.0 lat_ts: 60.0 shape: height: 1024 width: 1024 area_extent: lower_left_xy: [-1268854.126638295, -4150234.8425892727] upper_right_xy: [779145.8733617051, -2102234.8425892727] scan1: description: Scandinavia - 1km area projection: proj: stere ellps: bessel lat_0: 90.0 lon_0: 14.0 lat_ts: 60.0 shape: height: 2088 width: 2048 area_extent: lower_left_xy: [-1268854.126638295, -4150234.8425892727] upper_right_xy: [779145.8733617051, -2062234.8425892727] scan500m: description: Scandinavia - 500m area projection: proj: stere ellps: bessel lat_0: 90.0 lon_0: 14.0 lat_ts: 60.0 shape: height: 4176 width: 4096 area_extent: lower_left_xy: [-1268854.126638295, -4150234.8425892727] upper_right_xy: [779145.8733617051, -2062234.8425892727] mesanX: description: Mesan-X rotated lon/lat 1.8km projection: proj: ob_tran o_proj: eqc o_lat_p: 30.0 o_lon_p: 10.0 lon_0: -10.0 a: 6371000.0 b: 6371000.0 wktext: True shape: height: 1608 width: 1476 area_extent: lower_left_xy: [1067435.7598983962, -1278764.890341909] upper_right_xy: [3791765.9965939857, 1690140.6680267097] mesanE: description: Europe Mesan rotated lon/lat 1.8km projection: proj: ob_tran o_proj: eqc o_lat_p: 30.0 o_lon_p: 10.0 lon_0: -10.0 a: 6371000.0 b: 6371000.0 wktext: True shape: height: 6294 width: 5093 area_extent: lower_left_xy: [289083.0005619671, -2957836.6467769896] upper_right_xy: [5381881.121371055, 3335826.68502126] baws: description: BAWS projection: proj: aea ellps: bessel lon_0: 14.0 lat_1: 60.0 lat_2: 60.0 shape: height: 1400 width: 1400 area_extent: lower_left_xy: [-475617.0, 5324430.0] upper_right_xy: [924383.0, 6724430.0] eurotv: description: Europe TV - 6.2x5.0km projection: proj: stere ellps: bessel lat_0: 90.0 lon_0: 14.0 lat_ts: 60.0 a: 6378144.0 b: 6356759.0 shape: height: 1024 width: 1024 area_extent: lower_left_xy: [-3503748.8201907813, -6589593.134058789] upper_right_xy: [2842567.6359087573, -1499856.5846593212] eurotv4n: description: Europe TV4 - 4.1x4.1km projection: proj: stere ellps: bessel lat_0: 90.0 lon_0: 14.0 lat_ts: 60.0 a: 6378144.0 b: 6356759.0 shape: height: 1152 width: 2048 area_extent: lower_left_xy: [-5103428.678666952, -6772478.60053407] upper_right_xy: [3293371.321333048, -2049278.6005340703] eurol: description: Euro 3.0km area - Europe projection: proj: stere ellps: WGS84 lat_0: 90.0 lon_0: 0.0 lat_ts: 60.0 shape: height: 2048 width: 2560 area_extent: lower_left_xy: [-3780000.0, -7644000.0] upper_right_xy: [3900000.0, -1500000.0] eurol1: description: Euro 3.0km area - Europe projection: proj: stere ellps: WGS84 lat_0: 90.0 lon_0: 0.0 lat_ts: 60.0 shape: height: 8000 width: 10000 area_extent: lower_left_xy: [-3780000.0, -7644000.0] upper_right_xy: [3900000.0, -1500000.0] scanl: description: Scandinavia - Large projection: proj: stere ellps: WGS84 lat_0: 90.0 lon_0: 0.0 lat_ts: 60.0 shape: height: 2900 width: 2900 area_extent: lower_left_xy: [-900000.0, -4500000.0] upper_right_xy: [2000000.0, -1600000.0] euron1: description: Northern Europe - 1km projection: proj: stere ellps: WGS84 lat_0: 90.0 lon_0: 0.0 lat_ts: 60.0 shape: height: 3072 width: 3072 area_extent: lower_left_xy: [-1000000.0, -4500000.0] upper_right_xy: [2072000.0, -1428000.0] euron0250: description: Northern Europe - 1km projection: proj: stere ellps: WGS84 lat_0: 90.0 lon_0: 0.0 lat_ts: 60.0 shape: height: 12288 width: 12288 area_extent: lower_left_xy: [-1000000.0, -4500000.0] upper_right_xy: [2072000.0, -1428000.0] nsea: description: North Baltic Sea projection: proj: merc ellps: WGS84 lat_ts: 0.0 lon_0: 15.0 shape: height: 1024 width: 1024 area_extent: lower_left_xy: [-322789.07638000086, 7784901.986829306] upper_right_xy: [1725210.923619999, 9832901.986829307] ssea: description: South Baltic Sea projection: proj: merc ellps: WGS84 lat_ts: 0.0 lon_0: 15.0 shape: height: 1024 width: 1024 area_extent: lower_left_xy: [-801407.3620468981, 7003690.663643802] upper_right_xy: [1246592.637953102, 9051690.663643802] nsea250: description: North Baltic Sea projection: proj: merc ellps: WGS84 lat_ts: 0.0 lon_0: 15.0 shape: height: 4096 width: 4096 area_extent: lower_left_xy: [-322789.07638000086, 7784901.986829306] upper_right_xy: [1725210.923619999, 9832901.986829307] ssea250: description: South Baltic Sea projection: proj: merc ellps: WGS84 lat_ts: 0.0 lon_0: 15.0 shape: height: 4096 width: 4096 area_extent: lower_left_xy: [-801407.3620468981, 7003690.663643802] upper_right_xy: [1246592.637953102, 9051690.663643802] bsea250: description: South Baltic Sea projection: proj: merc ellps: WGS84 lat_ts: 60.0 lon_0: 0.0 shape: height: 5632 width: 4752 area_extent: lower_left_xy: [512000.0, 3525000.0] upper_right_xy: [1700000.0, 4933000.0] test250: description: South Baltic Sea projection: proj: merc ellps: WGS84 lat_ts: 60.0 lon_0: 0.0 shape: height: 5632 width: 4752 area_extent: lower_left_xy: [512000.0, 3525000.0] upper_right_xy: [1700000.0, 4933000.0] bsea1000: description: South Baltic Sea projection: proj: merc ellps: WGS84 lat_ts: 60.0 lon_0: 0.0 shape: height: 1408 width: 1188 area_extent: lower_left_xy: [512000.0, 3525000.0] upper_right_xy: [1700000.0, 4933000.0] euro: description: Euro area - Europe projection: proj: stere ellps: bessel lat_0: 90.0 lon_0: 14.0 lat_ts: 60.0 shape: height: 512 width: 512 area_extent: lower_left_xy: [-2717181.7304994687, -5571048.14031214] upper_right_xy: [1378818.2695005313, -1475048.1403121399] baltrad_lambert: description: Baltrad Lambert projection: proj: laea a: 6370997.0 b: 6370997.0 lon_0: 20.0 lat_0: 60.0 shape: height: 1195 width: 815 area_extent: lower_left_xy: [-994211.85388, -1291605.15396] upper_right_xy: [635788.14612, 1098394.84604] eport: description: eport projection: proj: stere lat_0: 90.0 lon_0: 0.0 ellps: WGS84 shape: height: 1792 width: 1792 area_extent: lower_left_xy: [-5283418.625834752, -5283418.625834753] upper_right_xy: [5283418.625834753, 5283418.625834752] units: m eport1: description: eport projection: proj: stere lat_0: 90.0 lon_0: 0.0 ellps: WGS84 shape: height: 10567 width: 10567 area_extent: lower_left_xy: [-5283418.625834752, -5283418.625834753] upper_right_xy: [5283418.625834753, 5283418.625834752] units: m eport10: description: eport reduced resolution projection: proj: stere lat_0: 90.0 lon_0: 0.0 ellps: WGS84 shape: height: 1057 width: 1057 area_extent: lower_left_xy: [-5283418.625834752, -5283418.625834753] upper_right_xy: [5283418.625834753, 5283418.625834752] units: m eport4: description: eport reduced resolution projection: proj: stere lat_0: 90.0 lon_0: 0.0 ellps: WGS84 shape: height: 2642 width: 2642 area_extent: lower_left_xy: [-5283418.625834752, -5283418.625834753] upper_right_xy: [5283418.625834753, 5283418.625834752] units: m eport2: description: eport reduced resolution projection: proj: stere lat_0: 90.0 lon_0: 0.0 ellps: WGS84 shape: height: 5285 width: 5285 area_extent: lower_left_xy: [-5283418.625834752, -5283418.625834753] upper_right_xy: [5283418.625834753, 5283418.625834752] units: m npp_sample_m: description: North America - NPP sample data - M-bands projection: proj: laea a: 6370997.0 b: 6370997.0 lat_0: 60.0 lon_0: -120.0 shape: height: 1500 width: 1500 area_extent: lower_left_xy: [-1700000.0, -1400000.0] upper_right_xy: [1100000.0, 1400000.0] arctic_europe_1km: description: Arctic and Europe projection: proj: laea a: 6371228.0 b: 6371228.0 lon_0: 0.0 lat_0: 90.0 shape: height: 9100 width: 9100 area_extent: lower_left_xy: [-3100000.0, -7100000.0] upper_right_xy: [6000000.0, 2000000.0] arctic_europe_9km: description: Arctic and Europe projection: proj: laea a: 6371228.0 b: 6371228.0 lon_0: 0.0 lat_0: 90.0 shape: height: 910 width: 910 area_extent: lower_left_xy: [-3100000.0, -7100000.0] upper_right_xy: [6000000.0, 2000000.0] sswe: description: Southern Sweden projection: proj: stere ellps: bessel a: 6378144.0 b: 6356759.0 lat_0: 90.0 lon_0: 14.0 lat_ts: 60.0 shape: height: 1024 width: 1024 area_extent: lower_left_xy: [-400884.23045, -3946631.71387] upper_right_xy: [623115.76955, -2922631.71387] nswe: description: Northern Sweden projection: proj: stere ellps: bessel a: 6378144.0 b: 6356759.0 lat_0: 90.0 lon_0: 14.0 lat_ts: 60.0 shape: height: 1024 width: 1024 area_extent: lower_left_xy: [-392288.010506, -3105279.35252] upper_right_xy: [631711.989494, -2081279.35252] sval: description: Svalbard projection: proj: stere ellps: WGS84 lat_0: 90.0 lon_0: 20.0 lat_ts: 75.0 shape: height: 1024 width: 1024 area_extent: lower_left_xy: [-287554.9489620461, -1630805.15418955] upper_right_xy: [736445.0510379539, -606805.1541895501] ease_sh: description: Antarctic EASE grid projection: proj: laea lat_0: -90.0 lon_0: 0.0 a: 6371228.0 b: 6371228.0 shape: height: 425 width: 425 area_extent: lower_left_xy: [-5326849.0625, -5326849.0625] upper_right_xy: [5326849.0625, 5326849.0625] units: m ease_nh: description: Arctic EASE grid projection: proj: laea lat_0: 90.0 lon_0: 0.0 a: 6371228.0 b: 6371228.0 shape: height: 425 width: 425 area_extent: lower_left_xy: [-5326849.0625, -5326849.0625] upper_right_xy: [5326849.0625, 5326849.0625] units: m barents_sea: description: Barents and Greenland seas projection: proj: stere ellps: WGS84 lat_0: 90.0 lon_0: 40.0 lat_ts: 75.0 shape: height: 1700 width: 3000 area_extent: lower_left_xy: [-1600000.0, -2000000.0] upper_right_xy: [1400000.0, -300000.0] antarctica: description: Antarctica - 1km projection: proj: laea a: 6370997.0 b: 6370997.0 lon_0: 0.0 lat_0: -90.0 shape: height: 5718 width: 5718 area_extent: lower_left_xy: [-2858899.2042342643, -2858899.204234264] upper_right_xy: [2858899.204234264, 2858899.2042342643] arctica: description: arctica - 1km projection: proj: laea a: 6370997.0 b: 6370997.0 lon_0: 0.0 lat_0: 90.0 shape: height: 5718 width: 5718 area_extent: lower_left_xy: [-1458899.2042342643, -1458899.2042342639] upper_right_xy: [1458899.2042342639, 1458899.2042342643] euroasia: description: Euroasia - Global 1km USGS Landuse database projection: proj: laea a: 6370997.0 b: 6370997.0 lat_0: 55.0 lon_0: 20.0 shape: height: 13000 width: 13000 area_extent: lower_left_xy: [-3000000.0, -4999000.0] upper_right_xy: [9999000.0, 8000000.0] euroasia_10km: description: Euroasia - Global 1km USGS Landuse database projection: proj: laea a: 6370997.0 b: 6370997.0 lat_0: 55.0 lon_0: 20.0 shape: height: 1300 width: 1300 area_extent: lower_left_xy: [-3000000.0, -4999000.0] upper_right_xy: [9999000.0, 8000000.0] euroasia_asia: description: Euroasia - optimised for Asia - Global 1km USGS Landuse database projection: proj: laea a: 6370997.0 b: 6370997.0 lat_0: 45.0 lon_0: 100.0 shape: height: 12000 width: 13000 area_extent: lower_left_xy: [-8000000.0, -5499000.0] upper_right_xy: [4999000.0, 6500000.0] euroasia_asia_10km: description: Euroasia - optimised for Asia - Global 1km USGS Landuse database projection: proj: laea a: 6370997.0 b: 6370997.0 lat_0: 45.0 lon_0: 100.0 shape: height: 1200 width: 1300 area_extent: lower_left_xy: [-8000000.0, -5499000.0] upper_right_xy: [4999000.0, 6500000.0] australia_pacific: description: Austalia/Pacific - Global 1km USGS Landuse database projection: proj: laea a: 6370997.0 b: 6370997.0 lat_0: -15.0 lon_0: 135.0 shape: height: 8000 width: 9300 area_extent: lower_left_xy: [-5000000.0, -3944890.0] upper_right_xy: [4299000.0, 4054110.0] australia_pacific_10km: description: Austalia/Pacific - Global 1km USGS Landuse database projection: proj: laea a: 6370997.0 b: 6370997.0 lat_0: -15.0 lon_0: 135.0 shape: height: 800 width: 930 area_extent: lower_left_xy: [-5000000.0, -3944890.0] upper_right_xy: [4299000.0, 4054110.0] africa: description: Africa - Global 1km USGS Landuse database projection: proj: laea a: 6370997.0 b: 6370997.0 lat_0: 5.0 lon_0: 20.0 shape: height: 9276 width: 8350 area_extent: lower_left_xy: [-4458000.0, -4795000.0] upper_right_xy: [3891000.0, 4480000.0] africa_10km: description: Africa - Global 1km USGS Landuse database projection: proj: laea a: 6370997.0 b: 6370997.0 lat_0: 5.0 lon_0: 20.0 shape: height: 928 width: 835 area_extent: lower_left_xy: [-4458000.0, -4795000.0] upper_right_xy: [3891000.0, 4480000.0] southamerica: description: South America - Global 1km USGS Landuse database projection: proj: laea a: 6370997.0 b: 6370997.0 lat_0: -15.0 lon_0: -60.0 shape: height: 8000 width: 6000 area_extent: lower_left_xy: [-3000000.0, -4899000.0] upper_right_xy: [2999000.0, 3100000.0] southamerica_10km: description: South America - Global 1km USGS Landuse database projection: proj: laea a: 6370997.0 b: 6370997.0 lat_0: -15.0 lon_0: -60.0 shape: height: 800 width: 600 area_extent: lower_left_xy: [-3000000.0, -4899000.0] upper_right_xy: [2999000.0, 3100000.0] northamerica: description: North America - Global 1km USGS Landuse database projection: proj: laea a: 6370997.0 b: 6370997.0 lat_0: 50.0 lon_0: -100.0 shape: height: 8996 width: 9223 area_extent: lower_left_xy: [-4487000.0, -4515000.0] upper_right_xy: [4735000.0, 4480000.0] northamerica_10km: description: North America - Global 1km USGS Landuse database projection: proj: laea a: 6370997.0 b: 6370997.0 lat_0: 50.0 lon_0: -100.0 shape: height: 900 width: 922 area_extent: lower_left_xy: [-4487000.0, -4515000.0] upper_right_xy: [4735000.0, 4480000.0] romania: description: Romania - 3km projection: proj: stere ellps: WGS84 lat_0: 50.0 lon_0: 15.0 lat_ts: 60.0 shape: height: 855 width: 1509 area_extent: lower_left_xy: [-2226837.662574135, -1684219.2829063328] upper_right_xy: [2299196.337425865, 881436.7170936672] stere_asia_test: description: stere projection: proj: stere lon_0: 121.5 lat_0: 25.0 shape: height: 7162 width: 7200 area_extent: lower_left_xy: [-3589072.840299738, -3568228.07278016] upper_right_xy: [3611014.256314698, 3594111.7022882444] bocheng_test: description: stere projection: proj: stere lon_0: 121.5 lat_0: 25.0 shape: height: 1989 width: 2000 area_extent: lower_left_xy: [-3589072.840299738, -3568228.07278016] upper_right_xy: [3611014.256314698, 3594111.7022882444] nsper_swe: description: nsper_swe projection: proj: nsper lon_0: 16.0 lat_0: 58.0 h: 360000000.0 wktext: True shape: height: 1024 width: 1024 area_extent: lower_left_xy: [-5000000.0, -5000000.0] upper_right_xy: [5000000.0, 5000000.0] new_bsea250: description: new_bsea250 projection: proj: stere lat_0: 59.5 lon_0: 19.5 ellps: WGS84 shape: height: 5750 width: 5104 area_extent: lower_left_xy: [-638072.2772287376, -680339.8397175331] upper_right_xy: [638072.277228737, 757253.9342263378] scanice: description: Scandinavia and Iceland projection: proj: laea a: 6370997.0 b: 6370997.0 lon_0: 0.0 lat_0: 64.0 shape: height: 1024 width: 1280 area_extent: lower_left_xy: [-1920000.0, -1536000.0] upper_right_xy: [1920000.0, 1536000.0] baws250: description: BAWS, 250m resolution projection: proj: aea ellps: WGS84 lon_0: 14.0 lat_1: 60.0 lat_2: 60.0 shape: height: 5600 width: 5600 area_extent: lower_left_xy: [-475617.0, 5324430.0] upper_right_xy: [924383.0, 6724430.0] moll: description: moll projection: proj: moll lat_0: 0.0 lon_0: 0.0 ellps: WGS84 shape: height: 1800 width: 3600 area_extent: lower_left_xy: [-18040095.696147293, -9020047.848073646] upper_right_xy: [18040095.696147293, 9020047.848073646] robinson: description: robinson projection: proj: robin lat_0: 70.0 lon_0: -45.0 ellps: WGS84 shape: height: 3296 width: 4096 area_extent: lower_left_xy: [-2049911.5256036147, 5326895.725982913] upper_right_xy: [2049911.5256036168, 8625155.12857459] satpy-0.20.0/satpy/etc/composites/000077500000000000000000000000001362525524100170365ustar00rootroot00000000000000satpy-0.20.0/satpy/etc/composites/abi.yaml000066400000000000000000000172241362525524100204630ustar00rootroot00000000000000sensor_name: visir/abi modifiers: rayleigh_corrected_crefl: compositor: !!python/name:satpy.composites.viirs.ReflectanceCorrector dem_filename: CMGDEM.hdf optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle rayleigh_corrected_500m: compositor: !!python/name:satpy.composites.PSPRayleighReflectance atmosphere: midlatitude summer aerosol_type: marine_tropical_aerosol prerequisites: - name: C02 modifiers: [effective_solar_pathlength_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle composites: green_crefl: compositor: !!python/name:satpy.composites.abi.SimulatedGreen # FUTURE: Set a wavelength...see what happens. Dependency finding # probably wouldn't work. prerequisites: # should we be using the most corrected or least corrected inputs? - name: C01 modifiers: [sunz_corrected, rayleigh_corrected_crefl] - name: C02 modifiers: [sunz_corrected, rayleigh_corrected_crefl] - name: C03 modifiers: [sunz_corrected, rayleigh_corrected_crefl] standard_name: toa_bidirectional_reflectance green_raw: compositor: !!python/name:satpy.composites.abi.SimulatedGreen # FUTURE: Set a wavelength...see what happens. Dependency finding # probably wouldn't work. prerequisites: # should we be using the most corrected or least corrected inputs? - name: C01 modifiers: [sunz_corrected] - name: C02 modifiers: [sunz_corrected] - name: C03 modifiers: [sunz_corrected] standard_name: toa_bidirectional_reflectance green: compositor: !!python/name:satpy.composites.abi.SimulatedGreen # FUTURE: Set a wavelength...see what happens. Dependency finding # probably wouldn't work. prerequisites: # should we be using the most corrected or least corrected inputs? - name: C01 modifiers: [sunz_corrected, rayleigh_corrected] - name: C02 modifiers: [sunz_corrected, rayleigh_corrected] - name: C03 modifiers: [sunz_corrected] standard_name: toa_bidirectional_reflectance true_color_crefl: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: C02 modifiers: [sunz_corrected, rayleigh_corrected_crefl] - name: green_crefl - name: C01 modifiers: [sunz_corrected, rayleigh_corrected_crefl] standard_name: true_color true_color_raw: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: C02 modifiers: [sunz_corrected] - name: green_raw - name: C01 modifiers: [sunz_corrected] standard_name: true_color true_color: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: C02 modifiers: [sunz_corrected, rayleigh_corrected] - name: green - name: C01 modifiers: [sunz_corrected, rayleigh_corrected] standard_name: true_color natural_color: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: C05 modifiers: [sunz_corrected] - name: C03 modifiers: [sunz_corrected] - name: C02 modifiers: [sunz_corrected] high_resolution_band: blue standard_name: natural_color natural_color_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C05 - name: C03 - name: C02 standard_name: natural_color overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C02 modifiers: [sunz_corrected] - name: C03 modifiers: [sunz_corrected] - C14 standard_name: overview overview_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - C02 - C03 - C14 standard_name: overview airmass: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C08 - name: C10 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C12 - name: C13 - name: C08 standard_name: airmass # CIMSS True Color Composites cimss_green_sunz_rayleigh: compositor: !!python/name:satpy.composites.abi.SimulatedGreen fractions: [0.45, 0.45, 0.1] prerequisites: # should we be using the most corrected or least corrected inputs? - name: C01 modifiers: [sunz_corrected, rayleigh_corrected] - name: C02 modifiers: [sunz_corrected, rayleigh_corrected] - name: C03 modifiers: [sunz_corrected] standard_name: toa_bidirection_reflectance cimss_green_sunz: compositor: !!python/name:satpy.composites.abi.SimulatedGreen fractions: [0.45, 0.45, 0.1] prerequisites: # should we be using the most corrected or least corrected inputs? - name: C01 modifiers: [sunz_corrected] - name: C02 modifiers: [sunz_corrected] - name: C03 modifiers: [sunz_corrected] standard_name: toa_bidirection_reflectance cimss_green: compositor: !!python/name:satpy.composites.abi.SimulatedGreen fractions: [0.45, 0.45, 0.1] prerequisites: # should we be using the most corrected or least corrected inputs? - name: C01 - name: C02 - name: C03 standard_name: toa_bidirection_reflectance cimss_true_color_sunz_rayleigh: compositor: !!python/name:satpy.composites.SelfSharpenedRGB description: > CIMSS Natural (True) Color RGB. This version has been adjusted for the cosine of the solar zenith angle and has had rayleigh correction applied. This RGB is ratio sharpened by comparing a high resolution C02 (red) band with a lower/averaged version of itself and applying that ratio to the green and blue channels. references: Research Article: https://agupubs.onlinelibrary.wiley.com/doi/10.1029/2018EA000379 prerequisites: - name: C02 modifiers: [sunz_corrected, rayleigh_corrected] - name: cimss_green_sunz_rayleigh - name: C01 modifiers: [sunz_corrected, rayleigh_corrected] standard_name: cimss_true_color cimss_true_color_sunz: compositor: !!python/name:satpy.composites.SelfSharpenedRGB description: > CIMSS Natural (True) Color RGB. This version has been adjusted for the cosine of the solar zenith angle. This RGB is ratio sharpened by comparing a high resolution C02 (red) band with a lower/averaged version of itself and applying that ratio to the green and blue channels. references: Research Article: https://agupubs.onlinelibrary.wiley.com/doi/10.1029/2018EA000379 prerequisites: - name: C02 modifiers: [sunz_corrected] - name: cimss_green_sunz - name: C01 modifiers: [sunz_corrected] standard_name: cimss_true_color cimss_true_color: compositor: !!python/name:satpy.composites.SelfSharpenedRGB description: > CIMSS Natural (True) Color RGB. No solar zenith angle or atmospheric corrections are applied to this composite. This RGB is ratio sharpened by comparing a high resolution C02 (red) band with a lower/averaged version of itself and applying that ratio to the green and blue channels. references: Research Article: https://agupubs.onlinelibrary.wiley.com/doi/10.1029/2018EA000379 prerequisites: - name: C02 - name: cimss_green - name: C01 standard_name: cimss_true_color satpy-0.20.0/satpy/etc/composites/agri.yaml000066400000000000000000000015031362525524100206430ustar00rootroot00000000000000sensor_name: visir/agri composites: green: compositor: !!python/name:satpy.composites.abi.SimulatedGreen # FUTURE: Set a wavelength...see what happens. Dependency finding # probably wouldn't work. prerequisites: # should we be using the most corrected or least corrected inputs? - name: C01 modifiers: [sunz_corrected, rayleigh_corrected] - name: C02 modifiers: [sunz_corrected, rayleigh_corrected] - name: C03 modifiers: [sunz_corrected] standard_name: toa_bidirectional_reflectance true_color: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: C02 modifiers: [sunz_corrected, rayleigh_corrected] - name: green - name: C01 modifiers: [sunz_corrected, rayleigh_corrected] standard_name: true_color satpy-0.20.0/satpy/etc/composites/ahi.yaml000066400000000000000000000205631362525524100204710ustar00rootroot00000000000000sensor_name: visir/ahi modifiers: rayleigh_corrected: compositor: !!python/name:satpy.composites.PSPRayleighReflectance atmosphere: us-standard aerosol_type: marine_clean_aerosol prerequisites: - wavelength: 0.65 modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle composites: green: compositor: !!python/name:satpy.composites.ahi.GreenCorrector # FUTURE: Set a wavelength...see what happens. Dependency finding # probably wouldn't work. prerequisites: # should we be using the most corrected or least corrected inputs? # what happens if something requests more modifiers on top of this? - wavelength: 0.51 modifiers: [sunz_corrected, rayleigh_corrected] - wavelength: 0.85 modifiers: [sunz_corrected] standard_name: toa_bidirectional_reflectance airmass: # PDF slides: https://www.eumetsat.int/website/home/News/ConferencesandEvents/DAT_2833302.html # Under session 2 by Akihiro Shimizu (JMA) compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B08 - name: B10 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B12 - name: B14 - name: B08 standard_name: airmass ash: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B15 - name: B13 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B14 - name: B11 - name: B13 standard_name: ash dust: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B15 - name: B13 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B14 - name: B11 - name: B13 standard_name: dust fog: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B15 - name: B13 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B14 - name: B11 - name: B13 standard_name: fog night_microphysics: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B15 - name: B13 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B14 - name: B07 - name: B13 standard_name: night_microphysics fire_temperature: # CIRA: Original VIIRS compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 3.85 calibration: radiance - wavelength: 2.26 calibration: radiance - wavelength: 1.61 calibration: radiance standard_name: fire_temperature name: fire_temperature fire_temperature_awips: # CIRA: EUMETSAT compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 3.85 - wavelength: 2.26 - wavelength: 1.61 standard_name: fire_temperature name: fire_temperature_awips fire_temperature_eumetsat: # CIRA: AWIPS compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 3.85 - wavelength: 2.26 - wavelength: 1.61 standard_name: fire_temperature name: fire_temperature_eumetsat fire_temperature_39refl: # CIRA: All bands in Reflectance units (%) compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 3.85 modifiers: [nir_reflectance] - wavelength: 2.26 modifiers: [sunz_corrected] - wavelength: 1.61 modifiers: [sunz_corrected] standard_name: fire_temperature name: fire_temperature_39refl overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - 0.65 - 0.85 - 10.4 standard_name: overview natural_color: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - wavelength: 1.63 modifiers: [sunz_corrected] #, rayleigh_corrected] - wavelength: 0.85 modifiers: [sunz_corrected] #, rayleigh_corrected] - wavelength: 0.635 modifiers: [sunz_corrected] #, rayleigh_corrected] high_resolution_band: blue standard_name: natural_color true_color: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - wavelength: 0.65 modifiers: [sunz_corrected, rayleigh_corrected] - name: green - wavelength: 0.46 modifiers: [sunz_corrected, rayleigh_corrected] high_resolution_band: red standard_name: true_color # true_color_reducedsize_land: # compositor: !!python/name:satpy.composites.GenericCompositor # prerequisites: # - wavelength: 0.65 # modifiers: [reducer4, effective_solar_pathlength_corrected, # rayleigh_corrected_reducedsize_land] # - wavelength: 0.51 # modifiers: [reducer2, vegetation_corrected_reduced, effective_solar_pathlength_corrected, # rayleigh_corrected_reducedsize_land] # - wavelength: 0.46 # modifiers: [reducer2, effective_solar_pathlength_corrected, # rayleigh_corrected_reducedsize_land] # standard_name: true_color # # true_color_reducedsize_marine_tropical: # compositor: !!python/name:satpy.composites.GenericCompositor # prerequisites: # - wavelength: 0.65 # modifiers: [reducer4, effective_solar_pathlength_corrected, # rayleigh_corrected_reducedsize_marine_tropical] # - wavelength: 0.51 # modifiers: [reducer2, vegetation_corrected_reduced, effective_solar_pathlength_corrected, # rayleigh_corrected_reducedsize_marine_tropical] # - wavelength: 0.46 # modifiers: [reducer2, effective_solar_pathlength_corrected, # rayleigh_corrected_reducedsize_marine_tropical] # standard_name: true_color day_microphysics_eum: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 0.86 - wavelength: 3.9 modifiers: [nir_reflectance] - wavelength: 10.4 standard_name: day_microphysics day_microphysics_ahi: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 0.86 - wavelength: 2.3 - wavelength: 10.4 standard_name: day_microphysics cloud_phase_distinction: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 10.4 - wavelength: 0.64 - wavelength: 1.6 standard_name: cloud_phase_distinction water_vapors1: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 10.4 - wavelength: 6.2 - wavelength: 7.3 standard_name: water_vapors1 mid_vapor: compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - wavelength: 7.3 - wavelength: 6.2 standard_name: mid_vapor water_vapors2: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: mid_vapor - wavelength: 7.3 - wavelength: 6.2 standard_name: water_vapors2 convection: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 6.7 - 7.3 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 3.75 - 10.4 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 1.63 - 0.635 standard_name: convection ir_cloud_day: standard_name: ir_cloud_day compositor: !!python/name:satpy.composites.CloudCompositor prerequisites: - name: B14 satpy-0.20.0/satpy/etc/composites/ami.yaml000066400000000000000000000103541362525524100204730ustar00rootroot00000000000000sensor_name: visir/ami composites: green_raw: compositor: !!python/name:satpy.composites.ahi.GreenCorrector prerequisites: - name: VI005 modifiers: [sunz_corrected] - name: VI008 modifiers: [sunz_corrected] standard_name: toa_bidirectional_reflectance fractions: [0.85, 0.15] green: compositor: !!python/name:satpy.composites.ahi.GreenCorrector prerequisites: - name: VI005 modifiers: [sunz_corrected, rayleigh_corrected] - name: VI008 modifiers: [sunz_corrected] standard_name: toa_bidirectional_reflectance fractions: [0.85, 0.15] true_color_raw: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: VI006 modifiers: [sunz_corrected] - name: green_raw - name: VI004 modifiers: [sunz_corrected] standard_name: true_color true_color: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: VI006 modifiers: [sunz_corrected, rayleigh_corrected] - name: green - name: VI004 modifiers: [sunz_corrected, rayleigh_corrected] standard_name: true_color overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - 0.65 - 0.85 - 10.4 standard_name: overview natural_color: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: NR016 modifiers: [sunz_corrected] #, rayleigh_corrected] - name: VI008 modifiers: [sunz_corrected] #, rayleigh_corrected] - name: VI006 modifiers: [sunz_corrected] #, rayleigh_corrected] high_resolution_band: blue standard_name: natural_color day_microphysics_eum: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 0.86 - wavelength: 3.9 modifiers: [nir_reflectance] - wavelength: 10.4 standard_name: day_microphysics cloud_phase_distinction: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 10.4 - wavelength: 0.64 - wavelength: 1.6 standard_name: cloud_phase_distinction water_vapors1: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 10.4 - wavelength: 6.2 - wavelength: 7.3 standard_name: water_vapors1 mid_vapor: compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - wavelength: 7.3 - wavelength: 6.2 standard_name: mid_vapor water_vapors2: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: mid_vapor - wavelength: 7.3 - wavelength: 6.2 standard_name: water_vapors2 convection: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - WV069 - WV073 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - SW038 - IR105 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - NR016 - VI006 standard_name: convection ir_cloud_day: standard_name: ir_cloud_day compositor: !!python/name:satpy.composites.CloudCompositor prerequisites: - name: IR112 airmass: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: WV063 - name: WV073 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: IR096 - name: IR105 - name: WV063 standard_name: airmass ash: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - IR123 - IR112 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - IR112 - IR087 - IR112 standard_name: ash satpy-0.20.0/satpy/etc/composites/amsr2.yaml000066400000000000000000000003141362525524100207440ustar00rootroot00000000000000sensor_name: amsr2 composites: rgb_color: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: 'btemp_10.7h' - name: 'btemp_36.5h' - name: 'btemp_89.0ah' satpy-0.20.0/satpy/etc/composites/avhrr-3.yaml000066400000000000000000000006141362525524100212050ustar00rootroot00000000000000sensor_name: visir/avhrr-3 composites: nwc_pps_ct_masked_ir: compositor: !!python/name:satpy.composites.MaskingCompositor prerequisites: - 10.8 - ct standard_name: nwc_pps_ct_masked_ir # Default is opaque (transparency = 0) transparency: Cloud-free_land: 100 Cloud-free_sea: 100 Snow_over_land: 100 Sea_ice: 100 Fractional_clouds: 45 satpy-0.20.0/satpy/etc/composites/fci.yaml000066400000000000000000000000271362525524100204620ustar00rootroot00000000000000sensor_name: visir/fci satpy-0.20.0/satpy/etc/composites/glm.yaml000066400000000000000000000003531362525524100205020ustar00rootroot00000000000000sensor_name: visir/glm composites: C14_flash_extent_density: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: c14_flash_extent_density prerequisites: - flash_extent_density - C14 satpy-0.20.0/satpy/etc/composites/goes_imager.yaml000066400000000000000000000003371362525524100222060ustar00rootroot00000000000000# XXX arb sensor_name: visir/goes_imager composites: overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - '00_7' - '00_7' - '10_7' standard_name: overview satpy-0.20.0/satpy/etc/composites/hsaf.yaml000066400000000000000000000012651362525524100206470ustar00rootroot00000000000000sensor_name: hsaf composites: instantaneous_rainrate_3: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: h03 standard_name: instantaneous_rainrate_3 instantaneous_rainrate_3b: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: h03B standard_name: instantaneous_rainrate_3b accum_rainrate_5: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: h05 standard_name: accum_rainrate_5 accum_rainrate_5b: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: h05B standard_name: accum_rainrate_5b satpy-0.20.0/satpy/etc/composites/mersi-2.yaml000066400000000000000000000102441362525524100212010ustar00rootroot00000000000000sensor_name: visir/mersi-2 modifiers: rayleigh_corrected: compositor: !!python/name:satpy.composites.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: - name: '3' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle sunz_corrected: compositor: !!python/name:satpy.composites.SunZenithCorrector prerequisites: - solar_zenith_angle nir_reflectance: compositor: !!python/name:satpy.composites.NIRReflectance prerequisites: - name: '24' optional_prerequisites: - solar_zenith_angle composites: ash: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: '25' - name: '24' - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: '24' - name: '23' - name: '24' standard_name: ash true_color_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - '3' - '2' - '1' standard_name: true_color true_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '3' # 0.65 modifiers: [sunz_corrected, rayleigh_corrected] - name: '2' modifiers: [sunz_corrected, rayleigh_corrected] - name: '1' # 0.47 modifiers: [sunz_corrected, rayleigh_corrected] standard_name: true_color natural_color: compositor: !!python/name:satpy.composites.RatioSharpenedRGB prerequisites: - name: '6' modifiers: [sunz_corrected] - name: '15' modifiers: [sunz_corrected] - name: '3' modifiers: [sunz_corrected] optional_prerequisites: - name: '4' modifiers: [sunz_corrected] standard_name: natural_color high_resolution_band: green natural_color_lowres: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '6' modifiers: [sunz_corrected] - name: '15' modifiers: [sunz_corrected] - name: '12' modifiers: [sunz_corrected] standard_name: natural_color overview_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '12' - name: '15' - name: '24' standard_name: overview overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '12' modifiers: [sunz_corrected] - name: '15' modifiers: [sunz_corrected] - name: '24' standard_name: overview cloudtop: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '20' - name: '24' - name: '25' standard_name: cloudtop day_microphysics: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '15' modifiers: [sunz_corrected] - name: '20' modifiers: [nir_reflectance] - name: '24' standard_name: day_microphysics night_fog: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: '25' - name: '24' - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: '24' - name: '20' - name: '24' standard_name: night_fog fog: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: '25' - name: '24' - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: '24' - name: '23' - name: '24' standard_name: fog green_snow: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '6' - name: '3' - name: '24' standard_name: green_snow satpy-0.20.0/satpy/etc/composites/modis.yaml000066400000000000000000000106071362525524100210410ustar00rootroot00000000000000sensor_name: visir/modis modifiers: rayleigh_corrected_crefl: compositor: !!python/name:satpy.composites.viirs.ReflectanceCorrector dem_filename: CMGDEM.hdf optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle composites: true_color_uncorrected: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '1' modifiers: [sunz_corrected] - name: '4' modifiers: [sunz_corrected] - name: '3' modifiers: [sunz_corrected] standard_name: true_color true_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '1' modifiers: [sunz_corrected, rayleigh_corrected] - name: '4' modifiers: [sunz_corrected, rayleigh_corrected] - name: '3' modifiers: [sunz_corrected, rayleigh_corrected] standard_name: true_color true_color_thin: compositor: !!python/name:satpy.composites.FillingCompositor prerequisites: - name: '1' modifiers: [sunz_corrected, rayleigh_corrected] - name: '1' modifiers: [sunz_corrected, rayleigh_corrected] - name: '12' modifiers: [sunz_corrected, rayleigh_corrected] - name: '10' modifiers: [sunz_corrected, rayleigh_corrected] standard_name: true_color true_color_crefl: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '1' modifiers: [sunz_corrected, rayleigh_corrected_crefl] - name: '4' modifiers: [sunz_corrected, rayleigh_corrected_crefl] - name: '3' modifiers: [sunz_corrected, rayleigh_corrected_crefl] standard_name: true_color overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '1' modifiers: [sunz_corrected] - compositor: !!python/name:satpy.composites.Filler prerequisites: - name: '2' modifiers: [sunz_corrected] - name: '1' modifiers: [sunz_corrected] - name: '31' standard_name: overview snow: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.Filler prerequisites: - name: '2' modifiers: [sunz_corrected] - name: '1' modifiers: [sunz_corrected] - name: '6' modifiers: [sunz_corrected] - name: '20' modifiers: [nir_reflectance] standard_name: snow natural_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '6' modifiers: [sunz_corrected] - compositor: !!python/name:satpy.composites.Filler prerequisites: - name: '2' modifiers: [sunz_corrected] - name: '1' modifiers: [sunz_corrected] - name: '1' modifiers: [sunz_corrected] standard_name: natural_color day_microphysics: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.Filler prerequisites: - name: '2' modifiers: [sunz_corrected] - name: '1' modifiers: [sunz_corrected] - name: '20' modifiers: [nir_reflectance] - name: '31' standard_name: day_microphysics airmass: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - wavelength: 6.7 - wavelength: 7.3 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - wavelength: 9.7 - wavelength: 10.8 - wavelength: 6.7 standard_name: airmass ocean_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '1' modifiers: [sunz_corrected, rayleigh_corrected] - name: '4' modifiers: [sunz_corrected, rayleigh_corrected] - name: '3' modifiers: [sunz_corrected, rayleigh_corrected] standard_name: ocean_color night_fog: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 12.0 - 10.8 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 10.8 - 3.75 - 10.8 standard_name: night_fog satpy-0.20.0/satpy/etc/composites/msi.yaml000066400000000000000000000113101362525524100205060ustar00rootroot00000000000000sensor_name: visir/msi modifiers: rayleigh_corrected: compositor: !!python/name:satpy.composites.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: - name: 'B04' modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle rayleigh_corrected_marine_clean: compositor: !!python/name:satpy.composites.PSPRayleighReflectance atmosphere: us-standard aerosol_type: marine_clean_aerosol prerequisites: - name: 'B04' modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle rayleigh_corrected_marine_tropical: compositor: !!python/name:satpy.composites.PSPRayleighReflectance atmosphere: tropical aerosol_type: marine_tropical_aerosol prerequisites: - name: 'B04' modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle rayleigh_corrected_desert: compositor: !!python/name:satpy.composites.PSPRayleighReflectance atmosphere: tropical aerosol_type: desert_aerosol prerequisites: - name: 'B04' modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle rayleigh_corrected_land: compositor: !!python/name:satpy.composites.PSPRayleighReflectance atmosphere: us-standard aerosol_type: continental_average_aerosol prerequisites: - name: 'B04' modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle composites: natural_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B11' - name: 'B08' modifiers: [effective_solar_pathlength_corrected] - name: 'B04' modifiers: [effective_solar_pathlength_corrected] standard_name: natural_color true_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B04' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - name: 'B03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - name: 'B02' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] standard_name: true_color true_color_land: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B04' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_land] - name: 'B03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_land] - name: 'B02' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_land] standard_name: true_color true_color_desert: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B04' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] - name: 'B03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] - name: 'B02' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] standard_name: true_color true_color_marine_clean: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B04' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] - name: 'B03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] - name: 'B02' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] standard_name: true_color true_color_marine_tropical: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B04' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] - name: 'B03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] - name: 'B02' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] standard_name: true_color true_color_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B04' #modifiers: [effective_solar_pathlength_corrected] - name: 'B03' #modifiers: [effective_solar_pathlength_corrected] - name: 'B02' #modifiers: [effective_solar_pathlength_corrected] standard_name: true_color satpy-0.20.0/satpy/etc/composites/msu-gs.yaml000066400000000000000000000006551362525524100211430ustar00rootroot00000000000000sensor_name: visir/msu-gs composites: overview_raw: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - 00_9 - 00_9 - 10.8 standard_name: overview overview: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: 00_9 modifiers: [sunz_corrected] - name: 00_9 modifiers: [sunz_corrected] - 10.8 standard_name: overview satpy-0.20.0/satpy/etc/composites/olci.yaml000066400000000000000000000115141362525524100206520ustar00rootroot00000000000000sensor_name: visir/olci modifiers: rayleigh_corrected: compositor: !!python/name:satpy.composites.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: - name: 'Oa08' modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle rayleigh_corrected_marine_clean: compositor: !!python/name:satpy.composites.PSPRayleighReflectance atmosphere: us-standard aerosol_type: marine_clean_aerosol prerequisites: - name: 'Oa08' modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle rayleigh_corrected_marine_tropical: compositor: !!python/name:satpy.composites.PSPRayleighReflectance atmosphere: tropical aerosol_type: marine_tropical_aerosol prerequisites: - name: 'Oa08' modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle rayleigh_corrected_desert: compositor: !!python/name:satpy.composites.PSPRayleighReflectance atmosphere: tropical aerosol_type: desert_aerosol prerequisites: - name: 'Oa08' modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle rayleigh_corrected_land: compositor: !!python/name:satpy.composites.PSPRayleighReflectance atmosphere: us-standard aerosol_type: continental_average_aerosol prerequisites: - name: 'Oa08' modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle composites: true_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'Oa08' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - name: 'Oa06' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - name: 'Oa03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] standard_name: true_color true_color_land: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'Oa08' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_land] - name: 'Oa06' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_land] - name: 'Oa03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_land] standard_name: true_color true_color_desert: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'Oa08' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] - name: 'Oa06' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] - name: 'Oa03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] standard_name: true_color true_color_marine_clean: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'Oa08' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] - name: 'Oa06' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] - name: 'Oa03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] standard_name: true_color true_color_marine_tropical: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'Oa08' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] - name: 'Oa06' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] - name: 'Oa03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] standard_name: true_color true_color_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'Oa08' modifiers: [effective_solar_pathlength_corrected] - name: 'Oa06' modifiers: [effective_solar_pathlength_corrected] - name: 'Oa03' modifiers: [effective_solar_pathlength_corrected] standard_name: true_color ocean_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'Oa08' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - name: 'Oa06' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - name: 'Oa03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] standard_name: ocean_color satpy-0.20.0/satpy/etc/composites/sar-c.yaml000066400000000000000000000000271362525524100207260ustar00rootroot00000000000000sensor_name: sar/sar-c satpy-0.20.0/satpy/etc/composites/sar.yaml000066400000000000000000000026171362525524100205150ustar00rootroot00000000000000sensor_name: sar composites: sar-ice: compositor: !!python/name:satpy.composites.sar.SARIce prerequisites: - name: measurement polarization: hh calibration: gamma - name: measurement polarization: hv calibraion: gamma standard_name: sar-ice sar-ice-iw: compositor: !!python/name:satpy.composites.sar.SARIce prerequisites: - name: measurement polarization: vv calibration: gamma - name: measurement polarization: vh calibration: gamma standard_name: sar-ice sar-rgb: compositor: !!python/name:satpy.composites.sar.SARIce prerequisites: - name: measurement polarization: hh - name: measurement polarization: hv standard_name: sar-rgb sar-quick: compositor: !!python/name:satpy.composites.sar.SARQuickLook prerequisites: - name: measurement polarization: hh - name: measurement polarization: hv standard_name: sar-quick sar-ice-legacy: compositor: !!python/name:satpy.composites.sar.SARIceLegacy prerequisites: - name: measurement polarization: hh - name: measurement polarization: hv standard_name: sar-ice-legacy sar-land: compositor: !!python/name:satpy.composites.sar.SARIce prerequisites: - name: measurement polarization: hh - name: measurement polarization: hv standard_name: sar-land satpy-0.20.0/satpy/etc/composites/seviri.yaml000066400000000000000000000257731362525524100212410ustar00rootroot00000000000000sensor_name: visir/seviri modifiers: sunz_corrected: compositor: !!python/name:satpy.composites.SunZenithCorrector co2_corrected: compositor: !!python/name:satpy.composites.CO2Corrector sensor: [seviri] prerequisites: - IR_108 - IR_134 composites: ct_masked_ir: compositor: !!python/name:satpy.composites.MaskingCompositor prerequisites: - IR_108 - ct standard_name: ct_masked_ir transparency: 0: 100 1: 100 2: 100 3: 100 4: 100 10: 35 nwc_geo_ct_masked_ir: compositor: !!python/name:satpy.composites.MaskingCompositor prerequisites: - IR_108 - ct standard_name: nwc_geo_ct_masked_ir # Default is opaque (transparency = 0) transparency: Cloud-free_land: 100 Cloud-free_sea: 100 Snow_over_land: 100 Sea_ice: 100 Fractional_clouds: 45 High_semitransparent_thin_clouds: 50 High_semitransparent_above_snow_ice: 60 cloudtop: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: IR_039 modifiers: [co2_corrected] - IR_108 - IR_120 standard_name: cloudtop cloudtop_daytime: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: IR_039 modifiers: [nir_emissive] - IR_108 - IR_120 standard_name: cloudtop convection: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - WV_062 - WV_073 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: IR_039 modifiers: [co2_corrected] - IR_108 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - IR_016 - VIS006 standard_name: convection night_fog: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - IR_120 - IR_108 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - IR_108 - name: IR_039 modifiers: [co2_corrected] - IR_108 standard_name: night_fog snow: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: VIS008 modifiers: [sunz_corrected] - name: IR_016 modifiers: [sunz_corrected] - name: IR_039 modifiers: [nir_reflectance] standard_name: snow day_microphysics: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: VIS008 modifiers: [sunz_corrected] - name: IR_039 modifiers: [nir_reflectance] - IR_108 standard_name: day_microphysics day_microphysics_winter: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: VIS008 modifiers: [sunz_corrected] - name: IR_039 modifiers: [nir_reflectance] - IR_108 standard_name: day_microphysics_winter natural_color_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - IR_016 - VIS008 - VIS006 standard_name: natural_color natural_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: IR_016 modifiers: [sunz_corrected] - name: VIS008 modifiers: [sunz_corrected] - name: VIS006 modifiers: [sunz_corrected] standard_name: natural_color fog: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - IR_120 - IR_108 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - IR_108 - IR_087 - IR_108 standard_name: fog cloudmask: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - cma - cma_pal standard_name: cloudmask cloudtype: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - ct - ct_pal standard_name: cloudtype cloud_top_height: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - ctth_alti - ctth_alti_pal standard_name: cloud_top_height cloud_top_pressure: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - ctth_pres - ctth_pres_pal standard_name: cloud_top_pressure cloud_top_temperature: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - ctth_tempe - ctth_tempe_pal standard_name: cloud_top_temperature cloud_top_phase: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - cmic_phase - cmic_phase_pal standard_name: cloud_top_phase cloud_drop_effective_radius: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - cmic_reff - cmic_reff_pal standard_name: cloud_drop_effective_radius cloud_optical_thickness: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - cmic_cot - cmic_cot_pal standard_name: cloud_optical_thickness cloud_liquid_water_path: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - cmic_lwp - cmic_lwp_pal standard_name: cloud_liquid_water_path cloud_ice_water_path: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - cmic_iwp - cmic_iwp_pal standard_name: cloud_ice_water_path precipitation_probability: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - pc - pc_pal standard_name: precipitation_probability convective_rain_rate: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - crr - crr_pal standard_name: convective_rain_rate convective_precipitation_hourly_accumulation: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - crr_accum - crr_pal standard_name: convective_precipitation_hourly_accumulation total_precipitable_water: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - ishai_tpw - ishai_tpw_pal standard_name: total_precipitable_water showalter_index: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - ishai_shw - ishai_shw_pal standard_name: showalter_index lifted_index: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - ishai_li - ishai_li_pal standard_name: lifted_index convection_initiation_prob30: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - ci_prob30 - ci_pal standard_name: convection_initiation_prob30 convection_initiation_prob60: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - ci_prob60 - ci_pal standard_name: convection_initiation_prob60 convection_initiation_prob90: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - ci_prob90 - ci_pal standard_name: convection_initiation_prob90 asii_prob: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - asii_turb_trop_prob - asii_turb_prob_pal standard_name: asii_prob rdt_cell_type: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - MapCellCatType - MapCellCatType_pal standard_name: rdt_cell_type realistic_colors: compositor: !!python/name:satpy.composites.RealisticColors standard_name: realistic_colors prerequisites: - name: VIS006 modifiers: [sunz_corrected] - name: VIS008 modifiers: [sunz_corrected] - name: HRV modifiers: [sunz_corrected] ir_overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: IR_039 modifiers: [co2_corrected] - IR_108 - IR_120 standard_name: ir_overview overview_raw: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - VIS006 - VIS008 - IR_108 standard_name: overview overview: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: VIS006 modifiers: [sunz_corrected] - name: VIS008 modifiers: [sunz_corrected] - IR_108 standard_name: overview colorized_ir_clouds: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - name: 'IR_108' standard_name: colorized_ir_clouds vis_sharpened_ir: compositor: !!python/name:satpy.composites.LuminanceSharpeningCompositor standard_name: vis_sharpened_ir prerequisites: - name: 'HRV' modifiers: [sunz_corrected] - name: colorized_ir_clouds ir_sandwich: compositor: !!python/name:satpy.composites.SandwichCompositor standard_name: ir_sandwich prerequisites: - name: 'HRV' modifiers: [sunz_corrected] - name: colorized_ir_clouds hrv_clouds: compositor: !!python/name:satpy.composites.GenericCompositor standard_name: hrv_clouds prerequisites: - name: HRV modifiers: [sunz_corrected] - name: HRV modifiers: [sunz_corrected] - IR_108 hrv_fog: compositor: !!python/name:satpy.composites.GenericCompositor standard_name: hrv_fog prerequisites: - name: IR_016 modifiers: [sunz_corrected] - name: HRV modifiers: [sunz_corrected] - name: HRV modifiers: [sunz_corrected] natural_with_night_fog: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: natural_with_night_fog prerequisites: - natural_color - night_fog natural_color_with_night_ir: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: natural_color_with_night_ir prerequisites: - natural_color - night_ir_with_background natural_color_with_night_ir_hires: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: natural_color_with_night_ir_hires prerequisites: - natural_color - night_ir_with_background_hires night_ir_alpha: compositor: !!python/name:satpy.composites.GenericCompositor standard_name: night_ir_alpha prerequisites: - name: IR_039 - name: IR_108 - name: IR_120 - name: IR_108 night_ir_with_background: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: night_ir_with_background prerequisites: - night_ir_alpha - _night_background night_ir_with_background_hires: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: night_ir_with_background_hires prerequisites: - night_ir_alpha - _night_background_hires satpy-0.20.0/satpy/etc/composites/slstr.yaml000066400000000000000000000032021362525524100210660ustar00rootroot00000000000000sensor_name: visir/slstr composites: overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: S2_an modifiers: [sunz_corrected] - name: S3_an modifiers: [sunz_corrected] - S8_in standard_name: overview overview_oblique: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: S2_ao modifiers: [sunz_corrected] - name: S3_ao modifiers: [sunz_corrected] - S8_io standard_name: overview_oblique natural_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: S5_an modifiers: [sunz_corrected] - name: S3_an modifiers: [sunz_corrected] - name: S2_an modifiers: [sunz_corrected] standard_name: natural_color natural_color_oblique: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: S5_ao modifiers: [sunz_corrected] - name: S3_ao modifiers: [sunz_corrected] - name: S2_ao modifiers: [sunz_corrected] standard_name: natural_color_oblique day_microphysics: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: S3_an modifiers: [sunz_corrected] - name: S7_in modifiers: [nir_reflectance] - S8_in standard_name: day_microphysics day_microphysics_oblique: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: S3_ao modifiers: [sunz_corrected] - name: S7_io modifiers: [nir_reflectance] - S8_io standard_name: day_microphysics_oblique satpy-0.20.0/satpy/etc/composites/viirs.yaml000066400000000000000000000335771362525524100210750ustar00rootroot00000000000000sensor_name: visir/viirs modifiers: rayleigh_corrected_crefl: compositor: !!python/name:satpy.composites.viirs.ReflectanceCorrector dem_filename: CMGDEM.hdf optional_prerequisites: - name: satellite_azimuth_angle resolution: 742 - name: satellite_zenith_angle resolution: 742 - name: solar_azimuth_angle resolution: 742 - name: solar_zenith_angle resolution: 742 rayleigh_corrected_crefl_iband: compositor: !!python/name:satpy.composites.viirs.ReflectanceCorrector dem_filename: CMGDEM.hdf optional_prerequisites: - name: satellite_azimuth_angle resolution: 371 - name: satellite_zenith_angle resolution: 371 - name: solar_azimuth_angle resolution: 371 - name: solar_zenith_angle resolution: 371 rayleigh_corrected_iband: compositor: !!python/name:satpy.composites.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: - name: I01 resolution: 371 modifiers: [sunz_corrected_iband] optional_prerequisites: - name: satellite_azimuth_angle resolution: 371 - name: satellite_zenith_angle resolution: 371 - name: solar_azimuth_angle resolution: 371 - name: solar_zenith_angle resolution: 371 rayleigh_corrected: compositor: !!python/name:satpy.composites.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: - name: M05 resolution: 742 modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle resolution: 742 - name: satellite_zenith_angle resolution: 742 - name: solar_azimuth_angle resolution: 742 - name: solar_zenith_angle resolution: 742 rayleigh_corrected_marine_tropical: compositor: !!python/name:satpy.composites.PSPRayleighReflectance atmosphere: us-standard aerosol_type: marine_tropical_aerosol prerequisites: - name: M05 resolution: 742 modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle resolution: 742 - name: satellite_zenith_angle resolution: 742 - name: solar_azimuth_angle resolution: 742 - name: solar_zenith_angle resolution: 742 rayleigh_corrected_land: compositor: !!python/name:satpy.composites.PSPRayleighReflectance atmosphere: us-standard aerosol_type: continental_average_aerosol prerequisites: - name: M05 resolution: 742 modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle resolution: 742 - name: satellite_zenith_angle resolution: 742 - name: solar_azimuth_angle resolution: 742 - name: solar_zenith_angle resolution: 742 sunz_corrected: compositor: !!python/name:satpy.composites.SunZenithCorrector prerequisites: - name: solar_zenith_angle resolution: 742 sunz_corrected_iband: compositor: !!python/name:satpy.composites.SunZenithCorrector prerequisites: - name: solar_zenith_angle resolution: 371 nir_emissive_lowres: compositor: !!python/name:satpy.composites.NIREmissivePartFromReflectance prerequisites: - M15 optional_prerequisites: - name: solar_zenith_angle resolution: 742 nir_emissive_hires: compositor: !!python/name:satpy.composites.NIREmissivePartFromReflectance prerequisites: - I05 optional_prerequisites: - name: solar_zenith_angle resolution: 371 nir_reflectance_lowres: compositor: !!python/name:satpy.composites.NIRReflectance prerequisites: - M15 optional_prerequisites: - name: solar_zenith_angle resolution: 742 nir_reflectance_hires: compositor: !!python/name:satpy.composites.NIRReflectance prerequisites: - I05 optional_prerequisites: - name: solar_zenith_angle resolution: 371 composites: true_color: compositor: !!python/name:satpy.composites.RatioSharpenedRGB prerequisites: - name: M05 modifiers: [sunz_corrected, rayleigh_corrected] - name: M04 modifiers: [sunz_corrected, rayleigh_corrected] - name: M03 modifiers: [sunz_corrected, rayleigh_corrected] optional_prerequisites: - name: I01 modifiers: [sunz_corrected_iband, rayleigh_corrected_iband] standard_name: true_color high_resolution_band: red true_color_lowres: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: M05 modifiers: [sunz_corrected, rayleigh_corrected] - name: M04 modifiers: [sunz_corrected, rayleigh_corrected] - name: M03 modifiers: [sunz_corrected, rayleigh_corrected] standard_name: true_color true_color_lowres_crefl: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: M05 modifiers: [sunz_corrected, rayleigh_corrected_crefl] - name: M04 modifiers: [sunz_corrected, rayleigh_corrected_crefl] - name: M03 modifiers: [sunz_corrected, rayleigh_corrected_crefl] standard_name: true_color true_color_lowres_land: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: M05 modifiers: [sunz_corrected, rayleigh_corrected_land] - name: M04 modifiers: [sunz_corrected, rayleigh_corrected_land] - name: M03 modifiers: [sunz_corrected, rayleigh_corrected_land] standard_name: true_color true_color_lowres_marine_tropical: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: M05 modifiers: [sunz_corrected, rayleigh_corrected_marine_tropical] - name: M04 modifiers: [sunz_corrected, rayleigh_corrected_marine_tropical] - name: M03 modifiers: [sunz_corrected, rayleigh_corrected_marine_tropical] standard_name: true_color false_color: compositor: !!python/name:satpy.composites.RatioSharpenedRGB prerequisites: - name: M11 modifiers: [sunz_corrected] - name: M07 modifiers: [sunz_corrected] - name: M05 modifiers: [sunz_corrected, rayleigh_corrected] optional_prerequisites: - name: I01 modifiers: [sunz_corrected_iband, rayleigh_corrected_iband] standard_name: false_color high_resolution_band: blue fire_temperature: # CIRA: Original VIIRS compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: M12 calibration: radiance - name: M11 calibration: radiance - name: M10 calibration: radiance standard_name: fire_temperature name: fire_temperature fire_temperature_awips: # CIRA: EUMETSAT compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: M12 - name: M11 - name: M10 standard_name: fire_temperature name: fire_temperature_awips fire_temperature_eumetsat: # CIRA: AWIPS compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: M12 - name: M11 - name: M10 standard_name: fire_temperature name: fire_temperature_eumetsat fire_temperature_39refl: # CIRA: All bands in Reflectance units (%) compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: M12 modifiers: [nir_reflectance_lowres] - name: M11 modifiers: [sunz_corrected] - name: M10 modifiers: [sunz_corrected] standard_name: fire_temperature name: fire_temperature_39refl natural_color: compositor: !!python/name:satpy.composites.RatioSharpenedRGB prerequisites: - name: M10 modifiers: [sunz_corrected] - name: M07 modifiers: [sunz_corrected] - name: M05 modifiers: [sunz_corrected, rayleigh_corrected] optional_prerequisites: - name: I01 modifiers: [sunz_corrected_iband, rayleigh_corrected_iband] standard_name: natural_color high_resolution_band: blue natural_color_sun: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: I03 modifiers: [sunz_corrected_iband] - name: I02 modifiers: [sunz_corrected_iband] - name: I01 modifiers: [sunz_corrected_iband] standard_name: natural_color natural_color_sun_lowres: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: M10 modifiers: [sunz_corrected] - name: M07 modifiers: [sunz_corrected] - name: M05 modifiers: [sunz_corrected] standard_name: natural_color true_color_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: M05 modifiers: [sunz_corrected] - name: M04 modifiers: [sunz_corrected] - name: M03 modifiers: [sunz_corrected] standard_name: true_color night_overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - DNB - DNB - M15 standard_name: night_overview overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - M05 - M07 - M15 standard_name: overview hr_overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - I01 - I02 - I05 standard_name: overview night_microphysics: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - hncc_dnb - M12 - M15 standard_name: night_microphysics fog: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - M16 - M15 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - M15 - M14 - M15 standard_name: fog dust: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - M16 - M15 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - M15 - M14 - M15 standard_name: dust ash: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - M16 - M15 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - M15 - M14 - M15 standard_name: ash night_fog: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - M16 - M15 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - M15 - M12 - M15 standard_name: night_fog ssec_fog: compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - I05 - I04 standard_name: temperature_difference cloudtop_daytime: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: M12 modifiers: [nir_emissive_lowres] - name: M15 - name: M16 standard_name: cloudtop hr_cloudtop_daytime: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: I04 modifiers: [nir_emissive_hires] - name: I05 - name: I05 standard_name: cloudtop snow_lowres: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: M07 modifiers: [sunz_corrected] - name: M10 modifiers: [sunz_corrected] - name: M12 modifiers: [nir_reflectance_lowres] standard_name: snow snow_hires: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: I02 modifiers: [sunz_corrected_iband] - name: I03 modifiers: [sunz_corrected_iband] - name: I04 modifiers: [nir_reflectance_hires] standard_name: snow histogram_dnb: compositor: !!python/name:satpy.composites.viirs.HistogramDNB prerequisites: - DNB - dnb_solar_zenith_angle standard_name: equalized_radiance units: "1" adaptive_dnb: compositor: !!python/name:satpy.composites.viirs.AdaptiveDNB prerequisites: - DNB - dnb_solar_zenith_angle adaptive_day: multiple adaptive_mixed: always adaptive_night: never standard_name: equalized_radiance units: "1" dynamic_dnb: compositor: !!python/name:satpy.composites.viirs.ERFDNB prerequisites: - DNB - dnb_solar_zenith_angle - dnb_lunar_zenith_angle - dnb_moon_illumination_fraction standard_name: equalized_radiance units: "1" hncc_dnb: compositor: !!python/name:satpy.composites.viirs.NCCZinke prerequisites: - DNB - dnb_solar_zenith_angle - dnb_lunar_zenith_angle - dnb_moon_illumination_fraction standard_name: ncc_radiance units: "1" night_overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - hncc_dnb - hncc_dnb - M15 standard_name: night_overview snow_age: compositor: !!python/name:satpy.composites.viirs.SnowAge prerequisites: - name: M07 modifiers: [sunz_corrected] - name: M08 modifiers: [sunz_corrected] - name: M09 modifiers: [sunz_corrected] - name: M10 modifiers: [sunz_corrected] - name: M11 modifiers: [sunz_corrected] standard_name: snow_age ocean_color: compositor: !!python/name:satpy.composites.RatioSharpenedRGB prerequisites: - name: M05 modifiers: [sunz_corrected, rayleigh_corrected] - name: M04 modifiers: [sunz_corrected, rayleigh_corrected] - name: M03 modifiers: [sunz_corrected, rayleigh_corrected] optional_prerequisites: - name: I01 modifiers: [sunz_corrected_iband, rayleigh_corrected_iband] standard_name: ocean_color high_resolution_band: red satpy-0.20.0/satpy/etc/composites/virr.yaml000066400000000000000000000022441362525524100207060ustar00rootroot00000000000000sensor_name: visir/virr modifiers: sunz_corrected: compositor: !!python/name:satpy.composites.SunZenithCorrector prerequisites: - name: solar_zenith_angle rayleigh_corrected: compositor: !!python/name:satpy.composites.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: - name: '1' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle composites: true_color_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '1' modifiers: [sunz_corrected] - name: '9' modifiers: [sunz_corrected] - name: '7' modifiers: [sunz_corrected] standard_name: true_color true_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '1' modifiers: [sunz_corrected, rayleigh_corrected] - name: '9' modifiers: [sunz_corrected, rayleigh_corrected] - name: '7' modifiers: [sunz_corrected, rayleigh_corrected] standard_name: true_color satpy-0.20.0/satpy/etc/composites/visir.yaml000066400000000000000000000244661362525524100210720ustar00rootroot00000000000000sensor_name: visir modifiers: sunz_corrected: compositor: !!python/name:satpy.composites.SunZenithCorrector optional_prerequisites: - solar_zenith_angle effective_solar_pathlength_corrected: compositor: !!python/name:satpy.composites.EffectiveSolarPathLengthCorrector optional_prerequisites: - solar_zenith_angle co2_corrected: compositor: !!python/name:satpy.composites.CO2Corrector prerequisites: - 10.8 - 13.4 nir_reflectance: compositor: !!python/name:satpy.composites.NIRReflectance prerequisites: - 11 optional_prerequisites: - solar_zenith_angle - 13.4 nir_emissive: compositor: !!python/name:satpy.composites.NIREmissivePartFromReflectance prerequisites: - 11 optional_prerequisites: - solar_zenith_angle - 13.4 atm_correction: compositor: !!python/name:satpy.composites.PSPAtmosphericalCorrection optional_prerequisites: - satellite_zenith_angle rayleigh_corrected: compositor: !!python/name:satpy.composites.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: - wavelength: 0.67 modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle rayleigh_corrected_marine_tropical: compositor: !!python/name:satpy.composites.PSPRayleighReflectance atmosphere: tropical aerosol_type: marine_tropical_aerosol prerequisites: - wavelength: 0.67 modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle rayleigh_corrected_desert: compositor: !!python/name:satpy.composites.PSPRayleighReflectance atmosphere: tropical aerosol_type: desert_aerosol prerequisites: - wavelength: 0.67 modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle rayleigh_corrected_land: compositor: !!python/name:satpy.composites.PSPRayleighReflectance atmosphere: us-standard aerosol_type: continental_average_aerosol prerequisites: - wavelength: 0.67 modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle composites: airmass: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - wavelength: 6.2 - wavelength: 7.3 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - wavelength: 9.7 - wavelength: 10.8 - wavelength: 6.2 standard_name: airmass ash: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 12.0 - 10.8 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 10.8 - 8.7 - 10.8 standard_name: ash cloudtop: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - 3.9 - 10.8 - 12.0 standard_name: cloudtop convection: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 6.2 - 7.3 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 3.9 - 10.8 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 1.6 - 0.6 standard_name: convection snow: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 0.8 modifiers: [sunz_corrected] - wavelength: 1.63 modifiers: [sunz_corrected] - wavelength: 3.9 modifiers: [nir_reflectance] standard_name: snow day_microphysics: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 0.85 modifiers: [sunz_corrected] - wavelength: 3.9 modifiers: [nir_reflectance] - 10.8 standard_name: day_microphysics dust: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 12.0 - 10.8 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 10.8 - 8.7 - 10.8 standard_name: dust fog: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 12.0 - 10.8 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 10.8 - 8.7 - 10.8 standard_name: fog green_snow: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - 1.63 - 0.635 - 10.8 standard_name: green_snow natural_color_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - 1.63 - 0.85 - 0.635 standard_name: natural_color natural_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 1.63 modifiers: [sunz_corrected] - wavelength: 0.85 modifiers: [sunz_corrected] - wavelength: 0.635 modifiers: [sunz_corrected] standard_name: natural_color night_fog: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 12.0 - 10.8 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 10.8 - 3.9 - 10.8 standard_name: night_fog overview_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - 0.6 - 0.8 - 10.8 standard_name: overview overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 0.6 modifiers: [sunz_corrected] - wavelength: 0.8 modifiers: [sunz_corrected] - 10.8 standard_name: overview true_color_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - 0.65 - 0.5 - 0.45 standard_name: true_color natural_with_night_fog: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: natural_with_night_fog prerequisites: - natural_color - night_fog - solar_zenith_angle precipitation_probability: compositor: !!python/name:satpy.composites.cloud_products.PrecipCloudsRGB prerequisites: - pc_precip_light - pc_precip_moderate - pc_precip_intense - pc_status_flag standard_name: precipitation_probability cloudmask: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - cma - cma_pal standard_name: cloudmask cloudmask_extended: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - cma_extended - cma_extended_pal standard_name: cloudmask cloudtype: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - ct - ct_pal standard_name: cloudtype cloud_top_height: compositor: !!python/name:satpy.composites.cloud_products.CloudTopHeightCompositor prerequisites: - ctth_alti - ctth_alti_pal - ctth_status_flag standard_name: cloud_top_height cloud_top_pressure: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - ctth_pres - ctth_pres_pal standard_name: cloud_top_pressure cloud_top_temperature: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - ctth_tempe - ctth_tempe_pal standard_name: cloud_top_temperature cloud_top_phase: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - cpp_phase - cpp_phase_pal standard_name: cloud_top_phase cloud_drop_effective_radius: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - cpp_reff - cpp_reff_pal standard_name: cloud_drop_effective_radius cloud_optical_thickness: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - cpp_cot - cpp_cot_pal standard_name: cloud_optical_thickness cloud_water_path: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - cpp_cwp - cpp_cwp_pal standard_name: cloud_water_path ice_water_path: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - cpp_iwp - cpp_iwp_pal standard_name: ice_water_path liquid_water_path: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - cpp_lwp - cpp_lwp_pal standard_name: liquid_water_path night_microphysics: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 12.0 - 10.8 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 10.8 - 3.9 - 10.8 standard_name: night_microphysics ir108_3d: compositor: !!python/name:satpy.composites.GenericCompositor standard_name: ir108_3d prerequisites: - wavelength: 10.8 ir_cloud_day: standard_name: ir_cloud_day compositor: !!python/name:satpy.composites.CloudCompositor prerequisites: - 10.8 transition_min: 258.15 transition_max: 298.15 transition_gamma: 3.0 _night_background: compositor: !!python/name:satpy.composites.StaticImageCompositor standard_name: night_background filename: BlackMarble_2016_01deg_geo.tif _night_background_hires: compositor: !!python/name:satpy.composites.StaticImageCompositor standard_name: night_background_hires filename: BlackMarble_2016_3km_geo.tif satpy-0.20.0/satpy/etc/enhancements/000077500000000000000000000000001362525524100173215ustar00rootroot00000000000000satpy-0.20.0/satpy/etc/enhancements/abi.yaml000066400000000000000000000021421362525524100207370ustar00rootroot00000000000000enhancements: cimss_true_color: standard_name: cimss_true_color sensor: abi operations: - name: linear_stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: 'crude', min_stretch: 0.0, max_stretch: 120.} - name: sqrt method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: 2.0} - name: contrast method: !!python/name:satpy.enhancements.abi.cimss_true_color_contrast cmi_reflectance_default: standard_name: toa_lambertian_equivalent_albedo_multiplied_by_cosine_solar_zenith_angle operations: - name: linear_stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: 'crude', min_stretch: 0.0, max_stretch: 100.} - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: 1.5} airmass: standard_name: airmass operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [-26.2, -43.2, 243.9] max_stretch: [0.6, 6.7, 208.5] satpy-0.20.0/satpy/etc/enhancements/ahi.yaml000066400000000000000000000004441362525524100207500ustar00rootroot00000000000000enhancements: airmass: # matches ABI standard_name: airmass operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [-26.2, -43.2, 243.9] max_stretch: [0.6, 6.7, 208.5] satpy-0.20.0/satpy/etc/enhancements/generic.yaml000066400000000000000000000605451362525524100216330ustar00rootroot000000000000003d_filter: &3d_filter !!python/name:satpy.enhancements.three_d_effect '' enhancements: default: operations: - name: stretch method: &stretchfun !!python/name:satpy.enhancements.stretch kwargs: {stretch: linear} reflectance_default: standard_name: toa_bidirectional_reflectance operations: - name: linear_stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: 'crude', min_stretch: 0.0, max_stretch: 100.} - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: 1.5} true_color_default: standard_name: true_color operations: - name: cira_stretch method: !!python/name:satpy.enhancements.cira_stretch true_color_crefl: name: true_color_crefl standard_name: true_color operations: - name: crefl_scaling method: !!python/name:satpy.enhancements.crefl_scaling kwargs: # Polar2Grid's scaling: "Preferred". idx: [0., 25., 55., 100., 255.] sc: [0., 90., 140., 175., 255.] # Ralph's new scaling: Looks darker. # idx: [0., 30., 60., 120., 190., 255.] # sc: [0., 100., 128., 188., 223., 255.] # Ralph's old scaling: Looks lighter. # idx: [0., 30., 60., 120., 190., 255.] # sc: [0, 110, 160, 210, 240, 255] overview_default: standard_name: overview operations: - name: inverse method: &inversefun !!python/name:satpy.enhancements.invert '' args: - [false, false, true] - name: stretch method: *stretchfun kwargs: {stretch: linear} - name: gamma method: &gammafun !!python/name:satpy.enhancements.gamma '' kwargs: {gamma: 1.6} ocean_color_default: standard_name: ocean_color operations: - name: cira_stretch method: !!python/name:satpy.enhancements.cira_stretch - name: gamma method: *gammafun kwargs: {gamma: 2.6} night_overview_default: standard_name: night_overview operations: - name: inverse method: *inversefun args: - [false, false, true] - name: stretch method: *stretchfun kwargs: {stretch: linear} - name: gamma method: *gammafun kwargs: {gamma: 1.6} natural_color_default: standard_name: natural_color operations: - name: stretch method: *stretchfun kwargs: {stretch: crude, min_stretch: 0, max_stretch: 120} - name: gamma method: *gammafun kwargs: {gamma: 1.8} fire_temperature: standard_name: fire_temperature name: fire_temperature operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [3.5, 35., 85.] - name: gamma method: *gammafun kwargs: {gamma: [1.0, 1.0, 1.0]} fire_temperature_awips: standard_name: fire_temperature name: fire_temperature_awips operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [273.0, 0, 0] max_stretch: [333.0, 100., 75.] - name: gamma method: *gammafun kwargs: {gamma: [0.4, 1.0, 1.0]} fire_temperature_eumetsat: standard_name: fire_temperature name: fire_temperature_eumetsat operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [273.0, 0, 0] max_stretch: [350.0, 60., 60.] - name: gamma method: *gammafun kwargs: {gamma: [1.0, 1.0, 1.0]} fire_temperature_39refl: standard_name: fire_temperature name: fire_temperature_39refl operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [50., 100., 75.] - name: gamma method: *gammafun kwargs: {gamma: [1.0, 1.0, 1.0]} airmass_default: standard_name: airmass operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [-25, -40, 243] max_stretch: [0, 5, 208] green_snow_default: standard_name: green_snow operations: - name: inverse method: *inversefun args: - [false, false, true] - name: stretch method: *stretchfun kwargs: {stretch: crude} - name: gamma method: *gammafun kwargs: {gamma: 1.6} convection_default: standard_name: convection operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [-30, 0, -70] max_stretch: [0, 55, 20] dust_default: standard_name: dust operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [-4, 0, 261] max_stretch: [2, 15, 289] - name: gamma method: *gammafun kwargs: gamma: [1, 2.5, 1] ash_default: standard_name: ash operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [-4, -4, 243] max_stretch: [2, 5, 303] fog_default: standard_name: fog operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [-4, 0, 243] max_stretch: [2, 6, 283] night_fog_default: standard_name: night_fog operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [-4, 0, 243] max_stretch: [2, 6, 293] - name: gamma method: *gammafun kwargs: gamma: [1, 2, 1] snow_default: standard_name: snow operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [100, 70, 30] - name: gamma method: *gammafun kwargs: gamma: [1.7, 1.7, 1.7] day_microphysics_default: standard_name: day_microphysics operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [0, 0, 203] max_stretch: [100, 60, 323] - name: gamma method: *gammafun kwargs: gamma: [1, 2.5, 1] day_microphysics_winter: standard_name: day_microphysics_winter operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [0, 0, 203] max_stretch: [100, 25, 323] - name: gamma method: *gammafun kwargs: gamma: [1, 1.5, 1] cloudtop_default: standard_name: cloudtop operations: - name: inverse method: *inversefun args: [true] - name: stretch method: *stretchfun kwargs: stretch: linear cutoffs: [0.005, 0.005] sar-ice: standard_name: sar-ice operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [0.10, 1.37, 0.32 ] - name: gamma method: *gammafun kwargs: gamma: [2, 3, 2] sar-ice-legacy: standard_name: sar-ice-legacy operations: - name: stretch method: *stretchfun kwargs: stretch: linear cutoffs: [0.2, 0.02] - name: gamma method: *gammafun kwargs: gamma: [1, 1.2, 1] sar-land: standard_name: sar-land operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [0.01, 1. , 0.15 ] max_stretch: [0.765, 50., 1.4] - name: gamma method: *gammafun kwargs: gamma: [1.5, 2.25, 1.5] sar-rgb: standard_name: sar-rgb operations: - name: stretch method: *stretchfun kwargs: stretch: linear - name: inverse method: *inversefun args: - [true, true, true] sar-quick: standard_name: sar-quick operations: - name: stretch method: *stretchfun kwargs: stretch: linear cutoffs: [0.2, 0.02] natural_with_ir_night: standard_name: natural_with_night_fog operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] cloudtype: standard_name: cloudtype operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] cloudmask: standard_name: cloudmask operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] cloud_top_height: standard_name: cloud_top_height operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] cloud_top_pressure: standard_name: cloud_top_pressure operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] cloud_top_temperature: standard_name: cloud_top_temperature operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] cloud_top_phase: standard_name: cloud_top_phase operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] cloud_drop_effective_radius: standard_name: cloud_drop_effective_radius operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] cloud_optical_thickness: standard_name: cloud_optical_thickness operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] cloud_liquid_water_path: standard_name: cloud_liquid_water_path operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] cloud_ice_water_path: standard_name: cloud_ice_water_path operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] precipitation_probability: standard_name: precipitation_probability operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] convective_rain_rate: standard_name: convective_rain_rate operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] convective_precipitation_hourly_accumulation: standard_name: convective_precipitation_hourly_accumulation operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] total_precipitable_water: standard_name: total_precipitable_water operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] showalter_index: standard_name: showalter_index operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] lifted_index: standard_name: lifted_index operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] convection_initiation_prob30: standard_name: convection_initiation_prob30 operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] convection_initiation_prob60: standard_name: convection_initiation_prob60 operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] convection_initiation_prob90: standard_name: convection_initiation_prob90 operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] rdt_cell_type: standard_name: rdt_cell_type operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] asii_prob: standard_name: asii_prob operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] day_microphysics_ahi: standard_name: day_microphysics_ahi operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [0, 0, 203] max_stretch: [100, 70, 323] cloud_phase_distinction_default: standard_name: cloud_phase_distinction operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [280.67, 0, 1] max_stretch: [219.62, 78, 59] water_vapors1_default: standard_name: water_vapors1 operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [278.96, 242.67, 261.03] max_stretch: [202.29, 214.66, 245.12] - name: gamma method: *gammafun kwargs: gamma: [10, 5.5, 5.5] water_vapors2_default: standard_name: water_vapors2 operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [30, 278.15, 243.9] max_stretch: [-3, 213.15, 208.5] - name: gamma method: *gammafun kwargs: gamma: [3.5, 2.5, 2.5] ncc_default: standard_name: ncc_radiance operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [0] max_stretch: [0.075] realistic_colors: standard_name: realistic_colors operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [110, 110, 110] - name: gamma method: *gammafun kwargs: gamma: [1.4, 1.4, 1.2] snow_age_default: standard_name: snow_age operations: - name: snow_age method: !!python/name:satpy.enhancements.lookup kwargs: luts: [[0, 0, 0], [1, 2, 2], [3, 8, 5], [4, 12, 8], [6, 15, 10], [8, 18, 13], [9, 21, 16], [11, 24, 19], [13, 26, 21], [14, 28, 24], [ 16, 30, 27], [18, 32, 30], [19, 34, 32], [21, 36, 35], [22, 38, 38], [24, 40, 40], [ 26, 42, 43], [27, 43, 46], [29, 45, 49], [31, 47, 51], [32, 49, 54], [34, 50, 57], [ 36, 52, 60], [37, 54, 62], [39, 55, 65], [40, 57, 68], [42, 59, 70], [44, 60, 73], [ 45, 62, 76], [47, 64, 79], [49, 66, 81], [50, 67, 84], [52, 69, 87], [53, 71, 90], [ 55, 73, 92], [56, 75, 95], [58, 77, 98], [59, 79, 100], [61, 81, 103], [62, 83, 106], [ 64, 85, 109], [65, 86, 111], [67, 88, 114], [68, 90, 117], [70, 92, 119], [71, 94, 121], [ 73, 96, 124], [74, 98, 126], [76, 100, 129], [77, 102, 131], [79, 104, 134], [80, 106, 136], [ 82, 107, 139], [83, 109, 141], [85, 111, 144], [86, 113, 146], [88, 115, 149], [89, 117, 151], [ 91, 118, 154], [92, 120, 156], [94, 122, 159], [95, 124, 161], [97, 126, 162], [98, 128, 164], [ 100, 129, 166], [101, 131, 168], [103, 133, 170], [104, 135, 172], [106, 137, 173], [ 107, 138, 175], [109, 140, 177], [110, 142, 179], [112, 144, 181], [113, 145, 183], [ 114, 147, 184], [116, 149, 186], [117, 151, 188], [118, 152, 190], [120, 154, 192], [ 121, 156, 193], [123, 158, 194], [124, 159, 196], [125, 161, 197], [127, 163, 199], [ 128, 165, 200], [130, 166, 202], [131, 168, 203], [132, 170, 205], [134, 172, 206], [ 135, 173, 206], [136, 175, 207], [138, 177, 208], [139, 178, 209], [141, 180, 210], [ 142, 182, 211], [143, 184, 212], [145, 185, 213], [146, 187, 214], [148, 189, 215], [ 149, 191, 216], [150, 192, 217], [152, 194, 218], [153, 196, 219], [154, 198, 220], [ 156, 200, 220], [157, 201, 221], [159, 203, 221], [160, 205, 222], [161, 207, 223], [ 162, 209, 223], [163, 210, 224], [164, 212, 225], [166, 213, 225], [167, 214, 226], [ 168, 216, 227], [169, 217, 227], [171, 218, 228], [173, 220, 228], [174, 221, 228], [ 175, 222, 229], [176, 224, 229], [177, 225, 229], [178, 226, 230], [179, 227, 230], [ 181, 228, 230], [182, 229, 231], [183, 230, 231], [184, 231, 232], [185, 232, 232], [ 186, 233, 232], [187, 234, 233], [188, 235, 233], [190, 236, 233], [191, 237, 234], [ 192, 237, 234], [193, 238, 234], [194, 239, 235], [195, 240, 235], [196, 240, 236], [ 196, 241, 236], [197, 242, 236], [198, 243, 237], [199, 243, 237], [200, 244, 237], [ 201, 245, 238], [202, 245, 238], [203, 245, 238], [204, 246, 239], [205, 246, 239], [ 206, 246, 239], [207, 247, 239], [208, 247, 239], [209, 247, 239], [209, 248, 240], [ 210, 248, 240], [210, 248, 240], [211, 248, 240], [212, 248, 240], [212, 248, 241], [ 213, 248, 241], [214, 248, 241], [215, 248, 241], [216, 248, 241], [217, 248, 242], [ 217, 248, 242], [218, 248, 242], [219, 248, 242], [219, 248, 242], [220, 248, 243], [ 221, 248, 243], [221, 249, 243], [222, 249, 243], [223, 249, 243], [223, 249, 244], [ 223, 249, 244], [224, 249, 244], [224, 249, 244], [225, 249, 245], [225, 249, 245], [ 226, 249, 245], [226, 249, 245], [227, 249, 245], [227, 249, 246], [228, 249, 246], [ 228, 250, 246], [229, 250, 246], [229, 250, 246], [230, 250, 247], [230, 250, 247], [ 231, 250, 247], [231, 250, 247], [232, 250, 247], [233, 250, 248], [233, 250, 248], [ 233, 250, 248], [234, 250, 248], [234, 250, 248], [234, 250, 249], [235, 251, 249], [ 235, 251, 249], [235, 251, 249], [236, 251, 249], [236, 251, 250], [237, 251, 250], [ 237, 251, 250], [237, 251, 250], [238, 251, 250], [238, 251, 250], [238, 251, 250], [ 239, 251, 250], [239, 251, 250], [240, 251, 250], [240, 251, 250], [240, 252, 250], [ 241, 252, 250], [241, 252, 251], [241, 252, 251], [242, 252, 251], [242, 252, 251], [ 242, 252, 251], [243, 252, 251], [243, 252, 251], [244, 252, 251], [244, 252, 251], [ 244, 252, 251], [245, 252, 252], [245, 252, 252], [245, 253, 252], [246, 253, 252], [ 246, 253, 252], [247, 253, 252], [248, 253, 252], [248, 253, 252], [248, 253, 252], [ 249, 253, 252], [249, 253, 253], [249, 253, 253], [250, 253, 253], [250, 253, 253], [ 250, 253, 253], [250, 253, 253], [251, 254, 253], [251, 254, 253], [251, 254, 253], [ 252, 254, 253], [252, 254, 254], [252, 254, 254], [253, 254, 254], [253, 254, 254], [ 253, 254, 254], [253, 254, 254], [254, 254, 254], [254, 254, 254], [254, 254, 254], [254, 254, 254], [255, 255, 255]] night_microphysics_default: standard_name: night_microphysics operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [-4, 0, 243] max_stretch: [2, 10, 293] ir_overview_default: standard_name: ir_overview operations: - name: inverse method: *inversefun args: [true] - name: stretch method: *stretchfun kwargs: stretch: histogram ir108_3d: standard_name: ir108_3d operations: - name: inverse method: *inversefun args: [true] - name: 3d_filter method: *3d_filter - name: stretch method: *stretchfun kwargs: {stretch: linear} ir_cloud_day: standard_name: ir_cloud_day operations: - name: inverse method: *inversefun args: - [True, false] - name: stretch method: *stretchfun kwargs: stretch: linear - name: 3d method: *3d_filter kwargs: weight: 1.0 colorized_ir_clouds: standard_name: colorized_ir_clouds operations: - name: colorize method: &colorizefun !!python/name:satpy.enhancements.colorize '' kwargs: palettes: - {colors: spectral, min_value: 193.15, max_value: 253.149999} - {colors: greys, min_value: 253.15, max_value: 303.15} vis_sharpened_ir: standard_name: vis_sharpened_ir operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] ir_sandwich: standard_name: ir_sandwich operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] hrv_clouds: operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 323] max_stretch: [100, 100, 203] standard_name: hrv_clouds hrv_fog: operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [70, 100, 100] standard_name: hrv_fog true_color_with_night_ir: standard_name: true_color_with_night_ir operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] night_background: standard_name: night_background operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [255, 255, 255] night_ir_alpha: standard_name: night_ir_alpha operations: - name: stretch method: *stretchfun kwargs: {stretch: linear, cutoffs: [0.02, 0.02]} - name: inverse method: *inversefun args: - [true, true, true, true] night_ir_with_background: standard_name: night_ir_with_background operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] satpy-0.20.0/satpy/etc/enhancements/glm.yaml000066400000000000000000000010621362525524100207630ustar00rootroot00000000000000enhancements: flash_extent_density: name: flash_extent_density operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - {colors: ylorrd, min_value: 0, max_value: 20} # Requires C14 from ABI c14_flash_extent_density: standard_name: c14_flash_extent_density operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] satpy-0.20.0/satpy/etc/enhancements/mersi-2.yaml000066400000000000000000000007771362525524100214760ustar00rootroot00000000000000enhancements: default: operations: - name: stretch method: &stretchfun !!python/name:satpy.enhancements.stretch kwargs: {stretch: linear} cloudtop_default: standard_name: cloudtop operations: - name: stretch method: *stretchfun kwargs: stretch: crude min_stretch: [320, 310, 310] max_stretch: [220, 220, 220] - name: gamma method: &gammafun !!python/name:satpy.enhancements.gamma '' kwargs: gamma: [1.4, 1.4, 1.2] satpy-0.20.0/satpy/etc/enhancements/mimic.yaml000066400000000000000000001027441362525524100213130ustar00rootroot00000000000000enhancements: tpwGrid: name: tpwGrid operations: - name: tpw_nrl_brightened method: !!python/name:satpy.enhancements.mimic.total_precipitable_water kwargs: palettes: {colors: [[0.0, [188, 132, 98]], [0.27472527472527475, [188, 130, 99]], [0.5494505494505495, [187, 128, 100]], [0.8241758241758242, [186, 125, 101]], [1.098901098901099, [185, 124, 102]], [1.3736263736263736, [184, 122, 103]], [1.6483516483516485, [183, 120, 103]], [1.9230769230769234, [182, 119, 104]], [2.197802197802198, [182, 118, 106]], [2.4725274725274726, [181, 116, 107]], [2.7472527472527473, [180, 114, 108]], [3.0219780219780223, [179, 114, 108]], [3.296703296703297, [178, 113, 109]], [3.5714285714285716, [177, 111, 110]], [3.8461538461538467, [177, 110, 111]], [4.120879120879121, [176, 108, 111]], [4.395604395604396, [176, 106, 110]], [4.670329670329671, [175, 104, 110]], [4.945054945054945, [174, 103, 111]], [5.21978021978022, [174, 101, 111]], [5.4945054945054945, [173, 99, 111]], [5.76923076923077, [172, 97, 111]], [6.043956043956045, [171, 95, 112]], [6.318681318681319, [171, 93, 112]], [6.593406593406594, [171, 91, 113]], [6.868131868131869, [170, 90, 113]], [7.142857142857143, [169, 88, 114]], [7.417582417582418, [169, 86, 114]], [7.692307692307693, [168, 85, 115]], [7.967032967032968, [167, 83, 115]], [8.241758241758243, [166, 81, 116]], [8.516483516483516, [166, 80, 118]], [8.791208791208792, [165, 78, 119]], [9.065934065934067, [165, 76, 120]], [9.340659340659341, [164, 75, 120]], [9.615384615384617, [164, 74, 121]], [9.89010989010989, [163, 72, 123]], [10.164835164835166, [162, 70, 124]], [10.43956043956044, [161, 69, 125]], [10.714285714285715, [160, 67, 126]], [10.989010989010989, [160, 66, 128]], [11.263736263736265, [159, 64, 130]], [11.53846153846154, [159, 63, 131]], [11.813186813186814, [158, 61, 132]], [12.08791208791209, [158, 60, 134]], [12.362637362637363, [157, 58, 136]], [12.637362637362639, [156, 57, 137]], [12.912087912087912, [155, 56, 139]], [13.186813186813188, [155, 54, 141]], [13.461538461538463, [154, 52, 142]], [13.736263736263737, [154, 52, 144]], [14.010989010989013, [153, 50, 146]], [14.285714285714286, [153, 49, 148]], [14.560439560439562, [152, 47, 150]], [14.835164835164836, [150, 46, 151]], [15.109890109890111, [147, 45, 150]], [15.384615384615387, [144, 44, 150]], [15.65934065934066, [142, 44, 152]], [15.934065934065936, [138, 48, 156]], [16.20879120879121, [135, 50, 159]], [16.483516483516485, [132, 52, 161]], [16.75824175824176, [131, 56, 164]], [17.032967032967033, [126, 60, 168]], [17.30769230769231, [123, 62, 171]], [17.582417582417584, [121, 65, 173]], [17.857142857142858, [117, 69, 177]], [18.131868131868135, [114, 71, 180]], [18.40659340659341, [111, 74, 182]], [18.681318681318682, [109, 77, 185]], [18.956043956043956, [104, 82, 190]], [19.230769230769234, [101, 84, 193]], [19.505494505494507, [98, 86, 195]], [19.78021978021978, [96, 89, 198]], [20.05494505494506, [93, 92, 200]], [20.329670329670332, [90, 95, 204]], [20.604395604395606, [87, 98, 207]], [20.87912087912088, [83, 103, 211]], [21.153846153846157, [80, 105, 214]], [21.42857142857143, [77, 108, 216]], [21.703296703296704, [74, 110, 220]], [21.978021978021978, [71, 114, 222]], [22.252747252747255, [68, 116, 225]], [22.52747252747253, [65, 120, 228]], [22.802197802197803, [61, 125, 233]], [23.07692307692308, [57, 127, 235]], [23.351648351648354, [55, 130, 239]], [23.626373626373628, [52, 133, 242]], [23.9010989010989, [49, 137, 245]], [24.17582417582418, [47, 139, 247]], [24.450549450549453, [44, 142, 250]], [24.725274725274726, [40, 146, 255]], [25.000000000000004, [40, 148, 255]], [25.274725274725277, [42, 150, 255]], [25.54945054945055, [46, 154, 255]], [25.824175824175825, [50, 158, 255]], [26.098901098901102, [52, 159, 255]], [26.373626373626376, [55, 163, 255]], [26.64835164835165, [59, 167, 255]], [26.923076923076927, [61, 169, 255]], [27.1978021978022, [65, 173, 255]], [27.472527472527474, [70, 178, 255]], [27.747252747252748, [73, 182, 255]], [28.021978021978025, [76, 185, 255]], [28.2967032967033, [79, 188, 255]], [28.571428571428573, [82, 192, 255]], [28.84615384615385, [86, 195, 255]], [29.120879120879124, [88, 199, 255]], [29.395604395604398, [91, 201, 255]], [29.67032967032967, [95, 205, 255]], [29.94505494505495, [97, 207, 255]], [30.219780219780223, [101, 210, 255]], [30.494505494505496, [104, 213, 255]], [30.769230769230774, [107, 216, 255]], [31.043956043956047, [110, 218, 255]], [31.31868131868132, [114, 222, 255]], [31.593406593406595, [115, 223, 255]], [31.868131868131872, [119, 227, 255]], [32.142857142857146, [123, 231, 255]], [32.41758241758242, [125, 233, 255]], [32.69230769230769, [127, 236, 255]], [32.96703296703297, [133, 241, 255]], [33.24175824175825, [136, 244, 255]], [33.51648351648352, [139, 247, 255]], [33.791208791208796, [143, 252, 255]], [34.065934065934066, [145, 254, 255]], [34.34065934065934, [148, 255, 254]], [34.61538461538462, [148, 255, 247]], [34.89010989010989, [148, 255, 241]], [35.16483516483517, [148, 255, 235]], [35.439560439560445, [148, 255, 229]], [35.714285714285715, [148, 255, 223]], [35.98901098901099, [148, 255, 217]], [36.26373626373627, [148, 255, 210]], [36.53846153846154, [148, 255, 205]], [36.81318681318682, [148, 255, 199]], [37.08791208791209, [148, 255, 193]], [37.362637362637365, [148, 255, 187]], [37.63736263736264, [148, 255, 181]], [37.91208791208791, [148, 255, 174]], [38.18681318681319, [148, 255, 168]], [38.46153846153847, [148, 255, 162]], [38.73626373626374, [148, 255, 156]], [39.010989010989015, [148, 255, 150]], [39.28571428571429, [151, 255, 148]], [39.56043956043956, [157, 255, 148]], [39.83516483516484, [163, 255, 148]], [40.10989010989012, [169, 255, 148]], [40.38461538461539, [175, 255, 148]], [40.659340659340664, [181, 255, 148]], [40.934065934065934, [188, 255, 148]], [41.20879120879121, [197, 255, 148]], [41.48351648351649, [203, 255, 148]], [41.75824175824176, [209, 255, 148]], [42.032967032967036, [215, 255, 148]], [42.307692307692314, [221, 255, 148]], [42.582417582417584, [227, 255, 148]], [42.85714285714286, [233, 255, 148]], [43.13186813186814, [239, 255, 148]], [43.40659340659341, [244, 255, 148]], [43.681318681318686, [250, 255, 148]], [43.956043956043956, [254, 254, 146]], [44.23076923076923, [255, 251, 143]], [44.50549450549451, [255, 249, 141]], [44.78021978021978, [255, 247, 139]], [45.05494505494506, [255, 242, 134]], [45.329670329670336, [255, 239, 131]], [45.604395604395606, [255, 236, 128]], [45.87912087912088, [255, 233, 125]], [46.15384615384616, [255, 231, 122]], [46.42857142857143, [255, 227, 120]], [46.70329670329671, [255, 225, 117]], [46.978021978021985, [255, 221, 113]], [47.252747252747255, [255, 218, 110]], [47.52747252747253, [255, 216, 108]], [47.8021978021978, [255, 211, 103]], [48.07692307692308, [255, 209, 101]], [48.35164835164836, [255, 206, 98]], [48.62637362637363, [255, 204, 96]], [48.901098901098905, [255, 199, 91]], [49.17582417582418, [255, 196, 87]], [49.45054945054945, [255, 193, 85]], [49.72527472527473, [255, 191, 82]], [50.00000000000001, [255, 188, 80]], [50.27472527472528, [255, 185, 77]], [50.549450549450555, [255, 182, 74]], [50.82417582417583, [255, 179, 70]], [51.0989010989011, [255, 176, 68]], [51.37362637362638, [255, 173, 64]], [51.64835164835165, [255, 171, 61]], [51.92307692307693, [255, 167, 58]], [52.197802197802204, [255, 164, 55]], [52.472527472527474, [255, 161, 52]], [52.74725274725275, [255, 158, 49]], [53.02197802197803, [255, 154, 46]], [53.2967032967033, [255, 151, 42]], [53.57142857142858, [255, 148, 40]], [53.846153846153854, [252, 144, 39]], [54.120879120879124, [249, 140, 39]], [54.3956043956044, [246, 136, 39]], [54.67032967032967, [243, 132, 39]], [54.94505494505495, [240, 128, 39]], [55.219780219780226, [237, 125, 39]], [55.494505494505496, [234, 121, 39]], [55.769230769230774, [231, 118, 39]], [56.04395604395605, [227, 114, 39]], [56.31868131868132, [225, 111, 39]], [56.5934065934066, [222, 108, 39]], [56.868131868131876, [219, 104, 39]], [57.142857142857146, [216, 101, 39]], [57.41758241758242, [213, 97, 39]], [57.6923076923077, [210, 95, 39]], [57.96703296703297, [206, 91, 39]], [58.24175824175825, [204, 89, 39]], [58.51648351648352, [200, 86, 39]], [58.791208791208796, [198, 83, 39]], [59.06593406593407, [194, 80, 39]], [59.34065934065934, [192, 78, 39]], [59.61538461538462, [188, 75, 39]], [59.8901098901099, [185, 73, 39]], [60.16483516483517, [182, 70, 39]], [60.439560439560445, [179, 68, 39]], [60.71428571428572, [176, 66, 39]], [60.98901098901099, [173, 63, 39]], [61.26373626373627, [171, 62, 39]], [61.53846153846155, [169, 59, 39]], [61.81318681318682, [167, 57, 40]], [62.087912087912095, [165, 56, 40]], [62.362637362637365, [165, 54, 40]], [62.63736263736264, [163, 52, 40]], [62.91208791208792, [161, 50, 41]], [63.18681318681319, [159, 48, 42]], [63.46153846153847, [159, 47, 42]], [63.736263736263744, [157, 46, 43]], [64.01098901098902, [155, 44, 43]], [64.28571428571429, [154, 44, 45]], [64.56043956043956, [156, 45, 48]], [64.83516483516485, [157, 46, 52]], [65.10989010989012, [159, 48, 55]], [65.38461538461539, [160, 50, 58]], [65.65934065934067, [162, 52, 62]], [65.93406593406594, [164, 53, 65]], [66.20879120879121, [165, 55, 69]], [66.4835164835165, [167, 57, 72]], [66.75824175824177, [169, 59, 76]], [67.03296703296704, [171, 61, 80]], [67.3076923076923, [172, 63, 83]], [67.58241758241759, [174, 65, 87]], [67.85714285714286, [176, 67, 91]], [68.13186813186813, [177, 69, 95]], [68.40659340659342, [179, 71, 98]], [68.68131868131869, [181, 73, 102]], [68.95604395604396, [182, 75, 106]], [69.23076923076924, [184, 78, 109]], [69.50549450549451, [186, 80, 114]], [69.78021978021978, [188, 82, 117]], [70.05494505494507, [189, 85, 121]], [70.32967032967034, [191, 87, 125]], [70.6043956043956, [193, 90, 129]], [70.87912087912089, [194, 92, 132]], [71.15384615384616, [196, 95, 137]], [71.42857142857143, [198, 97, 140]], [71.70329670329672, [199, 100, 144]], [71.97802197802199, [201, 103, 148]], [72.25274725274726, [203, 105, 152]], [72.52747252747254, [205, 108, 155]], [72.80219780219781, [206, 110, 159]], [73.07692307692308, [208, 114, 163]], [73.35164835164836, [210, 116, 167]], [73.62637362637363, [211, 120, 171]], [73.9010989010989, [213, 122, 174]], [74.17582417582418, [215, 125, 178]], [74.45054945054946, [216, 128, 182]], [74.72527472527473, [218, 131, 185]], [75.0, [220, 135, 189]], ], min_value: 0, max_value: 76} tpwGridPrior: name: tpwGridPrior operations: - name: tpw_nrl_brightened method: !!python/name:satpy.enhancements.mimic.total_precipitable_water kwargs: palettes: {colors: [[0.0, [188, 132, 98]], [0.27472527472527475, [188, 130, 99]], [0.5494505494505495, [187, 128, 100]], [0.8241758241758242, [186, 125, 101]], [1.098901098901099, [185, 124, 102]], [1.3736263736263736, [184, 122, 103]], [1.6483516483516485, [183, 120, 103]], [1.9230769230769234, [182, 119, 104]], [2.197802197802198, [182, 118, 106]], [2.4725274725274726, [181, 116, 107]], [2.7472527472527473, [180, 114, 108]], [3.0219780219780223, [179, 114, 108]], [3.296703296703297, [178, 113, 109]], [3.5714285714285716, [177, 111, 110]], [3.8461538461538467, [177, 110, 111]], [4.120879120879121, [176, 108, 111]], [4.395604395604396, [176, 106, 110]], [4.670329670329671, [175, 104, 110]], [4.945054945054945, [174, 103, 111]], [5.21978021978022, [174, 101, 111]], [5.4945054945054945, [173, 99, 111]], [5.76923076923077, [172, 97, 111]], [6.043956043956045, [171, 95, 112]], [6.318681318681319, [171, 93, 112]], [6.593406593406594, [171, 91, 113]], [6.868131868131869, [170, 90, 113]], [7.142857142857143, [169, 88, 114]], [7.417582417582418, [169, 86, 114]], [7.692307692307693, [168, 85, 115]], [7.967032967032968, [167, 83, 115]], [8.241758241758243, [166, 81, 116]], [8.516483516483516, [166, 80, 118]], [8.791208791208792, [165, 78, 119]], [9.065934065934067, [165, 76, 120]], [9.340659340659341, [164, 75, 120]], [9.615384615384617, [164, 74, 121]], [9.89010989010989, [163, 72, 123]], [10.164835164835166, [162, 70, 124]], [10.43956043956044, [161, 69, 125]], [10.714285714285715, [160, 67, 126]], [10.989010989010989, [160, 66, 128]], [11.263736263736265, [159, 64, 130]], [11.53846153846154, [159, 63, 131]], [11.813186813186814, [158, 61, 132]], [12.08791208791209, [158, 60, 134]], [12.362637362637363, [157, 58, 136]], [12.637362637362639, [156, 57, 137]], [12.912087912087912, [155, 56, 139]], [13.186813186813188, [155, 54, 141]], [13.461538461538463, [154, 52, 142]], [13.736263736263737, [154, 52, 144]], [14.010989010989013, [153, 50, 146]], [14.285714285714286, [153, 49, 148]], [14.560439560439562, [152, 47, 150]], [14.835164835164836, [150, 46, 151]], [15.109890109890111, [147, 45, 150]], [15.384615384615387, [144, 44, 150]], [15.65934065934066, [142, 44, 152]], [15.934065934065936, [138, 48, 156]], [16.20879120879121, [135, 50, 159]], [16.483516483516485, [132, 52, 161]], [16.75824175824176, [131, 56, 164]], [17.032967032967033, [126, 60, 168]], [17.30769230769231, [123, 62, 171]], [17.582417582417584, [121, 65, 173]], [17.857142857142858, [117, 69, 177]], [18.131868131868135, [114, 71, 180]], [18.40659340659341, [111, 74, 182]], [18.681318681318682, [109, 77, 185]], [18.956043956043956, [104, 82, 190]], [19.230769230769234, [101, 84, 193]], [19.505494505494507, [98, 86, 195]], [19.78021978021978, [96, 89, 198]], [20.05494505494506, [93, 92, 200]], [20.329670329670332, [90, 95, 204]], [20.604395604395606, [87, 98, 207]], [20.87912087912088, [83, 103, 211]], [21.153846153846157, [80, 105, 214]], [21.42857142857143, [77, 108, 216]], [21.703296703296704, [74, 110, 220]], [21.978021978021978, [71, 114, 222]], [22.252747252747255, [68, 116, 225]], [22.52747252747253, [65, 120, 228]], [22.802197802197803, [61, 125, 233]], [23.07692307692308, [57, 127, 235]], [23.351648351648354, [55, 130, 239]], [23.626373626373628, [52, 133, 242]], [23.9010989010989, [49, 137, 245]], [24.17582417582418, [47, 139, 247]], [24.450549450549453, [44, 142, 250]], [24.725274725274726, [40, 146, 255]], [25.000000000000004, [40, 148, 255]], [25.274725274725277, [42, 150, 255]], [25.54945054945055, [46, 154, 255]], [25.824175824175825, [50, 158, 255]], [26.098901098901102, [52, 159, 255]], [26.373626373626376, [55, 163, 255]], [26.64835164835165, [59, 167, 255]], [26.923076923076927, [61, 169, 255]], [27.1978021978022, [65, 173, 255]], [27.472527472527474, [70, 178, 255]], [27.747252747252748, [73, 182, 255]], [28.021978021978025, [76, 185, 255]], [28.2967032967033, [79, 188, 255]], [28.571428571428573, [82, 192, 255]], [28.84615384615385, [86, 195, 255]], [29.120879120879124, [88, 199, 255]], [29.395604395604398, [91, 201, 255]], [29.67032967032967, [95, 205, 255]], [29.94505494505495, [97, 207, 255]], [30.219780219780223, [101, 210, 255]], [30.494505494505496, [104, 213, 255]], [30.769230769230774, [107, 216, 255]], [31.043956043956047, [110, 218, 255]], [31.31868131868132, [114, 222, 255]], [31.593406593406595, [115, 223, 255]], [31.868131868131872, [119, 227, 255]], [32.142857142857146, [123, 231, 255]], [32.41758241758242, [125, 233, 255]], [32.69230769230769, [127, 236, 255]], [32.96703296703297, [133, 241, 255]], [33.24175824175825, [136, 244, 255]], [33.51648351648352, [139, 247, 255]], [33.791208791208796, [143, 252, 255]], [34.065934065934066, [145, 254, 255]], [34.34065934065934, [148, 255, 254]], [34.61538461538462, [148, 255, 247]], [34.89010989010989, [148, 255, 241]], [35.16483516483517, [148, 255, 235]], [35.439560439560445, [148, 255, 229]], [35.714285714285715, [148, 255, 223]], [35.98901098901099, [148, 255, 217]], [36.26373626373627, [148, 255, 210]], [36.53846153846154, [148, 255, 205]], [36.81318681318682, [148, 255, 199]], [37.08791208791209, [148, 255, 193]], [37.362637362637365, [148, 255, 187]], [37.63736263736264, [148, 255, 181]], [37.91208791208791, [148, 255, 174]], [38.18681318681319, [148, 255, 168]], [38.46153846153847, [148, 255, 162]], [38.73626373626374, [148, 255, 156]], [39.010989010989015, [148, 255, 150]], [39.28571428571429, [151, 255, 148]], [39.56043956043956, [157, 255, 148]], [39.83516483516484, [163, 255, 148]], [40.10989010989012, [169, 255, 148]], [40.38461538461539, [175, 255, 148]], [40.659340659340664, [181, 255, 148]], [40.934065934065934, [188, 255, 148]], [41.20879120879121, [197, 255, 148]], [41.48351648351649, [203, 255, 148]], [41.75824175824176, [209, 255, 148]], [42.032967032967036, [215, 255, 148]], [42.307692307692314, [221, 255, 148]], [42.582417582417584, [227, 255, 148]], [42.85714285714286, [233, 255, 148]], [43.13186813186814, [239, 255, 148]], [43.40659340659341, [244, 255, 148]], [43.681318681318686, [250, 255, 148]], [43.956043956043956, [254, 254, 146]], [44.23076923076923, [255, 251, 143]], [44.50549450549451, [255, 249, 141]], [44.78021978021978, [255, 247, 139]], [45.05494505494506, [255, 242, 134]], [45.329670329670336, [255, 239, 131]], [45.604395604395606, [255, 236, 128]], [45.87912087912088, [255, 233, 125]], [46.15384615384616, [255, 231, 122]], [46.42857142857143, [255, 227, 120]], [46.70329670329671, [255, 225, 117]], [46.978021978021985, [255, 221, 113]], [47.252747252747255, [255, 218, 110]], [47.52747252747253, [255, 216, 108]], [47.8021978021978, [255, 211, 103]], [48.07692307692308, [255, 209, 101]], [48.35164835164836, [255, 206, 98]], [48.62637362637363, [255, 204, 96]], [48.901098901098905, [255, 199, 91]], [49.17582417582418, [255, 196, 87]], [49.45054945054945, [255, 193, 85]], [49.72527472527473, [255, 191, 82]], [50.00000000000001, [255, 188, 80]], [50.27472527472528, [255, 185, 77]], [50.549450549450555, [255, 182, 74]], [50.82417582417583, [255, 179, 70]], [51.0989010989011, [255, 176, 68]], [51.37362637362638, [255, 173, 64]], [51.64835164835165, [255, 171, 61]], [51.92307692307693, [255, 167, 58]], [52.197802197802204, [255, 164, 55]], [52.472527472527474, [255, 161, 52]], [52.74725274725275, [255, 158, 49]], [53.02197802197803, [255, 154, 46]], [53.2967032967033, [255, 151, 42]], [53.57142857142858, [255, 148, 40]], [53.846153846153854, [252, 144, 39]], [54.120879120879124, [249, 140, 39]], [54.3956043956044, [246, 136, 39]], [54.67032967032967, [243, 132, 39]], [54.94505494505495, [240, 128, 39]], [55.219780219780226, [237, 125, 39]], [55.494505494505496, [234, 121, 39]], [55.769230769230774, [231, 118, 39]], [56.04395604395605, [227, 114, 39]], [56.31868131868132, [225, 111, 39]], [56.5934065934066, [222, 108, 39]], [56.868131868131876, [219, 104, 39]], [57.142857142857146, [216, 101, 39]], [57.41758241758242, [213, 97, 39]], [57.6923076923077, [210, 95, 39]], [57.96703296703297, [206, 91, 39]], [58.24175824175825, [204, 89, 39]], [58.51648351648352, [200, 86, 39]], [58.791208791208796, [198, 83, 39]], [59.06593406593407, [194, 80, 39]], [59.34065934065934, [192, 78, 39]], [59.61538461538462, [188, 75, 39]], [59.8901098901099, [185, 73, 39]], [60.16483516483517, [182, 70, 39]], [60.439560439560445, [179, 68, 39]], [60.71428571428572, [176, 66, 39]], [60.98901098901099, [173, 63, 39]], [61.26373626373627, [171, 62, 39]], [61.53846153846155, [169, 59, 39]], [61.81318681318682, [167, 57, 40]], [62.087912087912095, [165, 56, 40]], [62.362637362637365, [165, 54, 40]], [62.63736263736264, [163, 52, 40]], [62.91208791208792, [161, 50, 41]], [63.18681318681319, [159, 48, 42]], [63.46153846153847, [159, 47, 42]], [63.736263736263744, [157, 46, 43]], [64.01098901098902, [155, 44, 43]], [64.28571428571429, [154, 44, 45]], [64.56043956043956, [156, 45, 48]], [64.83516483516485, [157, 46, 52]], [65.10989010989012, [159, 48, 55]], [65.38461538461539, [160, 50, 58]], [65.65934065934067, [162, 52, 62]], [65.93406593406594, [164, 53, 65]], [66.20879120879121, [165, 55, 69]], [66.4835164835165, [167, 57, 72]], [66.75824175824177, [169, 59, 76]], [67.03296703296704, [171, 61, 80]], [67.3076923076923, [172, 63, 83]], [67.58241758241759, [174, 65, 87]], [67.85714285714286, [176, 67, 91]], [68.13186813186813, [177, 69, 95]], [68.40659340659342, [179, 71, 98]], [68.68131868131869, [181, 73, 102]], [68.95604395604396, [182, 75, 106]], [69.23076923076924, [184, 78, 109]], [69.50549450549451, [186, 80, 114]], [69.78021978021978, [188, 82, 117]], [70.05494505494507, [189, 85, 121]], [70.32967032967034, [191, 87, 125]], [70.6043956043956, [193, 90, 129]], [70.87912087912089, [194, 92, 132]], [71.15384615384616, [196, 95, 137]], [71.42857142857143, [198, 97, 140]], [71.70329670329672, [199, 100, 144]], [71.97802197802199, [201, 103, 148]], [72.25274725274726, [203, 105, 152]], [72.52747252747254, [205, 108, 155]], [72.80219780219781, [206, 110, 159]], [73.07692307692308, [208, 114, 163]], [73.35164835164836, [210, 116, 167]], [73.62637362637363, [211, 120, 171]], [73.9010989010989, [213, 122, 174]], [74.17582417582418, [215, 125, 178]], [74.45054945054946, [216, 128, 182]], [74.72527472527473, [218, 131, 185]], [75.0, [220, 135, 189]], ], min_value: 0, max_value: 76} satpy-0.20.0/satpy/etc/enhancements/viirs.yaml000066400000000000000000000076101362525524100213450ustar00rootroot00000000000000enhancements: water_detection: name: WaterDetection operations: - name: WaterDetection method: !!python/name:satpy.enhancements.viirs.water_detection kwargs: palettes: {colors: [[14, [0.0, 0.0, 0.0]], [15, [0.0, 0.0, 0.39215686274509803]], [16, [0.7686274509803922, 0.6352941176470588, 0.4470588235294118]], [17, [0.7686274509803922, 0.6352941176470588, 0.4470588235294118]], [18, [0.0, 0.0, 1.0]], [20, [1.0, 1.0, 1.0]], [27, [0.0, 1.0, 1.0]], [30, [0.7843137254901961, 0.7843137254901961, 0.7843137254901961]], [31, [0.39215686274509803, 0.39215686274509803, 0.39215686274509803]], [88, [0.7058823529411765, 0.0, 0.9019607843137255]], [100, [0.19607843137254902, 1.0, 0.39215686274509803]], [120, [0.19607843137254902, 1.0, 0.39215686274509803]], [121, [0.0, 1.0, 0.0]], [130, [0.0, 1.0, 0.0]], [131, [0.7843137254901961, 1.0, 0.0]], [140, [0.7843137254901961, 1.0, 0.0]], [141, [1.0, 1.0, 0.5882352941176471]], [150, [1.0, 1.0, 0.5882352941176471]], [151, [1.0, 1.0, 0.0]], [160, [1.0, 1.0, 0.0]], [161, [1.0, 0.7843137254901961, 0.0]], [170, [1.0, 0.7843137254901961, 0.0]], [171, [1.0, 0.5882352941176471, 0.19607843137254902]], [180, [1.0, 0.5882352941176471, 0.19607843137254902]], [181, [1.0, 0.39215686274509803, 0.0]], [190, [1.0, 0.39215686274509803, 0.0]], [191, [1.0, 0.0, 0.0]], [200, [1.0, 0.0, 0.0]], [201, [0.0, 0.0, 0.0]]], min_value: 0, max_value: 201} # palettes: {colors: # [[14, [0.0, 0.0, 0.0, 0.0]], # [15, [0.0, 0.0, 0.39215686274509803, 1.0]], # [16, [0.7686274509803922, 0.6352941176470588, 0.4470588235294118, 1.0]], # [17, [0.7686274509803922, 0.6352941176470588, 0.4470588235294118, 1.0]], # [18, [0.0, 0.0, 1.0, 1.0]], # [20, [1.0, 1.0, 1.0, 1.0]], # [27, [0.0, 1.0, 1.0, 1.0]], # [30, [0.7843137254901961, 0.7843137254901961, 0.7843137254901961, 1.0]], # [31, [0.39215686274509803, 0.39215686274509803, 0.39215686274509803, 1.0]], # [88, [0.7058823529411765, 0.0, 0.9019607843137255, 1.0]], # [100, [0.19607843137254902, 1.0, 0.39215686274509803, 1.0]], # [120, [0.19607843137254902, 1.0, 0.39215686274509803, 1.0]], # [121, [0.0, 1.0, 0.0, 1.0]], # [130, [0.0, 1.0, 0.0, 1.0]], # [131, [0.7843137254901961, 1.0, 0.0, 1.0]], # [140, [0.7843137254901961, 1.0, 0.0, 1.0]], # [141, [1.0, 1.0, 0.5882352941176471, 1.0]], # [150, [1.0, 1.0, 0.5882352941176471, 1.0]], # [151, [1.0, 1.0, 0.0, 1.0]], # [160, [1.0, 1.0, 0.0, 1.0]], # [161, [1.0, 0.7843137254901961, 0.0, 1.0]], # [170, [1.0, 0.7843137254901961, 0.0, 1.0]], # [171, [1.0, 0.5882352941176471, 0.19607843137254902, 1.0]], # [180, [1.0, 0.5882352941176471, 0.19607843137254902, 1.0]], # [181, [1.0, 0.39215686274509803, 0.0, 1.0]], # [190, [1.0, 0.39215686274509803, 0.0, 1.0]], # [191, [1.0, 0.0, 0.0, 1.0]], # [200, [1.0, 0.0, 0.0, 1.0]], # [201, [0.0, 0.0, 0.0, 0.0]]], # min_value: 0, # max_value: 201} satpy-0.20.0/satpy/etc/eps_avhrrl1b_6.5.xml000066400000000000000000002352341362525524100203640ustar00rootroot00000000000000 40 april04 50 nov05 100 launch current 65 EPS AVHRR/3 Level 1B Format This AVHRR/3 1B description was generated using the AVHRR/3 PFS Excel document Issue 6 Revision 5 (eps_avhrrl1_6.5_names_masks.xls) and pfs2xml version 3.3 AVHR_*1B_*Z* Geolocation AVHRR Geolocation Coverage (Latitude, Longitude) mdr-1b[].EARTH_LOCATIONS[][0] mdr-1b[].EARTH_LOCATIONS[][1] Channel 1 AVHRR Scene Radiance for Channel 1 mdr-1b[].SCENE_RADIANCES[0][] Geolocation Channel 2 AVHRR Scene Radiance for Channel 2 mdr-1b[].SCENE_RADIANCES[1][] Geolocation Channel 3a/b AVHRR Scene Radiance for Channel 3a/b mdr-1b[].SCENE_RADIANCES[2][] Geolocation Channel 4 AVHRR Scene Radiance for Channel 4 mdr-1b[].SCENE_RADIANCES[3][] Geolocation Channel 5 AVHRR Scene Radiance for Channel 5 mdr-1b[].SCENE_RADIANCES[4][] Geolocation Uniformity Test A AVHRR Cloud Information Uniformity Test A (0=test failed or clear, 1=cloudy) mdr-1b[].CLOUD_INFORMATION[].#Uniformity_test_a Geolocation Uniformity Test B AVHRR Cloud Information Uniformity Test A (0 =test failed or cloudy, 1=clear) mdr-1b[].CLOUD_INFORMATION[].#Uniformity_test_b Geolocation T3-T5 Test A AVHRR Cloud Information T3-T5 Test A (0=test failed or clear, 1=cloudy) mdr-1b[].CLOUD_INFORMATION[].#T3_T5_test_a Geolocation T3-T5 Test B AVHRR Cloud Information T3-T5 Test B (0 =test failed or cloudy, 1=clear) mdr-1b[].CLOUD_INFORMATION[].#T3_T5_test_b Geolocation T4-T3 Test A AVHRR Cloud Information T4-T3 Test A (0=test failed or clear, 1=cloudy) mdr-1b[].CLOUD_INFORMATION[].#T4_T3_test_a Geolocation T4-T3 Test B AVHRR Cloud Information T4-T3 Test B (0 =test failed or cloudy, 1=clear) mdr-1b[].CLOUD_INFORMATION[].#T4_T3_test_b Geolocation T4-T5 Test A AVHRR Cloud Information T4-T5 Test A (0=test failed or clear, 1=cloudy) mdr-1b[].CLOUD_INFORMATION[].#T4_T5_test_a Geolocation T4-T5 Test B AVHRR Cloud Information T4-T5 Test B (0 =test failed or cloudy, 1=clear) mdr-1b[].CLOUD_INFORMATION[].#T4_T5_test_b Geolocation Albedo Test A AVHRR Cloud Information Albedo Test A (0=test failed or clear, 1=cloudy or snow/ice covered) mdr-1b[].CLOUD_INFORMATION[].#Albedo_test_a Geolocation Albedo Test B AVHRR Cloud Information Albedo Test B (0 =test failed or cloudy, 1=clear or snow/ice covered) mdr-1b[].CLOUD_INFORMATION[].#Albedo_test_b Geolocation T4 Test A AVHRR Cloud Information T4 Test A (0=test failed or clear, 1=cloudy or snow/ice covered) mdr-1b[].CLOUD_INFORMATION[].#T4_test_a Geolocation T4 Test B AVHRR Cloud Information T4 Test B (0 =test failed or cloudy, 1=clear or snow/ice covered) mdr-1b[].CLOUD_INFORMATION[].#T4_test_b Geolocation Test Situations AVHRR Cloud Information - number of the test situations (11 different test situations) mdr-1b[].CLOUD_INFORMATION[].#Number_of_the_test_situation Geolocation eps-product satpy-0.20.0/satpy/etc/ninjo-cmd.yaml000066400000000000000000000004071362525524100174140ustar00rootroot00000000000000# sample config file holding NinJo Tiff metadata # to be passed to the convert_to_ninjotif.py script # as an alternative to the command line input chan_id : 662626 sat_id : 1622 data_cat : GPRN data_src : EUMETCAST area : nrEURO1km_NPOL_COALeqc ph_unit : CELSIUS satpy-0.20.0/satpy/etc/readers/000077500000000000000000000000001362525524100162765ustar00rootroot00000000000000satpy-0.20.0/satpy/etc/readers/abi_l1b.yaml000066400000000000000000000277051362525524100204660ustar00rootroot00000000000000# References: # - GOES-R Series Data Book, Chapter 3 # # Note: Channels < 3 microns have different units than channels > 3 microns reader: name: abi_l1b short_name: ABI L1b long_name: GOES-R ABI Level 1b description: > GOES-R ABI Level 1b data reader in the NetCDF4 format. The file format is described in the GOES-R Product Definition and Users' Guide (PUG). Volume 4 of this document can be found `here `_. sensors: [abi] default_channels: reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader # file pattern keys to sort files by with 'satpy.utils.group_files' group_keys: ['start_time', 'platform_shortname', 'scene_abbr'] file_types: # NOTE: observation_type == product acronym in PUG document c01: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C01_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C01_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}-{chid:6d}_0.nc{nc_version}'] c02: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C02_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C02_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}-{chid:6d}_0.nc{nc_version}'] c03: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C03_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C03_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}-{chid:6d}_0.nc{nc_version}'] c04: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C04_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}'] c05: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C05_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C05_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}-{chid:6d}_0.nc{nc_version}'] c06: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C06_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}'] c07: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C07_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}'] c08: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C08_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}'] c09: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C09_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}'] c10: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C10_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}'] c11: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C11_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}'] c12: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C12_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}'] c13: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C13_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}'] c14: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C14_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}'] c15: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C15_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}'] c16: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C16_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}'] datasets: C01: name: C01 wavelength: [0.450, 0.470, 0.490] resolution: 1000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: c01 C02: name: C02 wavelength: [0.590, 0.640, 0.690] resolution: 500 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: c02 C03: name: C03 wavelength: [0.8455, 0.865, 0.8845] resolution: 1000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: c03 C04: name: C04 wavelength: [1.3705, 1.378, 1.3855] resolution: 2000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: c04 C05: name: C05 wavelength: [1.580, 1.610, 1.640] resolution: 1000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: c05 C06: name: C06 wavelength: [2.225, 2.250, 2.275] resolution: 2000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: c06 C07: name: C07 wavelength: [3.80, 3.90, 4.00] resolution: 2000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c07 C08: name: C08 wavelength: [5.770, 6.185, 6.600] resolution: 2000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c08 C09: name: C09 wavelength: [6.75, 6.95, 7.15] resolution: 2000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c09 C10: name: C10 wavelength: [7.24, 7.34, 7.44] resolution: 2000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c10 C11: name: C11 wavelength: [8.30, 8.50, 8.70] resolution: 2000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c11 C12: name: C12 wavelength: [9.42, 9.61, 9.80] resolution: 2000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c12 C13: name: C13 wavelength: [10.10, 10.35, 10.60] resolution: 2000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c13 C14: name: C14 wavelength: [10.80, 11.20, 11.60] resolution: 2000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c14 C15: name: C15 wavelength: [11.80, 12.30, 12.80] resolution: 2000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c15 C16: name: C16 wavelength: [13.00, 13.30, 13.60] resolution: 2000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c16 satpy-0.20.0/satpy/etc/readers/abi_l1b_scmi.yaml000066400000000000000000000304301362525524100214660ustar00rootroot00000000000000reader: description: SCMI NetCDF4 Reader for ABI data name: abi_l1b_scmi sensors: [] default_channels: reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader # Typical filenames from Unidata THREDDS server: # Server: http://thredds-test.unidata.ucar.edu/thredds/catalog/satellite/goes16/GOES16/CONUS/Channel02/current/catalog.xml # satellite/goes16/GOES16/CONUS/Channel02/current/GOES16_CONUS_20180912_201712_0.64_500m_30.1N_87.1W.nc4 # Typical names for operational/official SCMI files (CSPP Geo, etc): # CG_EFD-005-B12-M3C02-T131_G16_s2018257024530_c2018257132258.nc # Operational/official SCMI files for Meso sectors: # CG_EMESO-020-B14-S1-N34W077-M3C07-T001_G16_s2018257000420_c2018257142255.nc file_types: c01: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_0.47_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C01-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C01-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c02: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_0.64_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C02-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C02-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c03: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_0.87_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C03-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C03-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c04: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_1.38_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C04-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C04-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c05: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_1.61_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C05-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C05-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c06: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_2.25_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C06-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C06-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c07: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_3.90_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C07-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C07-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c08: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_6.19_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C08-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C08-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c09: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_6.95_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C09-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C09-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c10: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_7.34_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C10-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C10-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c11: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_8.50_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C11-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C11-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c12: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_9.61_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C12-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C12-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c13: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_10.35_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C13-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C13-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c14: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_11.20_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C14-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C14-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c15: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_12.30_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C15-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C15-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c16: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_13.30_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C16-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C16-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' datasets: C01: name: C01 sensor: abi wavelength: [0.450, 0.470, 0.490] resolution: 1000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: c01 C02: name: C02 sensor: abi wavelength: [0.590, 0.640, 0.690] resolution: 500 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: c02 C03: name: C03 sensor: abi wavelength: [0.8455, 0.865, 0.8845] resolution: 1000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: c03 C04: name: C04 sensor: abi wavelength: [1.3705, 1.378, 1.3855] resolution: 2000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: c04 C05: name: C05 sensor: abi wavelength: [1.580, 1.610, 1.640] resolution: 1000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: c05 C06: name: C06 sensor: abi wavelength: [2.225, 2.250, 2.275] resolution: 2000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: c06 C07: name: C07 sensor: abi wavelength: [3.80, 3.90, 4.00] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c07 C08: name: C08 sensor: abi wavelength: [5.770, 6.185, 6.600] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c08 C09: name: C09 sensor: abi wavelength: [6.75, 6.95, 7.15] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c09 C10: name: C10 sensor: abi wavelength: [7.24, 7.34, 7.44] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c10 C11: name: C11 sensor: abi wavelength: [8.30, 8.50, 8.70] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c11 C12: name: C12 sensor: abi wavelength: [9.42, 9.61, 9.80] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c12 C13: name: C13 sensor: abi wavelength: [10.10, 10.35, 10.60] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c13 C14: name: C14 sensor: abi wavelength: [10.80, 11.20, 11.60] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c14 C15: name: C15 sensor: abi wavelength: [11.80, 12.30, 12.80] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c15 C16: name: C16 sensor: abi wavelength: [13.00, 13.30, 13.60] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c16 satpy-0.20.0/satpy/etc/readers/abi_l2_nc.yaml000066400000000000000000000510401362525524100207720ustar00rootroot00000000000000reader: name: abi_l2_nc short_name: ABI L2 NetCDF4 long_name: GOES-R ABI Level 2 NetCDF4 description: > GOES-R ABI Level 2+ data reader in the NetCDF4 format. The file format is described in the GOES-R Product Definition and Users' Guide (PUG) Volume 5. This document can be found `here `_. sensors: ['abi'] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader # file pattern keys to sort files by with 'satpy.utils.group_files' group_keys: ['start_time', 'platform_shortname', 'scene_abbr'] datasets: # --- Cloud Moisture Image Products --- CMIP_C01: # Cloud Moisture Image Products Channel 1 name: C01 wavelength: [0.450, 0.470, 0.490] calibration: reflectance file_type: abi_l2_cmip_c01 file_key: CMI CMIP_C02: # Cloud Moisture Image Products Channel 2 name: C02 wavelength: [0.590, 0.640, 0.690] calibration: reflectance file_type: abi_l2_cmip_c02 file_key: CMI CMIP_C03: # Cloud Moisture Image Products Channel 3 name: C03 wavelength: [0.8455, 0.865, 0.8845] calibration: reflectance file_type: abi_l2_cmip_c03 file_key: CMI CMIP_C04: # Cloud Moisture Image Products Channel 4 name: C04 wavelength: [1.3705, 1.378, 1.3855] calibration: reflectance file_type: abi_l2_cmip_c04 file_key: CMI CMIP_C05: # Cloud Moisture Image Products Channel 5 name: C05 wavelength: [1.580, 1.610, 1.640] calibration: reflectance file_type: abi_l2_cmip_c05 file_key: CMI CMIP_C06: # Cloud Moisture Image Products Channel 6 name: C06 wavelength: [2.225, 2.250, 2.275] calibration: reflectance file_type: abi_l2_cmip_c06 file_key: CMI CMIP_C07: # Cloud Moisture Image Products Channel 7 name: C07 wavelength: [3.80, 3.90, 4.00] calibration: brightness_temperature file_type: abi_l2_cmip_c07 file_key: CMI CMIP_C08: # Cloud Moisture Image Products Channel 8 name: C08 wavelength: [5.770, 6.185, 6.600] calibration: brightness_temperature file_type: abi_l2_cmip_c08 file_key: CMI CMIP_C09: # Cloud Moisture Image Products Channel 9 name: C09 wavelength: [6.75, 6.95, 7.15] calibration: brightness_temperature file_type: abi_l2_cmip_c09 file_key: CMI CMIP_C10: # Cloud Moisture Image Products Channel 10 name: C10 wavelength: [7.24, 7.34, 7.44] calibration: brightness_temperature file_type: abi_l2_cmip_c10 file_key: CMI CMIP_C11: # Cloud Moisture Image Products Channel 11 name: C11 wavelength: [8.30, 8.50, 8.70] calibration: brightness_temperature file_type: abi_l2_cmip_c11 file_key: CMI CMIP_C12: # Cloud Moisture Image Products Channel 12 name: C12 wavelength: [9.42, 9.61, 9.80] calibration: brightness_temperature file_type: abi_l2_cmip_c12 file_key: CMI CMIP_C13: # Cloud Moisture Image Products Channel 13 name: C13 wavelength: [10.10, 10.35, 10.60] calibration: brightness_temperature file_type: abi_l2_cmip_c13 file_key: CMI CMIP_C14: # Cloud Moisture Image Products Channel 14 name: C14 wavelength: [10.80, 11.20, 11.60] calibration: brightness_temperature file_type: abi_l2_cmip_c14 file_key: CMI CMIP_C15: # Cloud Moisture Image Products Channel 15 name: C15 wavelength: [11.80, 12.30, 12.80] calibration: brightness_temperature file_type: abi_l2_cmip_c15 file_key: CMI CMIP_C16: # Cloud Moisture Image Products Channel 16 name: C16 wavelength: [13.00, 13.30, 13.60] calibration: brightness_temperature file_type: abi_l2_cmip_c16 file_key: CMI # --- Cloud Top Height --- cloud_top_height: name: HT file_type: abi_l2_acha file_key: HT # variable name in the nc files # --- Cloud Top Temperature --- cloud_top_temperature: name: TEMP file_type: abi_l2_acht file_key: TEMP # --- Cloud Top Phase --- cloud_top_phase: name: Phase file_type: abi_l2_actp file_key: Phase # --- Clear Sky Mask --- clear_sky_mask: name: BCM file_type: abi_l2_acm file_key: BCM # --- Aerosol Detection Products --- aerosol_binary_mask: name: Aerosol file_type: abi_l2_adp file_key: Aerosol smoke_binary_mask: name: Smoke file_type: abi_l2_adp file_key: Smoke dust_binary_mask: name: Dust file_type: abi_l2_adp file_key: Dust # --- Aerosol Optical Depth at 550 nm --- aerosol_optical_depth: name: AOD file_type: abi_l2_aod file_key: AOD # --- Cloud Optical Depth at 640 nm --- cloud_optical_depth: name: COD file_type: abi_l2_cod file_key: COD cloud_optical_depth_day: name: CODD file_type: abi_l2_codd file_key: COD cloud_optical_depth_night: name: CODN file_type: abi_l2_codn file_key: COD # --- Cloud Particle Size --- cloud_particle_size: name: PSD file_type: abi_l2_cps file_key: PSD cloud_particle_size_day: name: PSDD file_type: abi_l2_cpsd file_key: PSD cloud_particle_size_night: name: PSDN file_type: abi_l2_cpsn file_key: PSD # --- Cloud Top Pressure --- cloud_top_pressure: name: PRES file_type: abi_l2_ctp file_key: PRES # --- Derived Stability Indices --- cape: name: CAPE file_type: abi_l2_dsi file_key: CAPE total_totals_index: name: TT file_type: abi_l2_dsi file_key: TT lifted_index: name: LI file_type: abi_l2_dsi file_key: LI showalter_index: name: SI file_type: abi_l2_dsi file_key: SI k_index: name: KI file_type: abi_l2_dsi file_key: KI # --- Fire (Hot Spot Characterization) Products --- fire_area: name: Area file_type: abi_l2_fdc file_key: Area fire_temp: name: Temp file_type: abi_l2_fdc file_key: Temp radiative_power: name: Power file_type: abi_l2_fdc file_key: Power fire_mask: name: Mask file_type: abi_l2_fdc file_key: Mask # --- Snow Cover --- snow_cover_fraction: name: FSC file_type: abi_l2_fsc file_key: FSC # --- Reflected Shortwave Radiation --- reflected_shortwave_radiation: name: RSR file_type: abi_l2_rsr file_key: RSR # coordinates: [lon, lat] # --- Downward Shortwave Radiation: Surface --- downward_shortwave_radiation: name: DSR file_type: abi_l2_dsr file_key: DSR # coordinates: [lon, lat] # --- Land Surface (Skin) Temperature --- land_surface_temperature: name: LST file_type: abi_l2_lst file_key: LST # --- Sea Surface (Skin) Temperature --- sea_surface_temperature: name: SST file_type: abi_l2_sst file_key: SST # --- Rainfall Rate - Quantitative Prediction Estimate --- rainfall_rate: name: RRQPE file_type: abi_l2_rrqpe file_key: RRQPE # --- Total Precipitalable Water --- total_precipitalable_water: name: TPW file_type: abi_l2_tpw file_key: TPW # ---Volcanic Ash Products --- ash_cloud_height: name: VAH file_type: abi_l2_vaa file_key: VAH ash_mass_loading: name: VAML file_type: abi_l2_vaa file_key: VAML # ---Navigation Products - Unofficial --- nav_longitude: name: Longitude file_type: abi_l2_nav file_key: Longitude nav_latitude: name: Latitude file_type: abi_l2_nav file_key: Latitude # ---- file_types: abi_l2_cmip_c01: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C01_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] abi_l2_cmip_c02: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C02_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] abi_l2_cmip_c03: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C03_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] abi_l2_cmip_c04: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C04_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] abi_l2_cmip_c05: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C05_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] abi_l2_cmip_c06: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C06_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] abi_l2_cmip_c07: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C07_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] abi_l2_cmip_c08: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C08_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] abi_l2_cmip_c09: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C09_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] abi_l2_cmip_c10: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C10_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] abi_l2_cmip_c11: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C11_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] abi_l2_cmip_c12: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C12_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] abi_l2_cmip_c13: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C13_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] abi_l2_cmip_c14: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C14_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] abi_l2_cmip_c15: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C15_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] abi_l2_cmip_c16: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{area_code:1s}-{scan_mode:2s}C16_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] abi_l2_acha: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-ACHA{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] abi_l2_acht: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-ACHT{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] abi_l2_acm: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-ACM{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] abi_l2_actp: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-ACTP{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] abi_l2_adp: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-ADP{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] abi_l2_aod: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-AOD{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] abi_l2_cod: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: # F (Full Disk) or C (CONUS) - '{system_environment:2s}_{mission_id:3s}-L2-COD{area_code:1s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc' # M1 or M2 for mesoscale - '{system_environment:2s}_{mission_id:3s}-L2-CODM{area_code:1d}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc' # CSPP Geo keeps Day and Night algorithm outputs separate abi_l2_codd: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: - '{system_environment:2s}_{mission_id:3s}-L2-CODD{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc' abi_l2_codn: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: - '{system_environment:2s}_{mission_id:3s}-L2-CODN{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc' abi_l2_cps: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: # F (Full Disk) or C (CONUS) - '{system_environment:2s}_{mission_id:3s}-L2-CPS{area_code:1s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc' # M1 or M2 for mesoscale - '{system_environment:2s}_{mission_id:3s}-L2-CPSM{area_code:1d}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc' # CSPP Geo keeps Day and Night algorithm outputs separate abi_l2_cpsd: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: - '{system_environment:2s}_{mission_id:3s}-L2-CPSD{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc' abi_l2_cpsn: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: - '{system_environment:2s}_{mission_id:3s}-L2-CPSN{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc' abi_l2_ctp: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CTP{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] abi_l2_dsi: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-DSI{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] abi_l2_drs: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-DRS{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] abi_l2_fdc: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-FDC{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] abi_l2_fsc: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-FSC{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] abi_l2_lst: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-LST{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] abi_l2_rrqpe: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-RRQPE{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] abi_l2_rsr: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-RSR{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] abi_l2_dsr: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-DSR{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] abi_l2_sst: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-SST{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] abi_l2_tpw: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-TPW{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] abi_l2_vaa: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-VAA{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc', '{system_environment:2s}_{mission_id:3s}-L2-VAA{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}-128600_0.nc'] # CSPP - Geo Unofficial product abi_l2_nav: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-NAV{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] satpy-0.20.0/satpy/etc/readers/acspo.yaml000066400000000000000000000020661362525524100202730ustar00rootroot00000000000000reader: description: NOAA Level 2 Product (L2P) ACSPO SST File Reader name: acspo reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [viirs,modis,avhrr] default_datasets: file_types: acspo_sst: file_reader: !!python/name:satpy.readers.acspo.ACSPOFileHandler file_patterns: ['{start_time:%Y%m%d%H%M%S}-{rdac:4s}-L2P_GHRSST-SSTskin-{sensor_id}-ACSPO_V{version}-v{gds_version}-fv{file_version}.nc'] datasets: longitude: name: longitude file_type: acspo_sst file_key: lon latitude: name: latitude file_type: acspo_sst file_key: lat sst: name: sst coordinates: [longitude, latitude] file_type: acspo_sst file_key: sea_surface_temperature cloud_clear: True satellite_zenith_angle: name: satellite_zenith_angle coordinates: [longitude, latitude] file_type: acspo_sst sea_ice_fraction: name: sea_ice_fraction coordinates: [longitude, latitude] file_type: acspo_sst wind_speed: name: wind_speed coordinates: [longitude, latitude] file_type: acspo_sst satpy-0.20.0/satpy/etc/readers/agri_l1.yaml000077500000000000000000000224551362525524100205130ustar00rootroot00000000000000# References: # - L1_SDR Data of FY4A Advanced Geostationary Radiation Imager # - http://fy4.nsmc.org.cn/data/en/data/realtime.html reader: name: agri_l1 description: FY-4A AGRI instrument HDF5 reader sensors: [agri] default_channels: reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: agri_l1_0500m: file_reader: !!python/name:satpy.readers.agri_l1.HDF_AGRI_L1 file_patterns: ['{platform_id:4s}-_{instrument:4s}--_N_{observation_type:s}_{longitude:5s}_L1-_FDI-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_0500M_{version:s}.HDF'] agri_l1_1000m: file_reader: !!python/name:satpy.readers.agri_l1.HDF_AGRI_L1 file_patterns: ['{platform_id:4s}-_{instrument:4s}--_N_{observation_type:s}_{longitude:5s}_L1-_FDI-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_1000M_{version:s}.HDF'] agri_l1_2000m: file_reader: !!python/name:satpy.readers.agri_l1.HDF_AGRI_L1 file_patterns: ['{platform_id:4s}-_{instrument:4s}--_N_{observation_type:s}_{longitude:5s}_L1-_FDI-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_2000M_{version:s}.HDF'] agri_l1_4000m: file_reader: !!python/name:satpy.readers.agri_l1.HDF_AGRI_L1 file_patterns: ['{platform_id:4s}-_{instrument:4s}--_N_{observation_type:s}_{longitude:5s}_L1-_FDI-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_4000M_{version:s}.HDF'] agri_l1_4000m_geo: file_reader: !!python/name:satpy.readers.agri_l1.HDF_AGRI_L1 file_patterns: ['{platform_id:4s}-_{instrument:4s}--_N_{observation_type:s}_{longitude:5s}_L1-_GEO-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_4000M_{version:s}.HDF'] datasets: C01: name: C01 wavelength: [0.45, 0.47, 0.49] resolution: 1000: {file_type: agri_l1_1000m} 2000: {file_type: agri_l1_2000m} 4000: {file_type: agri_l1_4000m} calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_key: NOMChannel01 lut_key: CALChannel01 fill_value: 65535 C02: name: C02 wavelength: [0.55, 0.65, 0.75] resolution: 500: {file_type: agri_l1_0500m} 1000: {file_type: agri_l1_1000m} 2000: {file_type: agri_l1_2000m} 4000: {file_type: agri_l1_4000m} calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_key: NOMChannel02 lut_key: CALChannel02 fill_value: 65535 C03: name: C03 wavelength: [0.75, 0.83, 0.90] resolution: 1000: {file_type: agri_l1_1000m} 2000: {file_type: agri_l1_2000m} 4000: {file_type: agri_l1_4000m} calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_key: NOMChannel03 lut_key: CALChannel03 fill_value: 65535 C04: name: C04 wavelength: [1.36, 1.37, 1.39] resolution: 2000: {file_type: agri_l1_2000m} 4000: {file_type: agri_l1_4000m} calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_key: NOMChannel04 lut_key: CALChannel04 fill_value: 65535 C05: name: C05 wavelength: [1.58, 1.61, 1.64] resolution: 2000: {file_type: agri_l1_2000m} 4000: {file_type: agri_l1_4000m} calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_key: NOMChannel05 lut_key: CALChannel05 fill_value: 65535 C06: name: C06 wavelength: [2.10, 2.22, 2.35] resolution: 2000: {file_type: agri_l1_2000m} 4000: {file_type: agri_l1_4000m} calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_key: NOMChannel06 lut_key: CALChannel06 fill_value: 65535 C07: name: C07 wavelength: [3.5, 3.72, 4.0] resolution: 2000: {file_type: agri_l1_2000m} 4000: {file_type: agri_l1_4000m} calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: "mW/ (m2 cm-1 sr)" brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: "1" file_key: NOMChannel07 lut_key: CALChannel07 fill_value: 65535 C08: name: C08 wavelength: [3.5, 3.72, 4.0] resolution: 4000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: "mW/ (m2 cm-1 sr)" brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: "1" file_key: NOMChannel08 lut_key: CALChannel08 file_type: agri_l1_4000m fill_value: 65535 C09: name: C09 wavelength: [5.8, 6.25, 6.7] resolution: 4000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: "mW/ (m2 cm-1 sr)" brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: "1" file_key: NOMChannel09 lut_key: CALChannel09 file_type: agri_l1_4000m fill_value: 65535 C10: name: C10 wavelength: [6.9, 7.10, 7.3] resolution: 4000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: "mW/ (m2 cm-1 sr)" brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: "1" file_key: NOMChannel10 lut_key: CALChannel10 file_type: agri_l1_4000m fill_value: 65535 C11: name: C11 wavelength: [8.0, 8.5, 9.0] resolution: 4000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: "mW/ (m2 cm-1 sr)" brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: "1" file_key: NOMChannel11 lut_key: CALChannel11 file_type: agri_l1_4000m fill_value: 65535 C12: name: C12 wavelength: [10.3, 10.8, 11.1] resolution: 4000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: "mW/ (m2 cm-1 sr)" brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: "1" file_key: NOMChannel12 lut_key: CALChannel12 file_type: agri_l1_4000m fill_value: 65535 C13: name: C13 wavelength: [11.5, 12.0, 12.5] resolution: 4000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: "mW/ (m2 cm-1 sr)" brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: "1" file_key: NOMChannel13 lut_key: CALChannel13 file_type: agri_l1_4000m fill_value: 65535 C14: name: C14 wavelength: [13.2, 13.5, 13.8] resolution: 4000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: "mW/ (m2 cm-1 sr)" brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: "1" file_key: NOMChannel14 lut_key: CALChannel14 file_type: agri_l1_4000m fill_value: 65535 solar_zenith_angle: name: solar_zenith_angle units: degree standard_name: solar_zenith_angle resolution: 4000 file_type: agri_l1_4000m_geo file_key: NOMSunZenith solar_azimuth_angle: name: solar_azimuth_angle units: degree standard_name: solar_azimuth_angle resolution: 4000 file_type: agri_l1_4000m_geo file_key: NOMSunAzimuth solar_glint_angle: name: solar_glint_angle units: degree standard_name: solar_glint_angle resolution: 4000 file_type: agri_l1_4000m_geo file_key: NOMSunGlintAngle satellite_zenith_angle: name: satellite_zenith_angle units: degree standard_name: satellite_zenith_angle resolution: 4000 file_type: agri_l1_4000m_geo file_key: NOMSatelliteZenith satellite_azimuth_angle: name: satellite_azimuth_angle units: degree standard_name: satellite_azimuth_angle resolution: 4000 file_type: agri_l1_4000m_geo file_key: NOMSatelliteAzimuthsatpy-0.20.0/satpy/etc/readers/ahi_hrit.yaml000066400000000000000000000313751362525524100207620ustar00rootroot00000000000000# References: # - http://www.data.jma.go.jp/mscweb/en/himawari89/space_segment/spsg_ahi.html # - http://www.data.jma.go.jp/mscweb/en/himawari89/space_segment/sample_hrit.html reader: name: ahi_hrit short_name: AHI HRIT long_name: Himawari AHI Level 1 (HRIT) description: Reader for the JMA Himawari AHI Level 1 data in HRIT format sensors: [ahi] reader: !!python/name:satpy.readers.yaml_reader.GEOSegmentYAMLReader # file pattern keys to sort files by with 'satpy.utils.group_files' group_keys: ['start_time', 'area'] file_types: hrit_b01_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B01_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_b01_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B01_{start_time:%Y%m%d%H%M}' hrit_b02_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B02_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_b02_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B02_{start_time:%Y%m%d%H%M}' hrit_b03_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}VIS_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_b03_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}VIS_{start_time:%Y%m%d%H%M}' hrit_b04_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B04_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_b04_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B04_{start_time:%Y%m%d%H%M}' hrit_b05_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B05_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_b05_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B05_{start_time:%Y%m%d%H%M}' hrit_b06_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B06_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_b06_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B06_{start_time:%Y%m%d%H%M}' hrit_b07_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler # B07 are high resolution versions of IR4 at night # See section 1.3 of # https://www.data.jma.go.jp/mscweb/en/himawari89/himawari_cast/note/HimawariCast_dataset_20150624_en.pdf file_patterns: - 'IMG_DK{area:02d}B07_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_b07_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler # B07 are high resolution versions of IR4 at night # See section 1.3 of # https://www.data.jma.go.jp/mscweb/en/himawari89/himawari_cast/note/HimawariCast_dataset_20150624_en.pdf file_patterns: - 'IMG_DK{area:02d}B07_{start_time:%Y%m%d%H%M}' hrit_b07_ir4_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR4_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_b07_ir4_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR4_{start_time:%Y%m%d%H%M}' hrit_b08_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR3_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_b08_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR3_{start_time:%Y%m%d%H%M}' hrit_b09_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B09_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_b09_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B09_{start_time:%Y%m%d%H%M}' hrit_b10_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B10_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_b10_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B10_{start_time:%Y%m%d%H%M}' hrit_b11_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B11_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_b11_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B11_{start_time:%Y%m%d%H%M}' hrit_b12_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B12_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_b12_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B12_{start_time:%Y%m%d%H%M}' hrit_b13_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR1_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_b13_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR1_{start_time:%Y%m%d%H%M}' hrit_b14_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B14_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_b14_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B14_{start_time:%Y%m%d%H%M}' hrit_b15_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR2_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_b15_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR2_{start_time:%Y%m%d%H%M}' hrit_b16_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B16_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_b16_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B16_{start_time:%Y%m%d%H%M}' datasets: B01: name: B01 sensor: ahi wavelength: [0.45,0.47,0.49] resolution: 1000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: 1 file_type: [hrit_b01_seg, hrit_b01_fd] B02: name: B02 sensor: ahi wavelength: [0.49,0.51,0.53] resolution: 1000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: 1 file_type: [hrit_b02_seg, hrit_b02_fd] B03: name: B03 sensor: ahi wavelength: [0.62,0.64,0.66] resolution: 1000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: 1 file_type: [hrit_b03_seg, hrit_b03_fd] B04: name: B04 sensor: ahi wavelength: [0.83, 0.85, 0.87] resolution: 4000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: 1 file_type: [hrit_b04_seg, hrit_b04_fd] B05: name: B05 sensor: ahi wavelength: [1.5, 1.6, 1.7] resolution: 4000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: 1 file_type: [hrit_b05_seg, hrit_b05_fd] B06: name: B06 sensor: ahi wavelength: [2.2, 2.3, 2.4] resolution: 4000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: 1 file_type: [hrit_b06_seg, hrit_b06_fd] B07_low_res: name: B07 resolution: 4000 # resolution: 2000 sensor: ahi wavelength: [3.7, 3.9, 4.1] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 # FUTURE: Split this in to multiple resolutions so each can be loaded file_type: [hrit_b07_seg, hrit_b07_ir4_seg, hrit_b07_fd, hrit_b07_ir4_fd] # B07_high_res: # name: B07 # resolution: 2000 # sensor: ahi # wavelength: [3.7, 3.9, 4.1] # calibration: # brightness_temperature: # standard_name: toa_brightness_temperature # units: "K" # counts: # standard_name: counts # units: 1 # file_type: hrit_b07 B08: name: B08 sensor: ahi wavelength: [6.0, 6.2, 6.4] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: [hrit_b08_seg, hrit_b08_fd] B09: name: B09 sensor: ahi wavelength: [6.7, 6.9, 7.1] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: [hrit_b09_seg, hrit_b09_fd] B10: name: B10 sensor: ahi wavelength: [7.1, 7.3, 7.5] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: [hrit_b10_seg, hrit_b10_fd] B11: name: B11 sensor: ahi wavelength: [8.4, 8.6, 8.8] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: [hrit_b11_seg, hrit_b11_fd] B12: name: B12 sensor: ahi wavelength: [9.4, 9.6, 9.8] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: [hrit_b12_seg, hrit_b12_fd] B13: name: B13 sensor: ahi wavelength: [10.2, 10.4, 10.6] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: [hrit_b13_seg, hrit_b13_fd] B14: name: B14 sensor: ahi wavelength: [11.0, 11.2, 11.4] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: [hrit_b14_seg, hrit_b14_fd] B15: name: B15 sensor: ahi wavelength: [12.2, 12.4, 12.6] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: [hrit_b15_seg, hrit_b15_fd] B16: name: B16 sensor: ahi wavelength: [13.1, 13.3, 13.5] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: [hrit_b16_seg, hrit_b16_fd] satpy-0.20.0/satpy/etc/readers/ahi_hsd.yaml000066400000000000000000000277101362525524100205700ustar00rootroot00000000000000# References: # - Himawari-8/9 Himawari Standard Data User's Guide reader: name: ahi_hsd short_name: AHI HSD long_name: Himawari AHI Level 1b (HSD) description: Reader for the JMA Himawari AHI Level 1 data in HSD format reader: !!python/name:satpy.readers.yaml_reader.GEOSegmentYAMLReader sensors: [ahi] # file pattern keys to sort files by with 'satpy.utils.group_files' group_keys: ['start_time', 'platform_shortname', 'area'] datasets: B01: name: B01 sensor: ahi wavelength: [0.45,0.47,0.49] resolution: 1000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b01 B02: name: B02 sensor: ahi wavelength: [0.49,0.51,0.53] resolution: 1000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b02 B03: name: B03 sensor: ahi wavelength: [0.62,0.64,0.66] resolution: 500 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b03 B04: name: B04 sensor: ahi wavelength: [0.83, 0.85, 0.87] resolution: 1000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b04 B05: name: B05 sensor: ahi wavelength: [1.5, 1.6, 1.7] resolution: 2000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b05 B06: name: B06 sensor: ahi wavelength: [2.2, 2.3, 2.4] resolution: 2000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b06 B07: name: B07 sensor: ahi wavelength: [3.7, 3.9, 4.1] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b07 B08: name: B08 sensor: ahi wavelength: [6.0, 6.2, 6.4] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b08 B09: name: B09 sensor: ahi wavelength: [6.7, 6.9, 7.1] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b09 B10: name: B10 sensor: ahi wavelength: [7.1, 7.3, 7.5] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b10 B11: name: B11 sensor: ahi wavelength: [8.4, 8.6, 8.8] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b11 B12: name: B12 sensor: ahi wavelength: [9.4, 9.6, 9.8] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b12 B13: name: B13 sensor: ahi wavelength: [10.2, 10.4, 10.6] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b13 B14: name: B14 sensor: ahi wavelength: [11.0, 11.2, 11.4] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b14 B15: name: B15 sensor: ahi wavelength: [12.2, 12.4, 12.6] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b15 B16: name: B16 sensor: ahi wavelength: [13.1, 13.3, 13.5] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b16 file_types: hsd_b01: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B01_{area}_R10_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B01_{area}_R10_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b02: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B02_{area}_R10_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B02_{area}_R10_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b03: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B03_{area}_R05_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B03_{area}_R05_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b04: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B04_{area}_R10_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B04_{area}_R10_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b05: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B05_{area}_R20_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B05_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b06: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B06_{area}_R20_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B06_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b07: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B07_{area}_R20_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B07_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b08: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B08_{area}_R20_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B08_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b09: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B09_{area}_R20_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B09_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b10: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B10_{area}_R20_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B10_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b11: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B11_{area}_R20_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B11_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b12: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B12_{area}_R20_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B12_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b13: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B13_{area}_R20_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B13_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b14: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B14_{area}_R20_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B14_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b15: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B15_{area}_R20_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B15_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b16: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B16_{area}_R20_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B16_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2'] satpy-0.20.0/satpy/etc/readers/ami_l1b.yaml000066400000000000000000000251351362525524100204740ustar00rootroot00000000000000reader: name: ami_l1b short_name: AMI L1b long_name: GEO-KOMPSAT-2 AMI Level 1b description: > GEO-KOMPSAT-2 AMI Level 1b data reader in the NetCDF4 format. The file format and instrument are described on KMA's website `here `_. sensors: [ami] default_channels: reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader # file pattern keys to sort files by with 'satpy.utils.group_files' group_keys: ['start_time', 'platform_shortname', 'sensor', 'sector_info'] file_types: # Example: gk2a_ami_le1b_ir087_fd020ge_201901260310.nc # Below list is alphabetical ir087: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_ir087_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] ir096: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_ir096_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] ir105: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_ir105_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] ir112: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_ir112_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] ir123: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_ir123_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] ir133: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_ir133_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] nr013: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_nr013_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] nr016: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_nr016_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] sw038: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_sw038_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] vi004: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_vi004_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] vi005: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_vi005_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] vi006: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_vi006_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] vi008: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_vi008_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] wv063: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_wv063_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] wv069: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_wv069_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] wv073: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_wv073_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] datasets: # Below list is ordered the same as the table: # https://directory.eoportal.org/web/eoportal/satellite-missions/content/-/article/geo-kompsat-2 C01: name: VI004 wavelength: [0.450, 0.470, 0.490] resolution: 1000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: vi004 file_key: image_pixel_values C02: name: VI005 wavelength: [0.495, 0.509, 0.523] resolution: 1000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: vi005 file_key: image_pixel_values C03: name: VI006 wavelength: [0.599, 0.639, 0.679] resolution: 500 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: vi006 file_key: image_pixel_values C04: name: VI008 wavelength: [0.846, 0.863, 0.880] resolution: 1000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: vi008 file_key: image_pixel_values C05: name: NR013 wavelength: [1.363, 1.37, 1.377] resolution: 2000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: nr013 file_key: image_pixel_values C06: name: NR016 wavelength: [1.590, 1.61, 1.630] resolution: 2000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: nr016 file_key: image_pixel_values C07: name: SW038 wavelength: [3.74, 3.83, 3.92] resolution: 2000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: sw038 file_key: image_pixel_values C08: name: WV063 wavelength: [5.79, 6.21, 6.63] resolution: 2000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: wv063 file_key: image_pixel_values C09: name: WV069 wavelength: [6.74, 6.94, 7.14] resolution: 2000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: wv069 file_key: image_pixel_values C10: name: WV073 wavelength: [7.24, 7.33, 7.42] resolution: 2000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: wv073 file_key: image_pixel_values C11: name: IR087 wavelength: [8.415, 8.59, 8.765] resolution: 2000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: ir087 file_key: image_pixel_values C12: name: IR096 wavelength: [9.43, 9.62, 9.81] resolution: 2000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: ir096 file_key: image_pixel_values C13: name: IR105 wavelength: [10.115, 10.35, 10.585] resolution: 2000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: ir105 file_key: image_pixel_values C14: name: IR112 wavelength: [10.90, 11.23, 11.56] resolution: 2000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: ir112 file_key: image_pixel_values C15: name: IR123 wavelength: [11.805, 12.36, 12.915] resolution: 2000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: ir123 file_key: image_pixel_values C16: name: IR133 wavelength: [13.005, 13.29, 13.575] resolution: 2000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: ir133 file_key: image_pixel_values satpy-0.20.0/satpy/etc/readers/amsr2_l1b.yaml000066400000000000000000000173451362525524100207560ustar00rootroot00000000000000reader: name: amsr2_l1b description: GCOM-W1 AMSR2 instrument HDF5 reader # could this be a python hook ? reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [amsr2] default_channels: [] datasets: btemp_10.7v: name: 'btemp_10.7v' # FIXME: These are actually GHz not micrometers wavelength: [10.7, 10.7, 10.7] calibration: brightness_temperature resolution: 10000 standard_name: toa_brightness_temperature polarization: V file_type: amsr2_l1b file_key: "Brightness Temperature (10.7GHz,V)" fill_value: 65535 coordinates: - longitude - latitude btemp_10.7h: name: 'btemp_10.7h' wavelength: [10.7, 10.7, 10.7] calibration: brightness_temperature resolution: 10000 standard_name: toa_brightness_temperature polarization: H file_type: amsr2_l1b file_key: "Brightness Temperature (10.7GHz,H)" fill_value: 65535 coordinates: - longitude - latitude btemp_6.9v: name: 'btemp_6.9v' wavelength: [6.9, 6.9, 6.9] calibration: brightness_temperature resolution: 10000 standard_name: toa_brightness_temperature polarization: V file_type: amsr2_l1b file_key: "Brightness Temperature (6.9GHz,V)" fill_value: 65535 coordinates: - longitude - latitude btemp_6.9h: name: 'btemp_6.9h' wavelength: [6.9, 6.9, 6.9] calibration: brightness_temperature resolution: 10000 standard_name: toa_brightness_temperature polarization: H file_type: amsr2_l1b file_key: "Brightness Temperature (6.9GHz,H)" fill_value: 65535 coordinates: - longitude - latitude btemp_7.3v: name: 'btemp_7.3v' wavelength: [7.3, 7.3, 7.3] calibration: brightness_temperature resolution: 10000 standard_name: toa_brightness_temperature polarization: V file_type: amsr2_l1b file_key: "Brightness Temperature (7.3GHz,V)" fill_value: 65535 coordinates: - longitude - latitude btemp_7.3h: name: 'btemp_7.3h' wavelength: [7.3, 7.3, 7.3] calibration: brightness_temperature resolution: 10000 standard_name: toa_brightness_temperature polarization: H file_type: amsr2_l1b file_key: "Brightness Temperature (7.3GHz,H)" fill_value: 65535 coordinates: - longitude - latitude btemp_18.7v: name: 'btemp_18.7v' wavelength: [18.7, 18.7, 18.7] calibration: brightness_temperature resolution: 10000 standard_name: toa_brightness_temperature polarization: V file_type: amsr2_l1b file_key: "Brightness Temperature (18.7GHz,V)" fill_value: 65535 coordinates: - longitude - latitude btemp_18.7h: name: 'btemp_18.7h' wavelength: [18.7, 18.7, 18.7] calibration: brightness_temperature resolution: 10000 standard_name: toa_brightness_temperature polarization: H file_type: amsr2_l1b file_key: "Brightness Temperature (18.7GHz,H)" fill_value: 65535 coordinates: - longitude - latitude btemp_23.8v: name: 'btemp_23.8v' wavelength: [23.8, 23.8, 23.8] calibration: brightness_temperature resolution: 10000 standard_name: toa_brightness_temperature polarization: V file_type: amsr2_l1b file_key: "Brightness Temperature (23.8GHz,V)" fill_value: 65535 coordinates: - longitude - latitude btemp_23.8h: name: 'btemp_23.8h' wavelength: [23.8, 23.8, 23.8] calibration: brightness_temperature resolution: 10000 standard_name: toa_brightness_temperature polarization: H file_type: amsr2_l1b file_key: "Brightness Temperature (23.8GHz,H)" fill_value: 65535 coordinates: - longitude - latitude btemp_36.5v: name: 'btemp_36.5v' wavelength: [36.5, 36.5, 36.5] calibration: brightness_temperature resolution: 10000 standard_name: toa_brightness_temperature polarization: V file_type: amsr2_l1b file_key: "Brightness Temperature (36.5GHz,V)" fill_value: 65535 coordinates: - longitude - latitude btemp_36.5h: name: 'btemp_36.5h' wavelength: [36.5, 36.5, 36.5] calibration: brightness_temperature resolution: 10000 standard_name: toa_brightness_temperature polarization: H file_type: amsr2_l1b file_key: "Brightness Temperature (36.5GHz,H)" fill_value: 65535 coordinates: - longitude - latitude btemp_89.0av: name: 'btemp_89.0av' wavelength: [89.0, 89.0, 89.0] calibration: brightness_temperature resolution: 5000 navigation: amsr2_5km_a standard_name: toa_brightness_temperature polarization: V file_type: amsr2_l1b file_key: "Brightness Temperature (89.0GHz-A,V)" fill_value: 65535 coordinates: - longitude_a - latitude_a btemp_89.0ah: name: 'btemp_89.0ah' wavelength: [89.0, 89.0, 89.0] calibration: brightness_temperature resolution: 5000 navigation: amsr2_5km_a standard_name: toa_brightness_temperature polarization: H file_type: amsr2_l1b file_key: "Brightness Temperature (89.0GHz-A,H)" fill_value: 65535 coordinates: - longitude_a - latitude_a btemp_89.0bv: name: 'btemp_89.0bv' wavelength: [89.0, 89.0, 89.0] calibration: brightness_temperature resolution: 5000 navigation: amsr2_5km_b standard_name: toa_brightness_temperature polarization: V file_type: amsr2_l1b file_key: "Brightness Temperature (89.0GHz-B,V)" fill_value: 65535 coordinates: - longitude_b - latitude_b btemp_89.0bh: name: 'btemp_89.0bh' wavelength: [89.0, 89.0, 89.0] calibration: brightness_temperature resolution: 5000 navigation: amsr2_5km_b standard_name: toa_brightness_temperature polarization: H file_type: amsr2_l1b file_key: "Brightness Temperature (89.0GHz-B,H)" fill_value: 65535 coordinates: - longitude_b - latitude_b latitude_5km_a: name: latitude_a resolution: 5000 file_type: amsr2_l1b standard_name: latitude polarization: [H, V] units: degree file_key: 'Latitude of Observation Point for 89A' fill_value: -9999.0 latitude_5km_b: name: latitude_b resolution: 5000 file_type: amsr2_l1b standard_name: latitude polarization: [H, V] units: degree file_key: 'Latitude of Observation Point for 89B' fill_value: -9999.0 longitude_5km_a: name: longitude_a resolution: 5000 file_type: amsr2_l1b standard_name: longitude polarization: [H, V] units: degree file_key: 'Longitude of Observation Point for 89A' fill_value: -9999.0 longitude_5km_b: name: longitude_b resolution: 5000 file_type: amsr2_l1b standard_name: longitude polarization: [H, V] units: degree file_key: 'Longitude of Observation Point for 89B' fill_value: -9999.0 latitude_10km: name: latitude resolution: 10000 file_type: amsr2_l1b standard_name: latitude polarization: [H, V] units: degree file_key: 'Latitude of Observation Point for 89A' fill_value: -9999.0 longitude_10km: name: longitude resolution: 10000 file_type: amsr2_l1b standard_name: longitude polarization: [H, V] units: degree file_key: 'Longitude of Observation Point for 89A' fill_value: -9999.0 file_types: amsr2_l1b: file_reader: !!python/name:satpy.readers.amsr2_l1b.AMSR2L1BFileHandler file_patterns: ['{platform_shortname:3s}{instrument_shortname:3s}_{start_time:%Y%m%d%H%M}_{path_number:3d}{orbit_direction:1s}_{process_level:2s}{process_kind:2s}{product_id:3s}{resolution_id:1s}{dev_id:1s}{product_version:1s}{algorithm_version:3d}{parameter_version:3d}.h5'] satpy-0.20.0/satpy/etc/readers/avhrr_l1b_aapp.yaml000066400000000000000000000073551362525524100220550ustar00rootroot00000000000000reader: name: avhrr_l1b_aapp description: AAPP l1b Reader for AVHRR reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [avhrr-3,] default_channels: [1, 2, 3a, 3b, 4, 5] datasets: '1': name: '1' wavelength: [0.58, 0.63, 0.68] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: avhrr_aapp_l1b '2': name: '2' wavelength: [0.725, 0.8625, 1.0] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: avhrr_aapp_l1b '3a': name: '3a' wavelength: [1.58, 1.61, 1.64] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: avhrr_aapp_l1b '3b': name: '3b' wavelength: [3.55, 3.74, 3.93] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: avhrr_aapp_l1b '4': name: '4' wavelength: [10.3, 10.8, 11.3] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: avhrr_aapp_l1b '5': name: '5' wavelength: [11.5, 12.0, 12.5] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: avhrr_aapp_l1b solar_zenith_angle: name: solar_zenith_angle resolution: 1050 coordinates: - longitude - latitude file_type: avhrr_aapp_l1b standard_name: solar_zenith_angle units: degrees sensor_zenith_angle: name: sensor_zenith_angle resolution: 1050 coordinates: - longitude - latitude file_type: avhrr_aapp_l1b standard_name: sensor_zenith_angle units: degrees sun_sensor_azimuth_difference_angle: name: sun_sensor_azimuth_difference_angle resolution: 1050 coordinates: - longitude - latitude file_type: avhrr_aapp_l1b units: degrees latitude: name: latitude resolution: 1050 file_type: avhrr_aapp_l1b standard_name: latitude units: degrees_north longitude: name: longitude resolution: 1050 file_type: avhrr_aapp_l1b standard_name: longitude units: degrees_east file_types: avhrr_aapp_l1b: file_reader: !!python/name:satpy.readers.aapp_l1b.AVHRRAAPPL1BFile file_patterns: ['hrpt_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit_number:05d}.l1b'] satpy-0.20.0/satpy/etc/readers/avhrr_l1b_eps.yaml000066400000000000000000000077021362525524100217170ustar00rootroot00000000000000reader: name: avhrr_l1b_eps description: EPS Reader for AVHRR reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader '' sensors: [avhrr-3] default_channels: [1, 2, 3a, 3b, 4, 5] datasets: '1': name: '1' wavelength: [0.58, 0.63, 0.68] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: avhrr_eps '2': name: '2' wavelength: [0.725, 0.8625, 1.0] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: avhrr_eps '3a': name: '3a' wavelength: [1.58, 1.61, 1.64] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: avhrr_eps '3b': name: '3b' wavelength: [3.55, 3.74, 3.93] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: avhrr_eps '4': name: '4' wavelength: [10.3, 10.8, 11.3] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: avhrr_eps '5': name: '5' wavelength: [11.5, 12.0, 12.5] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: avhrr_eps latitude: name: latitude resolution: 1050 file_type: avhrr_eps standard_name: latitude units: degree longitude: name: longitude resolution: 1050 file_type: avhrr_eps standard_name: longitude units: degree solar_zenith_angle: name: solar_zenith_angle sensor: avhrr-3 resolution: 1050 coordinates: [longitude, latitude] file_type: avhrr_eps solar_azimuth_angle: name: solar_azimuth_angle sensor: avhrr-3 resolution: 1050 coordinates: [longitude, latitude] file_type: avhrr_eps satellite_zenith_angle: name: satellite_zenith_angle sensor: avhrr-3 resolution: 1050 coordinates: [longitude, latitude] file_type: avhrr_eps satellite_azimuth_angle: name: satellite_azimuth_angle sensor: avhrr-3 resolution: 1050 coordinates: [longitude, latitude] file_type: avhrr_eps file_types: avhrr_eps: file_reader: !!python/name:satpy.readers.eps_l1b.EPSAVHRRFile '' file_patterns: [ 'AVHR_xxx_1B_{platform_short_name}_{start_time:%Y%m%d%H%M%SZ}_{end_time:%Y%m%d%H%M%SZ}_{processing_mode}_{disposition_mode}_{creation_time:%Y%m%d%H%M%SZ}', 'AVHR_xxx_1B_{platform_short_name}_{start_time:%Y%m%d%H%M%SZ}_{end_time:%Y%m%d%H%M%SZ}_{processing_mode}_{disposition_mode}_{creation_time:%Y%m%d%H%M%SZ}.nat'] satpy-0.20.0/satpy/etc/readers/avhrr_l1b_gaclac.yaml000066400000000000000000000120441362525524100223350ustar00rootroot00000000000000reader: name: avhrr_l1b_gaclac description: AAPP l1b Reader for AVHRR reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [avhrr-3, avhrr-2, avhrr-1] datasets: '1': name: '1' wavelength: [0.58, 0.63, 0.68] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: '%' radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: gac_lac_l1b '2': name: '2' wavelength: [0.725, 0.8625, 1.0] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: '%' radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: gac_lac_l1b '3': name: '3' wavelength: [1.58, 1.61, 1.64] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: '%' radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: gac_lac_l1b '3a': name: '3a' wavelength: [1.58, 1.61, 1.64] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: '%' radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: gac_lac_l1b '3b': name: '3b' wavelength: [3.55, 3.74, 3.93] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: gac_lac_l1b '4': name: '4' wavelength: [10.3, 10.8, 11.3] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: gac_lac_l1b '5': name: '5' wavelength: [11.5, 12.0, 12.5] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: gac_lac_l1b solar_zenith_angle: name: solar_zenith_angle standard_name: solar_zenith_angle resolution: 1050 coordinates: - longitude - latitude file_type: gac_lac_l1b units: degrees sensor_zenith_angle: name: sensor_zenith_angle standard_name: sensor_zenith_angle resolution: 1050 coordinates: - longitude - latitude file_type: gac_lac_l1b units: degrees solar_azimuth_angle: name: solar_azimuth_angle standard_name: solar_azimuth_angle resolution: 1050 coordinates: - longitude - latitude file_type: gac_lac_l1b units: degrees sensor_azimuth_angle: name: sensor_azimuth_angle standard_name: sensor_azimuth_angle resolution: 1050 coordinates: - longitude - latitude file_type: gac_lac_l1b units: degrees sun_sensor_azimuth_difference_angle: name: sun_sensor_azimuth_difference_angle standard_name: angle_of_rotation_from_solar_azimuth_to_platform_azimuth resolution: 1050 coordinates: - longitude - latitude file_type: gac_lac_l1b units: degrees qual_flags: name: qual_flags long_name: Scanline Quality Flags resolution: 1050 file_type: gac_lac_l1b latitude: name: latitude resolution: 1050 file_type: gac_lac_l1b standard_name: latitude units: degrees_north longitude: name: longitude resolution: 1050 file_type: gac_lac_l1b standard_name: longitude units: degrees_east file_types: gac_lac_l1b: file_reader: !!python/name:satpy.readers.avhrr_l1b_gaclac.GACLACFile #NSS.GHRR.NJ.D95056.S1116.E1303.B0080506.GC file_patterns: ['{creation_site:3s}.{transfer_mode:4s}.{platform_id:2s}.D{start_time:%y%j.S%H%M}.E{end_time:%H%M}.B{orbit_number:05d}{end_orbit_last_digits:02d}.{station:2s}'] satpy-0.20.0/satpy/etc/readers/avhrr_l1b_hrpt.yaml000066400000000000000000000052231362525524100221010ustar00rootroot00000000000000reader: name: avhrr_l1b_hrpt description: HRPT Reader for AVHRR reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader '' sensors: [avhrr-3, avhrr-2] default_channels: [1, 2, 3a, 3b, 4, 5] datasets: '1': name: '1' wavelength: [0.58, 0.63, 0.68] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance counts: standard_name: counts coordinates: [longitude, latitude] file_type: avhrr_hrpt '2': name: '2' wavelength: [0.725, 0.8625, 1.0] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance counts: standard_name: counts coordinates: [longitude, latitude] file_type: avhrr_hrpt '3a': name: '3a' wavelength: [1.58, 1.61, 1.64] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance counts: standard_name: counts coordinates: [longitude, latitude] file_type: avhrr_hrpt '3b': name: '3b' wavelength: [3.55, 3.74, 3.93] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature counts: standard_name: counts coordinates: [longitude, latitude] file_type: avhrr_hrpt '4': name: '4' wavelength: [10.3, 10.8, 11.3] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature counts: standard_name: counts coordinates: [longitude, latitude] file_type: avhrr_hrpt '5': name: '5' wavelength: [11.5, 12.0, 12.5] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature counts: standard_name: counts coordinates: [longitude, latitude] file_type: avhrr_hrpt longitude: name: longitude resolution: 1050 file_type: avhrr_hrpt standard_name: longitude units: degree latitude: name: latitude resolution: 1050 file_type: avhrr_hrpt standard_name: latitude units: degree file_types: avhrr_hrpt: file_reader: !!python/name:satpy.readers.hrpt.HRPTFile '' file_patterns: ['{start_time:%Y%m%d%H%M%S}_{platform_name}.hmf', 'hrpt16_{platform_name:s}_{start_time:%d-%b-%Y_%H:%M:%S.%f}_{orbit_number:05d}'] satpy-0.20.0/satpy/etc/readers/caliop_l2_cloud.yaml000066400000000000000000000017351362525524100222220ustar00rootroot00000000000000reader: default_datasets: [] description: CALIOP Level 2 Cloud Layer Version 3 HDF4 reader name: caliop_l2_cloud reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader '' sensors: [caliop] datasets: elevation: file_type: hdf4_caliop name: Lidar_Surface_Elevation resolution: 1000 coordinates: [Longitude, Latitude] layer_top_altitude: file_type: hdf4_caliop name: Layer_Top_Altitude resolution: 1000 coordinates: [Longitude, Latitude] units: km longitude: file_type: hdf4_caliop name: Longitude resolution: 1000 standard_name: longitude units: degree latitude: file_type: hdf4_caliop name: Latitude resolution: 1000 standard_name: latitude units: degree file_types: hdf4_caliop: file_patterns: - 'CAL_LID_L2_0{resolution:1s}kmCLay-ValStage1-V3-30.{start_time:%Y-%m-%dT%H-%M-%S}ZN.hdf' file_reader: !!python/name:satpy.readers.caliop_l2_cloud.HDF4BandReader '' satpy-0.20.0/satpy/etc/readers/clavrx.yaml000066400000000000000000000020301362525524100204540ustar00rootroot00000000000000reader: description: CLAVR-X Reader name: clavrx reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [viirs, modis, avhrr, ahi] file_types: level2: file_reader: !!python/name:satpy.readers.clavrx.CLAVRXFileHandler # clavrx_npp_d20170520_t2055235_e2056477_b28822.level2.hdf' # clavrx_H08_20180719_1300.level2.hdf file_patterns: - 'clavrx_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}.level2.hdf' - 'clavrx_{platform_shortname}.{start_time:%y%j.%H%M}.{resolution:s}.level2.hdf' - 'clavrx_hrpt_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit}.level2.hdf' - 'clavrx_{platform_shortname}_{start_time:%Y%m%d_%H%M}.level2.hdf' #datasets: # longitude: # name: longitude # resolution: 0 # file_type: level2 # latitude: # name: latitude # resolution: 0 # file_type: level2 # # resolution come from file # cld_temp_acha: # name: Solar_Zenith # resolution: 0 # coordinates: [longitude, latitude] # file_type: level2 satpy-0.20.0/satpy/etc/readers/electrol_hrit.yaml000066400000000000000000000250541362525524100220270ustar00rootroot00000000000000reader: description: MSG HRIT Reader name: electrol_hrit sensors: [msu-gs] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: HRIT_00_6_4: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-00_6_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_00_7_4: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-00_7_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_00_9_4: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-00_9_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_03_8_4: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-03_8_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_06_4_4: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-06_4_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_08_0_4: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-08_0_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_08_7_4: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-08_7_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_09_7_4: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-09_7_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_10_7_4: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-10_7_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_11_9_4: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-11_9_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_00_6_4_C: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-00_6_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_00_7_4_C: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-00_7_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_00_9_4_C: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-00_9_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_03_8_4_C: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-03_8_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_06_4_4_C: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-06_4_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_08_0_4_C: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-08_0_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_08_7_4_C: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-08_7_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_09_7_4_C: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-09_7_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_10_7_4_C: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-10_7_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_11_9_4_C: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-11_9_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_PRO_4: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSPrologueFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-_________-PRO______-{start_time:%Y%m%d%H%M}-__'] HRIT_EPI_4: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSEpilogueFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-_________-EPI______-{start_time:%Y%m%d%H%M}-__'] datasets: '00_6': name: '00_6' resolution: 4000 wavelength: [0.5, 0.6, 0.65] calibration: # reflectance: # standard_name: toa_bidirectional_reflectance # units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: '1' file_type: [HRIT_00_6_4, HRIT_00_6_4_C] '00_7': name: '00_7' resolution: 4000 wavelength: [0.65, 0.7, 0.8] calibration: # reflectance: # standard_name: toa_bidirectional_reflectance # units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: '1' file_type: [HRIT_00_7_4, HRIT_00_7_4_C] '00_9': name: '00_9' resolution: 4000 wavelength: [0.8, 0.9, 0.9] calibration: # reflectance: # standard_name: toa_bidirectional_reflectance # units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: '1' file_type: [HRIT_00_9_4, HRIT_00_9_4_C] '03_8': name: '03_8' resolution: 4000 wavelength: [3.5, 3.8, 4.0] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 counts: standard_name: counts units: '1' file_type: [HRIT_03_8_4, HRIT_03_8_4_C] '06_4': name: '06_4' resolution: 4000 wavelength: [5.7, 6.4, 7.0] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 counts: standard_name: counts units: '1' file_type: [HRIT_06_4_4, HRIT_06_4_4_C] '08_0': name: '08_0' resolution: 4000 wavelength: [7.5, 8.0, 8.5] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 counts: standard_name: counts units: '1' file_type: [HRIT_08_0_4, HRIT_08_0_4_C] '08_7': name: '08_7' resolution: 4000 wavelength: [8.2, 8.7, 9.2] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 counts: standard_name: counts units: '1' file_type: [HRIT_08_7_4, HRIT_08_7_4_C] '09_7': name: '09_7' resolution: 4000 wavelength: [9.2, 9.7, 10.2] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 counts: standard_name: counts units: '1' file_type: [HRIT_09_7_4, HRIT_09_7_4_C] '10_7': name: '10_7' resolution: 4000 wavelength: [10.2, 10.8, 11.2] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 counts: standard_name: counts units: '1' file_type: [HRIT_10_7_4, HRIT_10_7_4_C] '11_9': name: '11_9' resolution: 4000 wavelength: [11.2, 11.9, 12.5] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 counts: standard_name: counts units: '1' file_type: [HRIT_11_9_4, HRIT_11_9_4_C] satpy-0.20.0/satpy/etc/readers/fci_l1c_fdhsi.yaml000066400000000000000000000175051362525524100216470ustar00rootroot00000000000000reader: name: fci_l1c_fdhsi short_name: FCI L1C FDHSI long_name: MTG FCI Level 1C FDHSI description: > Reader for FCI FDSHI data in NetCDF4 format. Used to read Meteosat Third Generation (MTG) Flexible Combined Imager (FCI) Full Disk High Spectral Imagery (FDHSI) data. reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [fci] datasets: vis_04: name: vis_04 sensor: fci wavelength: [0.384, 0.444, 0.504] resolution: 1000 calibration: counts: standard_name: counts units: "count" reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 file_type: fci_l1c_fdhsi vis_05: name: vis_05 sensor: fci wavelength: [0.470, 0.510, 0.550] resolution: 1000 calibration: counts: standard_name: counts units: "count" reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 file_type: fci_l1c_fdhsi vis_06: name: vis_06 sensor: fci wavelength: [0.590, 0.640, 0.690] resolution: 1000 calibration: counts: standard_name: counts units: "count" reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 file_type: fci_l1c_fdhsi vis_08: name: vis_08 sensor: fci wavelength: [0.815, 0.865, 0.915] resolution: 1000 calibration: counts: standard_name: counts units: "count" reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 file_type: fci_l1c_fdhsi vis_09: name: vis_09 sensor: fci wavelength: [0.894, 0.914, 0.934] resolution: 1000 calibration: counts: standard_name: counts units: "count" reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 file_type: fci_l1c_fdhsi nir_13: name: nir_13 sensor: fci wavelength: [1.350, 1.380, 1.410] resolution: 1000 calibration: counts: standard_name: counts units: "count" reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 file_type: fci_l1c_fdhsi nir_16: name: nir_16 sensor: fci wavelength: [1.560, 1.610, 1.660] resolution: 1000 calibration: counts: standard_name: counts units: "count" reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 file_type: fci_l1c_fdhsi nir_22: name: nir_22 sensor: fci wavelength: [2.200, 2.250, 2.300] resolution: 1000 calibration: counts: standard_name: counts units: "count" reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 file_type: fci_l1c_fdhsi ir_38: name: ir_38 sensor: fci wavelength: [3.400, 3.800, 4.200] resolution: 2000 calibration: counts: standard_name: counts units: "count" brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 file_type: fci_l1c_fdhsi wv_63: name: wv_63 sensor: fci wavelength: [5.300, 6.300, 7.300] resolution: 2000 calibration: counts: standard_name: counts units: "count" brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 file_type: fci_l1c_fdhsi wv_73: name: wv_73 sensor: fci wavelength: [6.850, 7.350, 7.850] resolution: 2000 calibration: counts: standard_name: counts units: "count" brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 file_type: fci_l1c_fdhsi ir_87: name: ir_87 sensor: fci wavelength: [8.300, 8.700, 9.100] resolution: 2000 calibration: counts: standard_name: counts units: "count" brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 file_type: fci_l1c_fdhsi ir_97: name: ir_97 sensor: fci wavelength: [9.360, 9.660, 9.960] resolution: 2000 calibration: counts: standard_name: counts units: "count" brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 file_type: fci_l1c_fdhsi ir_105: name: ir_105 sensor: fci wavelength: [9.800, 10.500, 11.200] resolution: 2000 calibration: counts: standard_name: counts units: "count" brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 file_type: fci_l1c_fdhsi ir_123: name: ir_123 sensor: fci wavelength: [11.800, 12.300, 12.800] resolution: 2000 calibration: counts: standard_name: counts units: "count" brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 file_type: fci_l1c_fdhsi ir_133: name: ir_133 sensor: fci wavelength: [12.700, 13.300, 13.900] resolution: 2000 calibration: counts: standard_name: counts units: "count" brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 file_type: fci_l1c_fdhsi # Source: FCI L1 Dataset User Guide [FCIL1DUG] # ftp://ftp.eumetsat.int/pub/OPS/out/test-data/FCI_L1C_Format_Familiarisation/FCI_L1_Dataset_User_Guide_[FCIL1DUG].pdf # and Example Products for Pytroll Workshop Package Description, # EUM/MTG/DOC/19/1079228 file_types: fci_l1c_fdhsi: file_reader: !!python/name:satpy.readers.fci_l1c_fdhsi.FCIFDHSIFileHandler '' file_patterns: ['{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-{processing_level}-{type}-{subtype}-{coverage}-{subsetting}-{component1}-BODY-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc'] expected_segments: 70 satpy-0.20.0/satpy/etc/readers/generic_image.yaml000066400000000000000000000032641362525524100217450ustar00rootroot00000000000000reader: name: generic_image description: generic image reader reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [images] default_channels: [image] datasets: image: name: image file_type: graphic file_types: graphic: file_reader: !!python/name:satpy.readers.generic_image.GenericImageFileHandler file_patterns: - '{start_time:%Y%m%d_%H%M}{filename}.png' - '{start_time:%Y%m%d_%H%M}{filename}.PNG' - '{start_time:%Y%m%d_%H%M}{filename}.jpg' - '{start_time:%Y%m%d_%H%M}{filename}.jpeg' - '{start_time:%Y%m%d_%H%M}{filename}.JPG' - '{start_time:%Y%m%d_%H%M}{filename}.JPEG' - '{start_time:%Y%m%d_%H%M}{filename}.tif' - '{start_time:%Y%m%d_%H%M}{filename}.tiff' - '{start_time:%Y%m%d_%H%M}{filename}.TIF' - '{start_time:%Y%m%d_%H%M}{filename}.TIFF' - '{filename}{start_time:%Y%m%d_%H%M}.png' - '{filename}{start_time:%Y%m%d_%H%M}.PNG' - '{filename}{start_time:%Y%m%d_%H%M}.jpg' - '{filename}{start_time:%Y%m%d_%H%M}.jpeg' - '{filename}{start_time:%Y%m%d_%H%M}.JPG' - '{filename}{start_time:%Y%m%d_%H%M}.JPEG' - '{filename}{start_time:%Y%m%d_%H%M}.tif' - '{filename}{start_time:%Y%m%d_%H%M}.tiff' - '{filename}{start_time:%Y%m%d_%H%M}.TIF' - '{filename}{start_time:%Y%m%d_%H%M}.TIFF' - '{filename}.png' - '{filename}.PNG' - '{filename}.jpg' - '{filename}.jpeg' - '{filename}.JPG' - '{filename}.JPEG' - '{filename}.tif' - '{filename}.tiff' - '{filename}.TIF' - '{filename}.TIFF' satpy-0.20.0/satpy/etc/readers/geocat.yaml000066400000000000000000000166531362525524100204370ustar00rootroot00000000000000reader: description: CSPP Geo and GEOCAT file reader name: geocat reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [abi, ahi, goes_imager] file_types: level2: file_reader: !!python/name:satpy.readers.geocat.GEOCATFileHandler file_patterns: # GOES-16 ABI files (must be first to capture things correctly): - 'geocatL{processing_level:1d}.{platform_shortname}.{sector_id}.{start_time:%Y%j.%H%M%S}.hdf' - 'geocatL{processing_level:1d}.{platform_shortname}.{sector_id}.{start_time:%Y%j.%H%M%S}.nc' # Generic file pattern - 'geocatL{processing_level:1d}.{platform_shortname}.{start_time:%Y%j.%H%M%S}.hdf' - 'geocatL{processing_level:1d}.{platform_shortname}.{start_time:%Y%j.%H%M%S}.nc' # Himawari 8 files: - 'geocatL2.{platform_shortname}.{start_time:%Y%j.%H%M%S}.{sector_id}.{res_id}.hdf' - 'geocatL2.{platform_shortname}.{start_time:%Y%j.%H%M%S}.{sector_id}.{res_id}.nc' ahi_level1: file_reader: !!python/name:satpy.readers.geocat.GEOCATFileHandler file_patterns: # we could use the H8 pattern above, but then the datasets listed below # would always be "available" - 'geocatL1.HIMAWARI-8.{start_time:%Y%j.%H%M%S}.{sector_id}.{res_id}.hdf' - 'geocatL1.HIMAWARI-8.{start_time:%Y%j.%H%M%S}.{sector_id}.{res_id}.nc' datasets: # AHI Level 1 Datasets (need to define here so wavelengths can be used) B01: name: B01 sensor: ahi wavelength: [0.45,0.47,0.49] calibration: reflectance: file_key: himawari_8_ahi_channel_1_reflectance standard_name: toa_bidirectional_reflectance units: "%" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B02: name: B02 sensor: ahi wavelength: [0.49,0.51,0.53] calibration: reflectance: file_key: himawari_8_ahi_channel_2_reflectance standard_name: toa_bidirectional_reflectance units: "%" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B03: name: B03 sensor: ahi wavelength: [0.62,0.64,0.66] calibration: reflectance: file_key: himawari_8_ahi_channel_3_reflectance standard_name: toa_bidirectional_reflectance units: "%" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B04: name: B04 sensor: ahi wavelength: [0.83, 0.85, 0.87] calibration: reflectance: file_key: himawari_8_ahi_channel_4_reflectance standard_name: toa_bidirectional_reflectance units: "%" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B05: name: B05 sensor: ahi wavelength: [1.5, 1.6, 1.7] calibration: reflectance: file_key: himawari_8_ahi_channel_5_reflectance standard_name: toa_bidirectional_reflectance units: "%" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B06: name: B06 sensor: ahi wavelength: [2.2, 2.3, 2.4] calibration: reflectance: file_key: himawari_8_ahi_channel_6_reflectance standard_name: toa_bidirectional_reflectance units: "%" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B07: name: B07 sensor: ahi wavelength: [3.7, 3.9, 4.1] calibration: brightness_temperature: file_key: himawari_8_ahi_channel_7_brightness_temperature standard_name: toa_brightness_temperature units: "K" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B08: name: B08 sensor: ahi wavelength: [6.0, 6.2, 6.4] calibration: brightness_temperature: file_key: himawari_8_ahi_channel_8_brightness_temperature standard_name: toa_brightness_temperature units: "K" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B09: name: B09 sensor: ahi wavelength: [6.7, 6.9, 7.1] calibration: brightness_temperature: file_key: himawari_8_ahi_channel_9_brightness_temperature standard_name: toa_brightness_temperature units: "K" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B10: name: B10 sensor: ahi wavelength: [7.1, 7.3, 7.5] calibration: brightness_temperature: file_key: himawari_8_ahi_channel_10_brightness_temperature standard_name: toa_brightness_temperature units: "K" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B11: name: B11 sensor: ahi wavelength: [8.4, 8.6, 8.8] calibration: brightness_temperature: file_key: himawari_8_ahi_channel_11_brightness_temperature standard_name: toa_brightness_temperature units: "K" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B12: name: B12 sensor: ahi wavelength: [9.4, 9.6, 9.8] calibration: brightness_temperature: file_key: himawari_8_ahi_channel_12_brightness_temperature standard_name: toa_brightness_temperature units: "K" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B13: name: B13 sensor: ahi wavelength: [10.2, 10.4, 10.6] calibration: brightness_temperature: file_key: himawari_8_ahi_channel_13_brightness_temperature standard_name: toa_brightness_temperature units: "K" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B14: name: B14 sensor: ahi wavelength: [11.0, 11.2, 11.4] calibration: brightness_temperature: file_key: himawari_8_ahi_channel_14_brightness_temperature standard_name: toa_brightness_temperature units: "K" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B15: name: B15 sensor: ahi wavelength: [12.2, 12.4, 12.6] calibration: brightness_temperature: file_key: himawari_8_ahi_channel_15_brightness_temperature standard_name: toa_brightness_temperature units: "K" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B16: name: B16 sensor: ahi wavelength: [13.1, 13.3, 13.5] calibration: brightness_temperature: file_key: himawari_8_ahi_channel_16_brightness_temperature standard_name: toa_brightness_temperature units: "K" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1satpy-0.20.0/satpy/etc/readers/ghrsst_l3c_sst.yaml000066400000000000000000000007641362525524100221350ustar00rootroot00000000000000reader: description: OSISAF SST GHRSST netCDF reader name: ghrsst_l3c_sst reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader '' sensors: [avhrr/3, viirs] datasets: sea_surface_temperature: name: sea_surface_temperature file_type: ghrsst_osisaf_l2 resolution: 1000 file_types: ghrsst_osisaf_l2: file_reader: !!python/name:satpy.readers.ghrsst_l3c_sst.GHRSST_OSISAFL2 '' file_patterns: ['S-OSI_-FRA_-{satid:3s}_-NARSST_FIELD-{start_time:%Y%m%d%H00}Z.nc'] satpy-0.20.0/satpy/etc/readers/glm_l2.yaml000066400000000000000000000033501362525524100203370ustar00rootroot00000000000000reader: name: glm_l2 short_name: GLM Level 2 long_name: GOES-R GLM Level 2 description: > NetCDF4 reader for GOES-R series GLM data. Currently only gridded L2 files output from `gltmtools `_ are supported. sensors: [glm] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader # file pattern keys to sort files by with 'satpy.utils.group_files' group_keys: ['start_time', 'platform_shortname', 'scene_abbr'] # Typical filenames from Unidata THREDDS server: # http://thredds.unidata.ucar.edu/thredds/catalog/satellite/goes/east/ # products/GeostationaryLightningMapper/CONUS/current/catalog.html # OR_GLM-L2-GLMC-M3_G16_s20191920000000_e20191920001000_c20191920001380.nc file_types: glm_l2_imagery: file_reader: !!python/name:satpy.readers.glm_l2.NCGriddedGLML2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-GLM{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] # glm_l2_lcfa - add this with glmtools datasets: flash_extent_density: name: flash_extent_density file_type: glm_l2_imagery group_extent_density: name: group_extent_density file_type: glm_l2_imagery flash_centroid_density: name: flash_centroid_density file_type: glm_l2_imagery group_centroid_density: name: group_centroid_density file_type: glm_l2_imagery average_flash_area: name: average_flash_area file_type: glm_l2_imagery minimum_flash_area: name: minimum_flash_area file_type: glm_l2_imagery average_group_area: name: average_group_area file_type: glm_l2_imagery total_energy: name: total_energy file_type: glm_l2_imagery satpy-0.20.0/satpy/etc/readers/goes-imager_hrit.yaml000066400000000000000000000131331362525524100224100ustar00rootroot00000000000000reader: name: goes-imager_hrit short_name: GOES Imager HRIT long_name: GOES Imager Level 1 (HRIT) description: Reader for GOES Imager Level 1 data in HRIT format sensors: [goes_imager] default_channels: [00_7, 03_9, 06_6, 10_7] reader: !!python/name:satpy.readers.yaml_reader.GEOSegmentYAMLReader # eg. # L-000-MSG3__-GOES13______-06_6_075W-000005___-201703261200-__ # L-000-MSG3__-GOES13______-06_6_075W-PRO______-201703261200-__ file_types: HRIT_00_7: file_reader: !!python/name:satpy.readers.goes_imager_hrit.HRITGOESFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}______-00_7_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_00] expected_segments: 7 HRIT_00_7_C: file_reader: !!python/name:satpy.readers.goes_imager_hrit.HRITGOESFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}______-00_7_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_00] expected_segments: 7 HRIT_03_9: file_reader: !!python/name:satpy.readers.goes_imager_hrit.HRITGOESFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}______-03_9_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_03] expected_segments: 7 HRIT_03_9_C: file_reader: !!python/name:satpy.readers.goes_imager_hrit.HRITGOESFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}______-03_9_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_03] expected_segments: 7 HRIT_06_6: file_reader: !!python/name:satpy.readers.goes_imager_hrit.HRITGOESFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}______-06_6_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_06] expected_segments: 7 HRIT_06_6_C: file_reader: !!python/name:satpy.readers.goes_imager_hrit.HRITGOESFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}______-06_6_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_06] expected_segments: 7 HRIT_10_7: file_reader: !!python/name:satpy.readers.goes_imager_hrit.HRITGOESFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}______-10_7_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_10] expected_segments: 7 HRIT_10_7_C: file_reader: !!python/name:satpy.readers.goes_imager_hrit.HRITGOESFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}______-10_7_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_10] expected_segments: 7 HRIT_PRO_00: file_reader: !!python/name:satpy.readers.goes_imager_hrit.HRITGOESPrologueFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}______-00_7_{sublon:4s}-PRO______-{start_time:%Y%m%d%H%M}-__'] expected_segments: 7 HRIT_PRO_03: file_reader: !!python/name:satpy.readers.goes_imager_hrit.HRITGOESPrologueFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}______-03_9_{sublon:4s}-PRO______-{start_time:%Y%m%d%H%M}-__'] expected_segments: 7 HRIT_PRO_06: file_reader: !!python/name:satpy.readers.goes_imager_hrit.HRITGOESPrologueFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}______-06_6_{sublon:4s}-PRO______-{start_time:%Y%m%d%H%M}-__'] expected_segments: 7 HRIT_PRO_10: file_reader: !!python/name:satpy.readers.goes_imager_hrit.HRITGOESPrologueFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}______-10_7_{sublon:4s}-PRO______-{start_time:%Y%m%d%H%M}-__'] expected_segments: 7 datasets: '00_7': name: '00_7' resolution: 3000 wavelength: [0.55, 0.7, 0.75] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 counts: standard_name: counts file_type: [HRIT_00_7, HRIT_00_7_C] '03_9': name: '03_9' resolution: 3000 wavelength: [3.8, 3.9, 4.0] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 counts: standard_name: counts file_type: [HRIT_03_9, HRIT_03_9_C] '06_6': name: '06_6' resolution: 3000 wavelength: [6.5, 6.6, 7.0] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 counts: standard_name: counts file_type: [HRIT_06_6, HRIT_06_6_C] '10_7': name: '10_7' resolution: 3000 wavelength: [10.2, 10.7, 11.2] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 counts: standard_name: counts file_type: [HRIT_10_7, HRIT_10_7_C] satpy-0.20.0/satpy/etc/readers/goes-imager_nc.yaml000066400000000000000000000223711362525524100220460ustar00rootroot00000000000000reader: name: goes-imager_nc short_name: GOES Imager netCDF long_name: GOES Imager Level 1 (netCDF) description: > Reader for GOES Imager Level 1 data in netCDF format (from both NOAA CLASS and EUMETCast) References: - GOES 8-12: https://goes.gsfc.nasa.gov/text/databook/databook.pdf, page 20 ff. - GOES 13-15: https://goes.gsfc.nasa.gov/text/GOES-N_Databook/databook.pdf, chapter 3. sensors: [goes_imager] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: nc_goes_00_7: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESNCFileHandler file_patterns: ['{platform}.{start_time:%Y.%j.%H%M%S}.BAND_01.nc'] eum_nc_goes_00_7: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESEUMNCFileHandler file_patterns: ['goes15.{start_time:%Y.%j.%H%M%S}_1.nc'] requires: [eum_nc_goes_geo] nc_goes_03_9: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESNCFileHandler file_patterns: ['{platform}.{start_time:%Y.%j.%H%M%S}.BAND_02.nc'] eum_nc_goes_03_9: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESEUMNCFileHandler file_patterns: ['goes15.{start_time:%Y.%j.%H%M%S}_2.nc'] requires: [eum_nc_goes_geo] nc_goes_06_5: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESNCFileHandler file_patterns: ['goes12.{start_time:%Y.%j.%H%M%S}.BAND_03.nc', 'goes13.{start_time:%Y.%j.%H%M%S}.BAND_03.nc', 'goes14.{start_time:%Y.%j.%H%M%S}.BAND_03.nc', 'goes15.{start_time:%Y.%j.%H%M%S}.BAND_03.nc'] eum_nc_goes_06_5: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESEUMNCFileHandler file_patterns: ['goes15.{start_time:%Y.%j.%H%M%S}_3.nc'] requires: [eum_nc_goes_geo] nc_goes_06_8: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESNCFileHandler file_patterns: ['goes08.{start_time:%Y.%j.%H%M%S}.BAND_03.nc', 'goes09.{start_time:%Y.%j.%H%M%S}.BAND_03.nc', 'goes10.{start_time:%Y.%j.%H%M%S}.BAND_03.nc', 'goes11.{start_time:%Y.%j.%H%M%S}.BAND_03.nc'] nc_goes_10_7: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESNCFileHandler file_patterns: ['{platform}.{start_time:%Y.%j.%H%M%S}.BAND_04.nc'] eum_nc_goes_10_7: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESEUMNCFileHandler file_patterns: ['goes15.{start_time:%Y.%j.%H%M%S}_4.nc'] requires: [eum_nc_goes_geo] nc_goes_12_0: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESNCFileHandler file_patterns: ['goes08.{start_time:%Y.%j.%H%M%S}.BAND_05.nc', 'goes09.{start_time:%Y.%j.%H%M%S}.BAND_05.nc', 'goes10.{start_time:%Y.%j.%H%M%S}.BAND_05.nc', 'goes11.{start_time:%Y.%j.%H%M%S}.BAND_05.nc'] eum_nc_goes_12_0: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESEUMNCFileHandler file_patterns: ['goes15.{start_time:%Y.%j.%H%M%S}_5.nc'] requires: [eum_nc_goes_geo] nc_goes_13_3: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESNCFileHandler file_patterns: ['goes12.{start_time:%Y.%j.%H%M%S}.BAND_06.nc', 'goes13.{start_time:%Y.%j.%H%M%S}.BAND_06.nc', 'goes14.{start_time:%Y.%j.%H%M%S}.BAND_06.nc', 'goes15.{start_time:%Y.%j.%H%M%S}.BAND_06.nc'] eum_nc_goes_13_3: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESEUMNCFileHandler file_patterns: ['goes15.{start_time:%Y.%j.%H%M%S}_6.nc'] requires: [eum_nc_goes_geo] eum_nc_goes_geo: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESEUMGEONCFileHandler file_patterns: ['goes15.{start_time:%Y.%j.%H%M%S}_geo.nc'] datasets: '00_7': name: '00_7' wavelength: [0.52, 0.65, 0.71] calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: comment: Angle of incident radiation and annual variation of the earth-sun distance is not taken into account. A value of 100% corresponds to the radiance of a perfectly reflecting diffuse surface illuminated at normal incidence when the sun is at its annual-average distance from the Earth. units: "%" coordinates: - longitude_00_7 - latitude_00_7 file_type: [nc_goes_00_7, eum_nc_goes_00_7] '03_9': name: '03_9' wavelength: [3.73, 3.9, 4.07] calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: mW m-2 cm-1 sr-1 brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: - longitude_03_9 - latitude_03_9 file_type: [nc_goes_03_9, eum_nc_goes_03_9] '06_5': name: '06_5' wavelength: [5.8, 6.5, 7.3] calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: mW m-2 cm-1 sr-1 brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: - longitude_06_5 - latitude_06_5 file_type: [nc_goes_06_5, eum_nc_goes_06_5] '06_8': name: '06_8' wavelength: [6.5, 6.75, 7.0] calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: mW m-2 cm-1 sr-1 brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: - longitude_06_8 - latitude_06_8 file_type: nc_goes_06_8 '10_7': name: '10_7' wavelength: [10.2, 10.7, 11.2] calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: mW m-2 cm-1 sr-1 brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: - longitude_10_7 - latitude_10_7 file_type: [nc_goes_10_7, eum_nc_goes_10_7] '12_0': name: '12_0' wavelength: [11.5, 12.0, 12.5] calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: mW m-2 cm-1 sr-1 brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: - longitude_12_0 - latitude_12_0 file_type: nc_goes_12_0 '13_3': name: '13_3' wavelength: [13.0, 13.35, 13.7] calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: mW m-2 cm-1 sr-1 brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: - longitude_13_3 - latitude_13_3 file_type: [nc_goes_13_3, eum_nc_goes_13_3] longitude_00_7: name: longitude_00_7 file_type: [nc_goes_00_7, eum_nc_goes_geo] standard_name: longitude units: degrees_east latitude_00_7: name: latitude_00_7 file_type: [nc_goes_00_7, eum_nc_goes_geo] standard_name: latitude units: degrees_north longitude_03_9: name: longitude_03_9 file_type: [nc_goes_03_9, eum_nc_goes_geo] standard_name: longitude units: degrees_east latitude_03_9: name: latitude_03_9 file_type: [nc_goes_03_9, eum_nc_goes_geo] standard_name: latitude units: degrees_north longitude_06_5: name: longitude_06_5 file_type: [nc_goes_06_5, eum_nc_goes_geo] standard_name: longitude units: degrees_east latitude_06_5: name: latitude_06_5 file_type: [nc_goes_06_5, eum_nc_goes_geo] standard_name: latitude units: degrees_north longitude_06_8: name: longitude_06_8 file_type: nc_goes_06_8 standard_name: longitude units: degrees_east latitude_06_8: name: latitude_06_8 file_type: nc_goes_06_8 standard_name: latitude units: degrees_north longitude_10_7: name: longitude_10_7 file_type: [nc_goes_10_7, eum_nc_goes_geo] standard_name: longitude units: degrees_east latitude_10_7: name: latitude_10_7 file_type: [nc_goes_10_7, eum_nc_goes_geo] standard_name: latitude units: degrees_north longitude_12_0: name: longitude_12_0 file_type: nc_goes_12_0 standard_name: longitude units: degrees_east latitude_12_0: name: latitude_12_0 file_type: nc_goes_12_0 standard_name: latitude units: degrees_north longitude_13_3: name: longitude_13_3 file_type: [nc_goes_13_3, eum_nc_goes_geo] standard_name: longitude units: degrees_east latitude_13_3: name: latitude_13_3 file_type: [nc_goes_13_3, eum_nc_goes_geo] standard_name: latitude units: degrees_north satpy-0.20.0/satpy/etc/readers/grib.yaml000066400000000000000000000017141362525524100201100ustar00rootroot00000000000000reader: description: GRIB2 file reader name: grib reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [unknown] file_types: grib: file_reader: !!python/name:satpy.readers.grib.GRIBFileHandler file_patterns: # NOAA NCEP: # gfs.t18z.sfluxgrbf106.grib2 - '{stem}.grib2' - 'gfs{stem}.f{forecast_time:3d}' - 'gdas{stem}.f{forecast_time:3d}' # EUMETSAT: # S-OSI_-NOR_-MULT-AHLDLI_FIELD-201805011200Z.grb.gz - '{stem}.grb' - '{stem}.grb2' # grib_ncep: # file_reader: !!python/name:satpy.readers.grib.GRIBFileHandler # file_patterns: # # NOAA NCEP: # # gfs.t18z.sfluxgrbf106.grib2 # - '{model_name}.t{model_hour:2d}z.{field_set}.grib2' # keys: # shortName: # id_key: name # values: ['gh', 't', 'u', 'v', 'r', 'icaht'] # level: # id_key: level # values: [0, 100, 125, 150, 175, 200, 225, 250, 275, 300, 350, 400, 450, 500, 600, 700, 750, 850] satpy-0.20.0/satpy/etc/readers/hsaf_grib.yaml000066400000000000000000000023651362525524100211140ustar00rootroot00000000000000reader: description: Reader for Hydrology SAF products name: hsaf_grib reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [hsaf] file_types: hsafgrib: file_reader: !!python/name:satpy.readers.hsaf_grib.HSAFFileHandler file_patterns: ['h03_{sensing_time:%Y%m%d_%H%M}_{region:3s}.grb', 'h05_{sensing_time:%Y%m%d_%H%M}_{accum_time:2s}_{region:3s}.grb', 'h03B_{sensing_time:%Y%m%d_%H%M}_{region:3s}.grb', 'h05B_{sensing_time:%Y%m%d_%H%M}_{accum_time:2s}_{region:3s}.grb'] datasets: h03: name: h03 msg_name: irrate sensor: hsaf resolution: 3000 standard_name: instantaneous_rainfall_rate units: kg m-2 s-1 file_type: hsafgrib h03B: name: h03B msg_name: irrate sensor: hsaf resolution: 3000 standard_name: instantaneous_rainfall_rate units: kg m-2 s-1 file_type: hsafgrib h05: name: h05 msg_name: accumrain sensor: hsaf resolution: 3000 standard_name: accumulated_rainfall_rate units: kg m-2 file_type: hsafgrib h05B: name: h05B msg_name: accumrain sensor: hsaf resolution: 3000 standard_name: accumulated_rainfall_rate units: kg m-2 file_type: hsafgrib satpy-0.20.0/satpy/etc/readers/iasi_l2.yaml000066400000000000000000000073261362525524100205140ustar00rootroot00000000000000reader: description: Reader for IASI L2 files name: iasi_l2 reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [iasi] default_datasets: datasets: longitude: name: longitude file_type: iasi_l2_hdf5 standard_name: longitude units: "degrees" resolution: 12000 latitude: name: latitude file_type: iasi_l2_hdf5 standard_name: latitude units: "degrees" resolution: 12000 ozone_mixing_ratio: name: ozone_mixing_ratio file_type: iasi_l2_hdf5 units: "kg/kg" resolution: 12000 coordinates: [longitude, latitude] ozone_mixing_ratio_quality: name: ozone_mixing_ratio_quality file_type: iasi_l2_hdf5 units: "" resolution: 12000 coordinates: [longitude, latitude] ozone_total_column: name: ozone_total_column file_type: iasi_l2_hdf5 units: "kg/m^2" resolution: 12000 coordinates: [longitude, latitude] pressure: name: pressure file_type: iasi_l2_hdf5 units: "hPa" resolution: 12000 coordinates: [longitude, latitude] pressure_quality: name: pressure_quality file_type: iasi_l2_hdf5 units: "" resolution: 12000 coordinates: [longitude, latitude] temperature: name: temperature file_type: iasi_l2_hdf5 units: "K" resolution: 12000 coordinates: [longitude, latitude] temperature_quality: name: temperature_quality file_type: iasi_l2_hdf5 units: "" resolution: 12000 coordinates: [longitude, latitude] water_mixing_ratio: name: water_mixing_ratio file_type: iasi_l2_hdf5 units: "kg/kg" resolution: 12000 coordinates: [longitude, latitude] water_total_column: name: water_total_column file_type: iasi_l2_hdf5 units: "mm" resolution: 12000 coordinates: [longitude, latitude] surface_skin_temperature: name: surface_skin_temperature file_type: iasi_l2_hdf5 units: "K" resolution: 12000 coordinates: [longitude, latitude] surface_skin_temperature_quality: name: surface_skin_temperature_quality file_type: iasi_l2_hdf5 units: "" resolution: 12000 coordinates: [longitude, latitude] emissivity: name: emissivity file_type: iasi_l2_hdf5 units: "1" resolution: 12000 coordinates: [longitude, latitude] emissivity_quality: name: emissivity_quality file_type: iasi_l2_hdf5 units: "" resolution: 12000 coordinates: [longitude, latitude] water_mixing_ratio_quality: name: water_mixing_ratio_quality file_type: iasi_l2_hdf5 units: "" resolution: 12000 coordinates: [longitude, latitude] satellite_azimuth_angle: name: satellite_azimuth_angle file_type: iasi_l2_hdf5 units: "degrees" resolution: 12000 coordinates: [longitude, latitude] satellite_zenith_angle: name: satellite_zenith_angle file_type: iasi_l2_hdf5 units: "degrees" resolution: 12000 coordinates: [longitude, latitude] sensing_time: name: sensing_time file_type: iasi_l2_hdf5 units: "ut_time" resolution: 12000 coordinates: [longitude, latitude] solar_azimuth_angle: name: solar_azimuth_angle file_type: iasi_l2_hdf5 units: "degrees" resolution: 12000 coordinates: [longitude, latitude] solar_zenith_angle: name: solar_zenith_angle file_type: iasi_l2_hdf5 units: "degrees" resolution: 12000 coordinates: [longitude, latitude] file_types: iasi_l2_hdf5: file_reader: !!python/name:satpy.readers.iasi_l2.IASIL2HDF5 file_patterns: ["W_XX-EUMETSAT-{reception_location},{instrument},{long_platform_id}+{processing_location}_C_EUMS_{processing_time:%Y%m%d%H%M%S}_IASI_PW3_02_{platform_id}_{start_time:%Y%m%d%H%M%S}Z_{end_time:%Y%m%d%H%M%S}Z.hdf"] satpy-0.20.0/satpy/etc/readers/iasi_l2_so2_bufr.yaml000066400000000000000000000147171362525524100223170ustar00rootroot00000000000000reader: name: iasi_l2_so2_bufr short_name: IASI L2 SO2 BUFR long_name: METOP IASI Level 2 SO2 BUFR description: > Reader for IASI L2 files sensors: [iasi] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader default_datasets: file_types: iasi_l2_so2_bufr: file_reader: !!python/name:satpy.readers.iasi_l2_so2_bufr.IASIL2SO2BUFR file_patterns: ["W_XX-EUMETSAT-{reception_location},SOUNDING+SATELLITE,{platform}+{instrument}_C_EUMC_{start_time:%Y%m%d%H%M%S}_{perigee}_eps_o_{species}_{level}.bin"] datasets: year: name: year file_type: iasi_l2_so2_bufr units: "year" resolution: 12000 coordinates: [longitude, latitude] key: '#1#year' fill_value: -1.e+100 month: name: month file_type: iasi_l2_so2_bufr units: "month" resolution: 12000 coordinates: [longitude, latitude] key: '#1#month' fill_value: -1.e+100 day: name: day file_type: iasi_l2_so2_bufr units: "day" resolution: 12000 coordinates: [longitude, latitude] key: '#1#day' fill_value: -1.e+100 hour: name: hour file_type: iasi_l2_so2_bufr units: "hour" resolution: 12000 coordinates: [longitude, latitude] key: '#1#hour' fill_value: -1.e+100 minute: name: minute file_type: iasi_l2_so2_bufr units: "minute" resolution: 12000 coordinates: [longitude, latitude] key: '#1#minute' fill_value: -1.e+100 second: name: second file_type: iasi_l2_so2_bufr units: "second" resolution: 12000 coordinates: [longitude, latitude] key: '#1#second' fill_value: -1.e+100 orbit_number: name: orbit_number file_type: iasi_l2_so2_bufr units: "" resolution: 12000 coordinates: [longitude, latitude] key: '#1#orbitNumber' fill_value: -1.e+100 scanline_number: name: scanline_number file_type: iasi_l2_so2_bufr units: "" resolution: 12000 coordinates: [longitude, latitude] key: '#1#scanLineNumber' fill_value: -1.e+100 latitude: name: latitude file_type: iasi_l2_so2_bufr standard_name: latitude units: "degrees" resolution: 12000 key: '#1#latitude' fill_value: -1.e+100 longitude: name: longitude file_type: iasi_l2_so2_bufr standard_name: longitude units: "degrees" resolution: 12000 key: '#1#longitude' fill_value: -1.e+100 field_of_view_number: name: field_of_view_number file_type: iasi_l2_so2_bufr units: "" resolution: 12000 coordinates: [longitude, latitude] key: '#1#fieldOfViewNumber' fill_value: -1.e+100 satellite_zenith_angle: name: satellite_zenith_angle file_type: iasi_l2_so2_bufr units: "degrees" resolution: 12000 coordinates: [longitude, latitude] key: '#1#satelliteZenithAngle' fill_value: -1.e+100 satellite_azimuth_angle: name: satellite_azimuth_angle file_type: iasi_l2_so2_bufr units: "degrees" resolution: 12000 coordinates: [longitude, latitude] key: '#1#bearingOrAzimuth' fill_value: -1.e+100 solar_zenith_angle: name: solar_zenith_angle file_type: iasi_l2_so2_bufr units: "degrees" resolution: 12000 coordinates: [longitude, latitude] key: '#1#solarZenithAngle' fill_value: -1.e+100 solar_azimuth_angle: name: solar_azimuth_angle file_type: iasi_l2_so2_bufr units: "degrees" resolution: 12000 coordinates: [longitude, latitude] key: '#1#solarAzimuth' fill_value: -1.e+100 so2_quality_flag: name: so2_quality_flag file_type: iasi_l2_so2_bufr units: "" resolution: 12000 coordinates: [longitude, latitude] key: '#1#generalRetrievalQualityFlagForSo2' fill_value: -1.e+100 so2_height_1: name: so2_height_1 file_type: iasi_l2_so2_bufr units: "dobson" resolution: 12000 coordinates: [longitude, latitude] key: '#1#sulphurDioxide' fill_value: -1.e+100 so2_height_2: name: so2_height_2 file_type: iasi_l2_so2_bufr units: "dobson" resolution: 12000 coordinates: [longitude, latitude] key: '#2#sulphurDioxide' fill_value: -1.e+100 so2_height_3: name: so2_height_3 file_type: iasi_l2_so2_bufr units: "dobson" resolution: 12000 coordinates: [longitude, latitude] key: '#3#sulphurDioxide' fill_value: -1.e+100 so2_height_4: name: so2_height_4 file_type: iasi_l2_so2_bufr units: "dobson" resolution: 12000 coordinates: [longitude, latitude] key: '#4#sulphurDioxide' fill_value: -1.e+100 so2_height_5: name: so2_height_5 file_type: iasi_l2_so2_bufr units: "dobson" resolution: 12000 coordinates: [longitude, latitude] key: '#5#sulphurDioxide' fill_value: -1.e+100 so2_height_6: name: so2_height_6 file_type: iasi_l2_so2_bufr units: "dobson" resolution: 12000 coordinates: [longitude, latitude] key: '#6#sulphurDioxide' fill_value: -1.e+100 height_1: name: height_1 file_type: iasi_l2_so2_bufr units: "meters" resolution: 12000 coordinates: [longitude, latitude] key: '#1#height' fill_value: -1.e+100 height_2: name: height_2 file_type: iasi_l2_so2_bufr units: "meters" resolution: 12000 coordinates: [longitude, latitude] key: '#2#height' fill_value: -1.e+100 height_3: name: height_3 file_type: iasi_l2_so2_bufr units: "meters" resolution: 12000 coordinates: [longitude, latitude] key: '#3#height' fill_value: -1.e+100 height_4: name: height_4 file_type: iasi_l2_so2_bufr units: "meters" resolution: 12000 coordinates: [longitude, latitude] key: '#4#height' fill_value: -1.e+100 height_5: name: height_5 file_type: iasi_l2_so2_bufr units: "meters" resolution: 12000 coordinates: [longitude, latitude] key: '#5#height' fill_value: -1.e+100 height_6: name: height_6 file_type: iasi_l2_so2_bufr units: "meters" resolution: 12000 coordinates: [longitude, latitude] key: '#6#height' fill_value: -1.e+100 height_7: name: height_7 file_type: iasi_l2_so2_bufr units: "meters" resolution: 12000 coordinates: [longitude, latitude] key: '#7#height' fill_value: -1.e+100 brightnessTemperatureRealPart: name: brightnessTemperatureRealPart file_type: iasi_l2_so2_bufr units: "K" resolution: 12000 coordinates: [longitude, latitude] key: '#1#brightnessTemperatureRealPart' fill_value: -1.e+100 satpy-0.20.0/satpy/etc/readers/jami_hrit.yaml000066400000000000000000000070011362525524100211260ustar00rootroot00000000000000reader: name: jami_hrit short_name: JAMI HRIT long_name: MTSAT-1R JAMI Level 1 (HRIT) description: > Reader for MTSAT-1R JAMI data in JMA HRIT format. Note that there exist two versions of the dataset. A segmented (data split into multiple files) and a non-segmented version (all data in one file). References: - https://www.wmo-sat.info/oscar/instruments/view/236 - http://www.data.jma.go.jp/mscweb/notice/Himawari7_e.html sensors: [jami] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: hrit_vis: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}VIS_{start_time:%Y%m%d%H%M}_{segment:03d}' - 'IMG_DK{area:02d}VIS_{start_time:%Y%m%d%H%M}' - 'HRIT_MTSAT1_{start_time:%Y%m%d_%H%M}_DK{area:02d}VIS' hrit_ir1: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR1_{start_time:%Y%m%d%H%M}_{segment:03d}' - 'IMG_DK{area:02d}IR1_{start_time:%Y%m%d%H%M}' - 'HRIT_MTSAT1_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR1' hrit_ir2: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR2_{start_time:%Y%m%d%H%M}_{segment:03d}' - 'IMG_DK{area:02d}IR2_{start_time:%Y%m%d%H%M}' - 'HRIT_MTSAT1_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR2' hrit_ir3: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR3_{start_time:%Y%m%d%H%M}_{segment:03d}' - 'IMG_DK{area:02d}IR3_{start_time:%Y%m%d%H%M}' - 'HRIT_MTSAT1_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR3' hrit_ir4: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR4_{start_time:%Y%m%d%H%M}_{segment:03d}' - 'IMG_DK{area:02d}IR4_{start_time:%Y%m%d%H%M}' - 'HRIT_MTSAT1_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR4' datasets: VIS: name: VIS sensor: jami wavelength: [0.55, 0.675, 0.90] resolution: 1000 calibration: counts: standard_name: counts units: 1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: hrit_vis IR1: name: IR1 sensor: jami wavelength: [10.3, 10.8, 11.3] resolution: 4000 calibration: counts: standard_name: counts units: 1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" file_type: hrit_ir1 IR2: name: IR2 sensor: jami wavelength: [11.5, 12.0, 12.5] resolution: 4000 calibration: counts: standard_name: counts units: 1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" file_type: hrit_ir2 IR3: name: IR3 sensor: jami wavelength: [6.5, 6.75, 7.0] resolution: 4000 calibration: counts: standard_name: counts units: 1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" file_type: hrit_ir3 IR4: name: IR4 sensor: jami wavelength: [3.5, 3.75, 4.0] resolution: 4000 calibration: counts: standard_name: counts units: 1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" file_type: hrit_ir4satpy-0.20.0/satpy/etc/readers/li_l2.yaml000066400000000000000000000104411362525524100201630ustar00rootroot00000000000000reader: description: Generic MTG LI L2 product reader name: li_l2 reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader '' sensors: [li] default_datasets: datasets: # There is only one dataset per file. The following list provide only the optional # contained products, which are dependend on the file name. af: name: af sensor: li resolution: 2000 file_type: li_af afa: name: afa sensor: li resolution: 2000 file_type: li_afa afr: name: afr sensor: li resolution: 2000 file_type: li_afr lgr: name: lgr sensor: li file_type: li_lgr lef: name: lef sensor: li file_type: li_lef lfl: name: lfl sensor: li file_type: li_lfl # Source: LI L2 Product User Guide [LIL2PUG] Draft version -- 2016 file_types: li_l2: file_reader: !!python/name:satpy.readers.li_l2.LIFileHandler '' file_patterns: ['{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-{processing_evel}-{type}-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day}.nc'] li_af: file_reader: !!python/name:satpy.readers.li_l2.LIFileHandler '' file_patterns: ['{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-{processing_evel}-AF-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day}.nc'] li_afa: file_reader: !!python/name:satpy.readers.li_l2.LIFileHandler '' file_patterns: ['{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-{processing_evel}-AFA-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day}.nc'] li_afr: file_reader: !!python/name:satpy.readers.li_l2.LIFileHandler '' file_patterns: ['{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-{processing_evel}-AFR-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day}.nc'] li_lgr: file_reader: !!python/name:satpy.readers.li_l2.LIFileHandler '' file_patterns: ['{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-{processing_evel}-LGR-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day}.nc'] li_lef: file_reader: !!python/name:satpy.readers.li_l2.LIFileHandler '' file_patterns: ['{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-{processing_evel}-LEF-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day}.nc'] li_lfl: file_reader: !!python/name:satpy.readers.li_l2.LIFileHandler '' file_patterns: ['{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-{processing_evel}-LFL-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day}.nc'] satpy-0.20.0/satpy/etc/readers/maia.yaml000066400000000000000000000051351362525524100200750ustar00rootroot00000000000000reader: description: MAIA Reader name: maia reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [viirs, avhrr] file_types: maia: file_reader: !!python/name:satpy.readers.maia.MAIAFileHandler # example of file name: viiCT_npp_DB_20121010_S132824_E132947_ASC_D_La050_Lo-012_00001.h5 file_patterns: - 'viiCT_{platform_name}_{origin}_{start_time:%Y%m%d_S%H%M%S}_E{end_time:%H%M%S}_{orbit_type}_La{center_lat}_Lo{center_lon}_{orbit:5d}.h5' - 'avhCT_{platform_name}_{origin}_{start_time:%Y%m%d_S%H%M%S}_E{end_time:%H%M%S}_{orbit_type}_La{center_lat}_Lo{center_lon}_{orbit:5d}.h5' datasets: Mask_in: name: Mask_in file_type: maia coordinates: [Longitude, Latitude ] Latitude: name: Latitude file_type: maia units: "degrees" standard_name: latitude Longitude: name: Longitude file_type: maia units: "degrees" standard_name: longitude Alt_surface: name: Alt_surface units: m file_type: maia coordinates: [Longitude, Latitude ] CloudType: name: CloudType file_type: maia coordinates: [Longitude, Latitude ] CloudMask: name: CloudMask file_type: maia coordinates: [Longitude, Latitude ] # CloudType and CloudMask are bitfields # description of sub fields ct: name: ct file_type: maia coordinates: [Longitude, Latitude ] # Cloud Mask On Pixel cma: name: cma file_type: maia coordinates: [Longitude, Latitude ] # Cloud Mask Confidence cma_conf: name: cma_conf file_type: maia coordinates: [Longitude, Latitude ] # Cloud Mask Quality cma_qual: name: CM_qual file_type: maia coordinates: [Longitude, Latitude ] land_water_background: name: land_water_background file_type: maia coordinates: [Longitude, Latitude ] opaq_cloud: name: opaq_cloud file_type: maia coordinates: [Longitude, Latitude ] CloudTopPres: name: CloudTopPres units: hPa file_type: maia coordinates: [Longitude, Latitude ] CloudTopTemp: name: CloudTopTemp units: degree celcius file_type: maia coordinates: [Longitude, Latitude ] Mask_ind: name: Mask_ind file_type: maia coordinates: [Longitude, Latitude ] fov_qual: name: fov_qual file_type: maia coordinates: [Longitude, Latitude ] Tsurf: name: Tsurf units: degrees celcius file_type: maia coordinates: [Longitude, Latitude ] Sat_zenith: name: Sat_zenith units: degrees file_type: maia coordinates: [Longitude, Latitude ] file_type: maia coordinates: [Longitude, Latitude ] satpy-0.20.0/satpy/etc/readers/mersi2_l1b.yaml000066400000000000000000000434141362525524100211270ustar00rootroot00000000000000reader: description: FY-3D Medium Resolution Spectral Imager 2 (MERSI-2) L1B Reader name: mersi2_l1b sensors: [mersi-2] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: mersi2_l1b_1000: file_reader: !!python/name:satpy.readers.mersi2_l1b.MERSI2L1B rows_per_scan: 10 file_patterns: # tf2019071182739.FY3D-X_MERSI_1000M_L1B.HDF - 'tf{start_time:%Y%j%H%M%S}.{platform_shortname}-{trans_band:1s}_MERSI_1000M_L1B.{ext}' # FY3D_20190808_130200_130300_8965_MERSI_1000M_L1B.HDF - '{platform_shortname}_{start_time:%Y%m%d_%H%M%S}_{end_time:%H%M%S}_{orbit_number:s}_MERSI_1000M_L1B.{ext}' mersi2_l1b_250: file_reader: !!python/name:satpy.readers.mersi2_l1b.MERSI2L1B rows_per_scan: 40 file_patterns: # tf2019071182739.FY3D-X_MERSI_0250M_L1B.HDF - 'tf{start_time:%Y%j%H%M%S}.{platform_shortname}-{trans_band:1s}_MERSI_0250M_L1B.{ext}' mersi2_l1b_1000_geo: file_reader: !!python/name:satpy.readers.mersi2_l1b.MERSI2L1B rows_per_scan: 10 file_patterns: # tf2019071182739.FY3D-X_MERSI_GEO1K_L1B.HDF - 'tf{start_time:%Y%j%H%M%S}.{platform_shortname}-{trans_band:1s}_MERSI_GEO1K_L1B.{ext}' # FY3D_20190808_130200_130300_8965_MERSI_GEO1K_L1B.HDF - '{platform_shortname}_{start_time:%Y%m%d_%H%M%S}_{end_time:%H%M%S}_{orbit_number:s}_MERSI_GEO1K_L1B.{ext}' mersi2_l1b_250_geo: file_reader: !!python/name:satpy.readers.mersi2_l1b.MERSI2L1B rows_per_scan: 40 file_patterns: # tf2019071182739.FY3D-X_MERSI_GEOQK_L1B.HDF - 'tf{start_time:%Y%j%H%M%S}.{platform_shortname}-{trans_band:1s}_MERSI_GEOQK_L1B.{ext}' # NOTE: OSCAR website currently has bands in wavelength order # https://www.wmo-sat.info/oscar/instruments/view/279 # The order below is by the wavelength in the input files # The slides at the below link have band 5 and 19 swapped: # http://www.wmo.int/pages/prog/sat/meetings/documents/IPET-SUP-4_Doc_05-04_FY-3D-ppt.pdf datasets: '1': name: '1' wavelength: [0.445, 0.470, 0.495] resolution: 1000: file_type: mersi2_l1b_1000 file_key: Data/EV_250_Aggr.1KM_RefSB band_index: 0 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 0 250: file_type: mersi2_l1b_250 file_key: Data/EV_250_RefSB_b1 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 0 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '2': name: '2' wavelength: [0.525, 0.550, 0.575] resolution: 1000: file_type: mersi2_l1b_1000 file_key: Data/EV_250_Aggr.1KM_RefSB band_index: 1 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 1 250: file_type: mersi2_l1b_250 file_key: Data/EV_250_RefSB_b2 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 1 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '3': name: '3' wavelength: [0.625, 0.650, 0.675] resolution: 1000: file_type: mersi2_l1b_1000 file_key: Data/EV_250_Aggr.1KM_RefSB band_index: 2 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 2 250: file_type: mersi2_l1b_250 file_key: Data/EV_250_RefSB_b3 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 2 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '4': name: '4' wavelength: [0.840, 0.865, 0.890] resolution: 1000: file_type: mersi2_l1b_1000 file_key: Data/EV_250_Aggr.1KM_RefSB band_index: 3 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 3 250: file_type: mersi2_l1b_250 file_key: Data/EV_250_RefSB_b4 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 3 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '5': name: '5' wavelength: [1.37, 1.38, 1.39] # or 30nm bandwidth? resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 0 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 0 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '6': name: '6' wavelength: [1.615, 1.640, 1.665] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 1 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 1 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '7': name: '7' wavelength: [2.105, 2.130, 2.155] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 2 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 2 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '8': name: '8' wavelength: [0.402, 0.412, 0.422] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 3 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 3 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '9': name: '9' wavelength: [0.433, 0.443, 0.453] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 4 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 4 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '10': name: '10' wavelength: [0.480, 0.490, 0.500] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 5 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 5 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '11': name: '11' wavelength: [0.545, 0.555, 0.565] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 6 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 6 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '12': name: '12' wavelength: [0.660, 0.670, 0.680] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 7 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 7 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '13': name: '13' wavelength: [0.699, 0.709, 0.719] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 8 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 8 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '14': name: '14' wavelength: [0.736, 0.746, 0.756] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 9 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 9 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '15': name: '15' wavelength: [0.855, 0.865, 0.875] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 10 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 10 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '16': name: '16' wavelength: [0.895, 0.905, 0.915] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 11 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 11 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '17': name: '17' wavelength: [0.926, 0.936, 0.946] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 12 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 12 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '18': name: '18' wavelength: [0.915, 0.940, 0.965] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 13 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 13 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '19': name: '19' wavelength: [1.23, 1.24, 1.25] # or 1.03um? resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 14 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 14 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts # Not sure how to get radiance for BT channels '20': name: '20' wavelength: [3.710, 3.800, 3.890] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_Emissive band_index: 0 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 0 coordinates: [longitude, latitude] calibration: brightness_temperature: units: "K" standard_name: toa_brightness_temperature counts: units: "1" standard_name: counts '21': name: '21' wavelength: [3.9725, 4.050, 4.1275] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_Emissive band_index: 1 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 1 coordinates: [longitude, latitude] calibration: brightness_temperature: units: "K" standard_name: toa_brightness_temperature counts: units: "1" standard_name: counts '22': name: '22' wavelength: [6.950, 7.20, 7.450] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_Emissive band_index: 2 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 2 coordinates: [longitude, latitude] calibration: brightness_temperature: units: "K" standard_name: toa_brightness_temperature counts: units: "1" standard_name: counts '23': name: '23' wavelength: [8.400, 8.550, 8.700] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_Emissive band_index: 3 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 3 coordinates: [longitude, latitude] calibration: brightness_temperature: units: "K" standard_name: toa_brightness_temperature counts: units: "1" standard_name: counts '24': name: '24' wavelength: [10.300, 10.800, 11.300] resolution: 1000: file_type: mersi2_l1b_1000 file_key: Data/EV_250_Aggr.1KM_Emissive band_index: 0 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 4 250: file_type: mersi2_l1b_250 file_key: Data/EV_250_Emissive_b24 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 4 coordinates: [longitude, latitude] calibration: brightness_temperature: units: "K" standard_name: toa_brightness_temperature counts: units: "1" standard_name: counts '25': name: '25' wavelength: [11.500, 12.000, 12.500] resolution: 1000: file_type: mersi2_l1b_1000 file_key: Data/EV_250_Aggr.1KM_Emissive band_index: 1 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 5 250: file_type: mersi2_l1b_250 file_key: Data/EV_250_Emissive_b25 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 5 coordinates: [longitude, latitude] calibration: brightness_temperature: units: "K" standard_name: toa_brightness_temperature counts: units: "1" standard_name: counts longitude: name: longitude units: degrees_east standard_name: longitude resolution: 1000: file_type: mersi2_l1b_1000_geo file_key: Geolocation/Longitude 250: file_type: mersi2_l1b_250_geo file_key: Longitude latitude: name: latitude units: degrees_north standard_name: latitude resolution: 1000: file_type: mersi2_l1b_1000_geo file_key: Geolocation/Latitude 250: file_type: mersi2_l1b_250_geo file_key: Latitude solar_zenith_angle: name: solar_zenith_angle units: degree standard_name: solar_zenith_angle resolution: 1000 coordinates: [longitude, latitude] file_type: mersi2_l1b_1000_geo file_key: Geolocation/SolarZenith solar_azimuth_angle: name: solar_azimuth_angle units: degree standard_name: solar_azimuth_angle resolution: 1000 coordinates: [longitude, latitude] file_type: mersi2_l1b_1000_geo file_key: Geolocation/SolarAzimuth satellite_zenith_angle: name: satellite_zenith_angle units: degree standard_name: sensor_zenith_angle resolution: 1000 coordinates: [longitude, latitude] file_type: mersi2_l1b_1000_geo file_key: Geolocation/SensorZenith satellite_azimuth_angle: name: satellite_azimuth_angle units: degree standard_name: sensor_azimuth_angle resolution: 1000 coordinates: [longitude, latitude] file_type: mersi2_l1b_1000_geo file_key: Geolocation/SensorAzimuth satpy-0.20.0/satpy/etc/readers/mimicTPW2_comp.yaml000066400000000000000000000015041362525524100217530ustar00rootroot00000000000000reader: description: NetCDF4 reader for the MIMIC TPW Version 2.0 product name: mimicTPW2_comp sensors: [mimic] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: mimicTPW2_comp: file_reader: !!python/name:satpy.readers.mimic_TPW2_nc.MimicTPW2FileHandler file_patterns: ['comp{start_time:%Y%m%d.%H%M%S}.nc'] sensor: ['mimic'] platform_name: ['microwave'] datasets: latArr: name: 'latitude' units: degrees_north file_key: latArr standard_name: latitude coordinates: [ latitude ] file_type: mimicTPW2_comp lonArr: name: 'longitude' units: degrees_east file_key: lonArr standard_name: longitude coordinates: [ longitude ] file_type: mimicTPW2_comp satpy-0.20.0/satpy/etc/readers/modis_l1b.yaml000066400000000000000000000314351362525524100210410ustar00rootroot00000000000000reader: default_datasets: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36] description: Generic MODIS HDF-EOS Reader name: modis_l1b reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader '' sensors: [modis] navigations: hdf_eos_geo: description: MODIS navigation file_type: hdf_eos_geo latitude_key: Latitude longitude_key: Longitude nadir_resolution: [1000] rows_per_scan: 10 datasets: '1': name: '1' resolution: 250: {file_type: hdf_eos_data_250m} 500: {file_type: hdf_eos_data_500m} 1000: {file_type: hdf_eos_data_1000m} calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.62 - 0.645 - 0.67 '2': name: '2' resolution: 250: {file_type: hdf_eos_data_250m} 500: {file_type: hdf_eos_data_500m} 1000: {file_type: hdf_eos_data_1000m} calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.841 - 0.8585 - 0.876 '3': name: '3' resolution: 500: {file_type: hdf_eos_data_500m} 1000: {file_type: hdf_eos_data_1000m} calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.459 - 0.469 - 0.479 '4': name: '4' resolution: 500: {file_type: hdf_eos_data_500m} 1000: {file_type: hdf_eos_data_1000m} calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.545 - 0.555 - 0.565 '5': name: '5' resolution: 500: {file_type: hdf_eos_data_500m} 1000: {file_type: hdf_eos_data_1000m} calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 1.23 - 1.24 - 1.25 '6': name: '6' resolution: 500: {file_type: hdf_eos_data_500m} 1000: {file_type: hdf_eos_data_1000m} calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 1.628 - 1.64 - 1.652 '7': name: '7' resolution: 500: {file_type: hdf_eos_data_500m} 1000: {file_type: hdf_eos_data_1000m} calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 2.105 - 2.13 - 2.155 '8': file_type: hdf_eos_data_1000m name: '8' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.405 - 0.4125 - 0.42 '9': file_type: hdf_eos_data_1000m name: '9' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.438 - 0.443 - 0.448 '10': file_type: hdf_eos_data_1000m name: '10' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.483 - 0.488 - 0.493 '11': file_type: hdf_eos_data_1000m name: '11' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.526 - 0.531 - 0.536 '12': file_type: hdf_eos_data_1000m name: '12' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.546 - 0.551 - 0.556 13hi: file_type: hdf_eos_data_1000m name: '13hi' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.662 - 0.667 - 0.672 13lo: file_type: hdf_eos_data_1000m name: '13lo' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.662 - 0.667 - 0.672 14hi: file_type: hdf_eos_data_1000m name: '14hi' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.673 - 0.678 - 0.683 14lo: file_type: hdf_eos_data_1000m name: '14lo' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.673 - 0.678 - 0.683 '15': file_type: hdf_eos_data_1000m name: '15' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.743 - 0.748 - 0.753 '16': file_type: hdf_eos_data_1000m name: '16' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.862 - 0.8695 - 0.877 '17': file_type: hdf_eos_data_1000m name: '17' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.89 - 0.905 - 0.92 '18': file_type: hdf_eos_data_1000m name: '18' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.931 - 0.936 - 0.941 '19': file_type: hdf_eos_data_1000m name: '19' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.915 - 0.94 - 0.965 '20': file_type: hdf_eos_data_1000m name: '20' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 3.66 - 3.75 - 3.84 '21': file_type: hdf_eos_data_1000m name: '21' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 3.929 - 3.959 - 3.989 '22': file_type: hdf_eos_data_1000m name: '22' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 3.929 - 3.959 - 3.989 '23': file_type: hdf_eos_data_1000m name: '23' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 4.02 - 4.05 - 4.08 '24': file_type: hdf_eos_data_1000m name: '24' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 4.433 - 4.4655 - 4.498 '25': file_type: hdf_eos_data_1000m name: '25' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 4.482 - 4.5155 - 4.549 '26': file_type: hdf_eos_data_1000m name: '26' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 1.36 - 1.375 - 1.39 '27': file_type: hdf_eos_data_1000m name: '27' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 6.535 - 6.715 - 6.895 '28': file_type: hdf_eos_data_1000m name: '28' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 7.175 - 7.325 - 7.475 '29': file_type: hdf_eos_data_1000m name: '29' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 8.4 - 8.55 - 8.7 '30': file_type: hdf_eos_data_1000m name: '30' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 9.58 - 9.73 - 9.88 '31': file_type: hdf_eos_data_1000m name: '31' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 10.78 - 11.03 - 11.28 '32': file_type: hdf_eos_data_1000m name: '32' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 11.77 - 12.02 - 12.27 '33': file_type: hdf_eos_data_1000m name: '33' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 13.185 - 13.335 - 13.485 '34': file_type: hdf_eos_data_1000m name: '34' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 13.485 - 13.635 - 13.785 '35': file_type: hdf_eos_data_1000m name: '35' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 13.785 - 13.935 - 14.085 '36': file_type: hdf_eos_data_1000m name: '36' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 14.085 - 14.235 - 14.385 longitude: name: longitude resolution: 5000: # For EUM reduced (thinned) files file_type: hdf_eos_data_1000m 1000: file_type: [hdf_eos_geo, hdf_eos_data_1000m] 500: file_type: hdf_eos_geo 250: file_type: hdf_eos_geo standard_name: longitude units: degree latitude: name: latitude resolution: 5000: # For EUM reduced (thinned) files file_type: hdf_eos_data_1000m 1000: file_type: [hdf_eos_geo, hdf_eos_data_1000m] 500: file_type: hdf_eos_geo 250: file_type: hdf_eos_geo standard_name: latitude units: degree solar_zenith_angle: name: solar_zenith_angle sensor: modis resolution: [1000, 500, 250] coordinates: [longitude, latitude] file_type: [hdf_eos_geo, hdf_eos_data_1000m] solar_azimuth_angle: name: solar_azimuth_angle sensor: modis resolution: [1000, 500, 250] coordinates: [longitude, latitude] file_type: [hdf_eos_geo, hdf_eos_data_1000m] satellite_zenith_angle: name: satellite_zenith_angle sensor: modis resolution: [1000, 500, 250] coordinates: [longitude, latitude] file_type: [hdf_eos_geo, hdf_eos_data_1000m] satellite_azimuth_angle: name: satellite_azimuth_angle sensor: modis resolution: [1000, 500, 250] coordinates: [longitude, latitude] file_type: [hdf_eos_geo, hdf_eos_data_1000m] file_types: hdf_eos_data_250m: file_patterns: - 'M{platform_indicator:1s}D02Qkm_A{start_time:%y%j_%H%M%S}_{processing_time:%Y%j%H%M%S}.hdf' - 'M{platform_indicator:1s}D02QKM.A{start_time:%Y%j.%H%M}.{collection:03d}.{processing_time:%Y%j%H%M%S}.hdf' - 'M{platform_indicator:1s}D02QKM.A{start_time:%Y%j.%H%M}.{collection:03d}{suffix}.hdf' - 'M{platform_indicator:1s}D02QKM.{start_time:%y%j%H%M%S}.hdf' - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.250m.hdf' file_reader: !!python/name:satpy.readers.modis_l1b.HDFEOSBandReader hdf_eos_data_500m: file_patterns: - 'M{platform_indicator:1s}D02Hkm_A{start_time:%y%j_%H%M%S}_{processing_time:%Y%j%H%M%S}.hdf' - 'M{platform_indicator:1s}D02HKM.A{start_time:%Y%j.%H%M}.{collection:03d}.{processing_time:%Y%j%H%M%S}.hdf' - 'M{platform_indicator:1s}D02HKM.A{start_time:%Y%j.%H%M}.{collection:03d}{suffix}.hdf' - 'M{platform_indicator:1s}D02HKM.{start_time:%y%j%H%M%S}.hdf' - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.500m.hdf' file_reader: !!python/name:satpy.readers.modis_l1b.HDFEOSBandReader hdf_eos_data_1000m: file_patterns: - 'M{platform_indicator:1s}D021km_A{start_time:%y%j_%H%M%S}_{processing_time:%Y%j%H%M%S}.hdf' - 'M{platform_indicator:1s}D021KM.A{start_time:%Y%j.%H%M}.{collection:03d}.{processing_time:%Y%j%H%M%S}.hdf' - 'M{platform_indicator:1s}D021KM.A{start_time:%Y%j.%H%M}.{collection:03d}{suffix}.hdf' - 'thin_M{platform_indicator:1s}D021KM.A{start_time:%Y%j.%H%M}.{collection:03d}{suffix}.hdf' - 'M{platform_indicator:1s}D021KM.{start_time:%y%j%H%M%S}.hdf' - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.1000m.hdf' - 'M{platform_indicator:1s}D021KM_A{start_time:%Y%j_%H%M}_{collection:03d}_NRT.hdf' file_reader: !!python/name:satpy.readers.modis_l1b.MixedHDFEOSReader hdf_eos_geo: file_patterns: - 'M{platform_indicator:1s}D03_A{start_time:%y%j_%H%M%S}_{processing_time:%Y%j%H%M%S}.hdf' - 'M{platform_indicator:1s}D03.A{start_time:%Y%j.%H%M}.{collection:03d}.{processing_time:%Y%j%H%M%S}.hdf' - 'M{platform_indicator:1s}D03.A{start_time:%Y%j.%H%M}.{collection:03d}{suffix}.hdf' - 'M{platform_indicator:1s}D03.{start_time:%y%j%H%M%S}.hdf' - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.geo.hdf' file_reader: !!python/name:satpy.readers.modis_l1b.HDFEOSGeoReader satpy-0.20.0/satpy/etc/readers/modis_l2.yaml000066400000000000000000000051261362525524100206760ustar00rootroot00000000000000reader: default_datasets: [cloud_mask] description: MODIS HDF-EOS Cloud Mask Reader name: modis_l2 reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [modis] file_types: mod35_hdf: file_patterns: - 'M{platform_indicator:1s}D35_L2.A{acquisition_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf' file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler hdf_eos_geo: file_patterns: - 'M{platform_indicator:1s}D03_A{start_time:%y%j_%H%M%S}_{processing_time:%Y%j%H%M%S}.hdf' - 'M{platform_indicator:1s}D03.A{start_time:%Y%j.%H%M}.{collection:03d}.{processing_time:%Y%j%H%M%S}.hdf' - 'M{platform_indicator:1s}D03.A{start_time:%Y%j.%H%M}.{collection:03d}{suffix}.hdf' - 'M{platform_indicator:1s}D03.{start_time:%y%j%H%M%S}.hdf' - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.geo.hdf' file_reader: !!python/name:satpy.readers.modis_l1b.HDFEOSGeoReader datasets: cloud_mask: # byte Cloud_Mask(Byte_Segment, Cell_Along_Swath_1km, Cell_Across_Swath_1km) name: cloud_mask resolution: [1000, 250] # The dimension of the dataset where the byte information is stored byte_dimension: 0 # Different logic depending on the resolution byte: - 1000: 0 - 250: [4, 5] bit_start: - 1000: 1 - 250: 0 bit_count: - 1000: 2 - 250: 1 # Quality Assurance flag is necessary for 250m resolution dataset quality_assurance: - 250: True file_key: Cloud_Mask file_type: mod35_hdf coordinates: [longitude, latitude] longitude: name: longitude resolution: 5000: file_type: mod35_hdf 1000: file_type: [hdf_eos_geo, mod35_hdf] 500: file_type: hdf_eos_geo 250: file_type: hdf_eos_geo standard_name: longitude units: degree latitude: name: latitude resolution: 5000: # For EUM reduced (thinned) files file_type: mod35_hdf 1000: file_type: [hdf_eos_geo, mod35_hdf] 500: file_type: hdf_eos_geo 250: file_type: hdf_eos_geo standard_name: latitude units: degree quality_assurance: # byte Quality_Assurance(Cell_Along_Swath_1km, Cell_Across_Swath_1km, QA_Dimension) name: quality_assurance resolution: 1000 # The dimension of the dataset where the byte information is stored byte_dimension: 2 # The byte to consider to extract relevant bits byte: 0 bit_start: 0 bit_count: 1 file_key: Quality_Assurance file_type: mod35_hdf coordinates: [longitude, latitude] satpy-0.20.0/satpy/etc/readers/msi_safe.yaml000066400000000000000000000065021362525524100207530ustar00rootroot00000000000000reader: description: SAFE Reader for MSI data (Sentinel-2) name: msi_safe sensors: [msi] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: safe_granule: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIL1C file_patterns: ['{fmission_id:3s}_MSIL1C_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L1C_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}.jp2'] requires: [safe_g_metadata] safe_g_metadata: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIMDXML file_patterns: ['{fmission_id:3s}_MSIL1C_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L1C_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/MTD_TL.xml'] datasets: B01: name: B01 sensor: MSI wavelength: [0.415, 0.443, 0.470] resolution: 60 file_type: safe_granule B02: name: B02 sensor: MSI wavelength: [0.440, 0.490, 0.540] resolution: 10 file_type: safe_granule B03: name: B03 sensor: MSI wavelength: [0.540, 0.560, 0.580] resolution: 10 file_type: safe_granule B04: name: B04 sensor: MSI wavelength: [0.645, 0.665, 0.685] resolution: 10 file_type: safe_granule B05: name: B05 sensor: MSI wavelength: [0.695, 0.705, 0.715] resolution: 20 file_type: safe_granule B06: name: B06 sensor: MSI wavelength: [0.731, 0.740, 0.749] resolution: 20 file_type: safe_granule B07: name: B07 sensor: MSI wavelength: [0.764, 0.783, 0.802] resolution: 20 file_type: safe_granule B08: name: B08 sensor: MSI wavelength: [0.780, 0.842, 0.905] resolution: 10 file_type: safe_granule B8A: name: B8A sensor: MSI wavelength: [0.855, 0.865, 0.875] resolution: 20 file_type: safe_granule B09: name: B09 sensor: MSI wavelength: [0.935, 0.945, 0.955] resolution: 60 file_type: safe_granule B10: name: B10 sensor: MSI wavelength: [1.365, 1.375, 1.385] resolution: 60 file_type: safe_granule B11: name: B11 sensor: MSI wavelength: [1.565, 1.610, 1.655] resolution: 20 file_type: safe_granule B12: name: B12 sensor: MSI wavelength: [2.100, 2.190, 2.280] resolution: 20 file_type: safe_granule solar_zenith_angle: name: solar_zenith_angle resolution: [10, 20, 60] file_type: safe_g_metadata xml_tag: Sun_Angles_Grid/Zenith solar_azimuth_angle: name: solar_azimuth_angle resolution: [10, 20, 60] file_type: safe_g_metadata xml_tag: Sun_Angles_Grid/Azimuth satellite_azimuth_angle: name: satellite_azimuth_angle resolution: [10, 20, 60] file_type: safe_g_metadata xml_tag: Viewing_Incidence_Angles_Grids xml_item: Azimuth satellite_zenith_angle: name: satellite_zenith_angle resolution: [10, 20, 60] file_type: safe_g_metadata xml_tag: Viewing_Incidence_Angles_Grids xml_item: Zenith satpy-0.20.0/satpy/etc/readers/mtsat2-imager_hrit.yaml000066400000000000000000000106141362525524100226660ustar00rootroot00000000000000reader: name: mtsat2-imager_hrit short_name: MTSAT-2 Imager HRIT long_name: MTSAT-2 Imager Level 1 (HRIT) description: > Reader for MTSAT-2 Imager data in JMA HRIT format. Note that there exist two versions of the dataset. A segmented (data split into multiple files) and a non-segmented version (all data in one file). References: - https://www.wmo-sat.info/oscar/instruments/view/219 - http://www.data.jma.go.jp/mscweb/notice/Himawari7_e.html sensors: [mtsat2_imager] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.GEOSegmentYAMLReader file_types: hrit_vis: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}VIS_{start_time:%Y%m%d%H%M}' - 'HRIT_MTSAT2_{start_time:%Y%m%d_%H%M}_DK{area:02d}VIS' hrit_ir1: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR1_{start_time:%Y%m%d%H%M}' - 'HRIT_MTSAT2_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR1' hrit_ir2: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR2_{start_time:%Y%m%d%H%M}' - 'HRIT_MTSAT2_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR2' hrit_ir3: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR3_{start_time:%Y%m%d%H%M}' - 'HRIT_MTSAT2_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR3' hrit_ir4: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR4_{start_time:%Y%m%d%H%M}' - 'HRIT_MTSAT2_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR4' hrit_vis_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}VIS_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_ir1_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR1_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_ir2_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR2_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_ir3_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR3_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_ir4_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR4_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 datasets: VIS: name: VIS sensor: mtsat2_imager wavelength: [0.55, 0.675, 0.80] resolution: 1000 calibration: counts: standard_name: counts units: 1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: [hrit_vis, hrit_vis_seg] IR1: name: IR1 sensor: mtsat2_imager wavelength: [10.3, 10.8, 11.3] resolution: 4000 calibration: counts: standard_name: counts units: 1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" file_type: [hrit_ir1, hrit_ir1_seg] IR2: name: IR2 sensor: mtsat2_imager wavelength: [11.5, 12.0, 12.5] resolution: 4000 calibration: counts: standard_name: counts units: 1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" file_type: [hrit_ir2, hrit_ir2_seg] IR3: name: IR3 sensor: mtsat2_imager wavelength: [6.5, 6.75, 7.0] resolution: 4000 calibration: counts: standard_name: counts units: 1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" file_type: [hrit_ir3, hrit_ir3_seg] IR4: name: IR4 sensor: mtsat2_imager wavelength: [3.5, 3.75, 4.0] resolution: 4000 calibration: counts: standard_name: counts units: 1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" file_type: [hrit_ir4, hrit_ir4_seg] satpy-0.20.0/satpy/etc/readers/nucaps.yaml000066400000000000000000000075201362525524100204570ustar00rootroot00000000000000reader: description: NUCAPS Retrieval Reader name: nucaps reader: !!python/name:satpy.readers.nucaps.NUCAPSReader sensors: [cris, atms] file_types: nucaps: file_reader: !!python/name:satpy.readers.nucaps.NUCAPSFileHandler file_patterns: - 'NUCAPS-EDR_{nucaps_version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time:%Y%m%d%H%M%S%f}.nc' - 'NUCAPS-sciEDR_{am_pm:2s}_{platform_shortname:3s}_s{start_time:%Y%m%d%H%M%S}_e{end_time:%Y%m%d%H%M%S}_STC_fsr.nc' datasets: longitude: name: longitude file_type: nucaps file_key: Longitude units: degrees standard_name: longitude latitude: name: latitude file_type: nucaps file_key: Latitude units: degrees standard_name: latitude Solar_Zenith: name: Solar_Zenith coordinates: [longitude, latitude] file_type: nucaps Topography: name: Topography coordinates: [longitude, latitude] file_type: nucaps Land_Fraction: name: Land_Fraction coordinates: [longitude, latitude] file_type: nucaps Surface_Pressure: name: Surface_Pressure coordinates: [longitude, latitude] file_type: nucaps Skin_Temperature: name: Skin_Temperature coordinates: [longitude, latitude] file_type: nucaps Quality_Flag: name: Quality_Flag coordinates: [longitude, latitude] file_type: nucaps # Can't include cloud products until we figure out how to handle cloud layers dimension # Cloud_Top_Pressure: # name: Cloud_Top_Pressure # coordinates: [longitude, latitude] # file_type: nucaps # pressure_based: True # Cloud_Top_Fraction: # name: Cloud_Top_Fraction # coordinates: [longitude, latitude] # file_type: nucaps # pressure_based: True Temperature: name: Temperature coordinates: [longitude, latitude] file_type: nucaps pressure_based: True H2O: name: H2O coordinates: [longitude, latitude] file_type: nucaps pressure_based: True H2O_MR: name: H2O_MR coordinates: [longitude, latitude] file_type: nucaps pressure_based: True O3: name: O3 coordinates: [longitude, latitude] file_type: nucaps pressure_based: True O3_MR: name: O3_MR coordinates: [longitude, latitude] file_type: nucaps pressure_based: True Liquid_H2O: name: Liquid_H2O coordinates: [longitude, latitude] file_type: nucaps pressure_based: True Liquid_H2O_MR: name: Liquid_H2O_MR coordinates: [longitude, latitude] file_type: nucaps pressure_based: True CO: name: CO coordinates: [longitude, latitude] file_type: nucaps pressure_based: True CO_MR: name: CO_MR coordinates: [longitude, latitude] file_type: nucaps pressure_based: True CH4: name: CH4 coordinates: [longitude, latitude] file_type: nucaps pressure_based: True CH4_MR: name: CH4_MR coordinates: [longitude, latitude] file_type: nucaps pressure_based: True CO2: name: CO2 coordinates: [longitude, latitude] file_type: nucaps pressure_based: True HNO3: name: HNO3 coordinates: [longitude, latitude] file_type: nucaps pressure_based: True HNO3_MR: name: HNO3_MR coordinates: [longitude, latitude] file_type: nucaps pressure_based: True N2O: name: N2O coordinates: [longitude, latitude] file_type: nucaps pressure_based: True N2O_MR: name: N2O_MR coordinates: [longitude, latitude] file_type: nucaps pressure_based: True SO2: name: SO2 coordinates: [longitude, latitude] file_type: nucaps pressure_based: True SO2_MR: name: SO2_MR coordinates: [longitude, latitude] file_type: nucaps pressure_based: True Pressure_Levels: name: Pressure_Levels standard_name: air_pressure file_type: nucaps file_key: Pressure index: 0 satpy-0.20.0/satpy/etc/readers/nwcsaf-geo.yaml000066400000000000000000000376551362525524100212330ustar00rootroot00000000000000reader: description: NetCDF4 reader for the NWCSAF MSG Seviri 2016/2018 format name: nwcsaf-geo sensors: [seviri] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: nc_nwcsaf_cma: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_CMA_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc'] nc_nwcsaf_ct: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_CT_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc'] nc_nwcsaf_ctth: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_CTTH_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc'] nc_nwcsaf_cmic: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_CMIC_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc'] nc_nwcsaf_pc: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_PC_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc'] nc_nwcsaf_crr: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_CRR_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc'] nc_nwcsaf_ishai: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_iSHAI_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc'] nc_nwcsaf_ci: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_CI_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc'] nc_nwcsaf_rdt: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_RDT-CW_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc'] nc_nwcsaf_asii: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_ASII-NG_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc'] nc_nwcsaf_asii_tf: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_ASII-TF_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc'] nc_nwcsaf_asii_gw: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_ASII-GW_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc'] datasets: # ---- CMA products ------------ cma: name: cma sensor: seviri resolution: 3000 file_type: nc_nwcsaf_cma cma_pal: name: cma_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_cma cma_cloudsnow: name: cma_cloudsnow sensor: seviri resolution: 3000 file_type: nc_nwcsaf_cma cma_cloudsnow_pal: name: cma_cloudsnow_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_cma cma_dust: name: cma_dust sensor: seviri resolution: 3000 file_type: nc_nwcsaf_cma cma_dust_pal: name: cma_dust_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_cma cma_volcanic: name: cma_volcanic sensor: seviri resolution: 3000 file_type: nc_nwcsaf_cma cma_volcanic_pal: name: cma_volcanic_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_cma cma_conditions: name: cma_conditions sensor: seviri resolution: 3000 file_type: nc_nwcsaf_cma cma_status_flag: name: cma_status_flag sensor: seviri resolution: 3000 file_type: nc_nwcsaf_cma # ---- CT products ------------ ct: name: ct sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ct ct_pal: name: ct_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ct ct_cumuliform: name: ct_cumuliform sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ct ct_cumuliform_pal: name: ct_cumuliform_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ct ct_multilayer: name: ct_cumuliform sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ct ct_multilayer_pal: name: ct_cumuliform_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ct ct_quality: name: ct_quality sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ct ct_conditions: name: ct_conditions sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ct # ---- CTTH products ------------ ctth_alti: name: ctth_alti sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ctth ctth_alti_pal: name: ctth_alti_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ctth ctth_pres: name: ctth_pres sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ctth ctth_pres_pal: name: ctth_pres_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ctth ctth_tempe: name: ctth_tempe sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ctth ctth_tempe_pal: name: ctth_tempe_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ctth ctth_effectiv: name: ctth_effectiv sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ctth ctth_effectiv_pal: name: ctth_effectiv_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ctth ctth_method: name: ctth_method sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ctth ctth_conditions: name: ctth_conditions sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ctth ctth_quality: name: ctth_quality sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ctth ctth_status_flag: name: ctth_status_flag sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ctth # ---- CMIC products ------------ cmic_phase: name: cmic_phase sensor: seviri resolution: 3000 file_type: nc_nwcsaf_cmic cmic_phase_pal: name: cmic_phase_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_cmic cmic_reff: name: cmic_reff sensor: seviri resolution: 3000 file_type: nc_nwcsaf_cmic cmic_reff_pal: name: cmic_reff_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_cmic cmic_cot: name: cmic_cot sensor: seviri resolution: 3000 file_type: nc_nwcsaf_cmic cmic_cot_pal: name: cmic_cot_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_cmic cmic_lwp: name: cmic_lwp sensor: seviri resolution: 3000 file_type: nc_nwcsaf_cmic cmic_lwp_pal: name: cmic_lwp_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_cmic cmic_iwp: name: cmic_iwp sensor: seviri resolution: 3000 file_type: nc_nwcsaf_cmic cmic_iwp_pal: name: cmic_iwp_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_cmic cmic_status_flag: name: cmic_status_flag sensor: seviri resolution: 3000 file_type: nc_nwcsaf_cmic cmic_conditions: name: cmic_conditions sensor: seviri resolution: 3000 file_type: nc_nwcsaf_cmic cmic_quality: name: cmic_quality sensor: seviri resolution: 3000 file_type: nc_nwcsaf_cmic # ---- PC products ------------ pc: name: pc sensor: seviri resolution: 3000 file_type: nc_nwcsaf_pc pc_pal: name: pc_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_pc pc_conditions: name: pc_conditions sensor: seviri resolution: 3000 file_type: nc_nwcsaf_pc pc_quality: name: pc_quality sensor: seviri resolution: 3000 file_type: nc_nwcsaf_pc # ---- CRR products ------------ crr: name: crr sensor: seviri resolution: 3000 file_type: nc_nwcsaf_crr crr_pal: name: crr_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_crr crr_accum: name: crr_accum sensor: seviri resolution: 3000 file_type: nc_nwcsaf_crr crr_accum_pal: name: crr_accum_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_crr crr_intensity: name: crr_intensity sensor: seviri resolution: 3000 file_type: nc_nwcsaf_crr crr_intensity_pal: name: crr_intensity_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_crr crr_status_flag: name: crr_status_flag sensor: seviri resolution: 3000 file_type: nc_nwcsaf_crr crr_conditions: name: crr_conditions sensor: seviri resolution: 3000 file_type: nc_nwcsaf_crr crr_quality: name: crr_quality sensor: seviri resolution: 3000 file_type: nc_nwcsaf_crr # ----iSHAI products ------------ ishai_tpw: name: ishai_tpw sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_tpw_pal: name: ishai_tpw_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_shw: name: ishai_shw sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_shw_pal: name: ishai_shw_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_li: name: ishai_li sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_li_pal: name: ishai_li_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_ki: name: ishai_ki sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_ki_pal: name: ishai_ki_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_shw: name: ishai_shw sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_shw_pal: name: ishai_shw_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_bl: name: ishai_bl sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_bl_pal: name: ishai_bl_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_ml: name: ishai_ml sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_ml_pal: name: ishai_ml_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_hl: name: ishai_hl sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_hl_pal: name: ishai_hl_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_toz: name: ishai_toz sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_toz_pal: name: ishai_toz_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_skt: name: ishai_skt sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_skt_pal: name: ishai_skt_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_difftpw: name: ishai_difftpw sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_difftpw_pal: name: ishai_difftpw_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffshw: name: ishai_diffshw sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffshw_pal: name: ishai_diffshw_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffli: name: ishai_diffli sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffli_pal: name: ishai_diffli_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffki: name: ishai_diffki sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffki_pal: name: ishai_diffki_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffbl: name: ishai_diffbl sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffbl_pal: name: ishai_diffbl_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffml: name: ishai_diffml sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffml_pal: name: ishai_diffml_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffhl: name: ishai_diffhl sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffhl_pal: name: ishai_diffhl_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_difftoz: name: ishai_difftoz sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_difftoz_pal: name: ishai_difftoz_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffskt: name: ishai_diffskt sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffskt_pal: name: ishai_diffskt_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ihsai_status_flag: name: ihsai_status_flag sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_residual: name: ishai_residual sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_residual_pal: name: ishai_residual_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_conditions: name: ishai_conditions sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai ishai_quality: name: ishai_quality sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ishai # ----CI products ------------ ci_prob30: name: ci_prob30 sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ci ci_prob60: name: ci_prob60 sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ci ci_prob90: name: ci_prob90 sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ci # 2018 version ci_prob_pal: name: ci_prob_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ci # 2016 Version ci_pal: name: ci_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ci ci_status_flag: name: ci_status_flag sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ci ci_conditions: name: ci_conditions sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ci ci_quality: name: ci_quality sensor: seviri resolution: 3000 file_type: nc_nwcsaf_ci # ----RDT products ------------ MapCellCatType: name: MapCellCatType sensor: seviri resolution: 3000 file_type: nc_nwcsaf_rdt MapCellCatType_pal: name: MapCellCatType_pal sensor: seviri resolution: 3000 file_type: nc_nwcsaf_rdt MapCell_conditions: name: MapCell_conditions sensor: seviri resolution: 3000 file_type: nc_nwcsaf_rdt MapCell_quality: name: MapCell_quality sensor: seviri resolution: 3000 file_type: nc_nwcsaf_rdt # ----ASII products in multiple files ------------ asii_turb_trop_prob: name: asii_turb_trop_prob sensor: seviri resolution: 3000 file_type: [nc_nwcsaf_asii_tf, nc_nwcsaf_asii] asii_turb_prob_pal: name: asii_turb_prob_pal sensor: seviri resolution: 3000 file_type: [nc_nwcsaf_asii_tf, nc_nwcsaf_asii_gw] # ----ASII-TF product ------------ asii_turb_prob_status_flag: name: asii_turb_trop_prob_status_flag sensor: seviri resolution: 3000 file_type: nc_nwcsaf_asii_tf asiitf_conditions: name: asiitf_conditions sensor: seviri resolution: 3000 file_type: nc_nwcsaf_asii_tf asiitf_quality: name: asiitf_quality sensor: seviri resolution: 3000 file_type: nc_nwcsaf_asii_tf # ----ASII-GW product ------------ asii_turb_wave_prob: name: asii_turb_wave_prob sensor: seviri resolution: 3000 file_type: nc_nwcsaf_asii_gw asii_turb_wave_prob_status_flag: name: asii_turb_wave_prob_status_flag sensor: seviri resolution: 3000 file_type: nc_nwcsaf_asii_gw asiigw_conditions: name: asiigw_conditions sensor: seviri resolution: 3000 file_type: nc_nwcsaf_asii_gw asiigw_quality: name: asiigw_quality sensor: seviri resolution: 3000 file_type: nc_nwcsaf_asii_gw satpy-0.20.0/satpy/etc/readers/nwcsaf-msg2013-hdf5.yaml000066400000000000000000000100551362525524100223620ustar00rootroot00000000000000reader: description: HDF5 reader for the NWCSAF/Geo Seviri 2013 format name: nwcsaf-msg2013-hdf5 sensors: [seviri] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: h5_nwcsaf_cma: file_reader: !!python/name:satpy.readers.nwcsaf_msg2013_hdf5.Hdf5NWCSAF # SAFNWC_MSG4_CMa__201908271145_MSG-N_______.PLAX.CTTH.0.h5 file_patterns: ['SAFNWC_{platform_id}_CMa__{start_time:%Y%m%d%H%M}_{region_id:_<12s}.PLAX.CTTH.0.h5'] h5_nwcsaf_ct: file_reader: !!python/name:satpy.readers.nwcsaf_msg2013_hdf5.Hdf5NWCSAF # SAFNWC_MSG4_CT___201906241245_MSG-N_______.PLAX.CTTH.0.h5 file_patterns: ['SAFNWC_{platform_id}_CT___{start_time:%Y%m%d%H%M}_{region_id:_<12s}.PLAX.CTTH.0.h5'] h5_nwcsaf_ctth: file_reader: !!python/name:satpy.readers.nwcsaf_msg2013_hdf5.Hdf5NWCSAF # SAFNWC_MSG4_CTTH_201906241245_MSG-N_______.PLAX.CTTH.0.h5 file_patterns: ['SAFNWC_{platform_id}_CTTH_{start_time:%Y%m%d%H%M}_{region_id:_<12s}.PLAX.CTTH.0.h5'] datasets: # ---- CMA products ------------ cma: name: cma sensor: seviri resolution: 3000 file_type: h5_nwcsaf_cma cma_pal: name: cma_pal sensor: seviri resolution: 3000 file_type: h5_nwcsaf_cma cma_cloudsnow: name: cma_cloudsnow sensor: seviri resolution: 3000 file_type: h5_nwcsaf_cma cma_cloudsnow_pal: name: cma_cloudsnow_pal sensor: seviri resolution: 3000 file_type: h5_nwcsaf_cma cma_dust: name: cma_dust sensor: seviri resolution: 3000 file_type: h5_nwcsaf_cma cma_dust_pal: name: cma_dust_pal sensor: seviri resolution: 3000 file_type: h5_nwcsaf_cma cma_volcanic: name: cma_volcanic sensor: seviri resolution: 3000 file_type: h5_nwcsaf_cma cma_volcanic_pal: name: cma_volcanic_pal sensor: seviri resolution: 3000 file_type: h5_nwcsaf_cma cma_conditions: name: cma_conditions sensor: seviri resolution: 3000 file_type: h5_nwcsaf_cma cma_status_flag: name: cma_status_flag sensor: seviri resolution: 3000 file_type: h5_nwcsaf_cma # ---- CT products ------------ ct: name: ct sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ct file_key: CT ct_pal: name: ct_pal sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ct file_key: 01-PALETTE ct_quality: name: ct_quality sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ct file_key: CT_QUALITY ct_phase: name: ct_phase sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ct file_key: CT_PHASE ct_phase_pal: name: ct_phase_pal sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ct file_key: 02-PALETTE # ---- CTTH products ------------ ctth_alti: name: ctth_alti sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ctth file_key: CTTH_HEIGHT ctth_alti_pal: name: ctth_alti_pal sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ctth file_key: 02-PALETTE ctth_pres: name: ctth_pres sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ctth file_key: CTTH_PRESS ctth_pres_pal: name: ctth_pres_pal sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ctth file_key: 01-PALETTE ctth_tempe: name: ctth_tempe sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ctth file_key: CTTH_TEMPER ctth_tempe_pal: name: ctth_tempe_pal sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ctth file_key: 03-PALETTE ctth_effective_cloudiness: name: ctth_effective_cloudiness sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ctth file_key: CTTH_EFFECT ctth_effective_cloudiness_pal: name: ctth_eff_pal sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ctth file_key: 04-PALETTE ctth_quality: name: ctth_quality sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ctth file_key: CTTH_QUALITY satpy-0.20.0/satpy/etc/readers/nwcsaf-pps_nc.yaml000066400000000000000000000131641362525524100217300ustar00rootroot00000000000000reader: description: NetCDF4 reader for the NWCSAF/PPS 2014 format name: nwcsaf-pps_nc sensors: ['avhrr-3', 'viirs', 'modis'] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: nc_nwcsaf_cma: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_CMA_{platform_id}_{orbit_number}_{start_time:%Y%m%dT%H%M%S%f}Z_{end_time:%Y%m%dT%H%M%S%f}Z.nc', 'W_XX-EUMETSAT-Darmstadt,SING+LEV+SAT,{platform_id}+CMA_C_EUMS_{start_time:%Y%m%d%H%M%S}_{orbit_number}.nc', 'W_XX-EUMETSAT-Darmstadt,SING+LEV+SAT,{platform_id}+CMA_C_EUMS_{start_time:%Y%m%d%H%M%S}_{orbit_number}.nc.bz2'] nc_nwcsaf_ct: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_CT_{platform_id}_{orbit_number}_{start_time:%Y%m%dT%H%M%S%f}Z_{end_time:%Y%m%dT%H%M%S%f}Z.nc', 'W_XX-EUMETSAT-Darmstadt,SING+LEV+SAT,{platform_id}+CT_C_EUMS_{start_time:%Y%m%d%H%M%S}_{orbit_number}.nc', 'W_XX-EUMETSAT-Darmstadt,SING+LEV+SAT,{platform_id}+CT_C_EUMS_{start_time:%Y%m%d%H%M%S}_{orbit_number}.nc.bz2'] nc_nwcsaf_ctth: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_CTTH_{platform_id}_{orbit_number}_{start_time:%Y%m%dT%H%M%S%f}Z_{end_time:%Y%m%dT%H%M%S%f}Z.nc', 'W_XX-EUMETSAT-Darmstadt,SING+LEV+SAT,{platform_id}+CTTH_C_EUMS_{start_time:%Y%m%d%H%M%S}_{orbit_number}.nc', 'W_XX-EUMETSAT-Darmstadt,SING+LEV+SAT,{platform_id}+CTTH_C_EUMS_{start_time:%Y%m%d%H%M%S}_{orbit_number}.nc.bz2'] nc_nwcsaf_pc: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_PC_{platform_id}_{orbit_number}_{start_time:%Y%m%dT%H%M%S%f}Z_{end_time:%Y%m%dT%H%M%S%f}Z.nc'] nc_nwcsaf_cpp: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_CPP_{platform_id}_{orbit_number}_{start_time:%Y%m%dT%H%M%S%f}Z_{end_time:%Y%m%dT%H%M%S%f}Z.nc'] datasets: lon: name: lon file_type: - nc_nwcsaf_cma - nc_nwcsaf_ct - nc_nwcsaf_ctth units: "degrees" standard_name: longitude lat: name: lat file_type: - nc_nwcsaf_cma - nc_nwcsaf_ct - nc_nwcsaf_ctth units: "degrees" standard_name: latitude # ---- CMA products ------------ cma: name: cma file_type: nc_nwcsaf_cma coordinates: [lon, lat] cma_pal: name: cma_pal file_type: nc_nwcsaf_cma cma_extended: name: cma_extended file_type: nc_nwcsaf_cma coordinates: [lon, lat] cma_extended_pal: name: cma_extended_pal file_type: nc_nwcsaf_cma # ---- CT products ------------ ct: name: ct file_type: nc_nwcsaf_ct coordinates: [lon, lat] standard_name: cloudtype ct_conditions: name: ct_conditions file_type: nc_nwcsaf_ct coordinates: [lon, lat] standard_name: cloudtype_conditions ct_quality: name: ct_quality file_type: nc_nwcsaf_ct coordinates: [lon, lat] standard_name: cloudtype_quality ct_status_flag: name: ct_status_flag file_type: nc_nwcsaf_ct coordinates: [lon, lat] standard_name: cloudtype_status_flag ct_pal: name: ct_pal file_type: nc_nwcsaf_ct standard_name: palette # ---- PC products ------------ pc_conditions: name: pc_conditions file_type: nc_nwcsaf_pc coordinates: [lon, lat] pc_precip_intense: name: pc_precip_intense file_type: nc_nwcsaf_pc coordinates: [lon, lat] pc_precip_moderate: name: pc_precip_moderate file_type: nc_nwcsaf_pc coordinates: [lon, lat] pc_precip_light: name: pc_precip_light file_type: nc_nwcsaf_pc coordinates: [lon, lat] pc_status_flag: name: pc_status_flag file_type: nc_nwcsaf_pc coordinates: [lon, lat] pc_quality: name: pc_quality file_type: nc_nwcsaf_pc coordinates: [lon, lat] # ---- CTTH products ------------ ctth_alti: name: ctth_alti file_type: nc_nwcsaf_ctth coordinates: [lon, lat] ctth_alti_pal: name: ctth_alti_pal file_type: nc_nwcsaf_ctth ctth_quality: name: ctth_quality file_type: nc_nwcsaf_ctth coordinates: [lon, lat] ctth_conditions: name: ctth_conditions file_type: nc_nwcsaf_ctth coordinates: [lon, lat] ctth_status_flag: name: ctth_status_flag file_type: nc_nwcsaf_ctth coordinates: [lon, lat] ctth_pres: name: ctth_pres file_type: nc_nwcsaf_ctth coordinates: [lon, lat] ctth_pres_pal: name: ctth_pres_pal file_type: nc_nwcsaf_ctth ctth_tempe: name: ctth_tempe file_type: nc_nwcsaf_ctth coordinates: [lon, lat] ctth_tempe_pal: name: ctth_tempe_pal file_type: nc_nwcsaf_ctth # ---- CPP products ------------ cpp_phase: name: cpp_phase file_type: nc_nwcsaf_cpp coordinates: [lon, lat] cpp_phase_pal: name: cpp_phase_pal file_type: nc_nwcsaf_cpp cpp_reff: name: cpp_reff file_type: nc_nwcsaf_cpp coordinates: [lon, lat] cpp_reff_pal: name: cpp_reff_pal file_type: nc_nwcsaf_cpp cpp_cot: name: cpp_cot file_type: nc_nwcsaf_cpp coordinates: [lon, lat] cpp_cot_pal: name: cpp_cot_pal file_type: nc_nwcsaf_cpp cpp_cwp: name: cpp_cwp file_type: nc_nwcsaf_cpp coordinates: [lon, lat] cpp_cwp_pal: name: cpp_cwp_pal file_type: nc_nwcsaf_cpp cpp_iwp: name: cpp_iwp file_type: nc_nwcsaf_cpp coordinates: [lon, lat] cpp_iwp_pal: name: cpp_iwp_pal file_type: nc_nwcsaf_cpp cpp_lwp: name: cpp_lwp file_type: nc_nwcsaf_cpp coordinates: [lon, lat] cpp_lwp_pal: name: cpp_lwp_pal file_type: nc_nwcsaf_cpp satpy-0.20.0/satpy/etc/readers/olci_l1b.yaml000066400000000000000000000312561362525524100206550ustar00rootroot00000000000000reader: description: NC Reader for OLCI data name: olci_l1b sensors: [olci] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: esa_l1b: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI1B file_patterns: - '{mission_id:3s}_OL_1_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{platform_mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/{dataset_name}_radiance.nc' - '{mission_id:3s}_OL_1_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{platform_mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/{dataset_name}_radiance.nc' requires: [esa_cal] esa_angles: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCIAngles file_patterns: - '{mission_id:3s}_OL_1_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{platform_mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/tie_geometries.nc' - '{mission_id:3s}_OL_1_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{platform_mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/tie_geometries.nc' esa_geo: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCIGeo file_patterns: - '{mission_id:3s}_OL_1_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{platform_mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/geo_coordinates.nc' - '{mission_id:3s}_OL_1_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{platform_mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/geo_coordinates.nc' esa_cal: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCICal file_patterns: - '{mission_id:3s}_OL_1_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{platform_mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/instrument_data.nc' - '{mission_id:3s}_OL_1_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{platform_mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/instrument_data.nc' esa_meteo: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCIMeteo file_patterns: - '{mission_id:3s}_OL_1_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{platform_mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/tie_meteo.nc' - '{mission_id:3s}_OL_1_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{platform_mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/tie_meteo.nc' datasets: longitude: name: longitude resolution: 300 file_type: esa_geo standard_name: longitude units: degree latitude: name: latitude resolution: 300 file_type: esa_geo standard_name: latitude units: degree Oa01: name: Oa01 sensor: olci wavelength: [0.3925,0.4,0.4075] resolution: 300 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" coordinates: [longitude, latitude] file_type: esa_l1b Oa02: name: Oa02 sensor: olci wavelength: [0.4075, 0.4125, 0.4175] resolution: 300 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" coordinates: [longitude, latitude] file_type: esa_l1b Oa03: name: Oa03 sensor: olci wavelength: [0.4375,0.4425,0.4475] resolution: 300 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" coordinates: [longitude, latitude] file_type: esa_l1b Oa04: name: Oa04 sensor: olci wavelength: [0.485,0.49,0.495] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa05: name: Oa05 sensor: olci wavelength: [0.505,0.51,0.515] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa06: name: Oa06 sensor: olci wavelength: [0.555,0.56,0.565] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa07: name: Oa07 sensor: olci wavelength: [0.615,0.62,0.625] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa08: name: Oa08 sensor: olci wavelength: [0.66,0.665,0.67] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa09: name: Oa09 sensor: olci wavelength: [0.67,0.67375,0.6775] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa10: name: Oa10 sensor: olci wavelength: [0.6775,0.68125,0.685] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa11: name: Oa11 sensor: olci wavelength: [0.70375,0.70875,0.71375] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa12: name: Oa12 sensor: olci wavelength: [0.75,0.75375,0.7575] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa13: name: Oa13 sensor: olci wavelength: [0.76,0.76125,0.7625] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa14: name: Oa14 sensor: olci wavelength: [0.760625, 0.764375, 0.768125] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa15: name: Oa15 sensor: olci wavelength: [0.76625, 0.7675, 0.76875] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa16: name: Oa16 sensor: olci wavelength: [0.77125, 0.77875, 0.78625] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa17: name: Oa17 sensor: olci wavelength: [0.855, 0.865, 0.875] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa18: name: Oa18 sensor: olci wavelength: [0.88, 0.885, 0.89] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa19: name: Oa19 sensor: olci wavelength: [0.895, 0.9, 0.905] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa20: name: Oa20 sensor: olci wavelength: [0.93, 0.94, 0.95] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa21: name: Oa21 sensor: olci wavelength: [1.0, 1.02, 1.04] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b solar_zenith_angle: name: solar_zenith_angle sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_angles solar_azimuth_angle: name: solar_azimuth_angle sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_angles satellite_zenith_angle: name: satellite_zenith_angle sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_angles satellite_azimuth_angle: name: satellite_azimuth_angle sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_angles humidity: name: humidity sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_meteo sea_level_pressure: name: sea_level_pressure sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_meteo total_columnar_water_vapour: name: total_columnar_water_vapour sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_meteo total_ozone: name: total_ozone sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_meteo satpy-0.20.0/satpy/etc/readers/olci_l2.yaml000066400000000000000000000311161362525524100205070ustar00rootroot00000000000000reader: description: NC Reader for OLCI data name: olci_l2 sensors: [olci] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: esa_l2_reflectance: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/{dataset_name}_reflectance.nc'] esa_l2_chl_nn: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/chl_nn.nc'] esa_l2_chl_oc4me: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/chl_oc4me.nc'] esa_l2_iop_nn: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/iop_nn.nc'] esa_l2_trsp: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/trsp.nc'] esa_l2_tsm_nn: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/tsm_nn.nc'] esa_l2_wqsf: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/wqsf.nc'] esa_angles: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCIAngles file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/tie_geometries.nc'] esa_geo: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCIGeo file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/geo_coordinates.nc'] datasets: longitude: name: longitude resolution: 300 file_type: esa_geo standard_name: longitude units: degree latitude: name: latitude resolution: 300 file_type: esa_geo standard_name: latitude units: degree Oa01: name: Oa01 sensor: olci wavelength: [0.3925,0.4,0.4075] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 calibration: reflectance: standard_name: water_leaving_reflectance units: "%" coordinates: [longitude, latitude] file_type: esa_l2_reflectance Oa02: name: Oa02 sensor: olci wavelength: [0.4075, 0.4125, 0.4175] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 calibration: reflectance: standard_name: water_leaving_reflectance units: "%" coordinates: [longitude, latitude] file_type: esa_l2_reflectance Oa03: name: Oa03 sensor: olci wavelength: [0.4375,0.4425,0.4475] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 calibration: reflectance: standard_name: water_leaving_reflectance units: "%" coordinates: [longitude, latitude] file_type: esa_l2_reflectance Oa04: name: Oa04 sensor: olci wavelength: [0.485,0.49,0.495] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa05: name: Oa05 sensor: olci wavelength: [0.505,0.51,0.515] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa06: name: Oa06 sensor: olci wavelength: [0.555,0.56,0.565] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa07: name: Oa07 sensor: olci wavelength: [0.615,0.62,0.625] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa08: name: Oa08 sensor: olci wavelength: [0.66,0.665,0.67] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa09: name: Oa09 sensor: olci wavelength: [0.67,0.67375,0.6775] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa10: name: Oa10 sensor: olci wavelength: [0.6775,0.68125,0.685] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa11: name: Oa11 sensor: olci wavelength: [0.70375,0.70875,0.71375] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa12: name: Oa12 sensor: olci wavelength: [0.75,0.75375,0.7575] modifiers: [sunz_corrected, rayleigh_corrected] coordinates: [longitude, latitude] resolution: 300 calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa13: name: Oa13 sensor: olci wavelength: [0.76,0.76125,0.7625] modifiers: [sunz_corrected, rayleigh_corrected] coordinates: [longitude, latitude] resolution: 300 calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa14: name: Oa14 sensor: olci wavelength: [0.760625, 0.764375, 0.768125] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa15: name: Oa15 sensor: olci wavelength: [0.76625, 0.7675, 0.76875] modifiers: [sunz_corrected, rayleigh_corrected] coordinates: [longitude, latitude] resolution: 300 calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa16: name: Oa16 sensor: olci wavelength: [0.77125, 0.77875, 0.78625] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa17: name: Oa17 sensor: olci wavelength: [0.855, 0.865, 0.875] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa18: name: Oa18 sensor: olci wavelength: [0.88, 0.885, 0.89] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa19: name: Oa19 sensor: olci wavelength: [0.895, 0.9, 0.905] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa20: name: Oa20 sensor: olci wavelength: [0.93, 0.94, 0.95] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa21: name: Oa21 sensor: olci wavelength: [1.0, 1.02, 1.04] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance chl_oc4me: name: chl_oc4me sensor: olci resolution: 300 calibration: reflectance: standard_name: algal_pigment_concentration units: "lg(re mg.m-3)" coordinates: [longitude, latitude] file_type: esa_l2_chl_oc4me nc_key: CHL_OC4ME chl_nn: name: chl_nn sensor: olci resolution: 300 calibration: reflectance: standard_name: algal_pigment_concentration units: "lg(re mg.m-3)" coordinates: [longitude, latitude] file_type: esa_l2_chl_nn nc_key: CHL_NN iop_nn: name: iop_nn sensor: olci resolution: 300 calibration: reflectance: standard_name: cdm_absorption_coefficient units: "lg(re m-l)" coordinates: [longitude, latitude] file_type: esa_l2_iop_nn nc_key: ADG443_NN trsp: name: trsp sensor: olci resolution: 300 calibration: reflectance: standard_name: diffuse_attenuation_coefficient units: "lg(re m-l)" coordinates: [longitude, latitude] file_type: esa_l2_trsp nc_key: KD490_M07 tsm_nn: name: tsm_nn sensor: olci resolution: 300 calibration: reflectance: standard_name: total_suspended_matter_concentration units: "lg(re g.m-3)" coordinates: [longitude, latitude] file_type: esa_l2_tsm_nn nc_key: TSM_NN wqsf: name: wqsf sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_l2_wqsf nc_key: WQSF mask: name: mask sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_l2_wqsf nc_key: WQSF solar_zenith_angle: name: solar_zenith_angle sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_angles solar_azimuth_angle: name: solar_azimuth_angle sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_angles satellite_zenith_angle: name: satellite_zenith_angle sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_angles satellite_azimuth_angle: name: satellite_azimuth_angle sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_angles satpy-0.20.0/satpy/etc/readers/omps_edr.yaml000066400000000000000000000132031362525524100207710ustar00rootroot00000000000000reader: description: Generic OMPS EDR reader name: omps_edr reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [omps] file_types: # HDF5 files from NASA GES DISC HTTP # https://search.earthdata.nasa.gov/search/granules # https://snpp-omps.gesdisc.eosdis.nasa.gov/data//SNPP_OMPS_Level2/OMPS_NPP_NMSO2_L2.2/ # https://disc.sci.gsfc.nasa.gov/ omps_tc_so2_edr_ges_disc: file_reader: !!python/name:satpy.readers.omps_edr.EDRFileHandler file_patterns: ['{instrument_shortname}-{platform_shortname}_NMSO2-L2_{start_time:%Ym%m%dt%H%M%S}_o{orbit:05d}_{end_time:%Ym%m%dt%H%M%S}.h5'] # HDF5-EOS files from NASA DRL # ftp://is.sci.gsfc.nasa.gov/gsfcdata/npp/omps/level2/ omps_tc_so2_edr: file_reader: !!python/name:satpy.readers.omps_edr.EDREOSFileHandler file_patterns: ['{instrument_shortname}-{platform_shortname}-TC_EDR_SO2NRT-{start_time:%Ym%m%dt%H%M%S}-o{orbit:05d}-{end_time:%Ym%m%dt%H%M%S}.he5'] # HDF5 files from NASA DRL # ftp://is.sci.gsfc.nasa.gov/gsfcdata/npp/omps/level2/ omps_tc_to3_edr: file_reader: !!python/name:satpy.readers.omps_edr.EDRFileHandler file_patterns: ['{instrument_shortname}-{platform_shortname}-TC_EDR_TO3-{version}-{start_time:%Ym%m%dt%H%M%S}-o{orbit:05d}-{end_time:%Ym%m%dt%H%M%S}.h5'] # ftp://omisips1.omisips.eosdis.nasa.gov/OMPS/LANCE/NMSO2-L2-NRT-NRT/ # ftp://omisips1.omisips.eosdis.nasa.gov/OMPS/LANCE/NMSO2-L2-NRT-NRT/OMPS-NPP_NMSO2-L2-NRT_2017m0804t030731_o29890_2017m0804t021637.he5 datasets: reflectivity_331: name: reflectivity_331 resolution: 50000 coordinates: [longitude_to3, latitude_to3] file_type: omps_tc_to3_edr file_key: SCIENCE_DATA/Reflectivity331 uvaerosol_index: name: uvaerosol_index resolution: 50000 coordinates: [longitude_to3, latitude_to3] file_type: omps_tc_to3_edr file_key: SCIENCE_DATA/UVAerosolIndex so2_trm: name: so2_trm resolution: 50000 coordinates: [longitude_so2, latitude_so2] file_type: omps_tc_so2_edr file_key: HDFEOS/SWATHS/OMPS Column Amount SO2/Data Fields/ColumnAmountSO2_TRM longitude_to3: name: longitude_to3 resolution: 50000 file_type: omps_tc_to3_edr file_key: GEOLOCATION_DATA/Longitude units: degrees standard_name: longitude latitude_to3: name: latitude_to3 resolution: 50000 file_type: omps_tc_to3_edr file_key: GEOLOCATION_DATA/Latitude units: degrees standard_name: latitude longitude_so2: name: longitude_so2 resolution: 50000 file_type: omps_tc_so2_edr file_key: HDFEOS/SWATHS/OMPS Column Amount SO2/Geolocation Fields/Longitude units: degrees standard_name: longitude latitude_so2: name: latitude_so2 resolution: 50000 file_type: omps_tc_so2_edr file_key: HDFEOS/SWATHS/OMPS Column Amount SO2/Geolocation Fields/Latitude units: degrees standard_name: latitude #[file_key:so2_trm_eos] #variable_name=HDFEOS/SWATHS/{file_group}/Data Fields/ColumnAmountSO2_TRM #units_attr=Units #missing_attr=MissingValue #factor=ScaleFactor #offset=Offset aerosol_index: name: aerosol_index resolution: 50000 coordinates: [longitude_so2_gd, latitude_so2_gd] file_type: omps_tc_so2_edr_ges_disc file_key: ScienceData/AerosolIndex cldfra: name: cldfra resolution: 50000 coordinates: [longitude_so2_gd, latitude_so2_gd] file_type: omps_tc_so2_edr_ges_disc file_key: ScienceData/CloudFraction cldpres: name: cldpres resolution: 50000 coordinates: [longitude_so2_gd, latitude_so2_gd] file_type: omps_tc_so2_edr_ges_disc file_key: ScienceData/CloudPressure cldrefl: name: cldrefl resolution: 50000 coordinates: [longitude_so2_gd, latitude_so2_gd] file_type: omps_tc_so2_edr_ges_disc file_key: ScienceData/CloudRefletivity tco3_dvcf: name: tco3_dvcf resolution: 50000 coordinates: [longitude_so2_gd, latitude_so2_gd] file_type: omps_tc_so2_edr_ges_disc file_key: ScienceData/ColumnAmountO3isf tco3_toms: name: tco3_toms resolution: 50000 coordinates: [longitude_so2_gd, latitude_so2_gd] file_type: omps_tc_so2_edr_ges_disc file_key: ScienceData/ColumnAmountO3pair tcso2_abv: name: tcso2_abv resolution: 50000 coordinates: [longitude_so2_gd, latitude_so2_gd] file_type: omps_tc_so2_edr_ges_disc file_key: ScienceData/ColumnAmountSO2_ABV tcso2_pbl: name: tcso2_pbl resolution: 50000 coordinates: [longitude_so2_gd, latitude_so2_gd] file_type: omps_tc_so2_edr_ges_disc file_key: ScienceData/ColumnAmountSO2_PBL tcso2_stl: name: tcso2_stl resolution: 50000 coordinates: [longitude_so2_gd, latitude_so2_gd] file_type: omps_tc_so2_edr_ges_disc file_key: ScienceData/ColumnAmountSO2_STL tcso2_trl: name: tcso2_trl resolution: 50000 coordinates: [longitude_so2_gd, latitude_so2_gd] file_type: omps_tc_so2_edr_ges_disc file_key: ScienceData/ColumnAmountSO2_TRL tcso2_trm: name: tcso2_trm resolution: 50000 coordinates: [longitude_so2_gd, latitude_so2_gd] file_type: omps_tc_so2_edr_ges_disc file_key: ScienceData/ColumnAmountSO2_TRM tcso2_tru: name: tcso2_tru resolution: 50000 coordinates: [longitude_so2_gd, latitude_so2_gd] file_type: omps_tc_so2_edr_ges_disc file_key: ScienceData/ColumnAmountSO2_TRU longitude_so2_gd: name: longitude_so2_gd resolution: 50000 file_type: omps_tc_so2_edr_ges_disc file_key: GeolocationData/Longitude units: degrees standard_name: longitude latitude_so2_gd: name: latitude_so2_gd resolution: 50000 file_type: omps_tc_so2_edr_ges_disc file_key: GeolocationData/Latitude units: degrees standard_name: latitude satpy-0.20.0/satpy/etc/readers/safe_sar_l2_ocn.yaml000066400000000000000000000075061362525524100222110ustar00rootroot00000000000000reader: description: SAFE Reader for SAR L2 OCN data name: safe_sar_l2_ocn sensors: [sar-c] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: safe_measurement: file_reader: !!python/name:satpy.readers.safe_sar_l2_ocn.SAFENC file_patterns: ['{fmission_id:3s}_{fsar_mode:2s}_{fproduct_type:3s}{fresolution:1s}_{fprocessing_level:1s}{fproduct_class:1s}{fpolarization:2s}_{fstart_time:%Y%m%dT%H%M%S}_{fend_time:%Y%m%dT%H%M%S}_{forbit_number:6d}_{fmission_data_take_id:6s}_{fproduct_unique_id:4s}.SAFE/measurement/{mission_id:3s}-{swath_id:2s}-{product_type:3s}-{polarization:2s}-{start_time:%Y%m%dt%H%M%S}-{end_time:%Y%m%dt%H%M%S}-{orbit_number:6d}-{mission_data_take_id:6s}-{image_number:3s}.nc'] datasets: owiLat: name: owiLat file_type: safe_measurement standard_name: latitude units: degree owiLon: name: owiLon file_type: safe_measurement standard_name: longitude units: degree owiWindDirection: name: owiWindDirection sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: degree owiWindSpeed: name: owiWindSpeed sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: m s-1 owiEcmwfWindDirection: name: owiEcmwfWindDirection sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: degree owiEcmwfWindSpeed: name: owiEcmwfWindSpeed sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: m s-1 owiHs: name: owiHs sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: m owiWl: name: owiWl sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: m owiDirmet: name: owiDirmet sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: degrees owiWindSeaHs: name: owiWindSeaHs sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: m owiIncidenceAngle: name: owiIncidenceAngle sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: degrees owiElevationAngle: name: owiElevationAngle sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: degrees owiNrcs: name: owiNrcs sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: m2/m2 owiNesz: name: owiNesz sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: m2/m2 owiNrcsNeszCorr: name: owiNrcsNeszCorr sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: m2/m2 owiPolarisationName: name: owiPolarisationName sensor: sar-c file_type: safe_measurement owiPBright: name: owiPBright sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: '%' owiNrcsCmod: name: owiNrcsCmod sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: m2/m2 owiCalConstObsi: name: owiCalConstObsi sensor: sar-c file_type: safe_measurement owiCalConstInci: name: owiCalConstInci sensor: sar-c file_type: safe_measurement owiInversionQuality: name: owiInversionQuality sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] owiMask: name: owiMask sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] owiHeading: name: owiHeading sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: degrees owiWindQuality: name: owiWindQuality sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] satpy-0.20.0/satpy/etc/readers/sar-c_safe.yaml000066400000000000000000000101231362525524100211620ustar00rootroot00000000000000reader: description: SAFE Reader for SAR-C data name: sar-c_safe sensors: [sar-c] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: safe_measurement: file_reader: !!python/name:satpy.readers.sar_c_safe.SAFEGRD file_patterns: ['{fmission_id:3s}_{fsar_mode:2s}_{fproduct_type:3s}{fresolution:1s}_{fprocessing_level:1s}{fproduct_class:1s}{fpolarization:2s}_{fstart_time:%Y%m%dT%H%M%S}_{fend_time:%Y%m%dT%H%M%S}_{forbit_number:6d}_{fmission_data_take_id:6s}_{fproduct_unique_id:4s}.SAFE/measurement/{mission_id:3s}-{swath_id:2s}-{product_type:3s}-{polarization:2s}-{start_time:%Y%m%dt%H%M%S}-{end_time:%Y%m%dt%H%M%S}-{orbit_number:6d}-{mission_data_take_id:6s}-{image_number:3s}.tiff'] requires: [safe_calibration, safe_noise] safe_calibration: file_reader: !!python/name:satpy.readers.sar_c_safe.SAFEXML file_patterns: ['{fmission_id:3s}_{fsar_mode:2s}_{fproduct_type:3s}{fresolution:1s}_{fprocessing_level:1s}{fproduct_class:1s}{fpolarization:2s}_{fstart_time:%Y%m%dT%H%M%S}_{fend_time:%Y%m%dT%H%M%S}_{forbit_number:6d}_{fmission_data_take_id:6s}_{fproduct_unique_id:4s}.SAFE/annotation/calibration/calibration-{mission_id:3s}-{swath_id:2s}-{product_type:3s}-{polarization:2s}-{start_time:%Y%m%dt%H%M%S}-{end_time:%Y%m%dt%H%M%S}-{orbit_number:6d}-{mission_data_take_id:6s}-{image_number:3s}.xml'] requires: [safe_gcps] safe_noise: file_reader: !!python/name:satpy.readers.sar_c_safe.SAFEXML file_patterns: ['{fmission_id:3s}_{fsar_mode:2s}_{fproduct_type:3s}{fresolution:1s}_{fprocessing_level:1s}{fproduct_class:1s}{fpolarization:2s}_{fstart_time:%Y%m%dT%H%M%S}_{fend_time:%Y%m%dT%H%M%S}_{forbit_number:6d}_{fmission_data_take_id:6s}_{fproduct_unique_id:4s}.SAFE/annotation/calibration/noise-{mission_id:3s}-{swath_id:2s}-{product_type:3s}-{polarization:2s}-{start_time:%Y%m%dt%H%M%S}-{end_time:%Y%m%dt%H%M%S}-{orbit_number:6d}-{mission_data_take_id:6s}-{image_number:3s}.xml'] requires: [safe_gcps] safe_gcps: file_reader: !!python/name:satpy.readers.sar_c_safe.SAFEXML file_patterns: ['{fmission_id:3s}_{fsar_mode:2s}_{fproduct_type:3s}{fresolution:1s}_{fprocessing_level:1s}{fproduct_class:1s}{fpolarization:2s}_{fstart_time:%Y%m%dT%H%M%S}_{fend_time:%Y%m%dT%H%M%S}_{forbit_number:6d}_{fmission_data_take_id:6s}_{fproduct_unique_id:4s}.SAFE/annotation/{mission_id:3s}-{swath_id:2s}-{product_type:3s}-{polarization:2s}-{start_time:%Y%m%dt%H%M%S}-{end_time:%Y%m%dt%H%M%S}-{orbit_number:6d}-{mission_data_take_id:6s}-{image_number:3s}.xml'] datasets: latitude: name: latitude resolution: 80 file_type: safe_measurement standard_name: latitude polarization: [hh, hv, vv, vh] units: degree longitude: name: longitude resolution: 80 file_type: safe_measurement standard_name: longitude polarization: [hh, hv, vv, vh] units: degree measurement: name: measurement sensor: sar-c wavelength: [5.400, 5.405, 5.410] resolution: 80 polarization: [hh, hv, vv, vh] calibration: gamma: standard_name: backscatter units: 1 sigma_nought: standard_name: backscatter units: 1 beta_nought: standard_name: backscatter units: 1 coordinates: [longitude, latitude] file_type: safe_measurement noise: name: noise sensor: sar-c resolution: 80 polarization: [hh, hv, vv, vh] file_type: safe_noise xml_item: - noiseVector - noiseRangeVector xml_tag: - noiseLut - noiseRangeLut sigma: name: sigma_squared sensor: sar-c resolution: 80 polarization: [hh, hv, vv, vh] file_type: safe_calibration xml_item: calibrationVector xml_tag: sigma beta: name: beta_squared sensor: sar-c resolution: 80 polarization: [hh, hv, vv, vh] file_type: safe_calibration xml_item: calibrationVector xml_tag: beta gamma: name: gamma_squared sensor: sar-c resolution: 80 polarization: [hh, hv, vv, vh] file_type: safe_calibration xml_item: calibrationVector xml_tag: gamma satpy-0.20.0/satpy/etc/readers/scatsat1_l2b.yaml000066400000000000000000000020321362525524100214410ustar00rootroot00000000000000reader: description: Generic Eumetsat Scatsat-1 L2B Wind field Reader name: scatsat1_l2b reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader '' sensors: [scatterometer] default_datasets: datasets: longitude: name: longitude resolution: 25000 file_type: scatsat standard_name: longitude units: degree latitude: name: latitude resolution: 25000 file_type: scatsat standard_name: latitude units: degree wind_speed: name: wind_speed sensor: Scatterometer resolution: 25000 coordinates: [longitude, latitude] file_type: scatsat standard_name: wind_speed wind_direction: name: wind_direction resolution: 25000 coordinates: [longitude, latitude] file_type: scatsat standard_name: wind_direction file_types: scatsat: file_reader: !!python/name:satpy.readers.scatsat1_l2b.SCATSAT1L2BFileHandler '' file_patterns: ['S1L2B{start_date:%Y%j}_{start_orbit}_{end_orbit}_{direction}_{cell_spacing}_{prod_date}T{prod_time}_{version}.h5'] satpy-0.20.0/satpy/etc/readers/seviri_l1b_hrit.yaml000066400000000000000000000333051362525524100222530ustar00rootroot00000000000000# References: # - MSG Level 1.5 Image Data Format Description # - Radiometric Calibration of MSG SEVIRI Level 1.5 Image Data in Equivalent # Spectral Blackbody Radiance reader: name: seviri_l1b_hrit short_name: SEVIRI L1b HRIT long_name: MSG SEVIRI Level 1b (HRIT) description: > HRIT reader for EUMETSAT MSG SEVIRI Level 1b files. sensors: [seviri] default_channels: [HRV, IR_016, IR_039, IR_087, IR_097, IR_108, IR_120, IR_134, VIS006, VIS008, WV_062, WV_073] reader: !!python/name:satpy.readers.yaml_reader.GEOSegmentYAMLReader file_types: HRIT_HRV: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-HRV______-{segment:06d}___-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 24 HRIT_IR_016: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_016___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_039: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_039___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_087: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_087___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_097: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_097___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_108: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_108___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_120: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_120___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_134: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_134___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_VIS006: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-VIS006___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_VIS008: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-VIS008___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_WV_062: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-WV_062___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_WV_073: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-WV_073___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_HRV_C: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-HRV______-{segment:06d}___-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 24 HRIT_IR_016_C: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_016___-{segment:06d}___-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_039_C: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_039___-{segment:06d}___-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_087_C: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_087___-{segment:06d}___-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_097_C: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_097___-{segment:06d}___-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_108_C: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_108___-{segment:06d}___-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_120_C: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_120___-{segment:06d}___-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_134_C: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_134___-{segment:06d}___-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_VIS006_C: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-VIS006___-{segment:06d}___-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_VIS008_C: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-VIS008___-{segment:06d}___-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_WV_062_C: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-WV_062___-{segment:06d}___-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_WV_073_C: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-WV_073___-{segment:06d}___-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_PRO: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGPrologueFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-_________-PRO______-{start_time:%Y%m%d%H%M}-__'] HRIT_EPI: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGEpilogueFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-_________-EPI______-{start_time:%Y%m%d%H%M}-__'] datasets: HRV: name: HRV resolution: 1000.134348869 wavelength: [0.5, 0.7, 0.9] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [HRIT_HRV, HRIT_HRV_C] IR_016: name: IR_016 resolution: 3000.403165817 wavelength: [1.5, 1.64, 1.78] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [HRIT_IR_016, HRIT_IR_016_C] IR_039: name: IR_039 resolution: 3000.403165817 wavelength: [3.48, 3.92, 4.36] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [HRIT_IR_039, HRIT_IR_039_C] IR_087: name: IR_087 resolution: 3000.403165817 wavelength: [8.3, 8.7, 9.1] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [HRIT_IR_087, HRIT_IR_087_C] IR_097: name: IR_097 resolution: 3000.403165817 wavelength: [9.38, 9.66, 9.94] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [HRIT_IR_097, HRIT_IR_097_C] IR_108: name: IR_108 resolution: 3000.403165817 wavelength: [9.8, 10.8, 11.8] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [HRIT_IR_108, HRIT_IR_108_C] IR_120: name: IR_120 resolution: 3000.403165817 wavelength: [11.0, 12.0, 13.0] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [HRIT_IR_120, HRIT_IR_120_C] IR_134: name: IR_134 resolution: 3000.403165817 wavelength: [12.4, 13.4, 14.4] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [HRIT_IR_134, HRIT_IR_134_C] VIS006: name: VIS006 resolution: 3000.403165817 wavelength: [0.56, 0.635, 0.71] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [HRIT_VIS006, HRIT_VIS006_C] VIS008: name: VIS008 resolution: 3000.403165817 wavelength: [0.74, 0.81, 0.88] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [HRIT_VIS008, HRIT_VIS008_C] WV_062: name: WV_062 resolution: 3000.403165817 wavelength: [5.35, 6.25, 7.15] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [HRIT_WV_062, HRIT_WV_062_C] WV_073: name: WV_073 resolution: 3000.403165817 wavelength: [6.85, 7.35, 7.85] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [HRIT_WV_073, HRIT_WV_073_C] satpy-0.20.0/satpy/etc/readers/seviri_l1b_icare.yaml000066400000000000000000000134451362525524100223730ustar00rootroot00000000000000# References: # - MSG Level 1.5 Image Data Format Description # - Radiometric Calibration of MSG SEVIRI Level 1.5 Image Data in Equivalent # Spectral Blackbody Radiance reader: name: seviri_l1b_icare short_name: SEVIRI L1b ICARE long_name: MSG SEVIRI Level 1b in HDF format from ICARE (Lille) description: > A reader for L1b SEVIRI data that has been retrieved from the ICARE service as HDF. sensors: [seviri] default_channels: [HRV, IR_016, IR_039, IR_087, IR_097, IR_108, IR_120, IR_134, VIS006, VIS008, WV_062, WV_073] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: ICARE_HRV: file_reader: !!python/name:satpy.readers.seviri_l1b_icare.SEVIRI_ICARE file_patterns: ['GEO_L1B-{platform_shortname:4s}_{start_time:%Y-%m-%dT%H-%M-%S}_G_HRV_{version:5s}.hdf'] ICARE_IR_016: file_reader: !!python/name:satpy.readers.seviri_l1b_icare.SEVIRI_ICARE file_patterns: ['GEO_L1B-{platform_shortname:4s}_{start_time:%Y-%m-%dT%H-%M-%S}_G_IR016_{version:5s}.hdf'] ICARE_IR_039: file_reader: !!python/name:satpy.readers.seviri_l1b_icare.SEVIRI_ICARE file_patterns: ['GEO_L1B-{platform_shortname:4s}_{start_time:%Y-%m-%dT%H-%M-%S}_G_IR039_{version:5s}.hdf'] ICARE_IR_087: file_reader: !!python/name:satpy.readers.seviri_l1b_icare.SEVIRI_ICARE file_patterns: ['GEO_L1B-{platform_shortname:4s}_{start_time:%Y-%m-%dT%H-%M-%S}_G_IR087_{version:5s}.hdf'] ICARE_IR_097: file_reader: !!python/name:satpy.readers.seviri_l1b_icare.SEVIRI_ICARE file_patterns: ['GEO_L1B-{platform_shortname:4s}_{start_time:%Y-%m-%dT%H-%M-%S}_G_IR097_{version:5s}.hdf'] ICARE_IR_108: file_reader: !!python/name:satpy.readers.seviri_l1b_icare.SEVIRI_ICARE file_patterns: ['GEO_L1B-{platform_shortname:4s}_{start_time:%Y-%m-%dT%H-%M-%S}_G_IR108_{version:5s}.hdf'] ICARE_IR_120: file_reader: !!python/name:satpy.readers.seviri_l1b_icare.SEVIRI_ICARE file_patterns: ['GEO_L1B-{platform_shortname:4s}_{start_time:%Y-%m-%dT%H-%M-%S}_G_IR120_{version:5s}.hdf'] ICARE_IR_134: file_reader: !!python/name:satpy.readers.seviri_l1b_icare.SEVIRI_ICARE file_patterns: ['GEO_L1B-{platform_shortname:4s}_{start_time:%Y-%m-%dT%H-%M-%S}_G_IR134_{version:5s}.hdf'] ICARE_VIS006: file_reader: !!python/name:satpy.readers.seviri_l1b_icare.SEVIRI_ICARE file_patterns: ['GEO_L1B-{platform_shortname:4s}_{start_time:%Y-%m-%dT%H-%M-%S}_G_VIS06_{version:5s}.hdf'] ICARE_VIS008: file_reader: !!python/name:satpy.readers.seviri_l1b_icare.SEVIRI_ICARE file_patterns: ['GEO_L1B-{platform_shortname:4s}_{start_time:%Y-%m-%dT%H-%M-%S}_G_VIS08_{version:5s}.hdf'] ICARE_WV_062: file_reader: !!python/name:satpy.readers.seviri_l1b_icare.SEVIRI_ICARE file_patterns: ['GEO_L1B-{platform_shortname:4s}_{start_time:%Y-%m-%dT%H-%M-%S}_G_WV062_{version:5s}.hdf'] ICARE_WV_073: file_reader: !!python/name:satpy.readers.seviri_l1b_icare.SEVIRI_ICARE file_patterns: ['GEO_L1B-{platform_shortname:4s}_{start_time:%Y-%m-%dT%H-%M-%S}_G_WV073_{version:5s}.hdf'] datasets: HRV: name: HRV resolution: 1000.134348869 wavelength: [0.5, 0.7, 0.9] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: ICARE_HRV IR_016: name: IR_016 resolution: 3000.403165817 wavelength: [1.5, 1.64, 1.78] calibration: reflectance: standard_name: reflectance units: "%" file_type: ICARE_IR_016 IR_039: name: IR_039 resolution: 3000.403165817 wavelength: [3.48, 3.92, 4.36] calibration: brightness_temperature: standard_name: brightness_temperature units: K file_type: ICARE_IR_039 IR_087: name: IR_087 resolution: 3000.403165817 wavelength: [8.3, 8.7, 9.1] calibration: brightness_temperature: standard_name: brightness_temperature units: K file_type: ICARE_IR_087 IR_097: name: IR_097 resolution: 3000.403165817 wavelength: [9.38, 9.66, 9.94] calibration: brightness_temperature: standard_name: brightness_temperature units: K file_type: ICARE_IR_097 IR_108: name: IR_108 resolution: 3000.403165817 wavelength: [9.8, 10.8, 11.8] calibration: brightness_temperature: standard_name: brightness_temperature units: K file_type: ICARE_IR_108 IR_120: name: IR_120 resolution: 3000.403165817 wavelength: [11.0, 12.0, 13.0] calibration: brightness_temperature: standard_name: brightness_temperature units: K file_type: ICARE_IR_120 IR_134: name: IR_134 resolution: 3000.403165817 wavelength: [12.4, 13.4, 14.4] calibration: brightness_temperature: standard_name: brightness_temperature units: K file_type: ICARE_IR_134 VIS006: name: VIS006 resolution: 3000.403165817 wavelength: [0.56, 0.635, 0.71] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: ICARE_VIS006 VIS008: name: VIS008 resolution: 3000.403165817 wavelength: [0.74, 0.81, 0.88] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: ICARE_VIS008 WV_062: name: WV_062 resolution: 3000.403165817 wavelength: [5.35, 6.25, 7.15] calibration: brightness_temperature: standard_name: brightness_temperature units: "K" file_type: ICARE_WV_062 WV_073: name: WV_073 resolution: 3000.403165817 wavelength: [6.85, 7.35, 7.85] calibration: brightness_temperature: standard_name: brightness_temperature units: "K" file_type: ICARE_WV_073 satpy-0.20.0/satpy/etc/readers/seviri_l1b_native.yaml000066400000000000000000000126471362525524100226010ustar00rootroot00000000000000reader: name: seviri_l1b_native short_name: SEVIRI L1b Native long_name: MSG SEVIRI Level 1b (Native) description: > Reader for EUMETSAT MSG SEVIRI Level 1b native format files. sensors: [seviri] default_channels: [HRV, IR_016, IR_039, IR_087, IR_097, IR_108, IR_120, IR_134, VIS006, VIS008, WV_062, WV_073] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: native_msg: file_reader: !!python/name:satpy.readers.seviri_l1b_native.NativeMSGFileHandler '' file_patterns: ['{satid:4s}-{instr:4s}-MSG{product_level:2d}-0100-NA-{processing_time1:%Y%m%d%H%M%S.%f}000Z-{order_id:s}.nat'] datasets: HRV: name: HRV resolution: 1000.134348869 wavelength: [0.5, 0.7, 0.9] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: native_msg IR_016: name: IR_016 resolution: 3000.403165817 wavelength: [1.5, 1.64, 1.78] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: native_msg IR_039: name: IR_039 resolution: 3000.403165817 wavelength: [3.48, 3.92, 4.36] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: native_msg IR_087: name: IR_087 resolution: 3000.403165817 wavelength: [8.3, 8.7, 9.1] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: native_msg IR_097: name: IR_097 resolution: 3000.403165817 wavelength: [9.38, 9.66, 9.94] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: native_msg IR_108: name: IR_108 resolution: 3000.403165817 wavelength: [9.8, 10.8, 11.8] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: native_msg IR_120: name: IR_120 resolution: 3000.403165817 wavelength: [11.0, 12.0, 13.0] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: native_msg IR_134: name: IR_134 resolution: 3000.403165817 wavelength: [12.4, 13.4, 14.4] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: native_msg VIS006: name: VIS006 resolution: 3000.403165817 wavelength: [0.56, 0.635, 0.71] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: native_msg VIS008: name: VIS008 resolution: 3000.403165817 wavelength: [0.74, 0.81, 0.88] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: native_msg WV_062: name: WV_062 resolution: 3000.403165817 wavelength: [5.35, 6.25, 7.15] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: native_msg WV_073: name: WV_073 resolution: 3000.403165817 wavelength: [6.85, 7.35, 7.85] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: native_msg satpy-0.20.0/satpy/etc/readers/seviri_l1b_nc.yaml000066400000000000000000000131271362525524100217050ustar00rootroot00000000000000reader: name: seviri_l1b_nc short_name: SEVIRI L1b NetCDF4 long_name: MSG SEVIRI Level 1b NetCDF4 description: > NetCDF4 reader for EUMETSAT MSG SEVIRI Level 1b files. sensors: [seviri] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader group_keys: ["processing_time", "satid"] file_types: seviri_l1b_nc: file_reader: !!python/name:satpy.readers.seviri_l1b_nc.NCSEVIRIFileHandler file_patterns: ['W_XX-EUMETSAT-Darmstadt,VIS+IR+HRV+IMAGERY,{satid:4s}+SEVIRI_C_EUMG_{processing_time:%Y%m%d%H%M%S}.nc'] datasets: HRV: name: HRV resolution: 1000.134348869 wavelength: [0.5, 0.7, 0.9] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: seviri_l1b_nc nc_key: 'ch12' IR_016: name: IR_016 resolution: 3000.403165817 wavelength: [1.5, 1.64, 1.78] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: seviri_l1b_nc nc_key: 'ch3' IR_039: name: IR_039 resolution: 3000.403165817 wavelength: [3.48, 3.92, 4.36] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: seviri_l1b_nc nc_key: 'ch4' IR_087: name: IR_087 resolution: 3000.403165817 wavelength: [8.3, 8.7, 9.1] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: seviri_l1b_nc nc_key: 'ch7' IR_097: name: IR_097 resolution: 3000.403165817 wavelength: [9.38, 9.66, 9.94] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: seviri_l1b_nc nc_key: 'ch8' IR_108: name: IR_108 resolution: 3000.403165817 wavelength: [9.8, 10.8, 11.8] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: seviri_l1b_nc nc_key: 'ch9' IR_120: name: IR_120 resolution: 3000.403165817 wavelength: [11.0, 12.0, 13.0] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: seviri_l1b_nc nc_key: 'ch10' IR_134: name: IR_134 resolution: 3000.403165817 wavelength: [12.4, 13.4, 14.4] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: seviri_l1b_nc nc_key: 'ch11' VIS006: name: VIS006 resolution: 3000.403165817 wavelength: [0.56, 0.635, 0.71] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: seviri_l1b_nc nc_key: 'ch1' VIS008: name: VIS008 resolution: 3000.403165817 wavelength: [0.74, 0.81, 0.88] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: seviri_l1b_nc nc_key: 'ch2' WV_062: name: WV_062 resolution: 3000.403165817 wavelength: [5.35, 6.25, 7.15] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: seviri_l1b_nc nc_key: 'ch5' WV_073: name: WV_073 resolution: 3000.403165817 wavelength: [6.85, 7.35, 7.85] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: seviri_l1b_nc nc_key: 'ch6' satpy-0.20.0/satpy/etc/readers/seviri_l2_bufr.yaml000066400000000000000000000617501362525524100221070ustar00rootroot00000000000000reader: description: SEVIRI L2 BUFR Product Reader name: seviri_l2_bufr sensors: [seviri] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: seviri_l2_bufr_asr: file_reader: !!python/name:satpy.readers.seviri_l2_bufr.SeviriL2BufrFileHandler file_patterns: - 'ASRBUFRProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:s}_{satellite:s}_{mission:s}_{subsat:s}' - '{spacecraft:s}-SEVI-MSGASRE-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}.bfr' - '{spacecraft:s}-SEVI-MSGASRE-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}' seviri_l2_bufr_cla: file_reader: !!python/name:satpy.readers.seviri_l2_bufr.SeviriL2BufrFileHandler file_patterns: - 'CLABUFRProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:s}_{satellite:s}_{mission:s}_{subsat:s}' - '{spacecraft:s}-SEVI-MSGCLAP-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}.bfr' - '{spacecraft:s}-SEVI-MSGCLAP-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}' seviri_l2_bufr_csr: file_reader: !!python/name:satpy.readers.seviri_l2_bufr.SeviriL2BufrFileHandler file_patterns: - 'CSRBUFRProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:s}_{satellite:s}_{mission:s}_{subsat:s}' - '{spacecraft:s}-SEVI-MSGCSKR-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}.bfr' - '{spacecraft:s}-SEVI-MSGCSKR-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}' seviri_l2_bufr_gii: file_reader: !!python/name:satpy.readers.seviri_l2_bufr.SeviriL2BufrFileHandler file_patterns: - 'GIIBUFRProduct_{start_time:%Y%m%d%H%M%S}Z_00_{server:s}_{satellite:s}_{mission:s}_{subsat:s}' - '{spacecraft:s}-SEVI-MSGGIIN-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}.bfr' - '{spacecraft:s}-SEVI-MSGGIIN-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}' seviri_l2_bufr_thu: file_reader: !!python/name:satpy.readers.seviri_l2_bufr.SeviriL2BufrFileHandler file_patterns: - 'THBUFRProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:s}_{satellite:s}_{mission:s}_{subsat:s}' - '{spacecraft:s}-SEVI-MSGTPHU-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}.bfr' - '{spacecraft:s}-SEVI-MSGTPHU-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}' seviri_l2_bufr_toz: file_reader: !!python/name:satpy.readers.seviri_l2_bufr.SeviriL2BufrFileHandler file_patterns: - 'TOZBUFRProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:s}_{satellite:s}_{mission:s}_{subsat:s}' - '{spacecraft:s}-SEVI-MSGTOZN-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}.bfr' - '{spacecraft:s}-SEVI-MSGTOZN-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}' datasets: latitude: name: latitude key: 'latitude' resolution: [48000,9000] file_type: [seviri_l2_bufr_asr,seviri_l2_bufr_cla,seviri_l2_bufr_csr,seviri_l2_bufr_gii,seviri_l2_bufr_thu,seviri_l2_bufr_toz] standard_name: latitude units: degree_north fill_value: -1.e+100 longitude: name: longitude key: 'longitude' resolution: [48000,9000] file_type: [seviri_l2_bufr_asr,seviri_l2_bufr_cla,seviri_l2_bufr_csr,seviri_l2_bufr_gii,seviri_l2_bufr_thu,seviri_l2_bufr_toz] standard_name: longitude units: degree_east fill_value: -1.e+100 # ---- ASR products ------------ nir39all: name: nir39all key: '#19#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 nir39clr: name: nir39clr key: '#20#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 nir39cld: name: nir39cld key: '#21#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 nir39low: name: nir39low key: '#22#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 nir39med: name: nir39med key: '#23#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 nir39high: name: nir39high key: '#24#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 wv62all: name: wv62all key: '#25#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 wv62clr: name: wv62clr key: '#26#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 wv62cld: name: wv62cld key: '#27#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 wv62low: name: wv62low key: '#28#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 wv62med: name: wv62med key: '#29#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 wv62high: name: wv62high key: '#30#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 wv73all: name: wv73all key: '#31#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 wv73clr: name: wv73clr key: '#32#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 wv73cld: name: wv73cld key: '#33#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 wv73low: name: wv73low key: '#34#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 wv73med: name: wv73med key: '#35#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 wv73high: name: wv73high key: '#36#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir87all: name: ir87all key: '#37#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir87clr: name: ir87clr key: '#38#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir87cld: name: ir87cld key: '#39#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir87low: name: ir87low key: '#40#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir87med: name: ir87med key: '#41#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir87high: name: ir87high key: '#42#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir97all: name: ir97all key: '#43#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir97clr: name: ir97clr key: '#44#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir97cld: name: ir97cld key: '#45#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir97low: name: ir97low key: '#46#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir97med: name: ir97med key: '#47#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir97high: name: ir97high key: '#48#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir108all: name: ir108all key: '#49#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir108clr: name: ir108clr key: '#50#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir108cld: name: ir108cld key: '#51#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir108low: name: ir108low key: '#52#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir108med: name: ir108med key: '#53#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir108high: name: ir108high key: '#54#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir120all: name: ir120all key: '#55#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir120clr: name: ir120clr key: '#56#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir120cld: name: ir120cld key: '#57#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir120low: name: ir120low key: '#58#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir120med: name: ir120med key: '#59#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir120high: name: ir120high key: '#60#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir134all: name: ir134all key: '#61#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir134clr: name: ir134clr key: '#62#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir134cld: name: ir134cld key: '#63#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir134low: name: ir134low key: '#64#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir134med: name: ir134med key: '#65#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir134high: name: ir134high key: '#66#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 pcld: name: pcld key: '#1#cloudAmountInSegment' resolution: 48000 standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: 0 pclr: name: pclr key: '#1#amountSegmentCloudFree' resolution: 48000 standard_name: clear_sky_area_fraction units: '%' file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: 0 pclrs: name: pclrs key: '#2#amountSegmentCloudFree' resolution: 48000 standard_name: clar_sky_area_fraction units: '%' file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: 0 # ---- CLA products ------------ hca: name: hca key: '#1#amountOfHighClouds' resolution: 48000 standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_cla coordinates: - longitude - latitude fill_value: 0 lca: name: lca key: '#1#amountOfLowClouds' resolution: 48000 standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_cla coordinates: - longitude - latitude fill_value: 0 mca: name: mca key: '#1#amountOfMiddleClouds' resolution: 48000 standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_cla coordinates: - longitude - latitude fill_value: 0 tca: name: tca key: '#1#cloudAmountInSegment' resolution: 48000 standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_cla coordinates: - longitude - latitude fill_value: 0 # ---- CSR products ------------ nir39: name: nir39 key: '#4#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: "W/sr-1/m-2" file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 cld39: name: cld39 key: '#4#cloudAmountInSegment' resolution: 48000 standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 wv62: name: wv62 key: '#5#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: "W/sr-1/m-2" file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 cld62: name: cld62 key: '#5#cloudAmountInSegment' resolution: 48000 standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 wv73: name: wv73 key: '#6#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: "W/sr-1/m-2" file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 cld73: name: cld73 key: '#6#cloudAmountInSegment' resolution: 48000 standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 ir87: name: ir87 key: '#7#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: "W/sr-1/m-2" file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 cld87: name: cld87 key: '#7#cloudAmountInSegment' resolution: 48000 standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 ir97: name: ir97 key: '#8#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: "W/sr-1/m-2" file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 cld97: name: cld97 key: '#8#cloudAmountInSegment' resolution: 48000 standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 ir108: name: ir108 key: '#9#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: "W/sr-1/m-2" file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 cld108: name: cld108 key: '#9#cloudAmountInSegment' resolution: 48000 standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 ir120: name: ir120 key: '#10#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: "W/sr-1/m-2" file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 cld120: name: cld120 key: '#10#cloudAmountInSegment' resolution: 48000 standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 ir134: name: ir134 key: '#11#brightnessTemperature' resolution: 48000 standard_name: toa_brightness_temperature units: "W/sr-1/m-2" file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 cld134: name: cld134 key: '#11#cloudAmountInSegment' resolution: 48000 standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 # ---- GII products ------------ ki: name: ki key: '#1#kIndex' resolution: 9000 standard_name: atmosphere_stability_k_index coordinates: - longitude - latitude units: "" file_type: seviri_l2_bufr_gii fill_value: -1.e+100 ko: name: ko key: '#1#koIndex' resolution: 9000 standard_name: atmosphere_stability_ko_index coordinates: - longitude - latitude units: "" file_type: seviri_l2_bufr_gii fill_value: -1.e+100 li: name: li key: '#1#parcelLiftedIndexTo500Hpa' resolution: 9000 standard_name: atmosphere_stability_lifted_index coordinates: - longitude - latitude units: "" file_type: seviri_l2_bufr_gii fill_value: -1.e+100 lpw1: name: lpw1 key: '#2#precipitableWater' resolution: 9000 standard_name: lwe_thickness_of_precipitation_amount coordinates: - longitude - latitude units: mm file_type: seviri_l2_bufr_gii fill_value: -1.e+100 lpw2: name: lpw2 key: '#3#precipitableWater' resolution: 9000 standard_name: lwe_thickness_of_precipitation_amount coordinates: - longitude - latitude units: mm file_type: seviri_l2_bufr_gii fill_value: -1.e+100 lpw3: name: lpw3 key: '#4#precipitableWater' resolution: 9000 standard_name: lwe_thickness_of_precipitation_amount coordinates: - longitude - latitude units: mm file_type: seviri_l2_bufr_gii fill_value: -1.e+100 mb: name: mb key: '#1#maximumBuoyancy' resolution: 9000 standard_name: atmosphere_stability_maximum_bouyancy_index coordinates: - longitude - latitude units: "" file_type: seviri_l2_bufr_gii fill_value: -1.e+100 stza: name: stza key: '#1#satelliteZenithAngle' resolution: 9000 standard_name: sensor_zenith_angle coordinates: - longitude - latitude units: degrees file_type: seviri_l2_bufr_gii fill_value: 0 tpw: name: tpw key: '#1#precipitableWater' resolution: 9000 standard_name: lwe_thickness_of_precipitation_amount coordinates: - longitude - latitude units: mm file_type: seviri_l2_bufr_gii fill_value: -1.e+100 # ---- THU products ------------ thu62: name: thu62 key: '#1#relativeHumidity' resolution: 48000 standard_name: relative_humidity units: '%' file_type: seviri_l2_bufr_thu coordinates: - longitude - latitude fill_value: -1.e+100 thu73: name: thu73 key: '#2#relativeHumidity' resolution: 48000 standard_name: relative_humidity units: '%' file_type: seviri_l2_bufr_thu coordinates: - longitude - latitude fill_value: -1.e+100 # ---- TOZ products ------------ toz: name: toz key: '#1#totalOzone' resolution: 9000 standard_name: atmosphere_mass_content_of_ozone units: dobson file_type: seviri_l2_bufr_toz coordinates: - longitude - latitude fill_value: 0 qual: name: qual key: '#1#totalOzone->totalOzoneQuality' resolution: 9000 standard_name: total_ozone_quality units: "" file_type: seviri_l2_bufr_toz coordinates: - longitude - latitude fill_value: 0 satpy-0.20.0/satpy/etc/readers/slstr_l1b.yaml000066400000000000000000000736451362525524100211060ustar00rootroot00000000000000reader: description: NC Reader for SLSTR data name: slstr_l1b sensors: [slstr] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: esa_l1b_an: file_reader: !!python/name:satpy.readers.slstr_l1b.NCSLSTR1B file_patterns: ['{mission_id:3s}_SL_{processing_level:1s}_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/{dataset_name}_radiance_an.nc'] esa_l1b_ao: file_reader: !!python/name:satpy.readers.slstr_l1b.NCSLSTR1B file_patterns: ['{mission_id:3s}_SL_{processing_level:1s}_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/{dataset_name}_radiance_ao.nc'] esa_l1b_bn: file_reader: !!python/name:satpy.readers.slstr_l1b.NCSLSTR1B file_patterns: ['{mission_id:3s}_SL_{processing_level:1s}_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/{dataset_name}_radiance_bn.nc'] esa_l1b_bo: file_reader: !!python/name:satpy.readers.slstr_l1b.NCSLSTR1B file_patterns: ['{mission_id:3s}_SL_{processing_level:1s}_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/{dataset_name}_radiance_bo.nc'] esa_l1b_cn: file_reader: !!python/name:satpy.readers.slstr_l1b.NCSLSTR1B file_patterns: ['{mission_id:3s}_SL_{processing_level:1s}_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/{dataset_name}_radiance_cn.nc'] esa_l1b_co: file_reader: !!python/name:satpy.readers.slstr_l1b.NCSLSTR1B file_patterns: ['{mission_id:3s}_SL_{processing_level:1s}_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/{dataset_name}_radiance_co.nc'] esa_l1b_ntir: file_reader: !!python/name:satpy.readers.slstr_l1b.NCSLSTR1B file_patterns: ['{mission_id:3s}_SL_{processing_level:1s}_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/{dataset_name}_BT_{stripe:1s}n.nc'] esa_l1b_otir: file_reader: !!python/name:satpy.readers.slstr_l1b.NCSLSTR1B file_patterns: ['{mission_id:3s}_SL_{processing_level:1s}_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/{dataset_name}_BT_{stripe:1s}o.nc'] esa_angles: file_reader: !!python/name:satpy.readers.slstr_l1b.NCSLSTRAngles file_patterns: ['{mission_id:3s}_SL_{processing_level:1s}_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/geometry_t{view:1s}.nc'] esa_geo_an: file_reader: !!python/name:satpy.readers.slstr_l1b.NCSLSTRGeo file_patterns: ['{mission_id:3s}_SL_{processing_level:1s}_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/geodetic_a{view:1s}.nc'] esa_geo_bn: file_reader: !!python/name:satpy.readers.slstr_l1b.NCSLSTRGeo file_patterns: ['{mission_id:3s}_SL_{processing_level:1s}_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/geodetic_b{view:1s}.nc'] esa_geo_in: file_reader: !!python/name:satpy.readers.slstr_l1b.NCSLSTRGeo file_patterns: ['{mission_id:3s}_SL_{processing_level:1s}_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/geodetic_i{view:1s}.nc'] esa_l1b_flag_an: file_reader: !!python/name:satpy.readers.slstr_l1b.NCSLSTRFlag file_patterns: ['{mission_id:3s}_SL_{processing_level:1s}_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/flags_an.nc'] esa_l1b_flag_bn: file_reader: !!python/name:satpy.readers.slstr_l1b.NCSLSTRFlag file_patterns: ['{mission_id:3s}_SL_{processing_level:1s}_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/flags_bn.nc'] esa_l1b_flag_cn: file_reader: !!python/name:satpy.readers.slstr_l1b.NCSLSTRFlag file_patterns: ['{mission_id:3s}_SL_{processing_level:1s}_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/flags_cn.nc'] esa_l1b_flag_in: file_reader: !!python/name:satpy.readers.slstr_l1b.NCSLSTRFlag file_patterns: ['{mission_id:3s}_SL_{processing_level:1s}_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/flags_in.nc'] esa_l1b_flag_ao: file_reader: !!python/name:satpy.readers.slstr_l1b.NCSLSTRFlag file_patterns: ['{mission_id:3s}_SL_{processing_level:1s}_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/flags_ao.nc'] esa_l1b_flag_bo: file_reader: !!python/name:satpy.readers.slstr_l1b.NCSLSTRFlag file_patterns: ['{mission_id:3s}_SL_{processing_level:1s}_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/flags_bo.nc'] esa_l1b_flag_co: file_reader: !!python/name:satpy.readers.slstr_l1b.NCSLSTRFlag file_patterns: ['{mission_id:3s}_SL_{processing_level:1s}_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/flags_co.nc'] esa_l1b_flag_io: file_reader: !!python/name:satpy.readers.slstr_l1b.NCSLSTRFlag file_patterns: ['{mission_id:3s}_SL_{processing_level:1s}_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/flags_io.nc'] datasets: longitude_an: name: longitude_an resolution: 500 view: nadir stripe: a file_type: esa_geo_an file_key: longitude_an standard_name: longitude units: degree latitude_an: name: latitude_an resolution: 500 view: nadir stripe: a file_type: esa_geo_an file_key: latitude_an standard_name: latitude units: degree longitude_bn: name: longitude_bn resolution: 500 view: nadir stripe: b file_type: esa_geo_bn file_key: longitude_bn standard_name: longitude units: degree latitude_bn: name: latitude_bn resolution: 500 view: nadir stripe: b file_type: esa_geo_bn file_key: latitude_bn standard_name: latitude units: degree longitude_in: name: longitude_in resolution: 1000 view: nadir stripe: i file_type: esa_geo_in file_key: longitude_in standard_name: longitude units: degree latitude_in: name: latitude_in resolution: 1000 view: nadir stripe: i file_type: esa_geo_in standard_name: latitude file_key: latitude_in units: degree longitude_ao: name: longitude_ao resolution: 500 view: oblique stripe: a file_type: esa_geo_an file_key: longitude_ao standard_name: longitude units: degree latitude_ao: name: latitude_ao resolution: 500 view: oblique stripe: a file_type: esa_geo_an file_key: latitude_ao standard_name: latitude units: degree longitude_bo: name: longitude_bo resolution: 500 view: oblique stripe: b file_type: esa_geo_bn file_key: longitude_bo standard_name: longitude units: degree latitude_bo: name: latitude_bo resolution: 500 view: oblique stripe: b file_type: esa_geo_bn file_key: latitude_bo standard_name: latitude units: degree longitude_io: name: longitude_io resolution: 1000 view: oblique stripe: i file_type: esa_geo_in file_key: longitude_io standard_name: longitude units: degree latitude_io: name: latitude_io resolution: 1000 view: oblique stripe: i file_type: esa_geo_in standard_name: latitude file_key: latitude_io units: degree # The channels S1-S3 are available in nadir (default) and oblique view. S1_an: name: S1_an sensor: slstr wavelength: [0.545,0.555,0.565] resolution: 500 view: nadir stripe: a calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_an, latitude_an] file_type: esa_l1b_an S1_ao: name: S1_ao sensor: slstr wavelength: [0.545,0.555,0.565] resolution: 500 view: oblique stripe: a calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_ao, latitude_ao] file_type: esa_l1b_ao S2_an: name: S2_an sensor: slstr wavelength: [0.649, 0.659, 0.669] resolution: 500 view: nadir stripe: a calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_an, latitude_an] file_type: esa_l1b_an S2_ao: name: S2_ao sensor: slstr stripe: a wavelength: [0.649, 0.659, 0.669] resolution: 500 view: oblique calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_ao, latitude_ao] file_type: esa_l1b_ao S3_an: name: S3_an sensor: slstr wavelength: [0.855, 0.865, 0.875] resolution: 500 view: nadir calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_an, latitude_an] file_type: esa_l1b_an S3_ao: name: S3_ao sensor: slstr wavelength: [0.855, 0.865, 0.875] resolution: 500 view: oblique calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_ao, latitude_ao] file_type: esa_l1b_ao # The channels S4-S6 are available in nadir (default) and oblique view and for both in the # a,b and c stripes. S4_an: name: S4_an sensor: slstr wavelength: [1.3675, 1.375, 1.36825] resolution: 500 view: nadir calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_an, latitude_an] file_type: esa_l1b_an S4_ao: name: S4_ao sensor: slstr wavelength: [1.3675, 1.375, 1.36825] resolution: 500 view: oblique calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_ao, latitude_ao] file_type: esa_l1b_ao S5_an: name: S5_an sensor: slstr wavelength: [1.58, 1.61, 1.64] resolution: 500 view: nadir calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_an, latitude_an] file_type: esa_l1b_an S5_ao: name: S5_ao sensor: slstr wavelength: [1.58, 1.61, 1.64] resolution: 500 view: oblique calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_ao, latitude_ao] file_type: esa_l1b_ao S6_an: name: S6_an sensor: slstr wavelength: [2.225, 2.25, 2.275] resolution: 500 view: nadir calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_an, latitude_an] file_type: esa_l1b_an S6_ao: name: S6_ao sensor: slstr wavelength: [2.225, 2.25, 2.275] resolution: 500 view: oblique calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_ao, latitude_ao] file_type: esa_l1b_ao S4_bn: name: S4_bn sensor: slstr wavelength: [1.3675, 1.375, 1.36825] resolution: 500 view: nadir calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_bn, latitude_bn] file_type: esa_l1b_bn S4_bo: name: S4_bo sensor: slstr wavelength: [1.3675, 1.375, 1.36825] resolution: 500 view: oblique calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_bo, latitude_bo] file_type: esa_l1b_bo S5_bn: name: S5_bn sensor: slstr wavelength: [1.58, 1.61, 1.64] resolution: 500 view: nadir calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_bn, latitude_bn] file_type: esa_l1b_bn S5_bo: name: S5_bo sensor: slstr wavelength: [1.58, 1.61, 1.64] resolution: 500 view: oblique calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_bo, latitude_bo] file_type: esa_l1b_bo S6_bn: name: S6_bn sensor: slstr wavelength: [2.225, 2.25, 2.275] resolution: 500 view: nadir calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_bn, latitude_bn] file_type: esa_l1b_bn S6_bo: name: S6_bo sensor: slstr wavelength: [2.225, 2.25, 2.275] resolution: 500 view: oblique calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_bo, latitude_bo] file_type: esa_l1b_bo S4_cn: name: S4_cn sensor: slstr wavelength: [1.3675, 1.375, 1.36825] resolution: 500 view: nadir calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_cn, latitude_cn] file_type: esa_l1b_cn S4_co: name: S4_co sensor: slstr wavelength: [1.3675, 1.375, 1.36825] resolution: 500 view: oblique calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_co, latitude_co] file_type: esa_l1b_co S5_cn: name: S5_cn sensor: slstr wavelength: [1.58, 1.61, 1.64] resolution: 500 view: nadir calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_cn, latitude_cn] file_type: esa_l1b_cn S5_co: name: S5_co sensor: slstr wavelength: [1.58, 1.61, 1.64] resolution: 500 view: oblique calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_co, latitude_co] file_type: esa_l1b_co S6_cn: name: S6_cn sensor: slstr wavelength: [2.225, 2.25, 2.275] resolution: 500 view: nadir calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_cn, latitude_cn] file_type: esa_l1b_cn S6_co: name: S6_co sensor: slstr wavelength: [2.225, 2.25, 2.275] resolution: 500 view: oblique calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_co, latitude_co] file_type: esa_l1b_co # The channels S7-S9, F1 and F2 are available in nadir (default) and oblique view. S7_in: name: S7_in sensor: slstr wavelength: [3.55, 3.74, 3.93] resolution: 1000 view: nadir calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" coordinates: [longitude_in, latitude_in] file_type: esa_l1b_ntir S7_io: name: S7_io sensor: slstr wavelength: [3.55, 3.74, 3.93] resolution: 1000 view: oblique calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" coordinates: [longitude_io, latitude_io] file_type: esa_l1b_otir S8_in: name: S8_in sensor: slstr wavelength: [10.4, 10.85, 11.3] resolution: 1000 view: nadir calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" coordinates: [longitude_in, latitude_in] file_type: esa_l1b_ntir S8_io: name: S8_io sensor: slstr wavelength: [10.4, 10.85, 11.3] resolution: 1000 view: oblique calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" coordinates: [longitude_io, latitude_io] file_type: esa_l1b_otir S9_in: name: S9_in sensor: slstr wavelength: [11.0, 12.0, 13.0] resolution: 1000 view: nadir calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" coordinates: [longitude_in, latitude_in] file_type: esa_l1b_ntir S9_io: name: S9_io sensor: slstr wavelength: [11.0, 12.0, 13.0] resolution: 1000 view: oblique calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" coordinates: [longitude_io, latitude_io] file_type: esa_l1b_otir F1_in: name: F1_in sensor: slstr wavelength: [3.55, 3.74, 3.93] resolution: 1000 view: nadir calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" coordinates: [longitude_in, latitude_in] file_type: esa_l1b_ntir F1_io: name: F1_io sensor: slstr wavelength: [3.55, 3.74, 3.93] resolution: 1000 view: oblique calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" coordinates: [longitude_io, latitude_io] file_type: esa_l1b_otir F2_in: name: F2_in sensor: slstr wavelength: [10.4, 10.85, 11.3] resolution: 1000 view: nadir calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" coordinates: [longitude_in, latitude_in] file_type: esa_l1b_ntir F2_io: name: F2_io sensor: slstr wavelength: [10.4, 10.85, 11.3] resolution: 1000 view: oblique calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" coordinates: [longitude_io, latitude_io] file_type: esa_l1b_otir solar_zenith_angle_n: name: solar_zenith_angle_n sensor: slstr resolution: 1000 coordinates: [longitude_in, latitude_in] view: nadir standard_name: solar_zenith_angle file_type: esa_angles file_key: solar_zenith_tn solar_azimuth_angle_n: name: solar_azimuth_angle_n sensor: slstr resolution: 1000 coordinates: [longitude_in, latitude_in] view: nadir standard_name: solar_azimuth_angle file_type: esa_angles file_key: solar_azimuth_tn satellite_zenith_angle_n: name: satellite_zenith_angle_n sensor: slstr resolution: 1000 coordinates: [longitude_in, latitude_in] view: nadir standard_name: satellite_zenith_angle file_type: esa_angles file_key: sat_zenith_tn satellite_azimuth_angle_n: name: satellite_azimuth_angle_n sensor: slstr resolution: 1000 coordinates: [longitude_in, latitude_in] view: nadir standard_name: satellite_azimuth_angle file_type: esa_angles file_key: sat_azimuth_tn solar_zenith_angle_o: name: solar_zenith_angle_o sensor: slstr resolution: 1000 coordinates: [longitude_io, latitude_io] view: oblique standard_name: solar_zenith_angle file_type: esa_angles file_key: solar_zenith_to solar_azimuth_angle_o: name: solar_azimuth_angle_o sensor: slstr resolution: 1000 coordinates: [longitude_io, latitude_io] view: oblique standard_name: solar_azimuth_angle file_type: esa_angles file_key: solar_azimuth_to satellite_zenith_angle_o: name: satellite_zenith_angle_o sensor: slstr resolution: 1000 coordinates: [longitude_io, latitude_io] view: oblique standard_name: satellite_zenith_angle file_type: esa_angles file_key: sat_zenith_to satellite_azimuth_angle_o: name: satellite_azimuth_angle_o sensor: slstr resolution: 1000 coordinates: [longitude_io, latitude_io] view: oblique standard_name: satellite_azimuth_angle file_type: esa_angles file_key: sat_azimuth_to # CloudFlags are all bitfields. They are available in nadir (default) and oblique view for # each of the a,b,c,i stripes. cloud_an: name: cloud_an sensor: slstr resolution: 500 file_type: esa_l1b_flag_an coordinates: [longitude_an, latitude_an] confidence_an: name: confidence_an sensor: slstr resolution: 500 file_type: esa_l1b_flag_an coordinates: [longitude_an, latitude_an] pointing_an: name: pointing_an sensor: slstr resolution: 500 file_type: esa_l1b_flag_an coordinates: [longitude_an, latitude_an] bayes_an: name: bayes_an sensor: slstr resolution: 500 file_type: esa_l1b_flag_an coordinates: [longitude_an, latitude_an] cloud_bn: name: cloud_bn sensor: slstr resolution: 500 file_type: esa_l1b_flag_bn coordinates: [longitude_bn, latitude_bn] confidence_bn: name: confidence_bn sensor: slstr resolution: 500 file_type: esa_l1b_flag_bn coordinates: [longitude_bn, latitude_bn] pointing_bn: name: pointing_bn sensor: slstr resolution: 500 file_type: esa_l1b_flag_bn coordinates: [longitude_bn, latitude_bn] bayes_bn: name: bayes_bn sensor: slstr resolution: 500 file_type: esa_l1b_flag_bn coordinates: [longitude_bn, latitude_bn] cloud_cn: name: cloud_cn sensor: slstr resolution: 500 file_type: esa_l1b_flag_cn coordinates: [longitude_cn, latitude_cn] confidence_cn: name: confidence_cn sensor: slstr resolution: 500 file_type: esa_l1b_flag_cn coordinates: [longitude_cn, latitude_cn] pointing_cn: name: pointing_cn sensor: slstr resolution: 500 file_type: esa_l1b_flag_cn coordinates: [longitude_cn, latitude_cn] bayes_cn: name: bayes_cn sensor: slstr resolution: 500 file_type: esa_l1b_flag_cn coordinates: [longitude_cn, latitude_cn] cloud_in: name: cloud_in sensor: slstr resolution: 1000 file_type: esa_l1b_flag_in coordinates: [longitude_in, latitude_in] confidence_in: name: confidence_in sensor: slstr resolution: 1000 file_type: esa_l1b_flag_in coordinates: [longitude_in, latitude_in] pointing_in: name: pointing_in sensor: slstr resolution: 1000 file_type: esa_l1b_flag_in coordinates: [longitude_in, latitude_in] bayes_in: name: bayes_in sensor: slstr resolution: 1000 file_type: esa_l1b_flag_in coordinates: [longitude_in, latitude_in] # CloudFlags are all bitfields. Now for the oblique view cloud_ao: name: cloud_ao sensor: slstr resolution: 500 file_type: esa_l1b_flag_ao coordinates: [longitude_ao, latitude_ao] confidence_ao: name: confidence_ao sensor: slstr resolution: 500 file_type: esa_l1b_flag_ao coordinates: [longitude_ao, latitude_ao] pointing_ao: name: pointing_ao sensor: slstr resolution: 500 file_type: esa_l1b_flag_ao coordinates: [longitude_ao, latitude_ao] bayes_ao: name: bayes_ao sensor: slstr resolution: 500 file_type: esa_l1b_flag_ao coordinates: [longitude_ao, latitude_ao] cloud_bo: name: cloud_bo sensor: slstr resolution: 500 file_type: esa_l1b_flag_bo coordinates: [longitude_bo, latitude_bo] confidence_bo: name: confidence_bo sensor: slstr resolution: 500 file_type: esa_l1b_flag_bo coordinates: [longitude_bo, latitude_bo] pointing_bo: name: pointing_bo sensor: slstr resolution: 500 file_type: esa_l1b_flag_bo coordinates: [longitude_bo, latitude_bo] bayes_bo: name: bayes_bo sensor: slstr resolution: 500 file_type: esa_l1b_flag_bo coordinates: [longitude_bo, latitude_bo] cloud_co: name: cloud_co sensor: slstr resolution: 500 file_type: esa_l1b_flag_co coordinates: [longitude_co, latitude_co] confidence_co: name: confidence_co sensor: slstr resolution: 500 file_type: esa_l1b_flag_co coordinates: [longitude_co, latitude_co] pointing_co: name: pointing_co sensor: slstr resolution: 500 file_type: esa_l1b_flag_co coordinates: [longitude_co, latitude_co] bayes_co: name: bayes_co sensor: slstr resolution: 500 file_type: esa_l1b_flag_co coordinates: [longitude_co, latitude_co] cloud_io: name: cloud_io sensor: slstr resolution: 1000 file_type: esa_l1b_flag_io coordinates: [longitude_io, latitude_io] confidence_io: name: confidence_io sensor: slstr resolution: 1000 file_type: esa_l1b_flag_io coordinates: [longitude_io, latitude_io] pointing_io: name: pointing_io sensor: slstr resolution: 1000 file_type: esa_l1b_flag_io coordinates: [longitude_io, latitude_io] bayes_io: name: bayes_io sensor: slstr resolution: 1000 file_type: esa_l1b_flag_io coordinates: [longitude_io, latitude_io] satpy-0.20.0/satpy/etc/readers/slstr_l2.yaml000066400000000000000000000032541362525524100207320ustar00rootroot00000000000000reader: description: NC Reader for Sentinel-3 SLSTR Level 2 data name: slstr_l2 sensors: [slstr_l2] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: SLSTRB: file_reader: !!python/name:satpy.readers.slstr_l2.SLSTRL2FileHandler file_patterns: ['{dt1:%Y%m%d%H%M%S}-{generating_centre:3s}-{type_id:3s}_GHRSST-SSTskin-SLSTR{something:1s}-{dt2:%Y%m%d%H%M%S}-{version}.nc', '{mission_id:3s}_SL_{processing_level:1s}_WST____{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3.tar'] datasets: longitude: name: longitude resolution: 1000 view: nadir file_type: SLSTRB standard_name: lon units: degree latitude: name: latitude resolution: 1000 view: nadir file_type: SLSTRB standard_name: lat units: degree sea_surface_temperature: name: sea_surface_temperature sensor: slstr_l2 coordinates: [longitude, latitude] file_type: SLSTRB resolution: 1000 view: nadir units: kelvin standard_name: sea_surface_temperature sea_ice_fraction: name: sea_ice_fraction sensor: slstr_l2 coordinates: [longitude, latitude] file_type: SLSTRB resolution: 1000 view: nadir units: "%" standard_name: sea_ice_fraction # Quality estimation 0-5: no data, cloud, worst, low, acceptable, best quality_level: name: quality_level sensor: slstr_l2 coordinates: [longitude, latitude] file_type: SLSTRB resolution: 1000 view: nadir standard_name: quality_level satpy-0.20.0/satpy/etc/readers/tropomi_l2.yaml000066400000000000000000000017411362525524100212530ustar00rootroot00000000000000reader: description: TROPOMI Level 2 NetCDF reader name: tropomi_l2 reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [tropomi] file_types: tropomi_l2: # Ex: S5P_OFFL_L2__NO2____20180709T170334_20180709T184504_03821_01_010002_20180715T184729.nc file_reader: !!python/name:satpy.readers.tropomi_l2.TROPOMIL2FileHandler file_patterns: - '{platform_shortname:3s}_{data_type:4s}_{level:3s}_{product:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{orbit:5d}_{collection:2d}_{processor_version:6d}_{creation_time:%Y%m%dT%H%M%S}.nc' datasets: latitude: name: 'latitude' file_type: tropomi_l2 file_key: 'PRODUCT/latitude' coordinates: [longitude, latitude] standard_name: latitude longitude: name: 'longitude' file_type: tropomi_l2 file_key: 'PRODUCT/longitude' coordinates: [longitude, latitude] standard_name: longitude satpy-0.20.0/satpy/etc/readers/vaisala_gld360.yaml000066400000000000000000000017141362525524100216640ustar00rootroot00000000000000reader: description: Vaisala Global Lightning Dataset 360 reader name: vaisala_gld360 reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader '' sensors: [vaisala_gld360] file_types: vaisala_gld360: file_reader: !!python/name:satpy.readers.vaisala_gld360.VaisalaGLD360TextFileHandler '' file_patterns: ['flashes_{start_time:%Y%m%d}.txt'] datasets: time: name: time sensor: vaisala_gld360 resolution: 2000 file_type: vaisala_gld360 latitude: name: latitude sensor: vaisala_gld360 resolution: 2000 file_type: vaisala_gld360 standard_name: latitude units: degree_north longitude: name: longitude sensor: vaisala_gld360 resolution: 2000 file_type: vaisala_gld360 standard_name: longitude units: degree_east power: name: power sensor: vaisala_gld360 resolution: 2000 file_type: vaisala_gld360 coordinates: - longitude - latitude units: kA satpy-0.20.0/satpy/etc/readers/viirs_compact.yaml000066400000000000000000000236401362525524100220310ustar00rootroot00000000000000reader: description: Generic Eumetsat Compact VIIRS Reader name: viirs_compact reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader '' sensors: [viirs] default_datasets: datasets: longitude_m: name: longitude_m resolution: 742 file_type: compact_m standard_name: longitude units: degree latitude_m: name: latitude_m resolution: 742 file_type: compact_m standard_name: latitude units: degree longitude_dnb: name: longitude_dnb resolution: 743 file_type: compact_dnb standard_name: longitude units: degree latitude_dnb: name: latitude_dnb resolution: 743 file_type: compact_dnb standard_name: latitude units: degree M01: name: M01 sensor: viirs wavelength: [0.402,0.412,0.422] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M02: name: M02 sensor: viirs wavelength: [0.436,0.445,0.454] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M03: name: M03 sensor: viirs wavelength: [0.478,0.488,0.498] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M04: name: M04 sensor: viirs wavelength: [0.545,0.555,0.565] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M05: name: M05 sensor: viirs wavelength: [0.662,0.672,0.682] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M06: name: M06 sensor: viirs wavelength: [0.739,0.746,0.754] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M07: name: M07 sensor: viirs wavelength: [0.846,0.865,0.885] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M08: name: M08 sensor: viirs wavelength: [1.230,1.240,1.250] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M09: name: M09 sensor: viirs resolution: 742 wavelength: [1.371,1.378,1.386] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M10: name: M10 sensor: viirs wavelength: [1.580,1.610,1.640] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M11: name: M11 sensor: viirs resolution: 742 wavelength: [2.225,2.250,2.275] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M12: name: M12 sensor: viirs wavelength: [3.610,3.700,3.790] resolution: 742 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M13: name: M13 sensor: viirs wavelength: [3.973,4.050,4.128] resolution: 742 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M14: name: M14 sensor: viirs resolution: 742 wavelength: [8.400,8.550,8.700] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M15: name: M15 sensor: viirs resolution: 742 wavelength: [10.263,10.763,11.263] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M16: name: M16 sensor: viirs wavelength: [11.538,12.013,12.489] resolution: 742 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m DNB: name: DNB sensor: viirs wavelength: [0.500,0.700,0.900] resolution: 743 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W cm-2 sr-1 coordinates: [longitude_dnb, latitude_dnb] file_type: compact_dnb satellite_azimuth_angle: name: satellite_azimuth_angle sensor: viirs resolution: 742 file_type: compact_m units: degree coordinates: [longitude_m, latitude_m] standard_name: platform_azimuth_angle solar_azimuth_angle: name: solar_azimuth_angle sensor: viirs resolution: 742 file_type: compact_m units: degree coordinates: [longitude_m, latitude_m] standard_name: solar_azimuth_angle satellite_zenith_angle: name: satellite_zenith_angle sensor: viirs resolution: 742 file_type: compact_m units: degree coordinates: [longitude_m, latitude_m] standard_name: platform_zenith_angle solar_zenith_angle: name: solar_zenith_angle sensor: viirs resolution: 742 file_type: compact_m units: degree coordinates: [longitude_m, latitude_m] standard_name: solar_zenith_angle satellite_azimuth_angle_dnb: name: dnb_satellite_azimuth_angle sensor: viirs resolution: 743 file_type: compact_dnb units: degree coordinates: [longitude_dnb, latitude_dnb] standard_name: platform_azimuth_angle solar_azimuth_angle_dnb: name: dnb_solar_azimuth_angle sensor: viirs resolution: 743 file_type: compact_dnb units: degree coordinates: [longitude_dnb, latitude_dnb] standard_name: solar_azimuth_angle satellite_zenith_angle_dnb: name: dnb_satellite_zenith_angle sensor: viirs resolution: 743 file_type: compact_dnb units: degree coordinates: [longitude_dnb, latitude_dnb] standard_name: platform_zenith_angle solar_zenith_angle_dnb: name: dnb_solar_zenith_angle sensor: viirs resolution: 743 file_type: compact_dnb units: degree coordinates: [longitude_dnb, latitude_dnb] standard_name: solar_zenith_angle lunar_zenith_angle_dnb: name: dnb_lunar_zenith_angle sensor: viirs resolution: 743 file_type: compact_dnb units: degree coordinates: [longitude_dnb, latitude_dnb] standard_name: lunar_zenith_angle lunar_azimuth_angle_dnb: name: dnb_lunar_azimuth_angle sensor: viirs resolution: 743 file_type: compact_dnb units: degree coordinates: [longitude_dnb, latitude_dnb] standard_name: lunar_azimuth_angle moon_illumination_fraction_dnb: name: dnb_moon_illumination_fraction resolution: 743 file_type: compact_dnb file_types: compact_m: file_reader: !!python/name:satpy.readers.viirs_compact.VIIRSCompactFileHandler '' file_patterns: ['SVMC_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_eum_ops.h5'] compact_dnb: file_reader: !!python/name:satpy.readers.viirs_compact.VIIRSCompactFileHandler '' file_patterns: ['SVDNBC_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_eum_ops.h5'] satpy-0.20.0/satpy/etc/readers/viirs_edr_active_fires.yaml000066400000000000000000000072601362525524100237000ustar00rootroot00000000000000reader: description: VIIRS Active Fires Reader name: viirs_edr_active_fires reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [viirs] file_types: fires_netcdf_img: file_reader: !!python/name:satpy.readers.viirs_edr_active_fires.VIIRSActiveFiresFileHandler variable_prefix: "" file_patterns: - 'AFIMG_{satellite_name}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time}_{source}.nc' fires_netcdf: file_reader: !!python/name:satpy.readers.viirs_edr_active_fires.VIIRSActiveFiresFileHandler variable_prefix: "Fire Pixels/" file_patterns: - 'AFEDR_{satellite_name}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time}_{source}.nc' - 'AFMOD_{satellite_name}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time}_{source}.nc' fires_text_img: file_reader: !!python/name:satpy.readers.viirs_edr_active_fires.VIIRSActiveFiresTextFileHandler skip_rows: 15 columns: ["latitude", "longitude", "T4", "Along-scan", "Along-track", "confidence_cat", "power"] file_patterns: - 'AFIMG_{satellite_name}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time}_{source}.txt' fires_text: file_reader: !!python/name:satpy.readers.viirs_edr_active_fires.VIIRSActiveFiresTextFileHandler skip_rows: 15 columns: ["latitude", "longitude", "T13", "Along-scan", "Along-track", "confidence_pct", "power"] file_patterns: - 'AFMOD_{satellite_name}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time}_{source}.txt' - 'AFEDR_{satellite_name}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time}_{source}.txt' datasets: confidence_cat: name: confidence_cat file_type: [fires_netcdf_img, fires_text_img] file_key: "{variable_prefix}FP_confidence" coordinates: [longitude, latitude] units: '1' flag_meanings: ['low', 'medium', 'high'] flag_values: [7, 8, 9] _FillValue: 0 confidence_pct: name: confidence_pct file_type: [fires_netcdf, fires_text] file_key: "{variable_prefix}FP_confidence" coordinates: [longitude, latitude] units: '%' # this is not a category product but we should define a fill value # since we aren't going to scale the data to a float data type in # the python code _FillValue: 255 longitude: name: longitude standard_name: longitude file_type: [fires_netcdf_img, fires_netcdf, fires_text_img, fires_text] file_key: "{variable_prefix}FP_longitude" units: 'degrees_east' latitude: name: latitude standard_name: latitude file_type: [fires_netcdf_img, fires_netcdf, fires_text_img, fires_text] file_key: "{variable_prefix}FP_latitude" units: 'degrees_north' power: name: power file_type: [fires_netcdf_img, fires_netcdf, fires_text_img, fires_text] file_key: "{variable_prefix:s}FP_power" coordinates: [longitude, latitude] units: 'MW' T13: name: T13 file_type: [fires_netcdf, fires_text] file_key: "{variable_prefix}FP_T13" coordinates: [longitude, latitude] standard_name: toa_brightness_temperature units: 'K' T4: name: T4 file_type: [fires_netcdf_img, fires_text_img] file_key: "{variable_prefix}FP_T4" coordinates: [longitude, latitude] standard_name: toa_brightness_temperature units: 'K'satpy-0.20.0/satpy/etc/readers/viirs_edr_flood.yaml000066400000000000000000000015331362525524100223350ustar00rootroot00000000000000reader: description: VIIRS flood HDF4 reader name: viirs_edr_flood reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [viirs] file_types: viirs_edr: file_reader: !!python/name:satpy.readers.viirs_edr_flood.VIIRSEDRFlood file_patterns: - 'WATER_VIIRS_Prj_SVI_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_{source:8s}_{dim0:d}_{dim1:d}_01.hdf' - 'WATER_VIIRS_Prj_SVI_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_{source:8s}_{aoi:3s}_{dim0:d}_{dim1:d}_01.hdf' - 'WATER_COM_VIIRS_Prj_SVI_d{start_time:%Y%m%d}_d{end_time:%Y%m%d}_{dim0:d}_{dim1:d}_{unknown1:2d}_{total_days:3d}day_{tile_num:3d}.hdf' datasets: water_detection: name: 'WaterDetection' file_type: viirs_edr satpy-0.20.0/satpy/etc/readers/viirs_l1b.yaml000066400000000000000000000331321362525524100210560ustar00rootroot00000000000000reader: description: Generic NASA VIIRS L1B Reader name: viirs_l1b reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [viirs] default_datasets: navigations: vgeoi: description: VIIRS L1B I-band Navigation file_type: vgeoi latitude_key: geolocation_data/latitude longitude_key: geolocation_data/longitude nadir_resolution: [371] rows_per_scan: 32 vgeom: description: VIIRS L1B M-band Navigation file_type: vgeom latitude_key: geolocation_data/latitude longitude_key: geolocation_data/longitude nadir_resolution: [742] rows_per_scan: 16 vgeod: description: VIIRS L1B DNB Navigation file_type: vgeod latitude_key: geolocation_data/latitude longitude_key: geolocation_data/longitude nadir_resolution: [742] rows_per_scan: 16 file_types: vgeoi: file_reader: !!python/name:satpy.readers.viirs_l1b.VIIRSL1BFileHandler file_patterns: - 'VGEOI_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S}_c{creation_time:%Y%m%d%H%M%S}.nc' - 'V{platform_shortname:2s}03IMG.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}{creator}.nc' vgeom: file_reader: !!python/name:satpy.readers.viirs_l1b.VIIRSL1BFileHandler file_patterns: - 'VGEOM_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S}_c{creation_time:%Y%m%d%H%M%S}.nc' - 'V{platform_shortname:2s}03MOD.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}{creator}.nc' vgeod: file_reader: !!python/name:satpy.readers.viirs_l1b.VIIRSL1BFileHandler file_patterns: - 'VGEOD_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S}_c{creation_time:%Y%m%d%H%M%S}.nc' - 'V{platform_shortname:2s}03DNB.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}{creator}.nc' vl1bi: file_reader: !!python/name:satpy.readers.viirs_l1b.VIIRSL1BFileHandler file_patterns: - 'VL1BI_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S}_c{creation_time:%Y%m%d%H%M%S}.nc' - 'V{platform_shortname:2s}02IMG.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}{creator}.nc' vl1bm: file_reader: !!python/name:satpy.readers.viirs_l1b.VIIRSL1BFileHandler file_patterns: - 'VL1BM_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S}_c{creation_time:%Y%m%d%H%M%S}.nc' - 'V{platform_shortname:2s}02MOD.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}{creator}.nc' vl1bd: file_reader: !!python/name:satpy.readers.viirs_l1b.VIIRSL1BFileHandler file_patterns: - 'VL1BD_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S}_c{creation_time:%Y%m%d%H%M%S}.nc' - 'V{platform_shortname:2s}02DNB.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}{creator}.nc' datasets: i_lon: name: i_lon resolution: 371 file_type: vgeoi file_key: geolocation_data/longitude units: degrees standard_name: longitude i_lat: name: i_lat resolution: 371 file_type: vgeoi file_key: geolocation_data/latitude units: degrees standard_name: latitude m_lon: name: m_lon resolution: 742 file_type: vgeom file_key: geolocation_data/longitude units: degrees standard_name: longitude m_lat: name: m_lat resolution: 742 file_type: vgeom file_key: geolocation_data/latitude units: degrees standard_name: latitude dnb_lon: name: dnb_lon resolution: 743 file_type: vgeod file_key: geolocation_data/longitude units: degrees standard_name: longitude dnb_lat: name: dnb_lat resolution: 743 file_type: vgeod file_key: geolocation_data/latitude units: degrees standard_name: latitude I01: name: I01 wavelength: [0.600, 0.640, 0.680] resolution: 371 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [i_lon, i_lat] file_type: vl1bi I02: name: I02 wavelength: [0.845, 0.865, 0.884] resolution: 371 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [i_lon, i_lat] file_type: vl1bi I03: name: I03 wavelength: [1.580, 1.610, 1.640] resolution: 371 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [i_lon, i_lat] file_type: vl1bi I04: name: I04 wavelength: [3.580, 3.740, 3.900] resolution: 371 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [i_lon, i_lat] file_type: vl1bi I05: name: I05 wavelength: [10.500, 11.450, 12.300] resolution: 371 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [i_lon, i_lat] file_type: vl1bi I_SOLZ: name: i_solar_zenith_angle standard_name: solar_zenith_angle resolution: 371 units: degrees coordinates: [i_lon, i_lat] file_type: vgeoi file_key: geolocation_data/solar_zenith I_SOLA: name: i_solar_azimuth_angle standard_name: solar_azimuth_angle resolution: 371 units: degrees coordinates: [i_lon, i_lat] file_type: vgeoi file_key: geolocation_data/solar_azimuth I_SENZ: name: i_satellite_zenith_angle standard_name: sensor_zenith_angle resolution: 371 units: degrees coordinates: [i_lon, i_lat] file_type: vgeoi file_key: geolocation_data/sensor_zenith I_SENA: name: i_satellite_azimuth_angle standard_name: sensor_azimuth_angle resolution: 371 units: degrees coordinates: [i_lon, i_lat] file_type: vgeoi file_key: geolocation_data/sensor_azimuth M01: name: M01 wavelength: [0.402, 0.412, 0.422] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M02: name: M02 wavelength: [0.436, 0.445, 0.454] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M03: name: M03 wavelength: [0.478, 0.488, 0.498] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M04: name: M04 wavelength: [0.545, 0.555, 0.565] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M05: name: M05 wavelength: [0.662, 0.672, 0.682] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M06: name: M06 wavelength: [0.739, 0.746, 0.754] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M07: name: M07 wavelength: [0.846, 0.865, 0.885] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M08: name: M08 wavelength: [1.230, 1.240, 1.250] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M09: name: M09 wavelength: [1.371, 1.378, 1.386] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M10: name: M10 wavelength: [1.580, 1.610, 1.640] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M11: name: M11 wavelength: [2.225, 2.250, 2.275] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M12: name: M12 wavelength: [3.610, 3.700, 3.790] resolution: 742 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M13: name: M13 wavelength: [3.973, 4.050, 4.128] resolution: 742 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M14: name: M14 wavelength: [8.400, 8.550, 8.700] resolution: 742 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M15: name: M15 wavelength: [10.263, 10.763, 11.263] resolution: 742 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M16: name: M16 wavelength: [11.538, 12.013, 12.489] resolution: 742 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M_SOLZ: name: solar_zenith_angle standard_name: solar_zenith_angle resolution: 742 units: degrees coordinates: [m_lon, m_lat] file_type: vgeom file_key: geolocation_data/solar_zenith M_SOLA: name: solar_azimuth_angle standard_name: solar_azimuth_angle resolution: 742 units: degrees coordinates: [m_lon, m_lat] file_type: vgeom file_key: geolocation_data/solar_azimuth M_SENZ: name: satellite_zenith_angle standard_name: sensor_zenith_angle resolution: 742 units: degrees coordinates: [m_lon, m_lat] file_type: vgeom file_key: geolocation_data/sensor_zenith M_SENA: name: satellite_azimuth_angle standard_name: sensor_azimuth_angle resolution: 742 units: degrees coordinates: [m_lon, m_lat] file_type: vgeom file_key: geolocation_data/sensor_azimuth DNB: name: DNB wavelength: [0.500, 0.700, 0.900] resolution: 743 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 sr-1 file_units: W cm-2 sr-1 coordinates: [dnb_lon, dnb_lat] file_type: vl1bd file_key: observation_data/DNB_observations DNB_SZA: name: dnb_solar_zenith_angle standard_name: solar_zenith_angle resolution: 743 coordinates: [dnb_lon, dnb_lat] file_type: vgeod file_key: geolocation_data/solar_zenith DNB_LZA: name: dnb_lunar_zenith_angle standard_name: lunar_zenith_angle resolution: 743 coordinates: [dnb_lon, dnb_lat] file_type: vgeod file_key: geolocation_data/lunar_zenith dnb_moon_illumination_fraction: name: dnb_moon_illumination_fraction resolution: 743 file_type: vgeod file_key: geolocation_data/moon_illumination_fraction coordinates: [dnb_lon, dnb_lat] satpy-0.20.0/satpy/etc/readers/viirs_sdr.yaml000066400000000000000000000362361362525524100212000ustar00rootroot00000000000000reader: name: viirs_sdr description: VIIRS SDR Reader reader: !!python/name:satpy.readers.viirs_sdr.VIIRSSDRReader sensors: [viirs] # file pattern keys to sort files by with 'satpy.utils.group_files' # by default, don't use start_time group files (only orbit and platform) group_keys: ['orbit', 'platform_shortname'] datasets: i_lon: name: i_longitude resolution: 371 file_type: generic_file dataset_groups: [GITCO, GIMGO] file_key: All_Data/{dataset_group}_All/Longitude units: "degrees_east" standard_name: longitude coordinates: [i_longitude, i_latitude] i_lat: name: i_latitude resolution: 371 file_type: generic_file dataset_groups: [GITCO, GIMGO] file_key: All_Data/{dataset_group}_All/Latitude units: "degrees_north" standard_name: latitude coordinates: [i_longitude, i_latitude] m_lon: name: m_longitude resolution: 742 file_type: generic_file dataset_groups: [GMTCO, GMODO] file_key: All_Data/{dataset_group}_All/Longitude units: "degrees_east" standard_name: longitude coordinates: [m_longitude, m_latitude] m_lat: name: m_latitude resolution: 742 file_type: generic_file dataset_groups: [GMTCO, GMODO] file_key: All_Data/{dataset_group}_All/Latitude units: "degrees_north" standard_name: latitude coordinates: [m_longitude, m_latitude] dnb_lon: name: dnb_longitude resolution: 743 file_type: generic_file dataset_groups: [GDNBO] file_key: All_Data/{dataset_group}_All/Longitude units: "degrees_east" standard_name: longitude coordinates: [dnb_longitude, dnb_latitude] dnb_lat: name: dnb_latitude resolution: 743 file_type: generic_file dataset_groups: [GDNBO] file_key: All_Data/{dataset_group}_All/Latitude units: "degrees_north" standard_name: latitude coordinates: [dnb_longitude, dnb_latitude] I01: name: I01 wavelength: [0.600, 0.640, 0.680] modifiers: [sunz_corrected_iband] dataset_groups: [SVI01] file_type: generic_file resolution: 371 coordinates: [i_longitude, i_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 I02: name: I02 wavelength: [0.845, 0.865, 0.884] modifiers: [sunz_corrected_iband] dataset_groups: [SVI02] file_type: generic_file resolution: 371 coordinates: [i_longitude, i_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 I03: name: I03 wavelength: [1.580, 1.610, 1.640] modifiers: [sunz_corrected_iband] dataset_groups: [SVI03] file_type: generic_file resolution: 371 coordinates: [i_longitude, i_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 I04: name: I04 wavelength: [3.580, 3.740, 3.900] file_type: generic_file dataset_groups: [SVI04] resolution: 371 coordinates: [i_longitude, i_latitude] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 I05: name: I05 wavelength: [10.500, 11.450, 12.300] dataset_groups: [SVI05] file_type: generic_file resolution: 371 coordinates: [i_longitude, i_latitude] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 M01: name: M01 wavelength: [0.402, 0.412, 0.422] modifiers: [sunz_corrected] dataset_groups: [SVM01] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 M02: name: M02 wavelength: [0.436, 0.445, 0.454] modifiers: [sunz_corrected] dataset_groups: [SVM02] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 M03: name: M03 wavelength: [0.478, 0.488, 0.498] modifiers: [sunz_corrected] dataset_groups: [SVM03] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 M04: name: M04 wavelength: [0.545, 0.555, 0.565] modifiers: [sunz_corrected] dataset_groups: [SVM04] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 M05: name: M05 wavelength: [0.662, 0.672, 0.682] modifiers: [sunz_corrected] dataset_groups: [SVM05] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 M06: name: M06 wavelength: [0.739, 0.746, 0.754] modifiers: [sunz_corrected] dataset_groups: [SVM06] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 M07: name: M07 wavelength: [0.846, 0.865, 0.885] modifiers: [sunz_corrected] dataset_groups: [SVM07] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 M08: name: M08 wavelength: [1.230, 1.240, 1.250] modifiers: [sunz_corrected] dataset_groups: [SVM08] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 M09: name: M09 wavelength: [1.371, 1.378, 1.386] modifiers: [sunz_corrected] dataset_groups: [SVM09] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 M10: name: M10 wavelength: [1.580, 1.610, 1.640] modifiers: [sunz_corrected] dataset_groups: [SVM10] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 M11: name: M11 wavelength: [2.225, 2.250, 2.275] modifiers: [sunz_corrected] dataset_groups: [SVM11] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 M12: name: M12 wavelength: [3.610, 3.700, 3.790] dataset_groups: [SVM12] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 M13: name: M13 wavelength: [3.973, 4.050, 4.128] dataset_groups: [SVM13] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 M14: name: M14 wavelength: [8.400, 8.550, 8.700] dataset_groups: [SVM14] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 M15: name: M15 wavelength: [10.263, 10.763, 11.263] dataset_groups: [SVM15] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 M16: name: M16 wavelength: [11.538, 12.013, 12.489] dataset_groups: [SVM16] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 I_SOLZ: name: solar_zenith_angle standard_name: solar_zenith_angle resolution: 371 coordinates: [i_longitude, i_latitude] units: degrees file_type: generic_file dataset_groups: [GITCO, GIMGO] file_key: 'All_Data/{dataset_group}_All/SolarZenithAngle' I_SOLA: name: solar_azimuth_angle standard_name: solar_azimuth_angle resolution: 371 coordinates: [i_longitude, i_latitude] units: degrees file_type: generic_file dataset_groups: [GITCO, GIMGO] file_key: 'All_Data/{dataset_group}_All/SolarAzimuthAngle' I_SENZ: name: satellite_zenith_angle standard_name: sensor_zenith_angle resolution: 371 coordinates: [i_longitude, i_latitude] units: degrees file_type: generic_file dataset_groups: [GITCO, GIMGO] file_key: 'All_Data/{dataset_group}_All/SatelliteZenithAngle' I_SENA: name: satellite_azimuth_angle standard_name: sensor_azimuth_angle resolution: 371 coordinates: [i_longitude, i_latitude] units: degrees file_type: generic_file dataset_groups: [GITCO, GIMGO] file_key: 'All_Data/{dataset_group}_All/SatelliteAzimuthAngle' M_SOLZ: name: solar_zenith_angle standard_name: solar_zenith_angle resolution: 742 coordinates: [m_longitude, m_latitude] units: degrees file_type: generic_file dataset_groups: [GMTCO, GMODO] file_key: 'All_Data/{dataset_group}_All/SolarZenithAngle' M_SOLA: name: solar_azimuth_angle standard_name: solar_azimuth_angle resolution: 742 coordinates: [m_longitude, m_latitude] units: degrees file_type: generic_file dataset_groups: [GMTCO, GMODO] file_key: 'All_Data/{dataset_group}_All/SolarAzimuthAngle' M_SENZ: name: satellite_zenith_angle standard_name: sensor_zenith_angle resolution: 742 coordinates: [m_longitude, m_latitude] units: degrees file_type: generic_file dataset_groups: [GMTCO, GMODO] file_key: 'All_Data/{dataset_group}_All/SatelliteZenithAngle' M_SENA: name: satellite_azimuth_angle standard_name: sensor_azimuth_angle resolution: 742 coordinates: [m_longitude, m_latitude] units: degrees file_type: generic_file dataset_groups: [GMTCO, GMODO] file_key: 'All_Data/{dataset_group}_All/SatelliteAzimuthAngle' DNB: name: DNB wavelength: [0.500, 0.700, 0.900] resolution: 743 coordinates: [dnb_longitude, dnb_latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 sr-1 file_units: W cm-2 sr-1 dataset_groups: [SVDNB] file_type: generic_file DNB_SZA: name: dnb_solar_zenith_angle standard_name: solar_zenith_angle resolution: 743 coordinates: [dnb_longitude, dnb_latitude] file_type: generic_file dataset_groups: [GDNBO] file_key: 'All_Data/{dataset_group}_All/SolarZenithAngle' DNB_LZA: name: dnb_lunar_zenith_angle standard_name: lunar_zenith_angle resolution: 743 coordinates: [dnb_longitude, dnb_latitude] file_type: generic_file dataset_groups: [GDNBO] file_key: 'All_Data/{dataset_group}_All/LunarZenithAngle' DNB_SENZ: name: dnb_satellite_zenith_angle standard_name: sensor_zenith_angle resolution: 743 coordinates: [dnb_longitude, dnb_latitude] units: degrees file_type: generic_file dataset_groups: [GDNBO] file_key: 'All_Data/{dataset_group}_All/SatelliteZenithAngle' DNB_SENA: name: dnb_satellite_azimuth_angle standard_name: sensor_azimuth_angle resolution: 743 coordinates: [dnb_longitude, dnb_latitude] units: degrees file_type: generic_file dataset_groups: [GDNBO] file_key: 'All_Data/{dataset_group}_All/SatelliteAzimuthAngle' dnb_moon_illumination_fraction: name: dnb_moon_illumination_fraction file_type: generic_file dataset_groups: [GDNBO] file_key: 'All_Data/{dataset_group}_All/MoonIllumFraction' file_types: generic_file: file_reader: !!python/name:satpy.readers.viirs_sdr.VIIRSSDRFileHandler file_patterns: ['{datasets}_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5'] # Example filenames # GMODO-SVM01-SVM02-SVM03-SVM04-SVM05-SVM06-SVM07-SVM08-SVM09-SVM10-SVM11-SVM12-SVM13-SVM14-SVM15-SVM16_j01_d20190304_t1103049_e1108449_b06684_c20190304213641984108_nobc_ops.h5 # GMTCO_j01_d20190304_t1103049_e1108449_b06684_c20190304150845549693_nobc_ops.h5 # GDNBO-SVDNB_j01_d20190304_t1057236_e1103036_b06684_c20190304213641088765_nobc_ops.h5 # SVM15_npp_d20150311_t1126366_e1128008_b17451_c20150311113344455225_cspp_dev.h5 satpy-0.20.0/satpy/etc/readers/virr_l1b.yaml000066400000000000000000000106111362525524100207010ustar00rootroot00000000000000reader: description: reader for VIRR data name: virr_l1b sensors: [virr] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: virr_l1b: file_reader: !!python/name:satpy.readers.virr_l1b.VIRR_L1B file_patterns: - 'tf{creation_time:%Y%j%H%M%S}.{platform_id}-L_VIRRX_L1B.HDF' geolocation_prefix: '' virr_geoxx: file_reader: !!python/name:satpy.readers.virr_l1b.VIRR_L1B file_patterns: - 'tf{creation_time:%Y%j%H%M%S}.{platform_id}-L_VIRRX_GEOXX.HDF' geolocation_prefix: 'Geolocation/' datasets: R1: name: '1' wavelength: [0.58, 0.63, 0.68] resolution: 1000 file_type: virr_l1b file_key: Data/EV_RefSB band_index: 0 standard_name: toa_bidirectional_reflectance coordinates: [longitude, latitude] calibration: reflectance R2: name: '2' wavelength: [0.84, 0.865, 0.89] resolution: 1000 file_type: virr_l1b file_key: Data/EV_RefSB band_index: 1 standard_name: toa_bidirectional_reflectance coordinates: [longitude, latitude] calibration: reflectance E1: name: '3' wavelength: [3.55, 3.74, 3.93] resolution: 1000 file_type: virr_l1b file_key: Data/EV_Emissive band_index: 0 standard_name: toa_brightness_temperature coordinates: [longitude, latitude] calibration: brightness_temperature E2: name: '4' wavelength: [10.3, 10.8, 11.3] resolution: 1000 file_type: virr_l1b file_key: Data/EV_Emissive band_index: 1 standard_name: toa_brightness_temperature coordinates: [longitude, latitude] calibration: brightness_temperature E3: name: '5' wavelength: [11.5, 12.0, 12.5] resolution: 1000 file_type: virr_l1b file_key: Data/EV_Emissive band_index: 2 standard_name: toa_brightness_temperature coordinates: [longitude, latitude] calibration: brightness_temperature R3: name: '6' wavelength: [1.55, 1.6, 1.64] resolution: 1000 file_type: virr_l1b file_key: Data/EV_RefSB band_index: 2 standard_name: toa_bidirectional_reflectance coordinates: [longitude, latitude] calibration: reflectance R4: name: '7' wavelength: [0.43, 0.455, 0.48] resolution: 1000 file_type: virr_l1b file_key: Data/EV_RefSB band_index: 3 standard_name: toa_bidirectional_reflectance coordinates: [longitude, latitude] calibration: reflectance R5: name: '8' wavelength: [0.48, 0.505, 0.53] resolution: 1000 file_type: virr_l1b file_key: Data/EV_RefSB band_index: 4 standard_name: toa_bidirectional_reflectance coordinates: [longitude, latitude] calibration: reflectance R6: name: '9' wavelength: [0.53, 0.555, 0.58] resolution: 1000 file_type: virr_l1b file_key: Data/EV_RefSB band_index: 5 standard_name: toa_bidirectional_reflectance coordinates: [longitude, latitude] calibration: reflectance R7: name: '10' wavelength: [1.325, 1.36, 1.395] resolution: 1000 file_type: virr_l1b file_key: Data/EV_RefSB band_index: 6 standard_name: toa_bidirectional_reflectance coordinates: [longitude, latitude] calibration: reflectance satellite_azimuth_angle: name: satellite_azimuth_angle file_type: [virr_geoxx, virr_l1b] file_key: SensorAzimuth standard_name: sensor_azimuth_angle coordinates: [longitude, latitude] satellite_zenith_angle: name: satellite_zenith_angle file_type: [virr_geoxx, virr_l1b] file_key: SensorZenith standard_name: sensor_zenith_angle coordinates: [longitude, latitude] solar_azimuth_angle: name: solar_azimuth_angle file_type: [virr_geoxx, virr_l1b] file_key: SolarAzimuth standard_name: solar_azimuth_angle coordinates: [longitude, latitude] solar_zenith_angle: name: solar_zenith_angle file_type: [virr_geoxx, virr_l1b] file_key: SolarZenith standard_name: solar_zenith_angle coordinates: [longitude, latitude] longitude: name: longitude resolution: 1000 file_type: [virr_l1b, virr_geoxx] file_key: Longitude standard_name: longitude units: degrees_east coordinates: [longitude, latitude] latitude: name: latitude resolution: 1000 file_type: [virr_l1b, virr_geoxx] file_key: Latitude units: degrees_north standard_name: latitude coordinates: [longitude, latitude] satpy-0.20.0/satpy/etc/writers/000077500000000000000000000000001362525524100163505ustar00rootroot00000000000000satpy-0.20.0/satpy/etc/writers/cf.yaml000066400000000000000000000003101362525524100176160ustar00rootroot00000000000000writer: name: cf description: Generic netCDF4/CF Writer writer: !!python/name:satpy.writers.cf_writer.CFWriter filename: '{name}_{start_time:%Y%m%d_%H%M%S}.nc' compress: DEFLATE zlevel: 6 satpy-0.20.0/satpy/etc/writers/geotiff.yaml000066400000000000000000000003151362525524100206560ustar00rootroot00000000000000writer: name: geotiff description: Generic GeoTIFF Writer writer: !!python/name:satpy.writers.geotiff.GeoTIFFWriter filename: '{name}_{start_time:%Y%m%d_%H%M%S}.tif' compress: DEFLATE zlevel: 6satpy-0.20.0/satpy/etc/writers/mitiff.yaml000066400000000000000000000003231362525524100205100ustar00rootroot00000000000000--- writer: name: mitiff description: Generic MITIFF Writer writer: !!python/name:satpy.writers.mitiff.MITIFFWriter filename: '{name:s}_{start_time:%Y%m%d_%H%M%S}.mitiff' compress: DEFLATE zlevel: 6 satpy-0.20.0/satpy/etc/writers/ninjotiff.yaml000066400000000000000000000003161362525524100212220ustar00rootroot00000000000000writer: name: ninjotiff description: NinjoTIFF Writer writer: !!python/name:satpy.writers.ninjotiff.NinjoTIFFWriter filename: '{name}_{start_time:%Y%m%d_%H%M%S}.tif' compress: DEFLATE zlevel: 6 satpy-0.20.0/satpy/etc/writers/scmi.yaml000066400000000000000000000365101362525524100201740ustar00rootroot00000000000000# Originally converted from the CSPP Polar2Grid SCMI Writer # Some datasets are named differently and have not been converted to # Satpy-style naming yet. These config entries are commented out. writer: name: scmi description: AWIPS-compatible Tiled NetCDF4 Writer writer: !!python/name:satpy.writers.scmi.SCMIWriter filename: '{source_name}_AII_{platform_name}_{sensor}_{name}_{sector_id}_{tile_id}_{start_time:%Y%m%d_%H%M}.nc' compress: True sectors: LCC: lower_left_lonlat: [-135, 20] upper_right_lonlat: [-60, 60] resolution: [1300000, 1300000] # y, x projection: '+proj=lcc +datum=WGS84 +ellps=WGS84 +lat_0=25 +lat_1=25 +lon_0=-95 +units=m +no_defs' Polar: lower_left_lonlat: [-180, 33] upper_right_lonlat: [-40.5, 78] resolution: [1400000, 1400000] projection: '+proj=stere +datum=WGS84 +ellps=WGS84 +lat_0=90 +lat_ts=60.0 +lon_0=-150 +units=m' Mercator: lower_left_lonlat: [-135, 0] upper_right_lonlat: [-30, 50] resolution: [2150000, 2150000] projection: '+proj=merc +datum=WGS84 +ellps=WGS84 +lon_0=-95 +lat_0=0 +units=m +no_defs' Pacific: lower_left_lonlat: [120, 0] upper_right_lonlat: [-135, 50] resolution: [2150000, 2150000] projection: '+proj=merc +datum=WGS84 +ellps=WGS84 +lon_0=170 +lat_0=0 +units=m +no_defs' GOES_TEST: lower_left_xy: [-5433892.6923244298, -5433893.2095645051] upper_right_xy: [5433893.2095645051, 5433892.6923244298] resolution: [2500000, 2500000] projection: '+proj=geos +lon_0=-89.5 +h=35786023.0 +a=6378137.0 +b=6356752.31414 +sweep=x +units=m +no_defs' GOES_EAST: lower_left_xy: [-5433892.6923244298, -5433893.2095645051] upper_right_xy: [5433893.2095645051, 5433892.6923244298] resolution: [2500000, 2500000] projection: '+proj=geos +lon_0=-75.0 +h=35786023.0 +a=6378137.0 +b=6356752.31414 +sweep=x +units=m +no_defs' GOES_WEST: lower_left_xy: [-5433892.6923244298, -5433893.2095645051] upper_right_xy: [5433893.2095645051, 5433892.6923244298] resolution: [2500000, 2500000] projection: '+proj=geos +lon_0=-137.0 +h=35786023.0 +a=6378137.0 +b=6356752.31414 +sweep=x +units=m +no_defs' GOES_STORE: lower_left_xy: [-5433892.6923244298, -5433893.2095645051] upper_right_xy: [5433893.2095645051, 5433892.6923244298] resolution: [2500000, 2500000] projection: '+proj=geos +lon_0=-105.0 +h=35786023.0 +a=6378137.0 +b=6356752.31414 +sweep=x +units=m +no_defs' AHI Full Disk: lower_left_xy: [-5499999.901174725, -5499999.901174725] upper_right_xy: [5499999.901174725, 5499999.901174725] resolution: [2500000, 2500000] projection: '+proj=geos +lon_0=140.7 +h=35785863.0 +a=6378137.0 +b=6356752.3 +sweep=y +units=m +no_defs' datasets: default: physical_element: '{name}' # ACSPO Products acspo_sst: reader: acspo name: sst physical_element: ACSPO SST # CLAVR-x Products default_clavrx: reader: clavrx physical_element: 'CLAVR-x {name}' clavrx_cloud_type: reader: clavrx name: cloud_type physical_element: CLAVR-x Cloud Type clavrx_cld_temp_acha: reader: clavrx name: cld_temp_acha physical_element: CLAVR-x Cloud Top Temperature (ACHA) clavrx_cld_height_acha: reader: clavrx name: cld_height_acha physical_element: CLAVR-x Cloud Top Height (ACHA) clavrx_cloud_phase: reader: clavrx name: cloud_phase physical_element: CLAVR-x Cloud Phase clavrx_cld_opd_dcomp: reader: clavrx name: cld_opd_dcomp physical_element: CLAVR-x Cloud Optical Depth (dcomp) clavrx_clld_opd_nlcomp: reader: clavrx name: cloud_opd_nlcomp physical_element: CLAVR-x Cloud Optical Depth (nlcomp) clavrx_cld_reff_dcomp: reader: clavrx name: cld_reff_dcomp physical_element: CLAVR-x Cloud Effective Radius (dcomp) clavrx_cld_reff_nlcomp: reader: clavrx name: cld_reff_nlcomp physical_element: CLAVR-x Cloud Effective Radius (nlcomp) clavrx_cld_emiss_acha: reader: clavrx name: cld_emiss_acha physical_element: CLAVR-x Cloud Emissivity (ACHA) clavrx_refl_lunar_dnb_nom: reader: clavrx name: refl_lunar_dnb_nom physical_element: CLAVR-x Cloud Lunar Reflectance clavrx_rain_rate: reader: clavrx name: rain_rate physical_element: CLAVR-x Rain Rate # AVHRR L1B products avhrr_band1_vis: name: band1_vis physical_element: 0.63 um avhrr_band2_vis: name: band2_vis physical_element: 0.86 um avhrr_band3a_vis: name: band3a_vis physical_element: 1.61 um avhrr_band3b_bt: name: band3b_bt physical_element: 3.74 um avhrr_band4_bt: name: band4_bt physical_element: 10.8 um avhrr_band5_bt: name: band5_bt physical_element: 12.0 um # VIIRS SDRs viirs_i01: name: I01 physical_element: 0.64 um viirs_i02: name: I02 physical_element: 0.86 um viirs_i03: name: I03 physical_element: 1.61 um viirs_i04: name: I04 physical_element: 3.74 um viirs_i05: name: I05 physical_element: 11.5 um viirs_histogram_dnb: name: histogram_dnb physical_element: Histogram DNB viirs_adaptive_dnb: name: adaptive_dnb physical_element: Adaptive DNB viirs_dynamic_dnb: name: dynamic_dnb physical_element: Dynamic DNB viirs_hncc_dnb: name: hncc_dnb physical_element: HNCC DNB viirs_ifog: name: ssec_fog physical_element: Fog viirs_m01: name: M01 physical_element: 0.41 um viirs_m02: name: M02 physical_element: 0.45 um viirs_m03: name: M03 physical_element: 0.49 um viirs_m04: name: M04 physical_element: 0.56 um viirs_m05: name: M05 physical_element: 0.67 um viirs_m06: name: M06 physical_element: 0.75 um viirs_m07: name: M07 physical_element: 0.86 um viirs_m08: name: M08 physical_element: 1.24 um viirs_m09: name: M09 physical_element: 1.38 um viirs_m10: name: M10 physical_element: 1.61 um viirs_m11: name: M11 physical_element: 2.25 um viirs_m12: name: M12 physical_element: 3.70 um viirs_m13: name: M13 physical_element: 4.05 um viirs_m14: name: M14 physical_element: 8.6 um viirs_m15: name: M15 physical_element: 10.8 um viirs_m16: name: M16 physical_element: 12.0 um # VIIRS Corrected Reflectance # viirs_viirs_crefl01: # name: viirs_crefl01 # physical_element: 0.67 um CR # viirs_viirs_crefl02: # name: viirs_crefl02 # physical_element: 0.87 um CR # viirs_viirs_crefl03: # name: viirs_crefl03 # physical_element: 0.49 um CR # viirs_viirs_crefl04: # name: viirs_crefl04 # physical_element: 0.56 um CR # viirs_viirs_crefl05: # name: viirs_crefl05 # physical_element: 1.24 um CR # viirs_viirs_crefl06: # name: viirs_crefl06 # physical_element: 1.61 um CR # viirs_crefl07: # name: viirs_crefl07 # physical_element: 2.25 um CR # viirs_crefl08: # name: viirs_crefl08 # physical_element: 0.64 um CR # viirs_crefl09: # name: viirs_crefl09 # physical_element: 0.87 um CR # viirs_crefl10: # name: viirs_crefl10 # physical_element: 1.61 um CR # MODIS L1B Products # modis_vis01: # name: vis01 # physical_element: 0.65 um # modis_vis02: # name: vis02 # physical_element: 0.86 um # modis_vis03: # name: vis03 # physical_element: 0.47 um # modis_vis04: # name: vis04 # physical_element: 0.56 um # modis_vis05: # name: vis05 # physical_element: 1.24 um # modis_vis06: # name: vis06 # physical_element: 1.64 um # modis_vis07: # name: vis07 # physical_element: 2.13 um # modis_vis26: # name: vis26 # physical_element: 1.38 um # modis_bt20: # name: bt20 # physical_element: 3.75 um # modis_bt21: # name: bt21 # physical_element: Fire # modis_bt22: # name: bt22 # physical_element: 3.96 um # modis_bt23: # name: bt23 # physical_element: 4.05 um # modis_bt24: # name: bt24 # physical_element: 4.47 um # modis_bt25: # name: bt25 # physical_element: 4.52 um # modis_bt27: # name: bt27 # physical_element: 6.7 um # modis_bt28: # name: bt28 # physical_element: 7.3 um # modis_bt29: # name: bt29 # physical_element: 8.6 um # modis_bt30: # name: bt30 # physical_element: 9.7 um # modis_bt31: # name: bt31 # physical_element: 11.0 um # modis_bt32: # name: bt32 # physical_element: 12.0 um # modis_bt33: # name: bt33 # physical_element: 13.3 um # modis_bt34: # name: bt34 # physical_element: 13.6 um # modis_bt35: # name: bt35 # physical_element: 13.9 um # modis_bt36: # name: bt36 # physical_element: 14.2 um # modis_sst: # name: sst # physical_element: SST # modis_lst: # name: lst # physical_element: LST # modis_slst: # name: slst # physical_element: LSTSUM # modis_fog: # name: ssec_fog # physical_element: Fog # modis_ctt: # name: ctt # physical_element: CTT # modis_ndvi: # name: ndvi # physical_element: NDVI # modis_tpw: # name: tpw # physical_element: TPW # modis_ice_concentration: # name: ice_concentration # physical_element: Ice Concentration # modis_ist: # name: ist # physical_element: Ice Surface Temperature # MODIS L1B Corrected Reflectances # modis_crefl01_250m: # name: modis_crefl01_250m # physical_element: 0.65 um CR # modis_crefl01_500m: # name: modis_crefl01_250m # physical_element: 0.65 um CR # modis_crefl01_1000m: # name: modis_crefl01_1000m # physical_element: 0.65 um CR # modis_crefl02_250m: # name: modis_crefl02_250m # physical_element: 0.86 um CR # modis_crefl02_500m: # name: modis_crefl02_500m # physical_element: 0.86 um CR # modis_crefl02_1000m: # name: modis_crefl02_1000m # physical_element: 0.86 um CR # modis_crefl03_250m: # name: modis_crefl03_250m # physical_element: 0.47 um CR # modis_crefl03_500m: # name: modis_crefl03_500m # physical_element: 0.47 um CR # modis_crefl03_1000m: # name: modis_crefl03_1000m # physical_element: 0.47 um CR # modis_crefl04_250m: # name: modis_crefl04_250m # physical_element: 0.56 um CR # modis_crefl04_500m: # name: modis_crefl04_500m # physical_element: 0.56 um CR # modis_crefl04_1000m: # name: modis_crefl04_1000m # physical_element: 0.56 um CR # modis_crefl05_500m: # name: modis_crefl05_500m # physical_element: 1.24 um CR # modis_crefl05_1000m: # name: modis_crefl05_1000m # physical_element: 1.24 um CR # modis_crefl06_500m: # name: modis_crefl06_500m # physical_element: 1.64 um CR # modis_crefl06_1000m: # name: modis_crefl06_1000m # physical_element: 1.64 um CR # modis_crefl07_500m: # name: modis_crefl07_500m # physical_element: 2.13 um CR # modis_crefl07_1000m: # name: modis_crefl07_1000m # physical_element: 2.13 um CR # MIRS Products # mirs_btemp_23v: # name: btemp_23v # physical_element: MIRS 23 GHZ V # mirs_btemp_31v: # name: btemp_31v # physical_element: MIRS 31 GHZ V # mirs_btemp_50h: # name: btemp_50h # physical_element: MIRS 50 GHZ H # mirs_btemp_51h: # name: btemp_51h # physical_element: MIRS 51 GHZ H # mirs_btemp_52h: # name: btemp_52h # physical_element: MIRS 52 GHZ H # mirs_btemp_53h: # name: btemp_53h # physical_element: MIRS 53 GHZ H # mirs_btemp_54h1: # name: btemp_54h1 # physical_element: MIRS 54 GHZ H-1 # mirs_btemp_54h2: # name: btemp_54h2 # physical_element: MIRS 54 GHZ H-2 # mirs_btemp_55h: # name: btemp_55h # physical_element: MIRS 55 GHZ H # mirs_btemp_57h1: # name: btemp_57h1 # physical_element: MIRS 57 GHZ H-1 # mirs_btemp_57h2: # name: btemp_57h2 # physical_element: MIRS 57 GHZ H-2 # mirs_btemp_57h3: # name: btemp_57h3 # physical_element: MIRS 57 GHZ H-3 # mirs_btemp_57h4: # name: btemp_57h4 # physical_element: MIRS 57 GHZ H-4 # mirs_btemp_57h5: # name: btemp_57h5 # physical_element: MIRS 57 GHZ H-5 # mirs_btemp_57h6: # name: btemp_57h6 # physical_element: MIRS 57 GHZ H-6 # mirs_btemp_88v: # name: btemp_88v # physical_element: MIRS 88 GHZ V # mirs_btemp_165h: # name: btemp_165h # physical_element: MIRS 165 GHZ H # mirs_btemp_183h1: # name: btemp_183h1 # physical_element: MIRS 183 GHZ H-1 # mirs_btemp_183h2: # name: btemp_183h2 # physical_element: MIRS 183 GHZ H-2 # mirs_btemp_183h3: # name: btemp_183h3 # physical_element: MIRS 183 GHZ H-3 # mirs_btemp_183h4: # name: btemp_183h4 # physical_element: MIRS 183 GHZ H-4 # mirs_btemp_183h5: # name: btemp_183h5 # physical_element: MIRS 183 GHZ H-5 # MIRS BTs - NOAA-18 - AMSU-A MHS # MIRS BTs - NOAA-19 - AMSU-A MHS # MIRS BTs - M1 (metopb) - AMSU-A MHS # MIRS BTs - M2 (metopa) - AMSU-A MHS # mirs_btemp_50v: # name: btemp_50v # physical_element: MIRS 50 GHZ V # mirs_btemp_52v: # name: btemp_52v # physical_element: MIRS 52 GHZ V # mirs_btemp_54h: # name: btemp_54h # physical_element: MIRS 54 GHZ H # mirs_btemp_54v: # name: btemp_54v # physical_element: MIRS 54 GHZ V # mirs_btemp_89v1: # name: btemp_89v1 # physical_element: MIRS 89 GHZ V-1 # mirs_btemp_89v2: # name: btemp_89v2 # physical_element: MIRS 89 GHZ V-2 # 157h on OPSO NOAA site # mirs_btemp_157v: # name: btemp_157v # physical_element: MIRS 157 GHZ V # mirs_btemp_190v: # name: btemp_190v # physical_element: MIRS 190 GHZ V # mirs_rain_rate: # reader: mirs # name: rain_rate # physical_element: MIRS Rain Rate # mirs_snow_cover: # reader: mirs # name: snow_cover # physical_element: MIRS Snow Cover # mirs_sea_ice: # reader: mirs # name: sea_ice # physical_element: MIRS Sea Ice # mirs_swe: # reader: mirs # name: swe # physical_element: MIRS SWE # mirs_clw: # reader: mirs # name: clw # physical_element: MIRS CLW # mirs_tpw: # reader: mirs # name: tpw # physical_element: MIRS TPW # mirs_tskin: # reader: mirs # name: tskin # physical_element: MIRS Skin Temperature # AMSR-2 L1B amsr2_btemp_36.5h: name: btemp_36.5h physical_element: 36.5 GHz H amsr2_btemp_36.5v: name: btemp_36.5v physical_element: 36.5 GHz V amsr2_btemp_89.0ah: name: btemp_89.0ah physical_element: 89.0 GHz AH amsr2_btemp_89.0av: name: btemp_89.0av physical_element: 89.0 GHz AV amsr2_btemp_89.0bh: name: btemp_89.0bh physical_element: 89.0 GHz BH amsr2_btemp_89.0bv: name: btemp_89.0bv physical_element: 89.0 GHz BV # GEOCAT Level 1 Products geocat_surface_type: name: pixel_surface_type physical_element: Surface Type # GEOCAT Level 2 Products satpy-0.20.0/satpy/etc/writers/simple_image.yaml000066400000000000000000000002651362525524100216720ustar00rootroot00000000000000writer: name: simple_image description: Generic Image Writer writer: !!python/name:satpy.writers.simple_image.PillowWriter filename: '{name}_{start_time:%Y%m%d_%H%M%S}.png' satpy-0.20.0/satpy/multiscene.py000066400000000000000000000534501362525524100166270ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """MultiScene object to work with multiple timesteps of satellite data.""" import logging import numpy as np import dask.array as da import xarray as xr import pandas as pd from satpy.scene import Scene from satpy.writers import get_enhanced_image from satpy.dataset import combine_metadata, DatasetID from threading import Thread try: # python 3 from queue import Queue except ImportError: # python 2 from Queue import Queue try: import imageio except ImportError: imageio = None try: from dask.distributed import get_client except ImportError: get_client = None log = logging.getLogger(__name__) def stack(datasets): """Overlay series of datasets on top of each other.""" base = datasets[0].copy() for dataset in datasets[1:]: base = base.where(dataset.isnull(), dataset) return base def timeseries(datasets): """Expand dataset with and concatenate by time dimension.""" expanded_ds = [] for ds in datasets: tmp = ds.expand_dims("time") tmp.coords["time"] = pd.DatetimeIndex([ds.attrs["start_time"]]) expanded_ds.append(tmp) res = xr.concat(expanded_ds, dim="time") res.attrs = combine_metadata(*[x.attrs for x in expanded_ds]) return res class _SceneGenerator(object): """Fancy way of caching Scenes from a generator.""" def __init__(self, scene_gen): self._scene_gen = scene_gen self._scene_cache = [] self._dataset_idx = {} # this class itself is not an iterator, make one self._self_iter = self._create_cached_iter() @property def first(self): """First element in the generator.""" return next(iter(self)) def _create_cached_iter(self): """Iterate over the provided scenes, caching them for later.""" for scn in self._scene_gen: self._scene_cache.append(scn) yield scn def __iter__(self): """Iterate over the provided scenes, caching them for later.""" idx = 0 while True: if idx >= len(self._scene_cache): try: scn = next(self._self_iter) except StopIteration: return else: scn = self._scene_cache[idx] yield scn idx += 1 def __getitem__(self, ds_id): """Get a specific dataset from the scenes.""" for scn in self: yield scn.get(ds_id) class MultiScene(object): """Container for multiple `Scene` objects.""" def __init__(self, scenes=None): """Initialize MultiScene and validate sub-scenes. Args: scenes (iterable): `Scene` objects to operate on (optional) .. note:: If the `scenes` passed to this object are a generator then certain operations performed will try to preserve that generator state. This may limit what properties or methods are available to the user. To avoid this behavior compute the passed generator by converting the passed scenes to a list first: ``MultiScene(list(scenes))``. """ self._scenes = scenes or [] scenes = iter(self._scenes) self._scene_gen = _SceneGenerator(iter(scenes)) # if we were originally given a generator-like object then we want to # coordinate the loading between _SceneGenerator and _scenes # otherwise it doesn't really matter and other operations may prefer # a list if not isinstance(scenes, (list, tuple)): self._scenes = iter(self._scene_gen) @property def first_scene(self): """First Scene of this MultiScene object.""" return self._scene_gen.first @classmethod def from_files(cls, files_to_sort, reader=None, **kwargs): """Create multiple Scene objects from multiple files. This uses the :func:`satpy.readers.group_files` function to group files. See this function for more details on possible keyword arguments. .. versionadded:: 0.12 """ from satpy.readers import group_files file_groups = group_files(files_to_sort, reader=reader, **kwargs) scenes = (Scene(filenames=fg) for fg in file_groups) return cls(scenes) def __iter__(self): """Iterate over the provided Scenes once.""" for scn in self._scenes: yield scn @property def scenes(self): """Get list of Scene objects contained in this MultiScene. .. note:: If the Scenes contained in this object are stored in a generator (not list or tuple) then accessing this property will load/iterate through the generator possibly """ if self.is_generator: log.debug("Forcing iteration of generator-like object of Scenes") self._scenes = list(self._scenes) return self._scenes @property def is_generator(self): """Contained Scenes are stored as a generator.""" return not isinstance(self._scenes, (list, tuple)) @property def loaded_dataset_ids(self): """Union of all Dataset IDs loaded by all children.""" return set(ds_id for scene in self.scenes for ds_id in scene.keys()) @property def shared_dataset_ids(self): """Dataset IDs shared by all children.""" shared_ids = set(self.scenes[0].keys()) for scene in self.scenes[1:]: shared_ids &= set(scene.keys()) return shared_ids def _all_same_area(self, dataset_ids): """Return True if all areas for the provided IDs are equal.""" all_areas = [] for ds_id in dataset_ids: for scn in self.scenes: ds = scn.get(ds_id) if ds is None: continue all_areas.append(ds.attrs.get('area')) all_areas = [area for area in all_areas if area is not None] return all(all_areas[0] == area for area in all_areas[1:]) @property def all_same_area(self): """Determine if all contained Scenes have the same 'area'.""" return self._all_same_area(self.loaded_dataset_ids) @staticmethod def _call_scene_func(gen, func_name, create_new_scene, *args, **kwargs): """Abstract method for running a Scene method on each Scene.""" for scn in gen: new_scn = getattr(scn, func_name)(*args, **kwargs) if create_new_scene: yield new_scn else: yield scn def _generate_scene_func(self, gen, func_name, create_new_scene, *args, **kwargs): """Abstract method for running a Scene method on each Scene. Additionally, modifies current MultiScene or creates a new one if needed. """ new_gen = self._call_scene_func(gen, func_name, create_new_scene, *args, **kwargs) new_gen = new_gen if self.is_generator else list(new_gen) if create_new_scene: return self.__class__(new_gen) self._scene_gen = _SceneGenerator(new_gen) self._scenes = iter(self._scene_gen) def load(self, *args, **kwargs): """Load the required datasets from the multiple scenes.""" self._generate_scene_func(self._scenes, 'load', False, *args, **kwargs) def crop(self, *args, **kwargs): """Crop the multiscene and return a new cropped multiscene.""" return self._generate_scene_func(self._scenes, 'crop', True, *args, **kwargs) def resample(self, destination=None, **kwargs): """Resample the multiscene.""" return self._generate_scene_func(self._scenes, 'resample', True, destination=destination, **kwargs) def blend(self, blend_function=stack): """Blend the datasets into one scene. .. note:: Blending is not currently optimized for generator-based MultiScene. """ new_scn = Scene() common_datasets = self.shared_dataset_ids for ds_id in common_datasets: datasets = [scn[ds_id] for scn in self.scenes if ds_id in scn] new_scn[ds_id] = blend_function(datasets) return new_scn def _distribute_save_datasets(self, scenes_iter, client, batch_size=1, **kwargs): """Distribute save_datasets across a cluster.""" def load_data(q): idx = 0 while True: future_list = q.get() if future_list is None: break # save_datasets shouldn't be returning anything for future in future_list: future.result() log.info("Finished saving %d scenes", idx) idx += 1 q.task_done() input_q = Queue(batch_size if batch_size is not None else 1) load_thread = Thread(target=load_data, args=(input_q,)) load_thread.start() for scene in scenes_iter: delayed = scene.save_datasets(compute=False, **kwargs) if isinstance(delayed, (list, tuple)) and len(delayed) == 2: # TODO Make this work for (source, target) datasets # given a target, source combination raise NotImplementedError("Distributed save_datasets does not support writers " "that return (source, target) combinations at this time. Use " "the non-distributed save_datasets instead.") future = client.compute(delayed) input_q.put(future) input_q.put(None) log.debug("Waiting for child thread to get saved results...") load_thread.join() log.debug("Child thread died successfully") def _simple_save_datasets(self, scenes_iter, **kwargs): """Run save_datasets on each Scene.""" for scn in scenes_iter: scn.save_datasets(**kwargs) def save_datasets(self, client=True, batch_size=1, **kwargs): """Run save_datasets on each Scene. Note that some writers may not be multi-process friendly and may produce unexpected results or fail by raising an exception. In these cases ``client`` should be set to ``False``. This is currently a known issue for basic 'geotiff' writer work loads. Args: batch_size (int): Number of scenes to compute at the same time. This only has effect if the `dask.distributed` package is installed. This will default to 1. Setting this to 0 or less will attempt to process all scenes at once. This option should be used with care to avoid memory issues when trying to improve performance. client (bool or dask.distributed.Client): Dask distributed client to use for computation. If this is ``True`` (default) then any existing clients will be used. If this is ``False`` or ``None`` then a client will not be created and ``dask.distributed`` will not be used. If this is a dask ``Client`` object then it will be used for distributed computation. kwargs: Additional keyword arguments to pass to :meth:`~satpy.scene.Scene.save_datasets`. Note ``compute`` can not be provided. """ if 'compute' in kwargs: raise ValueError("The 'compute' keyword argument can not be provided.") client = self._get_client(client=client) scenes = iter(self._scenes) if client is not None: self._distribute_save_datasets(scenes, client, batch_size=batch_size, **kwargs) else: self._simple_save_datasets(scenes, **kwargs) def _get_animation_info(self, all_datasets, filename, fill_value=None): """Determine filename and shape of animation to be created.""" valid_datasets = [ds for ds in all_datasets if ds is not None] first_dataset = valid_datasets[0] last_dataset = valid_datasets[-1] first_img = get_enhanced_image(first_dataset) first_img_data = first_img.finalize(fill_value=fill_value)[0] shape = tuple(first_img_data.sizes.get(dim_name) for dim_name in ('y', 'x', 'bands')) if fill_value is None and filename.endswith('gif'): log.warning("Forcing fill value to '0' for GIF Luminance images") fill_value = 0 shape = shape[:2] attrs = first_dataset.attrs.copy() if 'end_time' in last_dataset.attrs: attrs['end_time'] = last_dataset.attrs['end_time'] this_fn = filename.format(**attrs) return this_fn, shape, fill_value def _get_animation_frames(self, all_datasets, shape, fill_value=None, ignore_missing=False): """Create enhanced image frames to save to a file.""" for idx, ds in enumerate(all_datasets): if ds is None and ignore_missing: continue elif ds is None: log.debug("Missing frame: %d", idx) data = da.zeros(shape, dtype=np.uint8, chunks=shape) data = xr.DataArray(data) else: img = get_enhanced_image(ds) data, mode = img.finalize(fill_value=fill_value) if data.ndim == 3: # assume all other shapes are (y, x) # we need arrays grouped by pixel so # transpose if needed data = data.transpose('y', 'x', 'bands') yield data.data def _get_client(self, client=True): """Determine what dask distributed client to use.""" client = client or None # convert False/None to None if client is True and get_client is None: log.debug("'dask.distributed' library was not found, will " "use simple serial processing.") client = None elif client is True: try: # get existing client client = get_client() except ValueError: log.warning("No dask distributed client was provided or found, " "but distributed features were requested. Will use simple serial processing.") client = None return client def _distribute_frame_compute(self, writers, frame_keys, frames_to_write, client, batch_size=1): """Use ``dask.distributed`` to compute multiple frames at a time.""" def load_data(frame_gen, q): for frame_arrays in frame_gen: future_list = client.compute(frame_arrays) for frame_key, arr_future in zip(frame_keys, future_list): q.put({frame_key: arr_future}) q.put(None) input_q = Queue(batch_size if batch_size is not None else 1) load_thread = Thread(target=load_data, args=(frames_to_write, input_q,)) load_thread.start() while True: input_future = input_q.get() future_dict = client.gather(input_future) if future_dict is None: break # write the current frame # this should only be one element in the dictionary, but this is # also the easiest way to get access to the data for frame_key, result in future_dict.items(): # frame_key = rev_future_dict[future] w = writers[frame_key] w.append_data(result) input_q.task_done() log.debug("Waiting for child thread...") load_thread.join(10) if load_thread.is_alive(): import warnings warnings.warn("Background thread still alive after failing to die gracefully") else: log.debug("Child thread died successfully") def _simple_frame_compute(self, writers, frame_keys, frames_to_write): """Compute frames the plain dask way.""" for frame_arrays in frames_to_write: for frame_key, product_frame in zip(frame_keys, frame_arrays): w = writers[frame_key] w.append_data(product_frame.compute()) def save_animation(self, filename, datasets=None, fps=10, fill_value=None, batch_size=1, ignore_missing=False, client=True, **kwargs): """Save series of Scenes to movie (MP4) or GIF formats. Supported formats are dependent on the `imageio` library and are determined by filename extension by default. .. note:: Starting with ``imageio`` 2.5.0, the use of FFMPEG depends on a separate ``imageio-ffmpeg`` package. By default all datasets available will be saved to individual files using the first Scene's datasets metadata to format the filename provided. If a dataset is not available from a Scene then a black array is used instead (np.zeros(shape)). This function can use the ``dask.distributed`` library for improved performance by computing multiple frames at a time (see `batch_size` option below). If the distributed library is not available then frames will be generated one at a time, one product at a time. Args: filename (str): Filename to save to. Can include python string formatting keys from dataset ``.attrs`` (ex. "{name}_{start_time:%Y%m%d_%H%M%S.gif") datasets (list): DatasetIDs to save (default: all datasets) fps (int): Frames per second for produced animation fill_value (int): Value to use instead creating an alpha band. batch_size (int): Number of frames to compute at the same time. This only has effect if the `dask.distributed` package is installed. This will default to 1. Setting this to 0 or less will attempt to process all frames at once. This option should be used with care to avoid memory issues when trying to improve performance. Note that this is the total number of frames for all datasets, so when saving 2 datasets this will compute ``(batch_size / 2)`` frames for the first dataset and ``(batch_size / 2)`` frames for the second dataset. ignore_missing (bool): Don't include a black frame when a dataset is missing from a child scene. client (bool or dask.distributed.Client): Dask distributed client to use for computation. If this is ``True`` (default) then any existing clients will be used. If this is ``False`` or ``None`` then a client will not be created and ``dask.distributed`` will not be used. If this is a dask ``Client`` object then it will be used for distributed computation. kwargs: Additional keyword arguments to pass to `imageio.get_writer`. """ if imageio is None: raise ImportError("Missing required 'imageio' library") scene_gen = self._scene_gen first_scene = self.first_scene scenes = iter(self._scene_gen) info_scenes = [first_scene] if 'end_time' in filename: # if we need the last scene to generate the filename # then compute all the scenes so we can figure it out log.debug("Generating scenes to compute end_time for filename") scenes = list(scenes) info_scenes.append(scenes[-1]) available_ds = [first_scene.datasets.get(ds) for ds in first_scene.wishlist] available_ds = [DatasetID.from_dict(ds.attrs) for ds in available_ds if ds is not None] dataset_ids = datasets or available_ds if not dataset_ids: raise RuntimeError("No datasets found for saving (resampling may be needed to generate composites)") writers = {} frames = {} for dataset_id in dataset_ids: if not self.is_generator and not self._all_same_area([dataset_id]): raise ValueError("Sub-scene datasets must all be on the same " "area (see the 'resample' method).") all_datasets = scene_gen[dataset_id] info_datasets = [scn.get(dataset_id) for scn in info_scenes] this_fn, shape, this_fill = self._get_animation_info(info_datasets, filename, fill_value=fill_value) data_to_write = self._get_animation_frames(all_datasets, shape, this_fill, ignore_missing) writer = imageio.get_writer(this_fn, fps=fps, **kwargs) frames[dataset_id] = data_to_write writers[dataset_id] = writer client = self._get_client(client=client) # get an ordered list of frames frame_keys, frames_to_write = list(zip(*frames.items())) frames_to_write = zip(*frames_to_write) if client is not None: self._distribute_frame_compute(writers, frame_keys, frames_to_write, client, batch_size=batch_size) else: self._simple_frame_compute(writers, frame_keys, frames_to_write) for writer in writers.values(): writer.close() satpy-0.20.0/satpy/node.py000066400000000000000000000512721362525524100154040ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Nodes to build trees.""" from satpy import DatasetDict, DatasetID, DATASET_KEYS from satpy.readers import TooManyResults from satpy.utils import get_logger from satpy.dataset import create_filtered_dsid LOG = get_logger(__name__) # Empty leaf used for marking composites with no prerequisites EMPTY_LEAF_NAME = "__EMPTY_LEAF_SENTINEL__" class Node(object): """A node object.""" def __init__(self, name, data=None): """Init the node object.""" self.name = name self.data = data self.children = [] self.parents = [] @property def is_leaf(self): """Check if the node is a leaf.""" return not self.children def flatten(self, d=None): """Flatten tree structure to a one level dictionary. Args: d (dict, optional): output dictionary to update Returns: dict: Node.name -> Node. The returned dictionary includes the current Node and all its children. """ if d is None: d = {} if self.name is not None: d[self.name] = self for child in self.children: child.flatten(d=d) return d def copy(self, node_cache=None): """Make a copy of the node.""" if node_cache and self.name in node_cache: return node_cache[self.name] if self.name is EMPTY_LEAF_NAME: return self s = Node(self.name, self.data) for c in self.children: c = c.copy(node_cache=node_cache) s.add_child(c) return s def add_child(self, obj): """Add a child to the node.""" self.children.append(obj) obj.parents.append(self) def __str__(self): """Display the node.""" return self.display() def __repr__(self): """Generate a representation of the node.""" return "".format(repr(self.name)) def __eq__(self, other): """Check equality.""" return self.name == other.name def __hash__(self): """Generate the hash of the node.""" return hash(self.name) def display(self, previous=0, include_data=False): """Display the node.""" no_data = " (No Data)" if self.data is None else "" return ( (" +" * previous) + str(self.name) + no_data + '\n' + ''.join([child.display(previous + 1) for child in self.children])) def leaves(self, unique=True): """Get the leaves of the tree starting at this root.""" if self.name is EMPTY_LEAF_NAME: return [] elif not self.children: return [self] res = list() for child in self.children: for sub_child in child.leaves(unique=unique): if not unique or sub_child not in res: res.append(sub_child) return res def trunk(self, unique=True): """Get the trunk of the tree starting at this root.""" # uniqueness is not correct in `trunk` yet unique = False res = [] if self.children and self.name is not EMPTY_LEAF_NAME: if self.name is not None: res.append(self) for child in self.children: for sub_child in child.trunk(unique=unique): if not unique or sub_child not in res: res.append(sub_child) return res class DependencyTree(Node): """Structure to discover and store `Dataset` dependencies. Used primarily by the `Scene` object to organize dependency finding. Dependencies are stored used a series of `Node` objects which this class is a subclass of. """ # simplify future logic by only having one "sentinel" empty node # making it a class attribute ensures it is the same across instances empty_node = Node(EMPTY_LEAF_NAME) def __init__(self, readers, compositors, modifiers, available_only=False): """Collect Dataset generating information. Collect the objects that generate and have information about Datasets including objects that may depend on certain Datasets being generated. This includes readers, compositors, and modifiers. Args: readers (dict): Reader name -> Reader Object compositors (dict): Sensor name -> Composite ID -> Composite Object modifiers (dict): Sensor name -> Modifier name -> (Modifier Class, modifier options) available_only (bool): Whether only reader's available/loadable datasets should be used when searching for dependencies (True) or use all known/configured datasets regardless of whether the necessary files were provided to the reader (False). Note that when ``False`` loadable variations of a dataset will have priority over other known variations. Default is ``False``. """ self.readers = readers self.compositors = compositors self.modifiers = modifiers self._available_only = available_only # we act as the root node of the tree super(DependencyTree, self).__init__(None) # keep a flat dictionary of nodes contained in the tree for better # __contains__ self._all_nodes = DatasetDict() def leaves(self, nodes=None, unique=True): """Get the leaves of the tree starting at this root. Args: nodes (iterable): limit leaves for these node names unique: only include individual leaf nodes once Returns: list of leaf nodes """ if nodes is None: return super(DependencyTree, self).leaves(unique=unique) res = list() for child_id in nodes: for sub_child in self._all_nodes[child_id].leaves(unique=unique): if not unique or sub_child not in res: res.append(sub_child) return res def trunk(self, nodes=None, unique=True): """Get the trunk nodes of the tree starting at this root. Args: nodes (iterable): limit trunk nodes to the names specified or the children of them that are also trunk nodes. unique: only include individual trunk nodes once Returns: list of trunk nodes """ if nodes is None: return super(DependencyTree, self).trunk(unique=unique) res = list() for child_id in nodes: for sub_child in self._all_nodes[child_id].trunk(unique=unique): if not unique or sub_child not in res: res.append(sub_child) return res def add_child(self, parent, child): """Add a child to the tree.""" Node.add_child(parent, child) # Sanity check: Node objects should be unique. They can be added # multiple times if more than one Node depends on them # but they should all map to the same Node object. if self.contains(child.name): assert self._all_nodes[child.name] is child if child is self.empty_node: # No need to store "empty" nodes return self._all_nodes[child.name] = child def add_leaf(self, ds_id, parent=None): """Add a leaf to the tree.""" if parent is None: parent = self try: node = self[ds_id] except KeyError: node = Node(ds_id) self.add_child(parent, node) def copy(self): """Copy this node tree. Note all references to readers are removed. This is meant to avoid tree copies accessing readers that would return incompatible (Area) data. Theoretically it should be possible for tree copies to request compositor or modifier information as long as they don't depend on any datasets not already existing in the dependency tree. """ new_tree = DependencyTree({}, self.compositors, self.modifiers) for c in self.children: c = c.copy(node_cache=new_tree._all_nodes) new_tree.add_child(new_tree, c) return new_tree def __contains__(self, item): """Check if a item is in the tree.""" return item in self._all_nodes def __getitem__(self, item): """Get an item of the tree.""" return self._all_nodes[item] def contains(self, item): """Check contains when we know the *exact* DatasetID.""" return super(DatasetDict, self._all_nodes).__contains__(item) def getitem(self, item): """Get Node when we know the *exact* DatasetID.""" return super(DatasetDict, self._all_nodes).__getitem__(item) def get_compositor(self, key): """Get a compositor.""" for sensor_name in self.compositors.keys(): try: return self.compositors[sensor_name][key] except KeyError: continue if isinstance(key, DatasetID) and key.modifiers: # we must be generating a modifier composite return self.get_modifier(key) raise KeyError("Could not find compositor '{}'".format(key)) def get_modifier(self, comp_id): """Get a modifer.""" # create a DatasetID for the compositor we are generating modifier = comp_id.modifiers[-1] for sensor_name in self.modifiers.keys(): modifiers = self.modifiers[sensor_name] compositors = self.compositors[sensor_name] if modifier not in modifiers: continue mloader, moptions = modifiers[modifier] moptions = moptions.copy() moptions.update(comp_id.to_dict()) moptions['sensor'] = sensor_name compositors[comp_id] = mloader(**moptions) return compositors[comp_id] raise KeyError("Could not find modifier '{}'".format(modifier)) def _find_reader_dataset(self, dataset_key, **dfilter): """Attempt to find a `DatasetID` in the available readers. Args: dataset_key (str, float, DatasetID): Dataset name, wavelength, or a combination of `DatasetID` parameters to use in searching for the dataset from the available readers. **dfilter (list or str): `DatasetID` parameters besides `name` and `wavelength` to use to filter the available datasets. Passed directly to `get_dataset_key` of the readers, see that method for more information. """ too_many = False for reader_name, reader_instance in self.readers.items(): try: ds_id = reader_instance.get_dataset_key(dataset_key, available_only=self._available_only, **dfilter) except TooManyResults: LOG.trace("Too many datasets matching key {} in reader {}".format(dataset_key, reader_name)) too_many = True continue except KeyError: LOG.trace("Can't find dataset %s in reader %s", str(dataset_key), reader_name) continue LOG.trace("Found {} in reader {} when asking for {}".format(str(ds_id), reader_name, repr(dataset_key))) try: # now that we know we have the exact DatasetID see if we have already created a Node for it return self.getitem(ds_id) except KeyError: # we haven't created a node yet, create it now return Node(ds_id, {'reader_name': reader_name}) if too_many: raise TooManyResults("Too many keys matching: {}".format(dataset_key)) def _get_compositor_prereqs(self, parent, prereq_names, skip=False, **dfilter): """Determine prerequisite Nodes for a composite. Args: parent (Node): Compositor node to add these prerequisites under prereq_names (sequence): Strings (names), floats (wavelengths), or DatasetIDs to analyze. skip (bool, optional): If True, prerequisites are considered optional if they can't be found and a debug message is logged. If False (default), the missing prerequisites are not logged and are expected to be handled by the caller. """ prereq_ids = [] unknowns = set() if not prereq_names and not skip: # this composite has no required prerequisites prereq_names = [None] for prereq in prereq_names: n, u = self._find_dependencies(prereq, **dfilter) if u: unknowns.update(u) if skip: u_str = ", ".join([str(x) for x in u]) LOG.debug('Skipping optional %s: Unknown dataset %s', str(prereq), u_str) else: prereq_ids.append(n) self.add_child(parent, n) return prereq_ids, unknowns def _update_modifier_key(self, orig_key, dep_key): """Update a key based on the dataset it will modified (dep). Typical use case is requesting a modified dataset (orig_key). This modified dataset most likely depends on a less-modified dataset (dep_key). The less-modified dataset must come from a reader (at least for now) or will eventually depend on a reader dataset. The original request key may be limited like (wavelength=0.67, modifiers=('a', 'b')) while the reader-based key should have all of its properties specified. This method updates the original request key so it is fully specified and should reduce the chance of Node's not being unique. """ orig_dict = orig_key._asdict() dep_dict = dep_key._asdict() # don't change the modifiers for k in DATASET_KEYS[:-1]: orig_dict[k] = dep_dict[k] return DatasetID.from_dict(orig_dict) def _find_compositor(self, dataset_key, **dfilter): """Find the compositor object for the given dataset_key.""" # NOTE: This function can not find a modifier that performs # one or more modifications if it has modifiers see if we can find # the unmodified version first src_node = None if isinstance(dataset_key, DatasetID) and dataset_key.modifiers: new_prereq = DatasetID( *dataset_key[:-1] + (dataset_key.modifiers[:-1],)) src_node, u = self._find_dependencies(new_prereq, **dfilter) # Update the requested DatasetID with information from the src if src_node is not None: dataset_key = self._update_modifier_key(dataset_key, src_node.name) if u: return None, u try: compositor = self.get_compositor(dataset_key) except KeyError: raise KeyError("Can't find anything called {}".format(str(dataset_key))) dataset_key = create_filtered_dsid(compositor.id, **dfilter) root = Node(dataset_key, data=(compositor, [], [])) if src_node is not None: self.add_child(root, src_node) root.data[1].append(src_node) # 2.1 get the prerequisites LOG.trace("Looking for composite prerequisites for: {}".format(dataset_key)) prereqs, unknowns = self._get_compositor_prereqs(root, compositor.attrs['prerequisites'], **dfilter) if unknowns: # Should we remove all of the unknown nodes that were found # if there is an unknown prerequisite are we in trouble? return None, unknowns root.data[1].extend(prereqs) LOG.trace("Looking for optional prerequisites for: {}".format(dataset_key)) optional_prereqs, _ = self._get_compositor_prereqs( root, compositor.attrs['optional_prerequisites'], skip=True, **dfilter) root.data[2].extend(optional_prereqs) return root, set() def get_filtered_item(self, dataset_key, **dfilter): """Get the item matching *dataset_key* and *dfilter*.""" dsid = create_filtered_dsid(dataset_key, **dfilter) return self[dsid] def _find_dependencies(self, dataset_key, **dfilter): """Find the dependencies for *dataset_key*. Args: dataset_key (str, float, DatasetID): Dataset identifier to locate and find any additional dependencies for. **dfilter (dict): Additional filter parameters. See `satpy.readers.get_key` for more details. """ # Special case: No required dependencies for this composite if dataset_key is None: return self.empty_node, set() # 0 check if the *exact* dataset is already loaded try: dsid = create_filtered_dsid(dataset_key, **dfilter) node = self.getitem(dsid) LOG.trace("Found exact dataset already loaded: {}".format(node.name)) return node, set() except KeyError: # exact dataset isn't loaded, let's load it below LOG.trace("Exact dataset {} isn't loaded, will try reader...".format(dataset_key)) # 1 try to get *best* dataset from reader try: node = self._find_reader_dataset(dataset_key, **dfilter) except TooManyResults: LOG.warning("Too many possible datasets to load for {}".format(dataset_key)) return None, set([dataset_key]) if node is not None: LOG.trace("Found reader provided dataset:\n\tRequested: {}\n\tFound: {}".format(dataset_key, node.name)) return node, set() LOG.trace("Could not find dataset in reader: {}".format(dataset_key)) # 2 try to find a composite by name (any version of it is good enough) try: # assume that there is no such thing as a "better" composite # version so if we find any DatasetIDs already loaded then # we want to use them node = self.get_filtered_item(dataset_key, **dfilter) LOG.trace("Composite already loaded:\n\tRequested: {}\n\tFound: {}".format(dataset_key, node.name)) return node, set() except KeyError: # composite hasn't been loaded yet, let's load it below LOG.trace("Composite hasn't been loaded yet, will load: {}".format(dataset_key)) # 3 try to find a composite that matches try: node, unknowns = self._find_compositor(dataset_key, **dfilter) LOG.trace("Found composite:\n\tRequested: {}\n\tFound: {}".format(dataset_key, node and node.name)) except KeyError: node = None unknowns = set([dataset_key]) LOG.trace("Composite not found: {}".format(dataset_key)) return node, unknowns def find_dependencies(self, dataset_keys, **dfilter): """Create the dependency tree. Args: dataset_keys (iterable): Strings or DatasetIDs to find dependencies for **dfilter (dict): Additional filter parameters. See `satpy.readers.get_key` for more details. Returns: (Node, set): Root node of the dependency tree and a set of unknown datasets """ unknown_datasets = set() for key in dataset_keys.copy(): n, unknowns = self._find_dependencies(key, **dfilter) dataset_keys.discard(key) # remove old non-DatasetID if n is not None: dataset_keys.add(n.name) # add equivalent DatasetID if unknowns: unknown_datasets.update(unknowns) continue self.add_child(self, n) return unknown_datasets satpy-0.20.0/satpy/plugin_base.py000066400000000000000000000055041362525524100167440ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2011-2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The :mod:`satpy.plugin_base` module defines the plugin API. """ import logging import yaml try: from yaml import UnsafeLoader except ImportError: from yaml import Loader as UnsafeLoader from satpy.config import config_search_paths, get_environ_config_dir, recursive_dict_update LOG = logging.getLogger(__name__) class Plugin(object): """Base plugin class for all dynamically loaded and configured objects.""" def __init__(self, ppp_config_dir=None, default_config_filename=None, config_files=None, **kwargs): """Load configuration files related to this plugin. This initializes a `self.config` dictionary that can be used to customize the subclass. Args: ppp_config_dir (str): Base "etc" directory for all configuration files. default_config_filename (str): Configuration filename to use if no other files have been specified with `config_files`. config_files (list or str): Configuration files to load instead of those automatically found in `ppp_config_dir` and other default configuration locations. kwargs (dict): Unused keyword arguments. """ self.ppp_config_dir = ppp_config_dir or get_environ_config_dir() self.default_config_filename = default_config_filename self.config_files = config_files if self.config_files is None and self.default_config_filename is not None: # Specify a default self.config_files = config_search_paths(self.default_config_filename, self.ppp_config_dir) if not isinstance(self.config_files, (list, tuple)): self.config_files = [self.config_files] self.config = {} if self.config_files: for config_file in self.config_files: self.load_yaml_config(config_file) def load_yaml_config(self, conf): """Load a YAML configuration file and recursively update the overall configuration.""" with open(conf) as fd: self.config = recursive_dict_update(self.config, yaml.load(fd, Loader=UnsafeLoader)) satpy-0.20.0/satpy/readers/000077500000000000000000000000001362525524100155235ustar00rootroot00000000000000satpy-0.20.0/satpy/readers/__init__.py000066400000000000000000000746731362525524100176550ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Shared objects of the various reader classes.""" import logging import numbers import os from datetime import datetime, timedelta import six import yaml try: from yaml import UnsafeLoader except ImportError: from yaml import Loader as UnsafeLoader from satpy.config import (config_search_paths, get_environ_config_dir, glob_config) from satpy.dataset import DATASET_KEYS, DatasetID from satpy import CALIBRATION_ORDER try: import configparser # noqa except ImportError: from six.moves import configparser # noqa LOG = logging.getLogger(__name__) # Old Name -> New Name OLD_READER_NAMES = { } class TooManyResults(KeyError): """Special exception when one key maps to multiple items in the container.""" pass def _wl_dist(wl_a, wl_b): """Return the distance between two requested wavelengths.""" if isinstance(wl_a, tuple): # central wavelength wl_a = wl_a[1] if isinstance(wl_b, tuple): wl_b = wl_b[1] if wl_a is None or wl_b is None: return 1000. return abs(wl_a - wl_b) def get_best_dataset_key(key, choices): """Choose the "best" `DatasetID` from `choices` based on `key`. The best key is chosen based on the follow criteria: 1. Central wavelength is nearest to the `key` wavelength if specified. 2. Least modified dataset if `modifiers` is `None` in `key`. Otherwise, the modifiers are ignored. 3. Highest calibration if `calibration` is `None` in `key`. Calibration priority is chosen by `satpy.CALIBRATION_ORDER`. 4. Best resolution (smallest number) if `resolution` is `None` in `key`. Otherwise, the resolution is ignored. This function assumes `choices` has already been filtered to only include datasets that match the provided `key`. Args: key (DatasetID): Query parameters to sort `choices` by. choices (iterable): `DatasetID` objects to sort through to determine the best dataset. Returns: List of best `DatasetID`s from `choices`. If there is more than one element this function could not choose between the available datasets. """ # Choose the wavelength closest to the choice if key.wavelength is not None and choices: # find the dataset with a central wavelength nearest to the # requested wavelength nearest_wl = min([_wl_dist(key.wavelength, x.wavelength) for x in choices if x.wavelength is not None]) choices = [c for c in choices if _wl_dist(key.wavelength, c.wavelength) == nearest_wl] if key.modifiers is None and choices: num_modifiers = min(len(x.modifiers or tuple()) for x in choices) choices = [c for c in choices if len( c.modifiers or tuple()) == num_modifiers] if key.calibration is None and choices: best_cal = [x.calibration for x in choices if x.calibration] if best_cal: best_cal = min(best_cal, key=lambda x: CALIBRATION_ORDER[x]) choices = [c for c in choices if c.calibration == best_cal] if key.resolution is None and choices: low_res = [x.resolution for x in choices if x.resolution] if low_res: low_res = min(low_res) choices = [c for c in choices if c.resolution == low_res] if key.level is None and choices: low_level = [x.level for x in choices if x.level] if low_level: low_level = max(low_level) choices = [c for c in choices if c.level == low_level] return choices def filter_keys_by_dataset_id(did, key_container): """Filer provided key iterable by the provided `DatasetID`. Note: The `modifiers` attribute of `did` should be `None` to allow for **any** modifier in the results. Args: did (DatasetID): Query parameters to match in the `key_container`. key_container (iterable): Set, list, tuple, or dict of `DatasetID` keys. Returns (list): List of keys matching the provided parameters in no specific order. """ keys = iter(key_container) for key in DATASET_KEYS: if getattr(did, key) is not None: if key == "wavelength": keys = [k for k in keys if (getattr(k, key) is not None and DatasetID.wavelength_match(getattr(k, key), getattr(did, key)))] else: keys = [k for k in keys if getattr(k, key) is not None and getattr(k, key) == getattr(did, key)] return keys def get_key(key, key_container, num_results=1, best=True, resolution=None, calibration=None, polarization=None, level=None, modifiers=None): """Get the fully-specified key best matching the provided key. Only the best match is returned if `best` is `True` (default). See `get_best_dataset_key` for more information on how this is determined. The `resolution` and other identifier keywords are provided as a convenience to filter by multiple parameters at once without having to filter by multiple `key` inputs. Args: key (DatasetID): DatasetID of query parameters to use for searching. Any parameter that is `None` is considered a wild card and any match is accepted. key_container (dict or set): Container of DatasetID objects that uses hashing to quickly access items. num_results (int): Number of results to return. Use `0` for all matching results. If `1` then the single matching key is returned instead of a list of length 1. (default: 1) best (bool): Sort results to get "best" result first (default: True). See `get_best_dataset_key` for details. resolution (float, int, or list): Resolution of the dataset in dataset units (typically meters). This can also be a list of these numbers. calibration (str or list): Dataset calibration (ex.'reflectance'). This can also be a list of these strings. polarization (str or list): Dataset polarization (ex.'V'). This can also be a list of these strings. level (number or list): Dataset level (ex. 100). This can also be a list of these numbers. modifiers (list): Modifiers applied to the dataset. Unlike resolution and calibration this is the exact desired list of modifiers for one dataset, not a list of possible modifiers. Returns (list or DatasetID): Matching key(s) Raises: KeyError if no matching results or if more than one result is found when `num_results` is `1`. """ if isinstance(key, numbers.Number): # we want this ID to act as a query so we set modifiers to None # meaning "we don't care how many modifiers it has". key = DatasetID(wavelength=key, modifiers=None) elif isinstance(key, (str, six.text_type)): # ID should act as a query (see wl comment above) key = DatasetID(name=key, modifiers=None) elif not isinstance(key, DatasetID): raise ValueError("Expected 'DatasetID', str, or number dict key, " "not {}".format(str(type(key)))) res = filter_keys_by_dataset_id(key, key_container) # further filter by other parameters if resolution is not None: if not isinstance(resolution, (list, tuple)): resolution = (resolution, ) res = [k for k in res if k.resolution is not None and k.resolution in resolution] if polarization is not None: if not isinstance(polarization, (list, tuple)): polarization = (polarization, ) res = [k for k in res if k.polarization is not None and k.polarization in polarization] if calibration is not None: if not isinstance(calibration, (list, tuple)): calibration = (calibration, ) res = [k for k in res if k.calibration is not None and k.calibration in calibration] if level is not None: if not isinstance(level, (list, tuple)): level = (level, ) res = [k for k in res if k.level is not None and k.level in level] if modifiers is not None: res = [k for k in res if k.modifiers is not None and k.modifiers == modifiers] if best: res = get_best_dataset_key(key, res) if num_results == 1 and not res: raise KeyError("No dataset matching '{}' found".format(str(key))) elif num_results == 1 and len(res) != 1: raise TooManyResults("No unique dataset matching {}".format(str(key))) elif num_results == 1: return res[0] elif num_results == 0: return res else: return res[:num_results] class DatasetDict(dict): """Special dictionary object that can handle dict operations based on dataset name, wavelength, or DatasetID. Note: Internal dictionary keys are `DatasetID` objects. """ def keys(self, names=False, wavelengths=False): """Give currently contained keys.""" # sort keys so things are a little more deterministic (.keys() is not) keys = sorted(super(DatasetDict, self).keys()) if names: return (k.name for k in keys) elif wavelengths: return (k.wavelength for k in keys) else: return keys def get_key(self, match_key, num_results=1, best=True, **dfilter): """Get multiple fully-specified keys that match the provided query. Args: key (DatasetID): DatasetID of query parameters to use for searching. Any parameter that is `None` is considered a wild card and any match is accepted. Can also be a string representing the dataset name or a number representing the dataset wavelength. num_results (int): Number of results to return. If `0` return all, if `1` return only that element, otherwise return a list of matching keys. **dfilter (dict): See `get_key` function for more information. """ return get_key(match_key, self.keys(), num_results=num_results, best=best, **dfilter) def getitem(self, item): """Get Node when we know the *exact* DatasetID.""" return super(DatasetDict, self).__getitem__(item) def __getitem__(self, item): """Get item from container.""" try: # short circuit - try to get the object without more work return super(DatasetDict, self).__getitem__(item) except KeyError: key = self.get_key(item) return super(DatasetDict, self).__getitem__(key) def get(self, key, default=None): """Get value with optional default.""" try: key = self.get_key(key) except KeyError: return default return super(DatasetDict, self).get(key, default) def __setitem__(self, key, value): """Support assigning 'Dataset' objects or dictionaries of metadata.""" d = value if hasattr(value, 'attrs'): # xarray.DataArray objects d = value.attrs # use value information to make a more complete DatasetID if not isinstance(key, DatasetID): if not isinstance(d, dict): raise ValueError("Key must be a DatasetID when value is not an xarray DataArray or dict") old_key = key try: key = self.get_key(key) except KeyError: if isinstance(old_key, (str, six.text_type)): new_name = old_key else: new_name = d.get("name") # this is a new key and it's not a full DatasetID tuple key = DatasetID(name=new_name, resolution=d.get("resolution"), wavelength=d.get("wavelength"), polarization=d.get("polarization"), calibration=d.get("calibration"), level=d.get("level"), modifiers=d.get("modifiers", tuple())) if key.name is None and key.wavelength is None: raise ValueError("One of 'name' or 'wavelength' attrs " "values should be set.") # update the 'value' with the information contained in the key if isinstance(d, dict): d["name"] = key.name # XXX: What should users be allowed to modify? d["resolution"] = key.resolution d["calibration"] = key.calibration d["polarization"] = key.polarization d["level"] = key.level d["modifiers"] = key.modifiers # you can't change the wavelength of a dataset, that doesn't make # sense if "wavelength" in d and d["wavelength"] != key.wavelength: raise TypeError("Can't change the wavelength of a dataset") return super(DatasetDict, self).__setitem__(key, value) def contains(self, item): """Check contains when we know the *exact* DatasetID.""" return super(DatasetDict, self).__contains__(item) def __contains__(self, item): """Check if item exists in container.""" try: key = self.get_key(item) except KeyError: return False return super(DatasetDict, self).__contains__(key) def __delitem__(self, key): """Delete item from container.""" try: # short circuit - try to get the object without more work return super(DatasetDict, self).__delitem__(key) except KeyError: key = self.get_key(key) return super(DatasetDict, self).__delitem__(key) def group_files(files_to_sort, reader=None, time_threshold=10, group_keys=None, ppp_config_dir=None, reader_kwargs=None): """Group series of files by file pattern information. By default this will group files by their filename ``start_time`` assuming it exists in the pattern. By passing the individual dictionaries returned by this function to the Scene classes' ``filenames``, a series `Scene` objects can be easily created. .. versionadded:: 0.12 Args: files_to_sort (iterable): File paths to sort in to group reader (str): Reader whose file patterns should be used to sort files. This time_threshold (int): Number of seconds used to consider time elements in a group as being equal. For example, if the 'start_time' item is used to group files then any time within `time_threshold` seconds of the first file's 'start_time' will be seen as occurring at the same time. group_keys (list or tuple): File pattern information to use to group files. Keys are sorted in order and only the first key is used when comparing datetime elements with `time_threshold` (see above). This means it is recommended that datetime values should only come from the first key in ``group_keys``. Otherwise, there is a good chance that files will not be grouped properly (datetimes being barely unequal). Defaults to a reader's ``group_keys`` configuration (set in YAML), otherwise ``('start_time',)``. ppp_config_dir (str): Root usser configuration directory for Satpy. This will be deprecated in the future, but is here for consistency with other Satpy features. reader_kwargs (dict): Additional keyword arguments to pass to reader creation. Returns: List of dictionaries mapping 'reader' to a list of filenames. Each of these dictionaries can be passed as ``filenames`` to a `Scene` object. """ # FUTURE: Find the best reader for each filename using `find_files_and_readers` if reader is None: raise ValueError("'reader' keyword argument is required.") elif not isinstance(reader, (list, tuple)): reader = [reader] # FUTURE: Handle multiple readers reader = reader[0] reader_configs = list(configs_for_reader(reader, ppp_config_dir))[0] reader_kwargs = reader_kwargs or {} try: reader_instance = load_reader(reader_configs, **reader_kwargs) except (KeyError, IOError, yaml.YAMLError) as err: LOG.info('Cannot use %s', str(reader_configs)) LOG.debug(str(err)) # if reader and (isinstance(reader, str) or len(reader) == 1): # # if it is a single reader then give a more usable error # raise raise if group_keys is None: group_keys = reader_instance.info.get('group_keys', ('start_time',)) file_keys = [] for _, filetype_info in reader_instance.sorted_filetype_items(): for f, file_info in reader_instance.filename_items_for_filetype(files_to_sort, filetype_info): group_key = tuple(file_info.get(k) for k in group_keys) file_keys.append((group_key, f)) prev_key = None threshold = timedelta(seconds=time_threshold) file_groups = {} for gk, f in sorted(file_keys): # use first element of key as time identifier (if datetime type) if prev_key is None: is_new_group = True prev_key = gk elif isinstance(gk[0], datetime): # datetimes within threshold difference are "the same time" is_new_group = (gk[0] - prev_key[0]) > threshold else: is_new_group = gk[0] != prev_key[0] # compare keys for those that are found for both the key and # this is a generator and is not computed until the if statement below # when we know that `prev_key` is not None vals_not_equal = (this_val != prev_val for this_val, prev_val in zip(gk[1:], prev_key[1:]) if this_val is not None and prev_val is not None) # if this is a new group based on the first element if is_new_group or any(vals_not_equal): file_groups[gk] = [f] prev_key = gk else: file_groups[prev_key].append(f) sorted_group_keys = sorted(file_groups) # passable to Scene as 'filenames' return [{reader: file_groups[group_key]} for group_key in sorted_group_keys] def read_reader_config(config_files, loader=UnsafeLoader): """Read the reader `config_files` and return the info extracted.""" conf = {} LOG.debug('Reading %s', str(config_files)) for config_file in config_files: with open(config_file) as fd: conf.update(yaml.load(fd.read(), Loader=loader)) try: reader_info = conf['reader'] except KeyError: raise KeyError( "Malformed config file {}: missing reader 'reader'".format( config_files)) reader_info['config_files'] = config_files return reader_info def load_reader(reader_configs, **reader_kwargs): """Import and setup the reader from *reader_info*.""" reader_info = read_reader_config(reader_configs) reader_instance = reader_info['reader'](config_files=reader_configs, **reader_kwargs) return reader_instance def configs_for_reader(reader=None, ppp_config_dir=None): """Generate reader configuration files for one or more readers. Args: reader (Optional[str]): Yield configs only for this reader ppp_config_dir (Optional[str]): Additional configuration directory to search for reader configuration files. Returns: Generator of lists of configuration files """ search_paths = (ppp_config_dir,) if ppp_config_dir else tuple() if reader is not None: if not isinstance(reader, (list, tuple)): reader = [reader] # check for old reader names new_readers = [] for reader_name in reader: if reader_name.endswith('.yaml') or reader_name not in OLD_READER_NAMES: new_readers.append(reader_name) continue new_name = OLD_READER_NAMES[reader_name] # Satpy 0.11 only displays a warning # Satpy 0.13 will raise an exception raise ValueError("Reader name '{}' has been deprecated, use '{}' instead.".format(reader_name, new_name)) # Satpy 0.15 or 1.0, remove exception and mapping reader = new_readers # given a config filename or reader name config_files = [r if r.endswith('.yaml') else r + '.yaml' for r in reader] else: reader_configs = glob_config(os.path.join('readers', '*.yaml'), *search_paths) config_files = set(reader_configs) for config_file in config_files: config_basename = os.path.basename(config_file) reader_configs = config_search_paths( os.path.join("readers", config_basename), *search_paths) if not reader_configs: # either the reader they asked for does not exist # or satpy is improperly configured and can't find its own readers raise ValueError("No reader(s) named: {}".format(reader)) yield reader_configs def available_readers(as_dict=False): """Available readers based on current configuration. Args: as_dict (bool): Optionally return reader information as a dictionary. Default: False Returns: List of available reader names. If `as_dict` is `True` then a list of dictionaries including additionally reader information is returned. """ readers = [] for reader_configs in configs_for_reader(): try: reader_info = read_reader_config(reader_configs) except (KeyError, IOError, yaml.YAMLError): LOG.warning("Could not import reader config from: %s", reader_configs) LOG.debug("Error loading YAML", exc_info=True) continue readers.append(reader_info if as_dict else reader_info['name']) return readers def find_files_and_readers(start_time=None, end_time=None, base_dir=None, reader=None, sensor=None, ppp_config_dir=None, filter_parameters=None, reader_kwargs=None): """Find on-disk files matching the provided parameters. Use `start_time` and/or `end_time` to limit found filenames by the times in the filenames (not the internal file metadata). Files are matched if they fall anywhere within the range specified by these parameters. Searching is **NOT** recursive. The returned dictionary can be passed directly to the `Scene` object through the `filenames` keyword argument. The behaviour of time-based filtering depends on whether or not the filename contains information about the end time of the data or not: - if the end time is not present in the filename, the start time of the filename is used and has to fall between (inclusive) the requested start and end times - otherwise, the timespan of the filename has to overlap the requested timespan Args: start_time (datetime): Limit used files by starting time. end_time (datetime): Limit used files by ending time. base_dir (str): The directory to search for files containing the data to load. Defaults to the current directory. reader (str or list): The name of the reader to use for loading the data or a list of names. sensor (str or list): Limit used files by provided sensors. ppp_config_dir (str): The directory containing the configuration files for Satpy. filter_parameters (dict): Filename pattern metadata to filter on. `start_time` and `end_time` are automatically added to this dictionary. Shortcut for `reader_kwargs['filter_parameters']`. reader_kwargs (dict): Keyword arguments to pass to specific reader instances to further configure file searching. Returns: Dictionary mapping reader name string to list of filenames """ if ppp_config_dir is None: ppp_config_dir = get_environ_config_dir() reader_files = {} reader_kwargs = reader_kwargs or {} filter_parameters = filter_parameters or reader_kwargs.get('filter_parameters', {}) sensor_supported = False if start_time or end_time: filter_parameters['start_time'] = start_time filter_parameters['end_time'] = end_time reader_kwargs['filter_parameters'] = filter_parameters for reader_configs in configs_for_reader(reader, ppp_config_dir): try: reader_instance = load_reader(reader_configs, **reader_kwargs) except (KeyError, IOError, yaml.YAMLError) as err: LOG.info('Cannot use %s', str(reader_configs)) LOG.debug(str(err)) if reader and (isinstance(reader, str) or len(reader) == 1): # if it is a single reader then give a more usable error raise continue if not reader_instance.supports_sensor(sensor): continue elif sensor is not None: # sensor was specified and a reader supports it sensor_supported = True loadables = reader_instance.select_files_from_directory(base_dir) if loadables: loadables = list( reader_instance.filter_selected_filenames(loadables)) if loadables: reader_files[reader_instance.name] = list(loadables) if sensor and not sensor_supported: raise ValueError("Sensor '{}' not supported by any readers".format(sensor)) if not reader_files: raise ValueError("No supported files found") return reader_files def load_readers(filenames=None, reader=None, reader_kwargs=None, ppp_config_dir=None): """Create specified readers and assign files to them. Args: filenames (iterable or dict): A sequence of files that will be used to load data from. A ``dict`` object should map reader names to a list of filenames for that reader. reader (str or list): The name of the reader to use for loading the data or a list of names. reader_kwargs (dict): Keyword arguments to pass to specific reader instances. ppp_config_dir (str): The directory containing the configuration files for satpy. Returns: Dictionary mapping reader name to reader instance """ reader_instances = {} reader_kwargs = reader_kwargs or {} reader_kwargs_without_filter = reader_kwargs.copy() reader_kwargs_without_filter.pop('filter_parameters', None) if ppp_config_dir is None: ppp_config_dir = get_environ_config_dir() if not filenames and not reader: # used for an empty Scene return {} elif reader and filenames is not None and not filenames: # user made a mistake in their glob pattern raise ValueError("'filenames' was provided but is empty.") elif not filenames: LOG.warning("'filenames' required to create readers and load data") return {} elif reader is None and isinstance(filenames, dict): # filenames is a dictionary of reader_name -> filenames reader = list(filenames.keys()) remaining_filenames = set(f for fl in filenames.values() for f in fl) elif reader and isinstance(filenames, dict): # filenames is a dictionary of reader_name -> filenames # but they only want one of the readers filenames = filenames[reader] remaining_filenames = set(filenames or []) else: remaining_filenames = set(filenames or []) for idx, reader_configs in enumerate(configs_for_reader(reader, ppp_config_dir)): if isinstance(filenames, dict): readers_files = set(filenames[reader[idx]]) else: readers_files = remaining_filenames try: reader_instance = load_reader(reader_configs, **reader_kwargs) except (KeyError, IOError, yaml.YAMLError) as err: LOG.info('Cannot use %s', str(reader_configs)) LOG.debug(str(err)) continue if not readers_files: # we weren't given any files for this reader continue loadables = reader_instance.select_files_from_pathnames(readers_files) if loadables: reader_instance.create_filehandlers(loadables, fh_kwargs=reader_kwargs_without_filter) reader_instances[reader_instance.name] = reader_instance remaining_filenames -= set(loadables) if not remaining_filenames: break if remaining_filenames: LOG.warning("Don't know how to open the following files: {}".format(str(remaining_filenames))) if not reader_instances: raise ValueError("No supported files found") elif not any(list(r.available_dataset_ids) for r in reader_instances.values()): raise ValueError("No dataset could be loaded. Either missing " "requirements (such as Epilog, Prolog) or none of the " "provided files match the filter parameters.") return reader_instances satpy-0.20.0/satpy/readers/_geos_area.py000066400000000000000000000113041362525524100201600ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Geostationary Projection / Area computations. This module computes properties and area definitions for geostationary satellites. It is designed to be a common module that can be called by all geostationary satellite readers and uses commonly-included parameters such as the CFAC/LFAC values, satellite position, etc, to compute the correct area definition. """ import numpy as np from pyresample import geometry def get_xy_from_linecol(line, col, offsets, factors): """Get the intermediate coordinates from line & col. Intermediate coordinates are actually the instruments scanning angles. """ loff, coff = offsets lfac, cfac = factors x__ = float(col - coff) / (float(cfac) / 2 ** 16) y__ = float(line - loff) / (float(lfac) / 2 ** 16) return x__, y__ def make_ext(ll_x, ur_x, ll_y, ur_y, h): """Create the area extent from computed ll and ur. Args: ll_x: The lower left x coordinate (m) ur_x: The upper right x coordinate (m) ll_y: The lower left y coordinate (m) ur_y: The upper right y coordinate (m) h: The satellite altitude above the Earth's surface Returns: aex: An area extent for the scene """ aex = (np.deg2rad(ll_x) * h, np.deg2rad(ll_y) * h, np.deg2rad(ur_x) * h, np.deg2rad(ur_y) * h) return aex def get_area_extent(pdict): """Get the area extent seen by a geostationary satellite. Args: pdict: A dictionary containing common parameters: nlines: Number of lines in image ncols: Number of columns in image cfac: Column scaling factor lfac: Line scaling factor coff: Column offset factor loff: Line offset factor scandir: 'N2S' for standard (N->S), 'S2N' for inverse (S->N) Returns: aex: An area extent for the scene """ # count starts at 1 cols = 1 - 0.5 if pdict['scandir'] == 'S2N': lines = 0.5 - 1 scanmult = -1 else: lines = 1 - 0.5 scanmult = 1 # Lower left x, y scanning angles in degrees ll_x, ll_y = get_xy_from_linecol(lines * scanmult, cols, (pdict['loff'], pdict['coff']), (pdict['lfac'], pdict['cfac'])) cols += pdict['ncols'] lines += pdict['nlines'] # Upper right x, y scanning angles in degrees ur_x, ur_y = get_xy_from_linecol(lines * scanmult, cols, (pdict['loff'], pdict['coff']), (pdict['lfac'], pdict['cfac'])) if pdict['scandir'] == 'S2N': ll_y *= -1 ur_y *= -1 # Convert degrees to radians and create area extent aex = make_ext(ll_x=ll_x, ur_x=ur_x, ll_y=ll_y, ur_y=ur_y, h=pdict['h']) return aex def get_area_definition(pdict, a_ext): """Get the area definition for a geo-sat. Args: pdict: A dictionary containing common parameters: nlines: Number of lines in image ncols: Number of columns in image ssp_lon: Subsatellite point longitude (deg) a: Earth equatorial radius (m) b: Earth polar radius (m) h: Platform height (m) a_name: Area name a_desc: Area description p_id: Projection id a_ext: A four element tuple containing the area extent (scan angle) for the scene in radians Returns: a_def: An area definition for the scene """ proj_dict = {'a': float(pdict['a']), 'b': float(pdict['b']), 'lon_0': float(pdict['ssp_lon']), 'h': float(pdict['h']), 'proj': 'geos', 'units': 'm'} a_def = geometry.AreaDefinition( pdict['a_name'], pdict['a_desc'], pdict['p_id'], proj_dict, int(pdict['ncols']), int(pdict['nlines']), a_ext) return a_def satpy-0.20.0/satpy/readers/aapp_l1b.py000066400000000000000000000534061362525524100175640ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2012-2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reader for aapp level 1b data. Options for loading: - pre_launch_coeffs (False): use pre-launch coefficients if True, operational otherwise (if available). http://research.metoffice.gov.uk/research/interproj/nwpsaf/aapp/ NWPSAF-MF-UD-003_Formats.pdf """ import logging from datetime import datetime, timedelta import numpy as np import xarray as xr import dask.array as da from dask import delayed from satpy.readers.file_handlers import BaseFileHandler from satpy import CHUNK_SIZE LINE_CHUNK = CHUNK_SIZE ** 2 // 2048 logger = logging.getLogger(__name__) CHANNEL_NAMES = ['1', '2', '3a', '3b', '4', '5'] ANGLES = {'sensor_zenith_angle': 'satz', 'solar_zenith_angle': 'sunz', 'sun_sensor_azimuth_difference_angle': 'azidiff'} PLATFORM_NAMES = {4: 'NOAA-15', 2: 'NOAA-16', 6: 'NOAA-17', 7: 'NOAA-18', 8: 'NOAA-19', 11: 'Metop-B', 12: 'Metop-A', 13: 'Metop-C', 14: 'Metop simulator'} def create_xarray(arr): """Create an `xarray.DataArray`.""" res = xr.DataArray(arr, dims=['y', 'x']) return res class AVHRRAAPPL1BFile(BaseFileHandler): """Reader for AVHRR L1B files created from the AAPP software.""" def __init__(self, filename, filename_info, filetype_info): """Initialize object information by reading the input file.""" super(AVHRRAAPPL1BFile, self).__init__(filename, filename_info, filetype_info) self.channels = {i: None for i in AVHRR_CHANNEL_NAMES} self.units = {i: 'counts' for i in AVHRR_CHANNEL_NAMES} self._data = None self._header = None self._is3b = None self._is3a = None self._shape = None self.lons = None self.lats = None self.area = None self.sensor = 'avhrr-3' self.read() self.platform_name = PLATFORM_NAMES.get(self._header['satid'][0], None) if self.platform_name is None: raise ValueError("Unsupported platform ID: %d" % self.header['satid']) self.sunz, self.satz, self.azidiff = None, None, None @property def start_time(self): """Get the time of the first observation.""" return datetime(self._data['scnlinyr'][0], 1, 1) + timedelta( days=int(self._data['scnlindy'][0]) - 1, milliseconds=int(self._data['scnlintime'][0])) @property def end_time(self): """Get the time of the final observation.""" return datetime(self._data['scnlinyr'][-1], 1, 1) + timedelta( days=int(self._data['scnlindy'][-1]) - 1, milliseconds=int(self._data['scnlintime'][-1])) def get_dataset(self, key, info): """Get a dataset from the file.""" if key.name in CHANNEL_NAMES: dataset = self.calibrate(key) elif key.name in ['longitude', 'latitude']: if self.lons is None or self.lats is None: self.navigate() if key.name == 'longitude': dataset = create_xarray(self.lons) else: dataset = create_xarray(self.lats) dataset.attrs = info else: # Get sun-sat angles if key.name in ANGLES: if isinstance(getattr(self, ANGLES[key.name]), np.ndarray): dataset = create_xarray(getattr(self, ANGLES[key.name])) else: dataset = self.get_angles(key.name) else: raise ValueError("Not a supported sun-sensor viewing angle: %s", key.name) dataset.attrs.update({'platform_name': self.platform_name, 'sensor': self.sensor}) dataset.attrs.update(key.to_dict()) for meta_key in ('standard_name', 'units'): if meta_key in info: dataset.attrs.setdefault(meta_key, info[meta_key]) if not self._shape: self._shape = dataset.shape return dataset def read(self): """Read the data.""" tic = datetime.now() header = np.memmap(self.filename, dtype=_HEADERTYPE, mode="r", shape=(1, )) data = np.memmap(self.filename, dtype=_SCANTYPE, offset=22016, mode="r") logger.debug("Reading time %s", str(datetime.now() - tic)) self._header = header self._data = data def get_angles(self, angle_id): """Get sun-satellite viewing angles.""" sunz40km = self._data["ang"][:, :, 0] * 1e-2 satz40km = self._data["ang"][:, :, 1] * 1e-2 azidiff40km = self._data["ang"][:, :, 2] * 1e-2 try: from geotiepoints.interpolator import Interpolator except ImportError: logger.warning("Could not interpolate sun-sat angles, " "python-geotiepoints missing.") self.sunz, self.satz, self.azidiff = sunz40km, satz40km, azidiff40km else: cols40km = np.arange(24, 2048, 40) cols1km = np.arange(2048) lines = sunz40km.shape[0] rows40km = np.arange(lines) rows1km = np.arange(lines) along_track_order = 1 cross_track_order = 3 satint = Interpolator( [sunz40km, satz40km, azidiff40km], (rows40km, cols40km), (rows1km, cols1km), along_track_order, cross_track_order) self.sunz, self.satz, self.azidiff = delayed(satint.interpolate, nout=3)() self.sunz = da.from_delayed(self.sunz, (lines, 2048), sunz40km.dtype) self.satz = da.from_delayed(self.satz, (lines, 2048), satz40km.dtype) self.azidiff = da.from_delayed(self.azidiff, (lines, 2048), azidiff40km.dtype) return create_xarray(getattr(self, ANGLES[angle_id])) def navigate(self): """Get the longitudes and latitudes of the scene.""" lons40km = self._data["pos"][:, :, 1] * 1e-4 lats40km = self._data["pos"][:, :, 0] * 1e-4 try: from geotiepoints import SatelliteInterpolator except ImportError: logger.warning("Could not interpolate lon/lats, " "python-geotiepoints missing.") self.lons, self.lats = lons40km, lats40km else: cols40km = np.arange(24, 2048, 40) cols1km = np.arange(2048) lines = lons40km.shape[0] rows40km = np.arange(lines) rows1km = np.arange(lines) along_track_order = 1 cross_track_order = 3 satint = SatelliteInterpolator( (lons40km, lats40km), (rows40km, cols40km), (rows1km, cols1km), along_track_order, cross_track_order) self.lons, self.lats = delayed(satint.interpolate, nout=2)() self.lons = da.from_delayed(self.lons, (lines, 2048), lons40km.dtype) self.lats = da.from_delayed(self.lats, (lines, 2048), lats40km.dtype) def calibrate(self, dataset_id, pre_launch_coeffs=False, calib_coeffs=None): """Calibrate the data.""" if calib_coeffs is None: calib_coeffs = {} units = {'reflectance': '%', 'brightness_temperature': 'K', 'counts': '', 'radiance': 'W*m-2*sr-1*cm ?'} if dataset_id.name in ("3a", "3b") and self._is3b is None: # Is it 3a or 3b: self._is3a = da.bitwise_and(da.from_array(self._data['scnlinbit'], chunks=LINE_CHUNK), 3) == 0 self._is3b = da.bitwise_and(da.from_array(self._data['scnlinbit'], chunks=LINE_CHUNK), 3) == 1 if dataset_id.name == '3a' and not np.any(self._is3a): raise ValueError("Empty dataset for channel 3A") if dataset_id.name == '3b' and not np.any(self._is3b): raise ValueError("Empty dataset for channel 3B") try: vis_idx = ['1', '2', '3a'].index(dataset_id.name) ir_idx = None except ValueError: vis_idx = None ir_idx = ['3b', '4', '5'].index(dataset_id.name) mask = True if vis_idx is not None: coeffs = calib_coeffs.get('ch' + dataset_id.name) if dataset_id.name == '3a': mask = self._is3a[:, None] ds = create_xarray( _vis_calibrate(self._data, vis_idx, dataset_id.calibration, pre_launch_coeffs, coeffs, mask=mask)) else: if dataset_id.name == '3b': mask = self._is3b[:, None] ds = create_xarray( _ir_calibrate(self._header, self._data, ir_idx, dataset_id.calibration, mask=mask)) ds.attrs['units'] = units[dataset_id.calibration] ds.attrs.update(dataset_id._asdict()) return ds AVHRR_CHANNEL_NAMES = ("1", "2", "3a", "3b", "4", "5") # AAPP 1b header _HEADERTYPE = np.dtype([("siteid", "S3"), ("blank", "S1"), ("l1bversnb", " 0.0 return da.where(mask, rad, np.nan) # Central wavenumber: cwnum = header['radtempcnv'][0, irchn, 0] if irchn == 0: cwnum = cwnum / 1.0e2 else: cwnum = cwnum / 1.0e3 bandcor_2 = header['radtempcnv'][0, irchn, 1] / 1e5 bandcor_3 = header['radtempcnv'][0, irchn, 2] / 1e6 ir_const_1 = 1.1910659e-5 ir_const_2 = 1.438833 t_planck = (ir_const_2 * cwnum) / \ np.log(1 + ir_const_1 * cwnum * cwnum * cwnum / rad) # Band corrections applied to t_planck to get correct # brightness temperature for channel: if bandcor_2 < 0: # Post AAPP-v4 tb_ = bandcor_2 + bandcor_3 * t_planck else: # AAPP 1 to 4 tb_ = (t_planck - bandcor_2) / bandcor_3 # Mask unnaturally low values return da.where(mask, tb_, np.nan) satpy-0.20.0/satpy/readers/abi_base.py000066400000000000000000000241521362525524100176260ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Advance Baseline Imager reader base class for the Level 1b and l2+ reader.""" import logging from datetime import datetime import numpy as np import xarray as xr from pyresample import geometry from satpy.readers.file_handlers import BaseFileHandler from satpy import CHUNK_SIZE logger = logging.getLogger(__name__) PLATFORM_NAMES = { 'G16': 'GOES-16', 'G17': 'GOES-17', } class NC_ABI_BASE(BaseFileHandler): """Base reader for ABI L1B L2+ NetCDF4 files.""" def __init__(self, filename, filename_info, filetype_info): """Open the NetCDF file with xarray and prepare the Dataset for reading.""" super(NC_ABI_BASE, self).__init__(filename, filename_info, filetype_info) # xarray's default netcdf4 engine try: self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=False, chunks={'x': CHUNK_SIZE, 'y': CHUNK_SIZE}, ) except ValueError: self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=False, chunks={'lon': CHUNK_SIZE, 'lat': CHUNK_SIZE}, ) if 't' in self.nc.dims or 't' in self.nc.coords: self.nc = self.nc.rename({'t': 'time'}) platform_shortname = filename_info['platform_shortname'] self.platform_name = PLATFORM_NAMES.get(platform_shortname) if 'goes_imager_projection' in self.nc: self.nlines = self.nc['y'].size self.ncols = self.nc['x'].size elif 'goes_lat_lon_projection' in self.nc: self.nlines = self.nc['lat'].size self.ncols = self.nc['lon'].size self.nc = self.nc.rename({'lon': 'x', 'lat': 'y'}) self.coords = {} @property def sensor(self): """Get sensor name for current file handler.""" return 'abi' def __getitem__(self, item): """Wrap `self.nc[item]` for better floating point precision. Some datasets use a 32-bit float scaling factor like the 'x' and 'y' variables which causes inaccurate unscaled data values. This method forces the scale factor to a 64-bit float first. """ def is_int(val): return np.issubdtype(val.dtype, np.integer) if hasattr(val, 'dtype') else isinstance(val, int) data = self.nc[item] attrs = data.attrs factor = data.attrs.get('scale_factor', 1) offset = data.attrs.get('add_offset', 0) fill = data.attrs.get('_FillValue') unsigned = data.attrs.get('_Unsigned', None) # Ref. GOESR PUG-L1B-vol3, section 5.0.2 Unsigned Integer Processing if unsigned is not None and unsigned.lower() == 'true': # cast the data from int to uint data = data.astype('u%s' % data.dtype.itemsize) if fill is not None: fill = fill.astype('u%s' % fill.dtype.itemsize) if fill is not None: if is_int(data) and is_int(factor) and is_int(offset): new_fill = fill else: new_fill = np.nan data = data.where(data != fill, new_fill) if factor != 1 and item in ('x', 'y'): # be more precise with x/y coordinates # see get_area_def for more information data = data * np.round(float(factor), 6) + np.round(float(offset), 6) elif factor != 1: # make sure the factor is a 64-bit float # can't do this in place since data is most likely uint16 # and we are making it a 64-bit float if not is_int(factor): factor = float(factor) data = data * factor + offset data.attrs = attrs # handle coordinates (and recursive fun) new_coords = {} # 'time' dimension causes issues in other processing # 'x_image' and 'y_image' are confusing to some users and unnecessary # 'x' and 'y' will be overwritten by base class AreaDefinition for coord_name in ('x_image', 'y_image', 'time', 'x', 'y'): if coord_name in data.coords: data = data.drop_vars(coord_name) if item in data.coords: self.coords[item] = data for coord_name in data.coords.keys(): if coord_name not in self.coords: self.coords[coord_name] = self[coord_name] new_coords[coord_name] = self.coords[coord_name] data.coords.update(new_coords) return data def get_dataset(self, key, info): """Load a dataset.""" raise NotImplementedError("Reader {} has not implemented get_dataset".format(self.name)) def get_area_def(self, key): """Get the area definition of the data at hand.""" if 'goes_imager_projection' in self.nc: return self._get_areadef_fixedgrid(key) elif 'goes_lat_lon_projection' in self.nc: return self._get_areadef_latlon(key) else: raise ValueError('Unsupported projection found in the dataset') def _get_areadef_latlon(self, key): """Get the area definition of the data at hand.""" projection = self.nc["goes_lat_lon_projection"] a = projection.attrs['semi_major_axis'] b = projection.attrs['semi_minor_axis'] fi = projection.attrs['inverse_flattening'] pm = projection.attrs['longitude_of_prime_meridian'] proj_ext = self.nc["geospatial_lat_lon_extent"] w_lon = proj_ext.attrs['geospatial_westbound_longitude'] e_lon = proj_ext.attrs['geospatial_eastbound_longitude'] n_lat = proj_ext.attrs['geospatial_northbound_latitude'] s_lat = proj_ext.attrs['geospatial_southbound_latitude'] lat_0 = proj_ext.attrs['geospatial_lat_center'] lon_0 = proj_ext.attrs['geospatial_lon_center'] area_extent = (w_lon, s_lat, e_lon, n_lat) proj_dict = {'proj': 'latlong', 'lon_0': float(lon_0), 'lat_0': float(lat_0), 'a': float(a), 'b': float(b), 'fi': float(fi), 'pm': float(pm)} ll_area_def = geometry.AreaDefinition( self.nc.attrs.get('orbital_slot', 'abi_geos'), self.nc.attrs.get('spatial_resolution', 'ABI file area'), 'abi_latlon', proj_dict, self.ncols, self.nlines, np.asarray(area_extent)) return ll_area_def def _get_areadef_fixedgrid(self, key): """Get the area definition of the data at hand. Note this method takes special care to round and cast numbers to new data types so that the area definitions for different resolutions (different bands) should be equal. Without the special rounding in `__getitem__` and this method the area extents can be 0 to 1.0 meters off depending on how the calculations are done. """ projection = self.nc["goes_imager_projection"] a = projection.attrs['semi_major_axis'] b = projection.attrs['semi_minor_axis'] h = projection.attrs['perspective_point_height'] lon_0 = projection.attrs['longitude_of_projection_origin'] sweep_axis = projection.attrs['sweep_angle_axis'][0] # compute x and y extents in m h = np.float64(h) x = self['x'] y = self['y'] x_l = x[0].values x_r = x[-1].values y_l = y[-1].values y_u = y[0].values x_half = (x_r - x_l) / (self.ncols - 1) / 2. y_half = (y_u - y_l) / (self.nlines - 1) / 2. area_extent = (x_l - x_half, y_l - y_half, x_r + x_half, y_u + y_half) area_extent = tuple(np.round(h * val, 6) for val in area_extent) proj_dict = {'proj': 'geos', 'lon_0': float(lon_0), 'a': float(a), 'b': float(b), 'h': h, 'units': 'm', 'sweep': sweep_axis} fg_area_def = geometry.AreaDefinition( self.nc.attrs.get('orbital_slot', 'abi_geos'), self.nc.attrs.get('spatial_resolution', 'ABI file area'), 'abi_fixed_grid', proj_dict, self.ncols, self.nlines, np.asarray(area_extent)) return fg_area_def @property def start_time(self): """Start time of the current file's observations.""" return datetime.strptime(self.nc.attrs['time_coverage_start'], '%Y-%m-%dT%H:%M:%S.%fZ') @property def end_time(self): """End time of the current file's observations.""" return datetime.strptime(self.nc.attrs['time_coverage_end'], '%Y-%m-%dT%H:%M:%S.%fZ') def spatial_resolution_to_number(self): """Convert the 'spatial_resolution' global attribute to meters.""" res = self.nc.attrs['spatial_resolution'].split(' ')[0] if res.endswith('km'): res = int(float(res[:-2]) * 1000) elif res.endswith('m'): res = int(res[:-1]) else: raise ValueError("Unexpected 'spatial_resolution' attribute '{}'".format(res)) return res def __del__(self): """Close the NetCDF file that may still be open.""" try: self.nc.close() except (IOError, OSError, AttributeError): pass satpy-0.20.0/satpy/readers/abi_l1b.py000066400000000000000000000112401362525524100173640ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Advance Baseline Imager reader for the Level 1b format. The files read by this reader are described in the official PUG document: https://www.goes-r.gov/users/docs/PUG-L1b-vol3.pdf """ import logging import numpy as np from satpy.readers.abi_base import NC_ABI_BASE logger = logging.getLogger(__name__) class NC_ABI_L1B(NC_ABI_BASE): """File reader for individual ABI L1B NetCDF4 files.""" def get_dataset(self, key, info): """Load a dataset.""" logger.debug('Reading in get_dataset %s.', key.name) radiances = self['Rad'] if key.calibration == 'reflectance': logger.debug("Calibrating to reflectances") res = self._vis_calibrate(radiances) elif key.calibration == 'brightness_temperature': logger.debug("Calibrating to brightness temperatures") res = self._ir_calibrate(radiances) elif key.calibration != 'radiance': raise ValueError("Unknown calibration '{}'".format(key.calibration)) else: res = radiances # convert to satpy standard units if res.attrs['units'] == '1': res *= 100 res.attrs['units'] = '%' res.attrs.update({'platform_name': self.platform_name, 'sensor': self.sensor}) # Add orbital parameters projection = self.nc["goes_imager_projection"] res.attrs['orbital_parameters'] = { 'projection_longitude': float(projection.attrs['longitude_of_projection_origin']), 'projection_latitude': float(projection.attrs['latitude_of_projection_origin']), 'projection_altitude': float(projection.attrs['perspective_point_height']), 'satellite_nominal_latitude': float(self['nominal_satellite_subpoint_lat']), 'satellite_nominal_longitude': float(self['nominal_satellite_subpoint_lon']), 'satellite_nominal_altitude': float(self['nominal_satellite_height']) * 1000., 'yaw_flip': bool(self['yaw_flip_flag']), } res.attrs.update(key.to_dict()) # remove attributes that could be confusing later res.attrs.pop('_FillValue', None) res.attrs.pop('scale_factor', None) res.attrs.pop('add_offset', None) res.attrs.pop('_Unsigned', None) res.attrs.pop('ancillary_variables', None) # Can't currently load DQF # add in information from the filename that may be useful to the user for attr in ('observation_type', 'scene_abbr', 'scan_mode', 'platform_shortname'): res.attrs[attr] = self.filename_info[attr] # copy global attributes to metadata for attr in ('scene_id', 'orbital_slot', 'instrument_ID', 'production_site', 'timeline_ID'): res.attrs[attr] = self.nc.attrs.get(attr) # only include these if they are present for attr in ('fusion_args',): if attr in self.nc.attrs: res.attrs[attr] = self.nc.attrs[attr] return res def _vis_calibrate(self, data): """Calibrate visible channels to reflectance.""" solar_irradiance = self['esun'] esd = self["earth_sun_distance_anomaly_in_AU"].astype(float) factor = np.pi * esd * esd / solar_irradiance res = data * factor res.attrs = data.attrs res.attrs['units'] = '1' res.attrs['long_name'] = 'Bidirectional Reflectance' res.attrs['standard_name'] = 'toa_bidirectional_reflectance' return res def _ir_calibrate(self, data): """Calibrate IR channels to BT.""" fk1 = float(self["planck_fk1"]) fk2 = float(self["planck_fk2"]) bc1 = float(self["planck_bc1"]) bc2 = float(self["planck_bc2"]) res = (fk2 / np.log(fk1 / data + 1) - bc1) / bc2 res.attrs = data.attrs res.attrs['units'] = 'K' res.attrs['long_name'] = 'Brightness Temperature' res.attrs['standard_name'] = 'toa_brightness_temperature' return res satpy-0.20.0/satpy/readers/abi_l2_nc.py000066400000000000000000000074201362525524100177100ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2019 Satpy developers # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . """Advance Baseline Imager NOAA Level 2+ products reader. The files read by this reader are described in the official PUG document: https://www.goes-r.gov/products/docs/PUG-L2+-vol5.pdf """ import logging import numpy as np from satpy.readers.abi_base import NC_ABI_BASE LOG = logging.getLogger(__name__) class NC_ABI_L2(NC_ABI_BASE): """Reader class for NOAA ABI l2+ products in netCDF format.""" def get_dataset(self, key, info): """Load a dataset.""" var = info['file_key'] LOG.debug('Reading in get_dataset %s.', var) variable = self[var] _units = variable.attrs['units'] if 'units' in variable.attrs else None variable.attrs.update({'platform_name': self.platform_name, 'sensor': self.sensor, 'units': _units, 'satellite_latitude': float(self.nc['nominal_satellite_subpoint_lat']), 'satellite_longitude': float(self.nc['nominal_satellite_subpoint_lon']), 'satellite_altitude': float(self.nc['nominal_satellite_height'])}) variable.attrs.update(key.to_dict()) # remove attributes that could be confusing later if not np.issubdtype(variable.dtype, np.integer): # integer fields keep the _FillValue variable.attrs.pop('_FillValue', None) variable.attrs.pop('scale_factor', None) variable.attrs.pop('add_offset', None) variable.attrs.pop('valid_range', None) variable.attrs.pop('_Unsigned', None) variable.attrs.pop('ancillary_variables', None) # Can't currently load DQF if 'flag_meanings' in variable.attrs: variable.attrs['flag_meanings'] = variable.attrs['flag_meanings'].split(' ') # add in information from the filename that may be useful to the user for attr in ('scan_mode', 'platform_shortname'): variable.attrs[attr] = self.filename_info[attr] # copy global attributes to metadata for attr in ('scene_id', 'orbital_slot', 'instrument_ID', 'production_site', 'timeline_ID'): variable.attrs[attr] = self.nc.attrs.get(attr) return variable def available_datasets(self, configured_datasets=None): """Add resolution to configured datasets.""" for is_avail, ds_info in (configured_datasets or []): # some other file handler knows how to load this # don't override what they've done if is_avail is not None: yield is_avail, ds_info matches = self.file_type_matches(ds_info['file_type']) if matches: # we have this dataset resolution = self.spatial_resolution_to_number() new_info = ds_info.copy() new_info.setdefault('resolution', resolution) yield True, ds_info elif is_avail is None: # we don't know what to do with this # see if another future file handler does yield is_avail, ds_info satpy-0.20.0/satpy/readers/acspo.py000066400000000000000000000125351362525524100172100ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """ACSPO SST Reader. See the following page for more information: https://podaac.jpl.nasa.gov/dataset/VIIRS_NPP-OSPO-L2P-v2.3 """ import logging from datetime import datetime import numpy as np from satpy.readers.netcdf_utils import NetCDF4FileHandler LOG = logging.getLogger(__name__) ROWS_PER_SCAN = { 'MODIS': 10, 'VIIRS': 16, 'AVHRR': None, } class ACSPOFileHandler(NetCDF4FileHandler): """ACSPO L2P SST File Reader.""" @property def platform_name(self): """Get satellite name for this file's data.""" res = self['/attr/platform'] if isinstance(res, np.ndarray): return str(res.astype(str)) else: return res @property def sensor_name(self): """Get instrument name for this file's data.""" res = self['/attr/sensor'] if isinstance(res, np.ndarray): return str(res.astype(str)) else: return res def get_shape(self, ds_id, ds_info): """Get numpy array shape for the specified dataset. Args: ds_id (DatasetID): ID of dataset that will be loaded ds_info (dict): Dictionary of dataset information from config file Returns: tuple: (rows, cols) """ var_path = ds_info.get('file_key', '{}'.format(ds_id.name)) if var_path + '/shape' not in self: # loading a scalar value shape = 1 else: shape = self[var_path + '/shape'] if len(shape) == 3: if shape[0] != 1: raise ValueError("Not sure how to load 3D Dataset with more than 1 time") else: shape = shape[1:] return shape @staticmethod def _parse_datetime(datestr): return datetime.strptime(datestr, "%Y%m%dT%H%M%SZ") @property def start_time(self): """Get first observation time of data.""" return self._parse_datetime(self['/attr/time_coverage_start']) @property def end_time(self): """Get final observation time of data.""" return self._parse_datetime(self['/attr/time_coverage_end']) def get_metadata(self, dataset_id, ds_info): """Collect various metadata about the specified dataset.""" var_path = ds_info.get('file_key', '{}'.format(dataset_id.name)) shape = self.get_shape(dataset_id, ds_info) units = self[var_path + '/attr/units'] info = getattr(self[var_path], 'attrs', {}) standard_name = self[var_path + '/attr/standard_name'] resolution = float(self['/attr/spatial_resolution'].split(' ')[0]) rows_per_scan = ROWS_PER_SCAN.get(self.sensor_name) or 0 info.update(dataset_id.to_dict()) info.update({ 'shape': shape, 'units': units, 'platform_name': self.platform_name, 'sensor': self.sensor_name, 'standard_name': standard_name, 'resolution': resolution, 'rows_per_scan': rows_per_scan, 'long_name': self.get(var_path + '/attr/long_name'), 'comment': self.get(var_path + '/attr/comment'), }) return info def get_dataset(self, dataset_id, ds_info): """Load data array and metadata from file on disk.""" var_path = ds_info.get('file_key', '{}'.format(dataset_id.name)) metadata = self.get_metadata(dataset_id, ds_info) shape = metadata['shape'] file_shape = self[var_path + '/shape'] metadata['shape'] = shape valid_min = self[var_path + '/attr/valid_min'] valid_max = self[var_path + '/attr/valid_max'] # no need to check fill value since we are using valid min/max scale_factor = self.get(var_path + '/attr/scale_factor') add_offset = self.get(var_path + '/attr/add_offset') data = self[var_path] if isinstance(file_shape, tuple) and len(file_shape) == 3: # can only read 3D arrays with size 1 in the first dimension data = data[0] data = data.where((data >= valid_min) & (data <= valid_max)) if scale_factor is not None: data = data * scale_factor + add_offset if ds_info.get('cloud_clear', False): # clear-sky if bit 15-16 are 00 clear_sky_mask = (self['l2p_flags'][0] & 0b1100000000000000) != 0 data = data.where(~clear_sky_mask) data.attrs.update(metadata) # Remove these attributes since they are no longer valid and can cause invalid value filling. data.attrs.pop('_FillValue', None) data.attrs.pop('valid_max', None) data.attrs.pop('valid_min', None) return data satpy-0.20.0/satpy/readers/agri_l1.py000066400000000000000000000220671362525524100174220ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Advanced Geostationary Radiation Imager reader for the Level_1 HDF format The files read by this reader are described in the official Real Time Data Service: http://fy4.nsmc.org.cn/data/en/data/realtime.html """ import logging import numpy as np import xarray as xr import dask.array as da from datetime import datetime from satpy.readers._geos_area import get_area_extent, get_area_definition from satpy.readers.hdf5_utils import HDF5FileHandler logger = logging.getLogger(__name__) # info of 500 m, 1 km, 2 km and 4 km data _resolution_list = [500, 1000, 2000, 4000] _COFF_list = [10991.5, 5495.5, 2747.5, 1373.5] _CFAC_list = [81865099.0, 40932549.0, 20466274.0, 10233137.0] _LOFF_list = [10991.5, 5495.5, 2747.5, 1373.5] _LFAC_list = [81865099.0, 40932549.0, 20466274.0, 10233137.0] PLATFORM_NAMES = {'FY4A': 'FY-4A', 'FY4B': 'FY-4B', 'FY4C': 'FY-4C'} class HDF_AGRI_L1(HDF5FileHandler): def __init__(self, filename, filename_info, filetype_info): super(HDF_AGRI_L1, self).__init__(filename, filename_info, filetype_info) def get_dataset(self, dataset_id, ds_info): """Load a dataset.""" logger.debug('Reading in get_dataset %s.', dataset_id.name) file_key = ds_info.get('file_key', dataset_id.name) lut_key = ds_info.get('lut_key', dataset_id.name) data = self.get(file_key) lut = self.get(lut_key) if data.ndim >= 2: data = data.rename({data.dims[-2]: 'y', data.dims[-1]: 'x'}) # convert bytes to string data.attrs['long_name'] = data.attrs['long_name'].decode('gbk') data.attrs['band_names'] = data.attrs['band_names'].decode('gbk') if ds_info['file_type'] != 'agri_l1_4000m_geo': data.attrs['center_wavelength'] = data.attrs['center_wavelength'].decode('gbk') # calibration calibration = ds_info['calibration'] if calibration == 'counts': data.attrs['units'] = ds_info['units'] ds_info['valid_range'] = data.attrs['valid_range'] return data elif calibration in ['reflectance', 'radiance']: logger.debug("Calibrating to reflectances") # using the corresponding SCALE and OFFSET cal_coef = 'CALIBRATION_COEF(SCALE+OFFSET)' num_channel = self.get(cal_coef).shape[0] if num_channel == 1: # only channel_2, resolution = 500 m slope = self.get(cal_coef)[0, 0].values offset = self.get(cal_coef)[0, 1].values else: slope = self.get(cal_coef)[int(file_key[-2:])-1, 0].values offset = self.get(cal_coef)[int(file_key[-2:])-1, 1].values data = self.dn2(data, calibration, slope, offset) if calibration == 'reflectance': ds_info['valid_range'] = (data.attrs['valid_range'] * slope + offset) * 100 else: ds_info['valid_range'] = (data.attrs['valid_range'] * slope + offset) elif calibration == 'brightness_temperature': logger.debug("Calibrating to brightness_temperature") # the value of dn is the index of brightness_temperature data = self.calibrate(data, lut) ds_info['valid_range'] = lut.attrs['valid_range'] satname = PLATFORM_NAMES.get(self['/attr/Satellite Name'], self['/attr/Satellite Name']) data.attrs.update({'platform_name': satname, 'sensor': self['/attr/Sensor Identification Code'].lower(), 'orbital_parameters': { 'satellite_nominal_latitude': self['/attr/NOMCenterLat'], 'satellite_nominal_longitude': self['/attr/NOMCenterLon'], 'satellite_nominal_altitude': self['/attr/NOMSatHeight']}}) data.attrs.update(ds_info) # remove attributes that could be confusing later data.attrs.pop('FillValue', None) data.attrs.pop('Intercept', None) data.attrs.pop('Slope', None) data = data.where((data >= min(data.attrs['valid_range'])) & (data <= max(data.attrs['valid_range']))) return data def get_area_def(self, key): # Coordination Group for Meteorological Satellites LRIT/HRIT Global Specification # https://www.cgms-info.org/documents/cgms-lrit-hrit-global-specification-(v2-8-of-30-oct-2013).pdf res = key.resolution pdict = {} pdict['coff'] = _COFF_list[_resolution_list.index(res)] pdict['loff'] = _LOFF_list[_resolution_list.index(res)] pdict['cfac'] = _CFAC_list[_resolution_list.index(res)] pdict['lfac'] = _LFAC_list[_resolution_list.index(res)] pdict['a'] = self.file_content['/attr/dEA'] * 1E3 # equator radius (m) pdict['b'] = pdict['a'] * (1 - 1 / self.file_content['/attr/dObRecFlat']) # polar radius (m) pdict['h'] = self.file_content['/attr/NOMSatHeight'] # the altitude of satellite (m) pdict['ssp_lon'] = self.file_content['/attr/NOMCenterLon'] pdict['nlines'] = self.file_content['/attr/RegLength'] pdict['ncols'] = self.file_content['/attr/RegWidth'] pdict['scandir'] = 'S2N' b500 = ['C02'] b1000 = ['C01', 'C03'] b2000 = ['C04', 'C05', 'C06', 'C07'] pdict['a_desc'] = "AGRI {} area".format(self.filename_info['observation_type']) if (key.name in b500): pdict['a_name'] = self.filename_info['observation_type']+'_500m' pdict['p_id'] = 'FY-4A, 500m' elif (key.name in b1000): pdict['a_name'] = self.filename_info['observation_type']+'_1000m' pdict['p_id'] = 'FY-4A, 1000m' elif (key.name in b2000): pdict['a_name'] = self.filename_info['observation_type']+'_2000m' pdict['p_id'] = 'FY-4A, 2000m' else: pdict['a_name'] = self.filename_info['observation_type']+'_2000m' pdict['p_id'] = 'FY-4A, 4000m' pdict['coff'] = pdict['coff'] + 0.5 pdict['nlines'] = pdict['nlines'] - 1 pdict['ncols'] = pdict['ncols'] - 1 pdict['loff'] = (pdict['loff'] - self.file_content['/attr/End Line Number'] + 0.5) area_extent = get_area_extent(pdict) area_extent = (area_extent[0] + 2000, area_extent[1], area_extent[2] + 2000, area_extent[3]) pdict['nlines'] = pdict['nlines'] + 1 pdict['ncols'] = pdict['ncols'] + 1 area = get_area_definition(pdict, area_extent) return area def dn2(self, dn, calibration, slope, offset): """Convert digital number (DN) to reflectance or radiance Args: dn: Raw detector digital number slope: Slope offset: Offset Returns: Reflectance [%] or Radiance [mW/ (m2 cm-1 sr)] """ ref = dn * slope + offset if calibration == 'reflectance': ref *= 100 # set unit to % ref = ref.clip(min=0) ref.attrs = dn.attrs return ref @staticmethod def _getitem(block, lut): return lut[block] def calibrate(self, data, lut): """Calibrate digital number (DN) to brightness_temperature Args: dn: Raw detector digital number lut: the look up table Returns: brightness_temperature [K] """ # append nan to the end of lut for fillvalue lut = np.append(lut, np.nan) data.data = da.where(data.data > lut.shape[0], lut.shape[0] - 1, data.data) res = data.data.map_blocks(self._getitem, lut, dtype=lut.dtype) res = xr.DataArray(res, dims=data.dims, attrs=data.attrs, coords=data.coords) return res @property def start_time(self): start_time = self['/attr/Observing Beginning Date'] + 'T' + self['/attr/Observing Beginning Time'] + 'Z' return datetime.strptime(start_time, '%Y-%m-%dT%H:%M:%S.%fZ') @property def end_time(self): end_time = self['/attr/Observing Ending Date'] + 'T' + self['/attr/Observing Ending Time'] + 'Z' return datetime.strptime(end_time, '%Y-%m-%dT%H:%M:%S.%fZ') satpy-0.20.0/satpy/readers/ahi_hsd.py000066400000000000000000000615031362525524100175010ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2014-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Advanced Himawari Imager (AHI) standard format data reader. References: - Himawari-8/9 Himawari Standard Data User's Guide - http://www.data.jma.go.jp/mscweb/en/himawari89/space_segment/spsg_ahi.html Time Information **************** AHI observations use the idea of a "scheduled" time and an "observation time. The "scheduled" time is when the instrument was told to record the data, usually at a specific and consistent interval. The "observation" time is when the data was actually observed. Scheduled time can be accessed from the `scheduled_time` metadata key and observation time from the `start_time` key. """ import logging from datetime import datetime, timedelta import numpy as np import dask.array as da import xarray as xr import warnings import os from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.utils import unzip_file, get_geostationary_mask, \ np2str, get_earth_radius from satpy.readers._geos_area import get_area_extent, get_area_definition AHI_CHANNEL_NAMES = ("1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "16") logger = logging.getLogger('ahi_hsd') # Basic information block: _BASIC_INFO_TYPE = np.dtype([("hblock_number", "u1"), ("blocklength", " no temperature data = da.where(data == 0, np.float32(np.nan), data) cwl = self._header['block5']["central_wave_length"][0] * 1e-6 c__ = self._header['calibration']["speed_of_light"][0] h__ = self._header['calibration']["planck_constant"][0] k__ = self._header['calibration']["boltzmann_constant"][0] a__ = (h__ * c__) / (k__ * cwl) b__ = ((2 * h__ * c__ ** 2) / (data * 1.0e6 * cwl ** 5)) + 1 Te_ = a__ / da.log(b__) c0_ = self._header['calibration']["c0_rad2tb_conversion"][0] c1_ = self._header['calibration']["c1_rad2tb_conversion"][0] c2_ = self._header['calibration']["c2_rad2tb_conversion"][0] return (c0_ + c1_ * Te_ + c2_ * Te_ ** 2).clip(0) satpy-0.20.0/satpy/readers/ami_l1b.py000066400000000000000000000217361362525524100174120ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Advanced Meteorological Imager reader for the Level 1b NetCDF4 format.""" import logging from datetime import datetime, timedelta import numpy as np import xarray as xr import dask.array as da import pyproj from satpy.readers._geos_area import get_area_definition, get_area_extent from pyspectral.blackbody import blackbody_wn_rad2temp as rad2temp from satpy.readers.file_handlers import BaseFileHandler from satpy import CHUNK_SIZE logger = logging.getLogger(__name__) PLATFORM_NAMES = { 'GK-2A': 'GEO-KOMPSAT-2A', 'GK-2B': 'GEO-KOMPSAT-2B', } class AMIL1bNetCDF(BaseFileHandler): """Base reader for AMI L1B NetCDF4 files.""" def __init__(self, filename, filename_info, filetype_info, calib_mode='PYSPECTRAL', allow_conditional_pixels=False): """Open the NetCDF file with xarray and prepare the Dataset for reading.""" super(AMIL1bNetCDF, self).__init__(filename, filename_info, filetype_info) self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=False, chunks={'dim_image_x': CHUNK_SIZE, 'dim_image_y': CHUNK_SIZE}) self.nc = self.nc.rename({'dim_image_x': 'x', 'dim_image_y': 'y'}) platform_shortname = self.nc.attrs['satellite_name'] self.platform_name = PLATFORM_NAMES.get(platform_shortname) self.sensor = 'ami' self.allow_conditional_pixels = allow_conditional_pixels calib_mode_choices = ('FILE', 'PYSPECTRAL') if calib_mode.upper() not in calib_mode_choices: raise ValueError('Invalid calibration mode: {}. Choose one of {}'.format( calib_mode, calib_mode_choices)) self.calib_mode = calib_mode.upper() @property def start_time(self): """Get observation start time.""" base = datetime(2000, 1, 1, 12, 0, 0) return base + timedelta(seconds=self.nc.attrs['observation_start_time']) @property def end_time(self): """Get observation end time.""" base = datetime(2000, 1, 1, 12, 0, 0) return base + timedelta(seconds=self.nc.attrs['observation_end_time']) def get_area_def(self, dsid): """Get area definition for this file.""" pdict = {} pdict['a'] = self.nc.attrs['earth_equatorial_radius'] pdict['b'] = self.nc.attrs['earth_polar_radius'] pdict['h'] = self.nc.attrs['nominal_satellite_height'] - pdict['a'] pdict['ssp_lon'] = self.nc.attrs['sub_longitude'] * 180 / np.pi # it's in radians? pdict['ncols'] = self.nc.attrs['number_of_columns'] pdict['nlines'] = self.nc.attrs['number_of_lines'] obs_mode = self.nc.attrs['observation_mode'] resolution = self.nc.attrs['channel_spatial_resolution'] # Example offset: 11000.5 # the 'get_area_extent' will handle this half pixel for us pdict['cfac'] = self.nc.attrs['cfac'] pdict['coff'] = self.nc.attrs['coff'] pdict['lfac'] = -self.nc.attrs['lfac'] pdict['loff'] = self.nc.attrs['loff'] pdict['scandir'] = 'N2S' pdict['a_name'] = 'ami_geos_{}'.format(obs_mode.lower()) pdict['a_desc'] = 'AMI {} Area at {} resolution'.format(obs_mode, resolution) pdict['p_id'] = 'ami_fixed_grid' area_extent = get_area_extent(pdict) fg_area_def = get_area_definition(pdict, area_extent) return fg_area_def def get_orbital_parameters(self): """Collect orbital parameters for this file.""" a = float(self.nc.attrs['earth_equatorial_radius']) b = float(self.nc.attrs['earth_polar_radius']) # nominal_satellite_height seems to be from the center of the earth h = float(self.nc.attrs['nominal_satellite_height']) - a lon_0 = self.nc.attrs['sub_longitude'] * 180 / np.pi # it's in radians? sc_position = self.nc['sc_position'].attrs['sc_position_center_pixel'] # convert ECEF coordinates to lon, lat, alt ecef = pyproj.Proj(proj='geocent', a=a, b=b) lla = pyproj.Proj(proj='latlong', a=a, b=b) sc_position = pyproj.transform( ecef, lla, sc_position[0], sc_position[1], sc_position[2]) orbital_parameters = { 'projection_longitude': float(lon_0), 'projection_latitude': 0.0, 'projection_altitude': h, 'satellite_actual_longitude': sc_position[0], 'satellite_actual_latitude': sc_position[1], 'satellite_actual_altitude': sc_position[2], # meters } return orbital_parameters def get_dataset(self, dataset_id, ds_info): """Load a dataset as a xarray DataArray.""" file_key = ds_info.get('file_key', dataset_id.name) data = self.nc[file_key] # hold on to attributes for later attrs = data.attrs # highest 2 bits are data quality flags # 00=no error # 01=available under conditions # 10=outside the viewing area # 11=Error exists if self.allow_conditional_pixels: qf = data & 0b1000000000000000 else: qf = data & 0b1100000000000000 # mask DQF bits bits = attrs['number_of_valid_bits_per_pixel'] data &= 2**bits - 1 # only take "no error" pixels as valid data = data.where(qf == 0) # Calibration values from file, fall back to built-in if unavailable gain = self.nc.attrs['DN_to_Radiance_Gain'] offset = self.nc.attrs['DN_to_Radiance_Offset'] if dataset_id.calibration in ('radiance', 'reflectance', 'brightness_temperature'): data = gain * data + offset if dataset_id.calibration == 'reflectance': # depends on the radiance calibration above rad_to_alb = self.nc.attrs['Radiance_to_Albedo_c'] if ds_info.get('units') == '%': rad_to_alb *= 100 data = data * rad_to_alb elif dataset_id.calibration == 'brightness_temperature': data = self._calibrate_ir(dataset_id, data) elif dataset_id.calibration not in ('counts', 'radiance'): raise ValueError("Unknown calibration: '{}'".format(dataset_id.calibration)) for attr_name in ('standard_name', 'units'): attrs[attr_name] = ds_info[attr_name] attrs.update(dataset_id.to_dict()) attrs['orbital_parameters'] = self.get_orbital_parameters() attrs['platform_name'] = self.platform_name attrs['sensor'] = self.sensor data.attrs = attrs return data def _calibrate_ir(self, dataset_id, data): """Calibrate radiance data to BTs using either pyspectral or in-file coefficients.""" if self.calib_mode == 'PYSPECTRAL': # depends on the radiance calibration above # Convert um to m^-1 (SI units for pyspectral) wn = 1 / (dataset_id.wavelength[1] / 1e6) # Convert cm^-1 (wavenumbers) and (mW/m^2)/(str/cm^-1) (radiance data) # to SI units m^-1, mW*m^-3*str^-1. bt_data = rad2temp(wn, data.data * 1e-5) if isinstance(bt_data, np.ndarray): # old versions of pyspectral produce numpy arrays data.data = da.from_array(bt_data, chunks=data.data.chunks) else: # new versions of pyspectral can do dask arrays data.data = bt_data else: # IR coefficients from the file # Channel specific c0 = self.nc.attrs['Teff_to_Tbb_c0'] c1 = self.nc.attrs['Teff_to_Tbb_c1'] c2 = self.nc.attrs['Teff_to_Tbb_c2'] # These should be fixed, but load anyway cval = self.nc.attrs['light_speed'] kval = self.nc.attrs['Boltzmann_constant_k'] hval = self.nc.attrs['Plank_constant_h'] # Compute wavenumber as cm-1 wn = (10000 / dataset_id.wavelength[1]) * 100 # Convert radiance to effective brightness temperature e1 = (2 * hval * cval * cval) * np.power(wn, 3) e2 = (data.data * 1e-5) t_eff = ((hval * cval / kval) * wn) / np.log((e1 / e2) + 1) # Now convert to actual brightness temperature bt_data = c0 + c1 * t_eff + c2 * t_eff * t_eff data.data = bt_data return data satpy-0.20.0/satpy/readers/amsr2_l1b.py000066400000000000000000000051531362525524100176630ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reader for AMSR2 L1B files in HDF5 format. """ from satpy.readers.hdf5_utils import HDF5FileHandler class AMSR2L1BFileHandler(HDF5FileHandler): def get_metadata(self, ds_id, ds_info): var_path = ds_info['file_key'] info = getattr(self[var_path], 'attrs', {}) info.update(ds_info) info.update({ "shape": self.get_shape(ds_id, ds_info), "units": self[var_path + "/attr/UNIT"], "platform_name": self["/attr/PlatformShortName"], "sensor": self["/attr/SensorShortName"], "start_orbit": int(self["/attr/StartOrbitNumber"]), "end_orbit": int(self["/attr/StopOrbitNumber"]), }) info.update(ds_id.to_dict()) return info def get_shape(self, ds_id, ds_info): """Get output shape of specified dataset.""" var_path = ds_info['file_key'] shape = self[var_path + '/shape'] if ((ds_info.get('standard_name') == "longitude" or ds_info.get('standard_name') == "latitude") and ds_id.resolution == 10000): return shape[0], int(shape[1] / 2) return shape def get_dataset(self, ds_id, ds_info): """Get output data and metadata of specified dataset.""" var_path = ds_info['file_key'] fill_value = ds_info.get('fill_value', 65535) metadata = self.get_metadata(ds_id, ds_info) data = self[var_path] if ((ds_info.get('standard_name') == "longitude" or ds_info.get('standard_name') == "latitude") and ds_id.resolution == 10000): # FIXME: Lower frequency channels need CoRegistration parameters applied data = data[:, ::2] * self[var_path + "/attr/SCALE FACTOR"] else: data = data * self[var_path + "/attr/SCALE FACTOR"] data = data.where(data != fill_value) data.attrs.update(metadata) return data satpy-0.20.0/satpy/readers/avhrr_l1b_gaclac.py000066400000000000000000000276431362525524100212630ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2009-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reading and calibrating GAC and LAC AVHRR data. .. todo:: Fine grained calibration """ import logging from datetime import datetime, timedelta import dask.array as da import numpy as np import pygac.utils import xarray as xr from pygac.gac_klm import GACKLMReader from pygac.gac_pod import GACPODReader from pygac.lac_klm import LACKLMReader from pygac.lac_pod import LACPODReader from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler logger = logging.getLogger(__name__) spacecrafts = {7: "NOAA 15", 3: "NOAA 16", 13: "NOAA 18", 15: "NOAA 19"} AVHRR3_CHANNEL_NAMES = {"1": 0, "2": 1, "3A": 2, "3B": 3, "4": 4, "5": 5} AVHRR2_CHANNEL_NAMES = {"1": 0, "2": 1, "3": 2, "4": 3, "5": 4} AVHRR_CHANNEL_NAMES = {"1": 0, "2": 1, "3": 2, "4": 3} ANGLES = ('sensor_zenith_angle', 'sensor_azimuth_angle', 'solar_zenith_angle', 'solar_azimuth_angle', 'sun_sensor_azimuth_difference_angle') class GACLACFile(BaseFileHandler): """Reader for GAC and LAC data.""" def __init__(self, filename, filename_info, filetype_info, start_line=None, end_line=None, strip_invalid_coords=True, interpolate_coords=True, adjust_clock_drift=True, tle_dir=None, tle_name=None, tle_thresh=7): """Init the file handler. Args: start_line: User defined start scanline end_line: User defined end scanline strip_invalid_coords: Strip scanlines with invalid coordinates in the beginning/end of the orbit interpolate_coords: Interpolate coordinates from every eighth pixel to all pixels. adjust_clock_drift: Adjust the geolocation to compensate for the clock error (POD satellites only). tle_dir: Directory holding Two-Line-Element (TLE) files tle_name: Filename pattern of TLE files. tle_thresh: Maximum number of days between observation and nearest TLE """ super(GACLACFile, self).__init__( filename, filename_info, filetype_info) self.start_line = start_line self.end_line = end_line self.strip_invalid_coords = strip_invalid_coords self.interpolate_coords = interpolate_coords self.adjust_clock_drift = adjust_clock_drift self.tle_dir = tle_dir self.tle_name = tle_name self.tle_thresh = tle_thresh self.creation_site = filename_info.get('creation_site') self.reader = None self.channels = None self.angles = None self.qual_flags = None self.midnight_scanline = None self.missing_scanlines = None self.first_valid_lat = None self.last_valid_lat = None self._start_time = filename_info['start_time'] self._end_time = datetime.combine(filename_info['start_time'].date(), filename_info['end_time'].time()) if self._end_time < self._start_time: self._end_time += timedelta(days=1) self.platform_id = filename_info['platform_id'] if self.platform_id in ['NK', 'NL', 'NM', 'NN', 'NP', 'M1', 'M2', 'M3']: if filename_info.get('transfer_mode') == 'GHRR': self.reader_class = GACKLMReader else: self.reader_class = LACKLMReader self.chn_dict = AVHRR3_CHANNEL_NAMES self.sensor = 'avhrr-3' elif self.platform_id in ['NC', 'ND', 'NF', 'NH', 'NJ']: if filename_info.get('transfer_mode') == 'GHRR': self.reader_class = GACPODReader else: self.reader_class = LACPODReader self.chn_dict = AVHRR2_CHANNEL_NAMES self.sensor = 'avhrr-2' else: if filename_info.get('transfer_mode') == 'GHRR': self.reader_class = GACPODReader else: self.reader_class = LACPODReader self.chn_dict = AVHRR_CHANNEL_NAMES self.sensor = 'avhrr' self.filename_info = filename_info def get_dataset(self, key, info): """Get the dataset.""" if self.reader is None: self.reader = self.reader_class( interpolate_coords=self.interpolate_coords, adjust_clock_drift=self.adjust_clock_drift, tle_dir=self.tle_dir, tle_name=self.tle_name, tle_thresh=self.tle_thresh, creation_site=self.creation_site) self.reader.read(self.filename) if np.all(self.reader.mask): raise ValueError('All data is masked out') if key.name in ['latitude', 'longitude']: # Lats/lons are buffered by the reader if key.name == 'latitude': _, data = self.reader.get_lonlat() else: data, _ = self.reader.get_lonlat() # If coordinate interpolation is disabled, only every eighth # pixel has a lat/lon coordinate xdim = 'x' if self.interpolate_coords else 'x_every_eighth' xcoords = None elif key.name in ANGLES: data = self._get_angle(key.name) xdim = 'x' if self.interpolate_coords else 'x_every_eighth' xcoords = None elif key.name == 'qual_flags': data = self.reader.get_qual_flags() xdim = 'num_flags' xcoords = ['Scan line number', 'Fatal error flag', 'Insufficient data for calibration', 'Insufficient data for calibration', 'Solar contamination of blackbody in channels 3', 'Solar contamination of blackbody in channels 4', 'Solar contamination of blackbody in channels 5'] else: data = self._get_channel(key.name) xdim = 'x' xcoords = None # Update start/end time using the actual scanline timestamps times = self.reader.get_times() self._start_time = times[0].astype(datetime) self._end_time = times[-1].astype(datetime) # Select user-defined scanlines and/or strip invalid coordinates self.midnight_scanline = self.reader.meta_data['midnight_scanline'] self.missing_scanlines = self.reader.meta_data['missing_scanlines'] if (self.start_line is not None or self.end_line is not None or self.strip_invalid_coords): data, times = self.slice(data=data, times=times) chunk_cols = data.shape[1] chunk_lines = int((CHUNK_SIZE ** 2) / chunk_cols) res = xr.DataArray(da.from_array(data, chunks=(chunk_lines, chunk_cols)), dims=['y', xdim], attrs=info) if xcoords: res[xdim] = xcoords for attr in self.reader.meta_data: res.attrs[attr] = self.reader.meta_data[attr] res.attrs['platform_name'] = self.reader.spacecraft_name res.attrs['orbit_number'] = self.filename_info['orbit_number'] res.attrs['sensor'] = self.sensor try: res.attrs['orbital_parameters'] = {'tle': self.reader.get_tle_lines()} except IndexError: pass res['acq_time'] = ('y', times) res['acq_time'].attrs['long_name'] = 'Mean scanline acquisition time' return res def slice(self, data, times): """Select user-defined scanlines and/or strip invalid coordinates. Furthermore, update scanline timestamps and auxiliary information. Args: data: Data to be sliced times: Scanline timestamps Returns: Sliced data and timestamps """ # Slice data, update midnight scanline & list of missing scanlines sliced, self.midnight_scanline, miss_lines = self._slice(data) self.missing_scanlines = miss_lines.astype(int) # Slice timestamps, update start/end time times, _, _ = self._slice(times) self._start_time = times[0].astype(datetime) self._end_time = times[-1].astype(datetime) return sliced, times def _slice(self, data): """Select user-defined scanlines and/or strip invalid coordinates. Returns: Sliced data, updated midnight scanline & list of missing scanlines """ start_line = self.start_line if self.start_line is not None else 0 end_line = self.end_line if self.end_line is not None else 0 # Strip scanlines with invalid coordinates if self.strip_invalid_coords: first_valid_lat, last_valid_lat = self._strip_invalid_lat() else: first_valid_lat = last_valid_lat = None # Check and correct user-defined scanlines, if possible start_line, end_line = pygac.utils.check_user_scanlines( start_line=start_line, end_line=end_line, first_valid_lat=first_valid_lat, last_valid_lat=last_valid_lat, along_track=data.shape[0] ) # Slice data, update missing lines and midnight scanline to new # scanline range sliced, miss_lines, midnight_scanline = pygac.utils.slice_channel( data, start_line=start_line, end_line=end_line, first_valid_lat=first_valid_lat, last_valid_lat=last_valid_lat, midnight_scanline=self.midnight_scanline, miss_lines=self.missing_scanlines, qual_flags=self._get_qual_flags() ) return sliced, midnight_scanline, miss_lines def _get_channel(self, name): """Get channel by name and buffer results.""" if self.channels is None: self.channels = self.reader.get_calibrated_channels() return self.channels[:, :, self.chn_dict[name.upper()]] def _get_qual_flags(self): """Get quality flags and buffer results.""" if self.qual_flags is None: self.qual_flags = self.reader.get_qual_flags() return self.qual_flags def _get_angle(self, name): """Get angle by name and buffer results.""" if self.angles is None: sat_azi, sat_zenith, sun_azi, sun_zenith, rel_azi = self.reader.get_angles() self.angles = {'sensor_zenith_angle': sat_zenith, 'sensor_azimuth_angle': sat_azi, 'solar_zenith_angle': sun_zenith, 'solar_azimuth_angle': sun_azi, 'sun_sensor_azimuth_difference_angle': rel_azi} return self.angles[name] def _strip_invalid_lat(self): """Strip scanlines with invalid coordinates in the beginning/end of the orbit. Returns: First and last scanline with valid latitudes. """ if self.first_valid_lat is None: _, lats = self.reader.get_lonlat() start, end = pygac.utils.strip_invalid_lat(lats) self.first_valid_lat, self.last_valid_lat = start, end return self.first_valid_lat, self.last_valid_lat @property def start_time(self): """Get the start time.""" return self._start_time @property def end_time(self): """Get the end time.""" return self._end_time satpy-0.20.0/satpy/readers/caliop_l2_cloud.py000066400000000000000000000074261362525524100211400ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Interface to CALIOP L2 HDF4 cloud products.""" import logging import os.path import re from datetime import datetime from pyhdf.SD import SD, SDC from satpy.dataset import Dataset from satpy.readers.file_handlers import BaseFileHandler logger = logging.getLogger(__name__) class HDF4BandReader(BaseFileHandler): """CALIOP v3 HDF4 reader.""" def __init__(self, filename, filename_info, filetype_info): """Initialze file handler.""" super(HDF4BandReader, self).__init__(filename, filename_info, filetype_info) self.lons = None self.lats = None self._start_time = None self._end_time = None self.get_filehandle() self._start_time = filename_info['start_time'] logger.debug('Retrieving end time from metadata array') self.get_end_time() def get_end_time(self): """Get observation end time from file metadata.""" mda_dict = self.filehandle.attributes() core_mda = mda_dict['coremetadata'] end_time_str = self.parse_metadata_string(core_mda) self._end_time = datetime.strptime(end_time_str, "%Y-%m-%dT%H:%M:%SZ") @staticmethod def parse_metadata_string(metadata_string): """Grab end time with regular expression.""" regex = r"STOP_DATE.+?VALUE\s*=\s*\"(.+?)\"" match = re.search(regex, metadata_string, re.DOTALL) end_time_str = match.group(1) return end_time_str def get_filehandle(self): """Get HDF4 filehandle.""" if os.path.exists(self.filename): self.filehandle = SD(self.filename, SDC.READ) logger.debug("Loading dataset {}".format(self.filename)) else: raise IOError("Path {} does not exist.".format(self.filename)) def get_dataset(self, key, info): """Read data from file and return the corresponding projectables.""" if key.name in ['longitude', 'latitude']: logger.debug('Reading coordinate arrays.') if self.lons is None or self.lats is None: self.lons, self.lats = self.get_lonlats() if key.name == 'latitude': proj = Dataset(self.lats, id=key, **info) else: proj = Dataset(self.lons, id=key, **info) else: data = self.get_sds_variable(key.name) proj = Dataset(data, id=key, **info) return proj def get_sds_variable(self, name): """Read variable from the HDF4 file.""" sds_obj = self.filehandle.select(name) data = sds_obj.get() return data def get_lonlats(self): """Get longitude and latitude arrays from the file.""" longitudes = self.get_sds_variable('Longitude') latitudes = self.get_sds_variable('Latitude') return longitudes, latitudes @property def start_time(self): """Get start time.""" return self._start_time @property def end_time(self): """Get end time.""" return self._end_time satpy-0.20.0/satpy/readers/clavrx.py000066400000000000000000000257501362525524100174050ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Interface to CLAVR-X HDF4 products. """ import os import logging import numpy as np import netCDF4 from glob import glob from satpy.readers.hdf4_utils import HDF4FileHandler, SDS from pyresample import geometry LOG = logging.getLogger(__name__) CF_UNITS = { 'none': '1', } class CLAVRXFileHandler(HDF4FileHandler): sensors = { 'MODIS': 'modis', 'VIIRS': 'viirs', 'AVHRR': 'avhrr', 'AHI': 'ahi', # 'ABI': 'abi', } platforms = { 'SNPP': 'npp', 'HIM8': 'himawari8', 'HIM9': 'himawari9', 'H08': 'himawari8', 'H09': 'himawari9', # 'G16': 'GOES-16', # 'G17': 'GOES-17' } rows_per_scan = { 'viirs': 16, 'modis': 10, } nadir_resolution = { 'viirs': 742, 'modis': 1000, 'avhrr': 1050, 'ahi': 2000, # 'abi': 2004, } def get_sensor(self, sensor): for k, v in self.sensors.items(): if k in sensor: return v raise ValueError("Unknown sensor '{}'".format(sensor)) def get_platform(self, platform): for k, v in self.platforms.items(): if k in platform: return v return platform def get_rows_per_scan(self, sensor): for k, v in self.rows_per_scan.items(): if sensor.startswith(k): return v def get_nadir_resolution(self, sensor): for k, v in self.nadir_resolution.items(): if sensor.startswith(k): return v res = self.filename_info.get('resolution') if res.endswith('m'): return int(res[:-1]) elif res is not None: return int(res) @property def start_time(self): return self.filename_info['start_time'] @property def end_time(self): return self.filename_info.get('end_time', self.start_time) def available_datasets(self, configured_datasets=None): """Automatically determine datasets provided by this file""" sensor = self.get_sensor(self['/attr/sensor']) nadir_resolution = self.get_nadir_resolution(sensor) coordinates = ('longitude', 'latitude') handled_variables = set() # update previously configured datasets for is_avail, ds_info in (configured_datasets or []): this_res = ds_info.get('resolution') this_coords = ds_info.get('coordinates') # some other file handler knows how to load this if is_avail is not None: yield is_avail, ds_info var_name = ds_info.get('file_key', ds_info['name']) matches = self.file_type_matches(ds_info['file_type']) # we can confidently say that we can provide this dataset and can # provide more info if matches and var_name in self and this_res != nadir_resolution: handled_variables.add(var_name) new_info = ds_info.copy() # don't mess up the above yielded new_info['resolution'] = nadir_resolution if self._is_polar() and this_coords is None: new_info['coordinates'] = coordinates yield True, new_info elif is_avail is None: # if we didn't know how to handle this dataset and no one else did # then we should keep it going down the chain yield is_avail, ds_info # add new datasets for var_name, val in self.file_content.items(): if isinstance(val, SDS): ds_info = { 'file_type': self.filetype_info['file_type'], 'resolution': nadir_resolution, 'name': var_name, } if self._is_polar(): ds_info['coordinates'] = ['longitude', 'latitude'] yield True, ds_info def get_shape(self, dataset_id, ds_info): var_name = ds_info.get('file_key', dataset_id.name) return self[var_name + '/shape'] def get_metadata(self, data_arr, ds_info): i = {} i.update(data_arr.attrs) i.update(ds_info) flag_meanings = i.get('flag_meanings') if not i.get('SCALED', 1) and not flag_meanings: i['flag_meanings'] = '' i.setdefault('flag_values', [None]) u = i.get('units') if u in CF_UNITS: # CF compliance i['units'] = CF_UNITS[u] i['sensor'] = sensor = self.get_sensor(self['/attr/sensor']) platform = self.get_platform(self['/attr/platform']) i['platform'] = i['platform_name'] = platform i['resolution'] = i.get('resolution') or self.get_nadir_resolution(i['sensor']) rps = self.get_rows_per_scan(sensor) if rps: i['rows_per_scan'] = rps i['reader'] = 'clavrx' return i def get_dataset(self, dataset_id, ds_info): var_name = ds_info.get('file_key', dataset_id.name) data = self[var_name] if dataset_id.resolution: data.attrs['resolution'] = dataset_id.resolution data.attrs = self.get_metadata(data, ds_info) fill = data.attrs.pop('_FillValue', None) factor = data.attrs.pop('scale_factor', None) offset = data.attrs.pop('add_offset', None) valid_range = data.attrs.pop('valid_range', None) if factor is not None and offset is not None: def scale_inplace(data): data *= factor data += offset return data else: def scale_inplace(data): return data data = data.where(data != fill) scale_inplace(data) if valid_range is not None: valid_min, valid_max = scale_inplace(valid_range[0]), scale_inplace(valid_range[1]) data = data.where((data >= valid_min) & (data <= valid_max)) data.attrs['valid_min'], data.attrs['valid_max'] = valid_min, valid_max return data @staticmethod def _area_extent(x, y, h): x_l = h * x[0] x_r = h * x[-1] y_l = h * y[-1] y_u = h * y[0] ncols = x.shape[0] nlines = y.shape[0] x_half = (x_r - x_l) / (ncols - 1) / 2. y_half = (y_u - y_l) / (nlines - 1) / 2. area_extent = (x_l - x_half, y_l - y_half, x_r + x_half, y_u + y_half) return area_extent, ncols, nlines @staticmethod def _read_pug_fixed_grid(projection, distance_multiplier=1.0): """Read from recent PUG format, where axes are in meters """ a = projection.semi_major_axis h = projection.perspective_point_height b = projection.semi_minor_axis lon_0 = projection.longitude_of_projection_origin sweep_axis = projection.sweep_angle_axis[0] proj_dict = {'a': float(a) * distance_multiplier, 'b': float(b) * distance_multiplier, 'lon_0': float(lon_0), 'h': float(h) * distance_multiplier, 'proj': 'geos', 'units': 'm', 'sweep': sweep_axis} return proj_dict def _find_input_nc(self, l1b_base): dirname = os.path.split(self.filename)[0] glob_pat = os.path.join(dirname, l1b_base + '*R20*.nc') LOG.debug("searching for {0}".format(glob_pat)) l1b_filenames = list(glob(glob_pat)) if not l1b_filenames: raise IOError("Could not find navigation donor for {0}" " in same directory as CLAVR-x data".format(l1b_base)) LOG.debug('Candidate nav donors: {0}'.format(repr(l1b_filenames))) return l1b_filenames[0] def _read_axi_fixed_grid(self, l1b_attr): """CLAVR-x does not transcribe fixed grid parameters to its output We have to recover that information from the original input file, which is partially named as L1B attribute example attributes found in L2 CLAVR-x files: sensor = "AHI" ; platform = "HIM8" ; FILENAME = "clavrx_H08_20180719_1300.level2.hdf" ; L1B = "clavrx_H08_20180719_1300" ; """ LOG.debug("looking for corresponding input file for {0}" " to act as fixed grid navigation donor".format(l1b_attr)) l1b_path = self._find_input_nc(l1b_attr) LOG.info("Since CLAVR-x does not include fixed-grid parameters," " using input file {0} as donor".format(l1b_path)) l1b = netCDF4.Dataset(l1b_path) proj = None proj_var = l1b.variables.get("Projection", None) if proj_var is not None: # hsd2nc input typically used by CLAVR-x uses old-form km for axes/height LOG.debug("found hsd2nc-style draft PUG fixed grid specification") proj = self._read_pug_fixed_grid(proj_var, 1000.0) if proj is None: # most likely to come into play for ABI cases proj_var = l1b.variables.get("goes_imager_projection", None) if proj_var is not None: LOG.debug("found cmip-style final PUG fixed grid specification") proj = self._read_pug_fixed_grid(proj_var) if not proj: raise ValueError("Unable to recover projection information" " for {0}".format(self.filename)) h = float(proj['h']) x, y = l1b['x'], l1b['y'] area_extent, ncols, nlines = self._area_extent(x, y, h) # LOG.debug(repr(proj)) # LOG.debug(repr(area_extent)) area = geometry.AreaDefinition( 'ahi_geos', "AHI L2 file area", 'ahi_geos', proj, ncols, nlines, np.asarray(area_extent)) return area def _is_polar(self): l1b_att, inst_att = (str(self.file_content.get('/attr/L1B', None)), str(self.file_content.get('/attr/sensor', None))) return (inst_att != 'AHI') or (l1b_att is None) def get_area_def(self, key): """Get the area definition of the data at hand.""" if self._is_polar(): # then it doesn't have a fixed grid return super(CLAVRXFileHandler, self).get_area_def(key) l1b_att = str(self.file_content.get('/attr/L1B', None)) return self._read_axi_fixed_grid(l1b_att) satpy-0.20.0/satpy/readers/electrol_hrit.py000066400000000000000000000350201362525524100207340ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """HRIT format reader. References: ELECTRO-L GROUND SEGMENT MSU-GS INSTRUMENT, LRIT/HRIT Mission Specific Implementation, February 2012 """ import logging from datetime import datetime import numpy as np import xarray as xr from satpy.readers._geos_area import get_area_extent, get_area_definition from satpy.readers.hrit_base import (HRITFileHandler, ancillary_text, annotation_header, base_hdr_map, image_data_function, time_cds_short) logger = logging.getLogger('hrit_electrol') # goms implementation: key_header = np.dtype([('key_number', 'u1'), ('seed', '>f8')]) segment_identification = np.dtype([('GP_SC_ID', '>i2'), ('spectral_channel_id', '>i1'), ('segment_sequence_number', '>u2'), ('planned_start_segment_number', '>u2'), ('planned_end_segment_number', '>u2'), ('data_field_representation', '>i1')]) image_segment_line_quality = np.dtype([('line_number_in_grid', '>i4'), ('line_mean_acquisition', [('days', '>u2'), ('milliseconds', '>u4')]), ('line_validity', 'u1'), ('line_radiometric_quality', 'u1'), ('line_geometric_quality', 'u1')]) goms_variable_length_headers = { image_segment_line_quality: 'image_segment_line_quality'} goms_text_headers = {image_data_function: 'image_data_function', annotation_header: 'annotation_header', ancillary_text: 'ancillary_text'} goms_hdr_map = base_hdr_map.copy() goms_hdr_map.update({7: key_header, 128: segment_identification, 129: image_segment_line_quality }) orbit_coef = np.dtype([('StartTime', time_cds_short), ('EndTime', time_cds_short), ('X', '>f8', (8, )), ('Y', '>f8', (8, )), ('Z', '>f8', (8, )), ('VX', '>f8', (8, )), ('VY', '>f8', (8, )), ('VZ', '>f8', (8, ))]) attitude_coef = np.dtype([('StartTime', time_cds_short), ('EndTime', time_cds_short), ('XofSpinAxis', '>f8', (8, )), ('YofSpinAxis', '>f8', (8, )), ('ZofSpinAxis', '>f8', (8, ))]) cuc_time = np.dtype([('coarse', 'u1', (4, )), ('fine', 'u1', (3, ))]) time_cds_expanded = np.dtype([('days', '>u2'), ('milliseconds', '>u4'), ('microseconds', '>u2'), ('nanoseconds', '>u2')]) satellite_status = np.dtype([("TagType", " 16777216: lut = lut.astype(np.float64) else: lut = lut.astype(np.float32) lut /= 1000 lut[0] = np.nan # Dask/XArray don't support indexing in 2D (yet). res = data.data.map_blocks(self._getitem, lut, dtype=lut.dtype) res = xr.DataArray(res, dims=data.dims, attrs=data.attrs, coords=data.coords) res = res.where(data > 0) return res def get_area_def(self, dsid): """Get the area definition of the band.""" pdict = {} pdict['cfac'] = np.int32(self.mda['cfac']) pdict['lfac'] = np.int32(self.mda['lfac']) pdict['coff'] = np.float32(self.mda['coff']) pdict['loff'] = np.float32(self.mda['loff']) pdict['a'] = 6378169.00 pdict['b'] = 6356583.80 pdict['h'] = 35785831.00 pdict['scandir'] = 'N2S' pdict['ssp_lon'] = self.mda['projection_parameters']['SSP_longitude'] pdict['nlines'] = int(self.mda['number_of_lines']) pdict['ncols'] = int(self.mda['number_of_columns']) pdict['loff'] = pdict['nlines'] - pdict['loff'] pdict['a_name'] = 'geosgoms' pdict['a_desc'] = 'Electro-L/GOMS channel area' pdict['p_id'] = 'goms' area_extent = get_area_extent(pdict) area = get_area_definition(pdict, area_extent) self.area = area return area satpy-0.20.0/satpy/readers/eps_l1b.py000066400000000000000000000435601362525524100174320ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reader for eps level 1b data. Uses xml files as a format description.""" import logging import os import numpy as np import xarray as xr import dask.array as da from dask.delayed import delayed from pyresample.geometry import SwathDefinition from satpy.config import CONFIG_PATH from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.xmlformat import XMLFormat from satpy import CHUNK_SIZE logger = logging.getLogger(__name__) C1 = 1.191062e-05 # mW/(m2*sr*cm-4) C2 = 1.4387863 # K/cm-1 def radiance_to_bt(arr, wc_, a__, b__): """Convert to BT.""" return a__ + b__ * (C2 * wc_ / (da.log(1 + (C1 * (wc_ ** 3) / arr)))) def radiance_to_refl(arr, solar_flux): """Convert to reflectances.""" return arr * np.pi * 100.0 / solar_flux record_class = ["Reserved", "mphr", "sphr", "ipr", "geadr", "giadr", "veadr", "viadr", "mdr"] def read_records(filename): """Read *filename* without scaling it afterwards.""" form = XMLFormat(os.path.join(CONFIG_PATH, "eps_avhrrl1b_6.5.xml")) grh_dtype = np.dtype([("record_class", "|i1"), ("INSTRUMENT_GROUP", "|i1"), ("RECORD_SUBCLASS", "|i1"), ("RECORD_SUBCLASS_VERSION", "|i1"), ("RECORD_SIZE", ">u4"), ("RECORD_START_TIME", "S6"), ("RECORD_STOP_TIME", "S6")]) max_lines = np.floor((CHUNK_SIZE ** 2) / 2048) dtypes = [] cnt = 0 counts = [] classes = [] prev = None with open(filename, "rb") as fdes: while True: grh = np.fromfile(fdes, grh_dtype, 1) if grh.size == 0: break rec_class = record_class[int(grh["record_class"])] sub_class = grh["RECORD_SUBCLASS"][0] expected_size = int(grh["RECORD_SIZE"]) bare_size = expected_size - grh_dtype.itemsize try: the_type = form.dtype((rec_class, sub_class)) # the_descr = grh_dtype.descr + the_type.descr except KeyError: the_type = np.dtype([('unknown', 'V%d' % bare_size)]) the_descr = grh_dtype.descr + the_type.descr the_type = np.dtype(the_descr) if the_type.itemsize < expected_size: padding = [('unknown%d' % cnt, 'V%d' % (expected_size - the_type.itemsize))] cnt += 1 the_descr += padding new_dtype = np.dtype(the_descr) key = (rec_class, sub_class) if key == prev: counts[-1] += 1 else: dtypes.append(new_dtype) counts.append(1) classes.append(key) prev = key fdes.seek(expected_size - grh_dtype.itemsize, 1) sections = {} offset = 0 for dtype, count, rec_class in zip(dtypes, counts, classes): fdes.seek(offset) if rec_class == ('mdr', 2): record = da.from_array(np.memmap(fdes, mode='r', dtype=dtype, shape=count, offset=offset), chunks=(max_lines,)) else: record = np.fromfile(fdes, dtype=dtype, count=count) offset += dtype.itemsize * count if rec_class in sections: logger.debug('Multiple records for ', str(rec_class)) sections[rec_class] = np.hstack((sections[rec_class], record)) else: sections[rec_class] = record return sections, form def create_xarray(arr): """Create xarray with correct dimensions.""" res = arr res = xr.DataArray(res, dims=['y', 'x']) return res class EPSAVHRRFile(BaseFileHandler): """Eps level 1b reader for AVHRR data.""" spacecrafts = {"M01": "Metop-B", "M02": "Metop-A", "M03": "Metop-C", } sensors = {"AVHR": "avhrr-3"} def __init__(self, filename, filename_info, filetype_info): """Initialize FileHandler.""" super(EPSAVHRRFile, self).__init__( filename, filename_info, filetype_info) self.lons, self.lats = None, None self.sun_azi, self.sun_zen, self.sat_azi, self.sat_zen = None, None, None, None self.area = None self.three_a_mask, self.three_b_mask = None, None self._start_time = filename_info['start_time'] self._end_time = filename_info['end_time'] self.form = None self.scanlines = None self.pixels = None self.sections = None def _read_all(self): logger.debug("Reading %s", self.filename) self.sections, self.form = read_records(self.filename) self.scanlines = self['TOTAL_MDR'] if self.scanlines != len(self.sections[('mdr', 2)]): logger.warning("Number of declared records doesn't match number of scanlines in the file.") self.pixels = self["EARTH_VIEWS_PER_SCANLINE"] def __getitem__(self, key): """Get value for given key.""" for altkey in self.form.scales.keys(): try: try: return self.sections[altkey][key] * self.form.scales[altkey][key] except TypeError: val = self.sections[altkey][key].item().decode().split("=")[1] try: return float(val) * self.form.scales[altkey][key].item() except ValueError: return val.strip() except (KeyError, ValueError): continue raise KeyError("No matching value for " + str(key)) def keys(self): """List of reader's keys.""" keys = [] for val in self.form.scales.values(): keys += val.dtype.fields.keys() return keys @delayed(nout=2, pure=True) def _get_full_lonlats(self, lons, lats): nav_sample_rate = self["NAV_SAMPLE_RATE"] if nav_sample_rate == 20 and self.pixels == 2048: from geotiepoints import metop20kmto1km return metop20kmto1km(lons, lats) else: raise NotImplementedError("Lon/lat expansion not implemented for " + "sample rate = " + str(nav_sample_rate) + " and earth views = " + str(self.pixels)) def get_full_lonlats(self): """Get the interpolated lons/lats.""" if self.lons is not None and self.lats is not None: return self.lons, self.lats raw_lats = np.hstack((self["EARTH_LOCATION_FIRST"][:, [0]], self["EARTH_LOCATIONS"][:, :, 0], self["EARTH_LOCATION_LAST"][:, [0]])) raw_lons = np.hstack((self["EARTH_LOCATION_FIRST"][:, [1]], self["EARTH_LOCATIONS"][:, :, 1], self["EARTH_LOCATION_LAST"][:, [1]])) self.lons, self.lats = self._get_full_lonlats(raw_lons, raw_lats) self.lons = da.from_delayed(self.lons, dtype=self["EARTH_LOCATIONS"].dtype, shape=(self.scanlines, self.pixels)) self.lats = da.from_delayed(self.lats, dtype=self["EARTH_LOCATIONS"].dtype, shape=(self.scanlines, self.pixels)) return self.lons, self.lats @delayed(nout=4, pure=True) def _get_full_angles(self, solar_zenith, sat_zenith, solar_azimuth, sat_azimuth): nav_sample_rate = self["NAV_SAMPLE_RATE"] if nav_sample_rate == 20 and self.pixels == 2048: from geotiepoints import metop20kmto1km # Note: interpolation asumes lat values values between -90 and 90 # Solar and satellite zenith is between 0 and 180. solar_zenith -= 90 sun_azi, sun_zen = metop20kmto1km( solar_azimuth, solar_zenith) sun_zen += 90 sat_zenith -= 90 sat_azi, sat_zen = metop20kmto1km( sat_azimuth, sat_zenith) sat_zen += 90 return sun_azi, sun_zen, sat_azi, sat_zen else: raise NotImplementedError("Angles expansion not implemented for " + "sample rate = " + str(nav_sample_rate) + " and earth views = " + str(self.pixels)) def get_full_angles(self): """Get the interpolated lons/lats.""" if (self.sun_azi is not None and self.sun_zen is not None and self.sat_azi is not None and self.sat_zen is not None): return self.sun_azi, self.sun_zen, self.sat_azi, self.sat_zen solar_zenith = np.hstack((self["ANGULAR_RELATIONS_FIRST"][:, [0]], self["ANGULAR_RELATIONS"][:, :, 0], self["ANGULAR_RELATIONS_LAST"][:, [0]])) sat_zenith = np.hstack((self["ANGULAR_RELATIONS_FIRST"][:, [1]], self["ANGULAR_RELATIONS"][:, :, 1], self["ANGULAR_RELATIONS_LAST"][:, [1]])) solar_azimuth = np.hstack((self["ANGULAR_RELATIONS_FIRST"][:, [2]], self["ANGULAR_RELATIONS"][:, :, 2], self["ANGULAR_RELATIONS_LAST"][:, [2]])) sat_azimuth = np.hstack((self["ANGULAR_RELATIONS_FIRST"][:, [3]], self["ANGULAR_RELATIONS"][:, :, 3], self["ANGULAR_RELATIONS_LAST"][:, [3]])) self.sun_azi, self.sun_zen, self.sat_azi, self.sat_zen = self._get_full_angles(solar_zenith, sat_zenith, solar_azimuth, sat_azimuth) self.sun_azi = da.from_delayed(self.sun_azi, dtype=self["ANGULAR_RELATIONS"].dtype, shape=(self.scanlines, self.pixels)) self.sun_zen = da.from_delayed(self.sun_zen, dtype=self["ANGULAR_RELATIONS"].dtype, shape=(self.scanlines, self.pixels)) self.sat_azi = da.from_delayed(self.sat_azi, dtype=self["ANGULAR_RELATIONS"].dtype, shape=(self.scanlines, self.pixels)) self.sat_zen = da.from_delayed(self.sat_zen, dtype=self["ANGULAR_RELATIONS"].dtype, shape=(self.scanlines, self.pixels)) return self.sun_azi, self.sun_zen, self.sat_azi, self.sat_zen def get_bounding_box(self): """Get bounding box.""" if self.sections is None: self._read_all() lats = np.hstack([self["EARTH_LOCATION_FIRST"][0, [0]], self["EARTH_LOCATION_LAST"][0, [0]], self["EARTH_LOCATION_LAST"][-1, [0]], self["EARTH_LOCATION_FIRST"][-1, [0]]]) lons = np.hstack([self["EARTH_LOCATION_FIRST"][0, [1]], self["EARTH_LOCATION_LAST"][0, [1]], self["EARTH_LOCATION_LAST"][-1, [1]], self["EARTH_LOCATION_FIRST"][-1, [1]]]) return lons.ravel(), lats.ravel() def get_dataset(self, key, info): """Get calibrated channel data.""" if self.sections is None: self._read_all() if key.name in ['longitude', 'latitude']: lons, lats = self.get_full_lonlats() if key.name == 'longitude': dataset = create_xarray(lons) else: dataset = create_xarray(lats) elif key.name in ['solar_zenith_angle', 'solar_azimuth_angle', 'satellite_zenith_angle', 'satellite_azimuth_angle']: sun_azi, sun_zen, sat_azi, sat_zen = self.get_full_angles() if key.name == 'solar_zenith_angle': dataset = create_xarray(sun_zen) elif key.name == 'solar_azimuth_angle': dataset = create_xarray(sun_azi) if key.name == 'satellite_zenith_angle': dataset = create_xarray(sat_zen) elif key.name == 'satellite_azimuth_angle': dataset = create_xarray(sat_azi) else: mask = None if key.calibration == 'counts': raise ValueError('calibration=counts is not supported! ' + 'This reader cannot return counts') elif key.calibration not in ['reflectance', 'brightness_temperature', 'radiance']: raise ValueError('calibration type ' + str(key.calibration) + ' is not supported!') if key.name in ['3A', '3a'] and self.three_a_mask is None: self.three_a_mask = ((self["FRAME_INDICATOR"] & 2 ** 16) != 2 ** 16) if key.name in ['3B', '3b'] and self.three_b_mask is None: self.three_b_mask = ((self["FRAME_INDICATOR"] & 2 ** 16) != 0) if key.name not in ["1", "2", "3a", "3A", "3b", "3B", "4", "5"]: logger.info("Can't load channel in eps_l1b: " + str(key.name)) return if key.name == "1": if key.calibration == 'reflectance': array = radiance_to_refl(self["SCENE_RADIANCES"][:, 0, :], self["CH1_SOLAR_FILTERED_IRRADIANCE"]) else: array = self["SCENE_RADIANCES"][:, 0, :] if key.name == "2": if key.calibration == 'reflectance': array = radiance_to_refl(self["SCENE_RADIANCES"][:, 1, :], self["CH2_SOLAR_FILTERED_IRRADIANCE"]) else: array = self["SCENE_RADIANCES"][:, 1, :] if key.name.lower() == "3a": if key.calibration == 'reflectance': array = radiance_to_refl(self["SCENE_RADIANCES"][:, 2, :], self["CH3A_SOLAR_FILTERED_IRRADIANCE"]) else: array = self["SCENE_RADIANCES"][:, 2, :] mask = np.empty(array.shape, dtype=bool) mask[:, :] = self.three_a_mask[:, np.newaxis] if key.name.lower() == "3b": if key.calibration == 'brightness_temperature': array = radiance_to_bt(self["SCENE_RADIANCES"][:, 2, :], self["CH3B_CENTRAL_WAVENUMBER"], self["CH3B_CONSTANT1"], self["CH3B_CONSTANT2_SLOPE"]) else: array = self["SCENE_RADIANCES"][:, 2, :] mask = np.empty(array.shape, dtype=bool) mask[:, :] = self.three_b_mask[:, np.newaxis] if key.name == "4": if key.calibration == 'brightness_temperature': array = radiance_to_bt(self["SCENE_RADIANCES"][:, 3, :], self["CH4_CENTRAL_WAVENUMBER"], self["CH4_CONSTANT1"], self["CH4_CONSTANT2_SLOPE"]) else: array = self["SCENE_RADIANCES"][:, 3, :] if key.name == "5": if key.calibration == 'brightness_temperature': array = radiance_to_bt(self["SCENE_RADIANCES"][:, 4, :], self["CH5_CENTRAL_WAVENUMBER"], self["CH5_CONSTANT1"], self["CH5_CONSTANT2_SLOPE"]) else: array = self["SCENE_RADIANCES"][:, 4, :] dataset = create_xarray(array) if mask is not None: dataset = dataset.where(~mask) dataset.attrs['platform_name'] = self.platform_name dataset.attrs['sensor'] = self.sensor_name dataset.attrs.update(info) dataset.attrs.update(key.to_dict()) return dataset def get_lonlats(self): """Get lonlats.""" if self.area is None: if self.lons is None or self.lats is None: self.lons, self.lats = self.get_full_lonlats() self.area = SwathDefinition(self.lons, self.lats) self.area.name = '_'.join([self.platform_name, str(self.start_time), str(self.end_time)]) return self.area @property def platform_name(self): """Get platform name.""" return self.spacecrafts[self["SPACECRAFT_ID"]] @property def sensor_name(self): """Get sensor name.""" return self.sensors[self["INSTRUMENT_ID"]] @property def start_time(self): """Get start time.""" # return datetime.strptime(self["SENSING_START"], "%Y%m%d%H%M%SZ") return self._start_time @property def end_time(self): """Get end time.""" # return datetime.strptime(self["SENSING_END"], "%Y%m%d%H%M%SZ") return self._end_time satpy-0.20.0/satpy/readers/eum_base.py000066400000000000000000000054341362525524100176630ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Utilities for EUMETSAT satellite data.""" from datetime import datetime, timedelta import numpy as np # 6 bytes, 8 bytes, 10 bytes time_cds_short = [('Days', '>u2'), ('Milliseconds', '>u4')] time_cds = time_cds_short + [('Microseconds', '>u2')] time_cds_expanded = time_cds + [('Nanoseconds', '>u2')] issue_revision = [('Issue', np.uint16), ('Revision', np.uint16)] def timecds2datetime(tcds): """Convert time_cds-variables to datetime-object. Works both with a dictionary and a numpy record_array. """ days = int(tcds['Days']) milliseconds = int(tcds['Milliseconds']) try: microseconds = int(tcds['Microseconds']) except (KeyError, ValueError): microseconds = 0 try: microseconds += int(tcds['Nanoseconds']) / 1000. except (KeyError, ValueError): pass reference = datetime(1958, 1, 1) delta = timedelta(days=days, milliseconds=milliseconds, microseconds=microseconds) return reference + delta def recarray2dict(arr): """Convert numpy record array to a dictionary.""" res = {} tcds_types = [time_cds_short, time_cds, time_cds_expanded] for dtuple in arr.dtype.descr: key = dtuple[0] ntype = dtuple[1] data = arr[key] if ntype in tcds_types: if data.size > 1: res[key] = np.array([timecds2datetime(item) for item in data.ravel()]).reshape(data.shape) else: res[key] = timecds2datetime(data) elif isinstance(ntype, list): res[key] = recarray2dict(data) else: if data.size == 1: data = data[0] if ntype[:2] == '|S': # Python2 and Python3 handle strings differently try: data = data.decode() except ValueError: pass data = data.split(':')[0].strip() res[key] = data else: res[key] = data.squeeze() return res satpy-0.20.0/satpy/readers/fci_l1c_fdhsi.py000066400000000000000000000264361362525524100205650ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Interface to MTG-FCI-FDHSI L1C NetCDF files. This module defines the :class:`FCIFDHSIFileHandler` file handler, to be used for reading Meteosat Third Generation (MTG) Flexible Combined Imager (FCI) Full Disk High Spectral Imagery (FDHSI) data. FCI will fly on the MTG Imager (MTG-I) series of satellites, scheduled to be launched in 2021 by the earliest. For more information about FCI, see `EUMETSAT`_. .. _EUMETSAT: https://www.eumetsat.int/website/home/Satellites/FutureSatellites/MeteosatThirdGeneration/MTGDesign/index.html#fci # noqa: E501 """ from __future__ import (division, absolute_import, print_function, unicode_literals) import logging import numpy as np import dask.array as da import xarray as xr from pyresample import geometry from netCDF4 import default_fillvals from .netcdf_utils import NetCDF4FileHandler logger = logging.getLogger(__name__) class FCIFDHSIFileHandler(NetCDF4FileHandler): """Class implementing the MTG FCI FDHSI File . This class implements the Meteosat Third Generation (MTG) Flexible Combined Imager (FCI) Full Disk High Spectral Imagery (FDHSI) reader. It is designed to be used through the :class:`~satpy.Scene` class using the :mod:`~satpy.Scene.load` method with the reader ``"fci_l1c_fdhsi"``. """ def __init__(self, filename, filename_info, filetype_info): """Initialize file handler.""" super(FCIFDHSIFileHandler, self).__init__(filename, filename_info, filetype_info, cache_var_size=10000, cache_handle=True) logger.debug('Reading: {}'.format(self.filename)) logger.debug('Start: {}'.format(self.start_time)) logger.debug('End: {}'.format(self.end_time)) self.cache = {} @property def start_time(self): """Get start time.""" return self.filename_info['start_time'] @property def end_time(self): """Get end time.""" return self.filename_info['end_time'] def get_dataset(self, key, info=None): """Load a dataset.""" logger.debug('Reading {} from {}'.format(key.name, self.filename)) # Get the dataset # Get metadata for given dataset measured, root = self.get_channel_dataset(key.name) radlab = measured + "/effective_radiance" data = self[radlab] attrs = data.attrs.copy() info = info.copy() fv = attrs.pop( "FillValue", default_fillvals.get(data.dtype.str[1:], np.nan)) vr = attrs.pop("valid_range", [-np.inf, np.inf]) if key.calibration == "counts": attrs["_FillValue"] = fv nfv = fv else: nfv = np.nan data = data.where(data >= vr[0], nfv) data = data.where(data <= vr[1], nfv) if key.calibration == "counts": # from package description, this just means not applying add_offset # and scale_factor attrs.pop("scale_factor") attrs.pop("add_offset") data.attrs["units"] = "1" res = data else: data = (data * attrs.pop("scale_factor", 1) + attrs.pop("add_offset", 0)) if key.calibration in ("brightness_temperature", "reflectance"): res = self.calibrate(data, key, measured, root) else: res = data data.attrs["units"] = attrs["units"] # pre-calibration units no longer apply info.pop("units") attrs.pop("units") self.nlines, self.ncols = res.shape res.attrs.update(key.to_dict()) res.attrs.update(info) res.attrs.update(attrs) return res def get_channel_dataset(self, channel): """Get channel dataset.""" root_group = 'data/{}'.format(channel) group = 'data/{}/measured'.format(channel) return group, root_group def calc_area_extent(self, key): """Calculate area extent for a dataset.""" # Calculate the area extent of the swath based on start line and column # information, total number of segments and channel resolution # numbers from Package Description, Table 8 xyres = {500: 22272, 1000: 11136, 2000: 5568} chkres = xyres[key.resolution] # Get metadata for given dataset measured, root = self.get_channel_dataset(key.name) # Get start/end line and column of loaded swath. self.startline = int(self[measured + "/start_position_row"]) self.endline = int(self[measured + "/end_position_row"]) self.startcol = int(self[measured + "/start_position_column"]) self.endcol = int(self[measured + "/end_position_column"]) self.nlines, self.ncols = self[measured + "/effective_radiance/shape"] logger.debug('Channel {} resolution: {}'.format(key.name, chkres)) logger.debug('Row/Cols: {} / {}'.format(self.nlines, self.ncols)) logger.debug('Start/End row: {} / {}'.format(self.startline, self.endline)) logger.debug('Start/End col: {} / {}'.format(self.startcol, self.endcol)) # total_segments = 70 # Calculate full globe line extent max_y = 5432229.9317116784 min_y = -5429229.5285458621 full_y = max_y + abs(min_y) # Single swath line extent res_y = full_y / chkres # Extent per pixel resolution startl = min_y + res_y * self.startline - 0.5 * (res_y) endl = min_y + res_y * self.endline + 0.5 * (res_y) logger.debug('Start / end extent: {} / {}'.format(startl, endl)) chk_extent = (-5432229.9317116784, endl, 5429229.5285458621, startl) return(chk_extent) _fallback_area_def = { "reference_altitude": 35786400, # metre } def get_area_def(self, key, info=None): """Calculate on-fly area definition for 0 degree geos-projection for a dataset.""" # TODO Projection information are hard coded for 0 degree geos projection # Test dataset doen't provide the values in the file container. # Only fill values are inserted a = float(self["state/processor/earth_equatorial_radius"]) b = float(self["state/processor/earth_polar_radius"]) h = float(self["state/processor/reference_altitude"]) lon_0 = float(self["state/processor/projection_origin_longitude"]) if h == default_fillvals[ self["state/processor/reference_altitude"].dtype.str[1:]]: logger.warning( "Reference altitude in {:s} set to " "fill value, using {:d}".format( self.filename, self._fallback_area_def["reference_altitude"])) h = self._fallback_area_def["reference_altitude"] # Channel dependent swath resoultion area_extent = self.calc_area_extent(key) logger.debug('Calculated area extent: {}' .format(''.join(str(area_extent)))) proj_dict = {'a': float(a), 'b': float(b), 'lon_0': float(lon_0), 'h': float(h), 'proj': 'geos', 'units': 'm'} area = geometry.AreaDefinition( 'some_area_name', "On-the-fly area", 'geosfci', proj_dict, self.ncols, self.nlines, area_extent) self.area = area return area def calibrate(self, data, key, measured, root): """Calibrate data.""" if key.calibration == 'brightness_temperature': data = self._ir_calibrate(data, measured, root) elif key.calibration == 'reflectance': data = self._vis_calibrate(data, measured) else: raise RuntimeError( "Received unknown calibration key. Expected " "'brightness_temperature' or 'reflectance', got " + key.calibration) return data def _ir_calibrate(self, radiance, measured, root): """IR channel calibration.""" coef = self[measured + "/radiance_unit_conversion_coefficient"] wl_c = self[root + "/central_wavelength_actual"] a = self[measured + "/radiance_to_bt_conversion_coefficient_a"] b = self[measured + "/radiance_to_bt_conversion_coefficient_b"] c1 = self[measured + "/radiance_to_bt_conversion_constant_c1"] c2 = self[measured + "/radiance_to_bt_conversion_constant_c2"] for v in (coef, wl_c, a, b, c1, c2): if v == v.attrs.get("FillValue", default_fillvals.get(v.dtype.str[1:])): logger.error( "{:s} set to fill value, cannot produce " "brightness temperatures for {:s}.".format( v.attrs.get("long_name", "at least one necessary coefficient"), root)) return xr.DataArray( da.full(shape=radiance.shape, chunks=radiance.chunks, fill_value=np.nan), dims=radiance.dims, coords=radiance.coords, attrs=radiance.attrs) Lv = radiance * coef vc = 1e6/wl_c # from wl in um to wn in m^-1 nom = c2 * vc denom = a * np.log(1 + (c1 * vc**3) / Lv) res = nom / denom - b / a res.attrs["units"] = "K" return res def _vis_calibrate(self, radiance, measured): """VIS channel calibration.""" # radiance to reflectance taken as in mipp/xrit/MSG.py # again FCI User Guide is not clear on how to do this cesilab = measured + "/channel_effective_solar_irradiance" cesi = self[cesilab] if cesi == cesi.attrs.get( "FillValue", default_fillvals.get(cesi.dtype.str[1:])): logger.error( "channel effective solar irradiance set to fill value, " "cannot produce reflectance for {:s}.".format(measured)) return xr.DataArray( da.full(shape=radiance.shape, chunks=radiance.chunks, fill_value=np.nan), dims=radiance.dims, coords=radiance.coords, attrs=radiance.attrs) sirr = float(cesi) res = radiance / sirr * 100 res.attrs["units"] = "%" return res satpy-0.20.0/satpy/readers/file_handlers.py000066400000000000000000000252011362525524100206740ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Interface for BaseFileHandlers.""" from abc import ABCMeta import numpy as np import six from pathlib import PurePath from pyresample.geometry import SwathDefinition from satpy.dataset import combine_metadata class BaseFileHandler(six.with_metaclass(ABCMeta, object)): """Base file handler.""" def __init__(self, filename, filename_info, filetype_info): """Initialize file handler.""" if isinstance(filename, PurePath): self.filename = str(filename) else: self.filename = filename self.navigation_reader = None self.filename_info = filename_info self.filetype_info = filetype_info self.metadata = filename_info.copy() def __str__(self): """Customize __str__.""" return "<{}: '{}'>".format(self.__class__.__name__, self.filename) def __repr__(self): """Customize __repr__.""" return str(self) def get_dataset(self, dataset_id, ds_info): """Get dataset.""" raise NotImplementedError def get_area_def(self, dsid): """Get area definition.""" raise NotImplementedError def get_bounding_box(self): """Get the bounding box of the files, as a (lons, lats) tuple. The tuple return should a lons and lats list of coordinates traveling clockwise around the points available in the file. """ raise NotImplementedError @staticmethod def _combine(infos, func, *keys): res = {} for key in keys: if key in infos[0]: res[key] = func([i[key] for i in infos]) return res def combine_info(self, all_infos): """Combine metadata for multiple datasets. When loading data from multiple files it can be non-trivial to combine things like start_time, end_time, start_orbit, end_orbit, etc. By default this method will produce a dictionary containing all values that were equal across **all** provided info dictionaries. Additionally it performs the logical comparisons to produce the following if they exist: - start_time - end_time - start_orbit - end_orbit - satellite_altitude - satellite_latitude - satellite_longitude - orbital_parameters Also, concatenate the areas. """ combined_info = combine_metadata(*all_infos) new_dict = self._combine(all_infos, min, 'start_time', 'start_orbit') new_dict.update(self._combine(all_infos, max, 'end_time', 'end_orbit')) new_dict.update(self._combine(all_infos, np.mean, 'satellite_longitude', 'satellite_latitude', 'satellite_altitude')) # Average orbital parameters orb_params = [info.get('orbital_parameters', {}) for info in all_infos] if all(orb_params): # Collect all available keys orb_params_comb = {} for d in orb_params: orb_params_comb.update(d) # Average known keys keys = ['projection_longitude', 'projection_latitude', 'projection_altitude', 'satellite_nominal_longitude', 'satellite_nominal_latitude', 'satellite_actual_longitude', 'satellite_actual_latitude', 'satellite_actual_altitude', 'nadir_longitude', 'nadir_latitude'] orb_params_comb.update(self._combine(orb_params, np.mean, *keys)) new_dict['orbital_parameters'] = orb_params_comb try: area = SwathDefinition(lons=np.ma.vstack([info['area'].lons for info in all_infos]), lats=np.ma.vstack([info['area'].lats for info in all_infos])) area.name = '_'.join([info['area'].name for info in all_infos]) combined_info['area'] = area except KeyError: pass new_dict.update(combined_info) return new_dict @property def start_time(self): """Get start time.""" return self.filename_info['start_time'] @property def end_time(self): """Get end time.""" return self.filename_info.get('end_time', self.start_time) @property def sensor_names(self): """List of sensors represented in this file.""" raise NotImplementedError def file_type_matches(self, ds_ftype): """Match file handler's type to this dataset's file type. Args: ds_ftype (str or list): File type or list of file types that a dataset is configured to be loaded from. Returns: ``True`` if this file handler object's type matches the dataset's file type(s), ``False`` otherwise. """ if isinstance(ds_ftype, str) and ds_ftype == self.filetype_info['file_type']: return True elif self.filetype_info['file_type'] in ds_ftype: return True return None def available_datasets(self, configured_datasets=None): """Get information of available datasets in this file. This is used for dynamically specifying what datasets are available from a file in addition to what's configured in a YAML configuration file. Note that this method is called for each file handler for each file type; care should be taken when possible to reduce the amount of redundant datasets produced. This method should **not** update values of the dataset information dictionary **unless** this file handler has a matching file type (the data could be loaded from this object in the future) and at least **one** :class:`satpy.dataset.DatasetID` key is also modified. Otherwise, this file type may override the information provided by a more preferred file type (as specified in the YAML file). It is recommended that any non-ID metadata be updated during the :meth:`BaseFileHandler.get_dataset` part of loading. This method is not guaranteed that it will be called before any other file type's handler. The availability "boolean" not being ``None`` does not mean that a file handler called later can't provide an additional dataset, but it must provide more identifying (DatasetID) information to do so and should yield its new dataset in addition to the previous one. Args: configured_datasets (list): Series of (bool or None, dict) in the same way as is returned by this method (see below). The bool is whether or not the dataset is available from at least one of the current file handlers. It can also be ``None`` if no file handler knows before us knows how to handle it. The dictionary is existing dataset metadata. The dictionaries are typically provided from a YAML configuration file and may be modified, updated, or used as a "template" for additional available datasets. This argument could be the result of a previous file handler's implementation of this method. Returns: Iterator of (bool or None, dict) pairs where dict is the dataset's metadata. If the dataset is available in the current file type then the boolean value should be ``True``, ``False`` if we **know** about the dataset but it is unavailable, or ``None`` if this file object is not responsible for it. Example 1 - Supplement existing configured information:: def available_datasets(self, configured_datasets=None): "Add information to configured datasets." # we know the actual resolution res = self.resolution # update previously configured datasets for is_avail, ds_info in (configured_datasets or []): # some other file handler knows how to load this # don't override what they've done if is_avail is not None: yield is_avail, ds_info matches = self.file_type_matches(ds_info['file_type']) if matches and ds_info.get('resolution') != res: # we are meant to handle this dataset (file type matches) # and the information we can provide isn't available yet new_info = ds_info.copy() new_info['resolution'] = res yield True, new_info elif is_avail is None: # we don't know what to do with this # see if another future file handler does yield is_avail, ds_info Example 2 - Add dynamic datasets from the file:: def available_datasets(self, configured_datasets=None): "Add information to configured datasets." # pass along existing datasets for is_avail, ds_info in (configured_datasets or []): yield is_avail, ds_info # get dynamic variables known to this file (that we created) for var_name, val in self.dynamic_variables.items(): ds_info = { 'file_type': self.filetype_info['file_type'], 'resolution': 1000, 'name': var_name, } yield True, ds_info """ for is_avail, ds_info in (configured_datasets or []): if is_avail is not None: # some other file handler said it has this dataset # we don't know any more information than the previous # file handler so let's yield early yield is_avail, ds_info continue yield self.file_type_matches(ds_info['file_type']), ds_info satpy-0.20.0/satpy/readers/generic_image.py000066400000000000000000000074331362525524100206620ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """ Reader for generic image (e.g. gif, png, jpg, tif, geotiff, ...). Returns a dataset without calibration. Includes coordinates if available in the file (eg. geotiff). """ import logging import rasterio import xarray as xr import dask.array as da import numpy as np from satpy.readers.file_handlers import BaseFileHandler from satpy import CHUNK_SIZE from pyresample import utils BANDS = {1: ['L'], 2: ['L', 'A'], 3: ['R', 'G', 'B'], 4: ['R', 'G', 'B', 'A']} logger = logging.getLogger(__name__) class GenericImageFileHandler(BaseFileHandler): """Handle reading of generic image files.""" def __init__(self, filename, filename_info, filetype_info): """Initialize filehandler.""" super(GenericImageFileHandler, self).__init__( filename, filename_info, filetype_info) self.finfo = filename_info try: self.finfo['end_time'] = self.finfo['start_time'] except KeyError: pass self.finfo['filename'] = self.filename self.file_content = {} self.area = None self.read() def read(self): """Read the image.""" dataset = rasterio.open(self.finfo['filename']) # Create area definition if hasattr(dataset, 'crs') and dataset.crs is not None: self.area = utils.get_area_def_from_raster(dataset) data = xr.open_rasterio(dataset, chunks=(1, CHUNK_SIZE, CHUNK_SIZE)) attrs = data.attrs.copy() # Rename to Satpy convention data = data.rename({'band': 'bands'}) # Rename bands to [R, G, B, A], or a subset of those data['bands'] = BANDS[data.bands.size] # Mask data if alpha channel is present try: data = mask_image_data(data) except ValueError as err: logger.warning(err) data.attrs = attrs self.file_content['image'] = data def get_area_def(self, dsid): """Get area definition of the image.""" if self.area is None: raise NotImplementedError("No CRS information available from image") return self.area @property def start_time(self): """Return start time.""" return self.finfo['start_time'] @property def end_time(self): """Return end time.""" return self.finfo['end_time'] def get_dataset(self, key, info): """Get a dataset from the file.""" logger.debug("Reading %s.", key) return self.file_content[key.name] def mask_image_data(data): """Mask image data if alpha channel is present.""" if data.bands.size in (2, 4): if not np.issubdtype(data.dtype, np.integer): raise ValueError("Only integer datatypes can be used as a mask.") mask = data.data[-1, :, :] == np.iinfo(data.dtype).min data = data.astype(np.float64) masked_data = da.stack([da.where(mask, np.nan, data.data[i, :, :]) for i in range(data.shape[0])]) data.data = masked_data data = data.sel(bands=BANDS[data.bands.size - 1]) return data satpy-0.20.0/satpy/readers/geocat.py000066400000000000000000000255471362525524100173540ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Interface to GEOCAT HDF4 or NetCDF4 products. Note: GEOCAT files do not currently have projection information or precise pixel resolution information. Additionally the longitude and latitude arrays are stored as 16-bit integers which causes loss of precision. For this reason the lon/lats can't be used as a reliable coordinate system to calculate the projection X/Y coordinates. Until GEOCAT adds projection information and X/Y coordinate arrays, this reader will estimate the geostationary area the best it can. It currently takes a single lon/lat point as reference and uses hardcoded resolution and projection information to calculate the area extents. """ import logging import numpy as np from pyproj import Proj from pyresample import geometry from pyresample.utils import proj4_str_to_dict from satpy.readers.netcdf_utils import NetCDF4FileHandler, netCDF4 LOG = logging.getLogger(__name__) CF_UNITS = { 'none': '1', } # GEOCAT currently doesn't include projection information in it's files GEO_PROJS = { 'GOES-16': '+proj=geos +lon_0={lon_0:0.02f} +h=35786023.0 +a=6378137.0 +b=6356752.31414 +sweep=x +units=m +no_defs', 'GOES-17': '+proj=geos +lon_0={lon_0:0.02f} +h=35786023.0 +a=6378137.0 +b=6356752.31414 +sweep=x +units=m +no_defs', 'HIMAWARI-8': '+proj=geos +over +lon_0=140.7 +h=35785863 +a=6378137 +b=6356752.299581327 +units=m +no_defs', } class GEOCATFileHandler(NetCDF4FileHandler): """GEOCAT netCDF4 file handler.""" sensors = { 'goes': 'goes_imager', 'himawari8': 'ahi', 'goes16': 'abi', # untested 'goesr': 'abi', # untested } platforms = { } resolutions = { 'abi': { 1: 1002.0086577437705, 2: 2004.0173154875411, }, 'ahi': { 1: 999.9999820317674, # assumption 2: 1999.999964063535, 4: 3999.99992812707, } } def get_sensor(self, sensor): """Get sensor.""" last_resort = None for k, v in self.sensors.items(): if k == sensor: return v elif k in sensor: last_resort = v if last_resort: return last_resort raise ValueError("Unknown sensor '{}'".format(sensor)) def get_platform(self, platform): """Get platform.""" for k, v in self.platforms.items(): if k in platform: return v return platform def _get_proj(self, platform, ref_lon): if platform == 'GOES-16' and -76. < ref_lon < -74.: # geocat file holds the *actual* subsatellite point, not the # projection (-75.2 actual versus -75 projection) ref_lon = -75. return GEO_PROJS[platform].format(lon_0=ref_lon) @property def sensor_names(self): """Get sensor names.""" return [self.get_sensor(self['/attr/Sensor_Name'])] @property def start_time(self): """Get start time.""" return self.filename_info['start_time'] @property def end_time(self): """Get end time.""" return self.filename_info.get('end_time', self.start_time) @property def is_geo(self): """Check platform.""" platform = self.get_platform(self['/attr/Platform_Name']) return platform in GEO_PROJS @property def resolution(self): """Get resolution.""" elem_res = self['/attr/Element_Resolution'] return int(elem_res * 1000) def _calc_area_resolution(self, ds_res): elem_res = round(ds_res / 1000.) # mimic 'Element_Resolution' attribute from above sensor = self.get_sensor(self['/attr/Sensor_Name']) return self.resolutions.get(sensor, {}).get(int(elem_res), elem_res * 1000.) def available_datasets(self, configured_datasets=None): """Update information for or add datasets provided by this file. If this file handler can load a dataset then it will supplement the dataset info with the resolution and possibly coordinate datasets needed to load it. Otherwise it will continue passing the dataset information down the chain. See :meth:`satpy.readers.file_handlers.BaseFileHandler.available_datasets` for details. """ res = self.resolution coordinates = ('pixel_longitude', 'pixel_latitude') handled_variables = set() # update previously configured datasets for is_avail, ds_info in (configured_datasets or []): this_res = ds_info.get('resolution') this_coords = ds_info.get('coordinates') # some other file handler knows how to load this if is_avail is not None: yield is_avail, ds_info var_name = ds_info.get('file_key', ds_info['name']) matches = self.file_type_matches(ds_info['file_type']) # we can confidently say that we can provide this dataset and can # provide more info if matches and var_name in self and this_res != res: handled_variables.add(var_name) new_info = ds_info.copy() # don't mess up the above yielded new_info['resolution'] = res if not self.is_geo and this_coords is None: new_info['coordinates'] = coordinates yield True, new_info elif is_avail is None: # if we didn't know how to handle this dataset and no one else did # then we should keep it going down the chain yield is_avail, ds_info # Provide new datasets for var_name, val in self.file_content.items(): if var_name in handled_variables: continue if isinstance(val, netCDF4.Variable): ds_info = { 'file_type': self.filetype_info['file_type'], 'resolution': res, 'name': var_name, } if not self.is_geo: ds_info['coordinates'] = coordinates yield True, ds_info def get_shape(self, dataset_id, ds_info): """Get shape.""" var_name = ds_info.get('file_key', dataset_id.name) return self[var_name + '/shape'] def _first_good_nav(self, lon_arr, lat_arr): if hasattr(lon_arr, 'mask'): good_indexes = np.nonzero(~lon_arr.mask) else: # no masked values found in auto maskandscale good_indexes = ([0], [0]) # nonzero returns (, ) return tuple(x[0] for x in good_indexes) def _get_extents(self, proj, res, lon_arr, lat_arr): p = Proj(proj) res = float(res) first_good = self._first_good_nav(lon_arr, lat_arr) one_x, one_y = p(lon_arr[first_good], lat_arr[first_good]) left_x = one_x - res * first_good[1] right_x = left_x + res * lon_arr.shape[1] top_y = one_y + res * first_good[0] bot_y = top_y - res * lon_arr.shape[0] half_x = res / 2. half_y = res / 2. return (left_x - half_x, bot_y - half_y, right_x + half_x, top_y + half_y) def _load_nav(self, name): nav = self[name] factor = self[name + '/attr/scale_factor'] offset = self[name + '/attr/add_offset'] fill = self[name + '/attr/_FillValue'] nav = nav[:] mask = nav == fill nav = np.ma.masked_array(nav * factor + offset, mask=mask) return nav[:] def get_area_def(self, dsid): """Get area definition.""" if not self.is_geo: raise NotImplementedError("Don't know how to get the Area Definition for this file") platform = self.get_platform(self['/attr/Platform_Name']) res = self._calc_area_resolution(dsid.resolution) proj = self._get_proj(platform, float(self['/attr/Subsatellite_Longitude'])) area_name = '{} {} Area at {}m'.format( platform, self.metadata.get('sector_id', ''), int(res)) lon = self._load_nav('pixel_longitude') lat = self._load_nav('pixel_latitude') extents = self._get_extents(proj, res, lon, lat) area_def = geometry.AreaDefinition( area_name, area_name, area_name, proj4_str_to_dict(proj), lon.shape[1], lon.shape[0], area_extent=extents, ) return area_def def get_metadata(self, dataset_id, ds_info): """Get metadata.""" var_name = ds_info.get('file_key', dataset_id.name) shape = self.get_shape(dataset_id, ds_info) info = getattr(self[var_name], 'attrs', {}) info['shape'] = shape info.update(ds_info) u = info.get('units') if u in CF_UNITS: # CF compliance info['units'] = CF_UNITS[u] info['sensor'] = self.get_sensor(self['/attr/Sensor_Name']) info['platform_name'] = self.get_platform(self['/attr/Platform_Name']) info['resolution'] = dataset_id.resolution if var_name == 'pixel_longitude': info['standard_name'] = 'longitude' elif var_name == 'pixel_latitude': info['standard_name'] = 'latitude' return info def get_dataset(self, dataset_id, ds_info): """Get dataset.""" var_name = ds_info.get('file_key', dataset_id.name) # FUTURE: Metadata retrieval may be separate info = self.get_metadata(dataset_id, ds_info) data = self[var_name] fill = self[var_name + '/attr/_FillValue'] factor = self.get(var_name + '/attr/scale_factor') offset = self.get(var_name + '/attr/add_offset') valid_range = self.get(var_name + '/attr/valid_range') data = data.where(data != fill) if valid_range is not None: data = data.where((data >= valid_range[0]) & (data <= valid_range[1])) if factor is not None and offset is not None: data = data * factor + offset data.attrs.update(info) data = data.rename({'lines': 'y', 'elements': 'x'}) return data satpy-0.20.0/satpy/readers/ghrsst_l3c_sst.py000066400000000000000000000106171362525524100210460ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """An OSISAF SST reader for the netCDF GHRSST format.""" import logging from datetime import datetime import numpy as np from satpy.dataset import Dataset from satpy.readers.netcdf_utils import NetCDF4FileHandler logger = logging.getLogger(__name__) PLATFORM_NAME = {'NPP': 'Suomi-NPP', } SENSOR_NAME = {'VIIRS': 'viirs', 'AVHRR': 'avhrr/3'} class GHRSST_OSISAFL2(NetCDF4FileHandler): """Reader for the OSISAF SST GHRSST format.""" def _parse_datetime(self, datestr): return datetime.strptime(datestr, '%Y%m%dT%H%M%SZ') def get_area_def(self, area_id, area_info): """Override abstract baseclass method.""" raise NotImplementedError def get_dataset(self, dataset_id, ds_info, out=None): """Load a dataset.""" var_path = ds_info.get('file_key', '{}'.format(dataset_id.name)) dtype = ds_info.get('dtype', np.float32) if var_path + '/shape' not in self: # loading a scalar value shape = 1 else: shape = self[var_path + '/shape'] if shape[0] == 1: # Remove the time dimenstion from dataset shape = shape[1], shape[2] file_units = ds_info.get('file_units') if file_units is None: try: file_units = self[var_path + '/attr/units'] # they were almost completely CF compliant... if file_units == "none": file_units = "1" except KeyError: # no file units specified file_units = None if out is None: out = np.ma.empty(shape, dtype=dtype) out.mask = np.zeros(shape, dtype=np.bool) out.data[:] = np.require(self[var_path][0][::-1], dtype=dtype) valid_min = self[var_path + '/attr/valid_min'] valid_max = self[var_path + '/attr/valid_max'] try: scale_factor = self[var_path + '/attr/scale_factor'] scale_offset = self[var_path + '/attr/add_offset'] except KeyError: scale_factor = scale_offset = None if valid_min is not None and valid_max is not None: out.mask[:] |= (out.data < valid_min) | (out.data > valid_max) factors = (scale_factor, scale_offset) if factors[0] != 1 or factors[1] != 0: out.data[:] *= factors[0] out.data[:] += factors[1] ds_info.update({ "units": ds_info.get("units", file_units), "platform_name": PLATFORM_NAME.get(self['/attr/platform'], self['/attr/platform']), "sensor": SENSOR_NAME.get(self['/attr/sensor'], self['/attr/sensor']), }) ds_info.update(dataset_id.to_dict()) cls = ds_info.pop("container", Dataset) return cls(out, **ds_info) def get_lonlats(self, navid, nav_info, lon_out=None, lat_out=None): """Load an area.""" lon_key = 'lon' valid_min = self[lon_key + '/attr/valid_min'] valid_max = self[lon_key + '/attr/valid_max'] lon_out.data[:] = self[lon_key][::-1] lon_out.mask[:] = (lon_out < valid_min) | (lon_out > valid_max) lat_key = 'lat' valid_min = self[lat_key + '/attr/valid_min'] valid_max = self[lat_key + '/attr/valid_max'] lat_out.data[:] = self[lat_key][::-1] lat_out.mask[:] = (lat_out < valid_min) | (lat_out > valid_max) return {} @property def start_time(self): """Get start time.""" # return self.filename_info['start_time'] return self._parse_datetime(self['/attr/start_time']) @property def end_time(self): """Get end time.""" return self._parse_datetime(self['/attr/stop_time']) satpy-0.20.0/satpy/readers/glm_l2.py000066400000000000000000000113561362525524100172570ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Geostationary Lightning Mapper reader for the Level 2 format from glmtools. More information about `glmtools` and the files it produces can be found on the project's GitHub repository: https://github.com/deeplycloudy/glmtools """ import logging from datetime import datetime from satpy.readers.abi_base import NC_ABI_BASE logger = logging.getLogger(__name__) PLATFORM_NAMES = { 'G16': 'GOES-16', 'G17': 'GOES-17', } # class NC_GLM_L2_LCFA(BaseFileHandler): — add this with glmtools class NCGriddedGLML2(NC_ABI_BASE): """File reader for individual GLM L2 NetCDF4 files.""" @property def sensor(self): """Get sensor name for current file handler.""" return 'glm' @property def start_time(self): """Start time of the current file's observations.""" return datetime.strptime(self.nc.attrs['time_coverage_start'], '%Y-%m-%dT%H:%M:%SZ') @property def end_time(self): """End time of the current file's observations.""" return datetime.strptime(self.nc.attrs['time_coverage_end'], '%Y-%m-%dT%H:%M:%SZ') def get_dataset(self, key, info): """Load a dataset.""" logger.debug('Reading in get_dataset %s.', key.name) res = self[key.name] res.attrs.update({'platform_name': self.platform_name, 'sensor': self.sensor}) res.attrs.update(self.filename_info) # Add orbital parameters projection = self.nc["goes_imager_projection"] res.attrs['orbital_parameters'] = { 'projection_longitude': float(projection.attrs['longitude_of_projection_origin']), 'projection_latitude': float(projection.attrs['latitude_of_projection_origin']), 'projection_altitude': float(projection.attrs['perspective_point_height']), 'satellite_nominal_latitude': float(self['nominal_satellite_subpoint_lat']), 'satellite_nominal_longitude': float(self['nominal_satellite_subpoint_lon']), # 'satellite_nominal_altitude': float(self['nominal_satellite_height']), } res.attrs.update(key.to_dict()) # remove attributes that could be confusing later res.attrs.pop('_FillValue', None) res.attrs.pop('scale_factor', None) res.attrs.pop('add_offset', None) res.attrs.pop('_Unsigned', None) res.attrs.pop('ancillary_variables', None) # Can't currently load DQF # add in information from the filename that may be useful to the user # for key in ('observation_type', 'scene_abbr', 'scan_mode', 'platform_shortname'): for attr in ('scene_abbr', 'scan_mode', 'platform_shortname'): res.attrs[attr] = self.filename_info[attr] # copy global attributes to metadata for attr in ('scene_id', 'orbital_slot', 'instrument_ID', 'production_site', 'timeline_ID'): res.attrs[attr] = self.nc.attrs.get(attr) return res def available_datasets(self, configured_datasets=None): """Check actual Add information to configured datasets.""" # we know the actual resolution res = self.spatial_resolution_to_number() # update previously configured datasets for is_avail, ds_info in (configured_datasets or []): # some other file handler knows how to load this # don't override what they've done if is_avail is not None: yield is_avail, ds_info matches = self.file_type_matches(ds_info['file_type']) if matches and ds_info.get('resolution') != res: # we are meant to handle this dataset (file type matches) # and the information we can provide isn't available yet new_info = ds_info.copy() new_info['resolution'] = res exists = ds_info['name'] in self.nc yield exists, new_info elif is_avail is None: # we don't know what to do with this # see if another future file handler does yield is_avail, ds_info satpy-0.20.0/satpy/readers/goes_imager_hrit.py000066400000000000000000000432661362525524100214170ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2014-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """GOES HRIT format reader. References: LRIT/HRIT Mission Specific Implementation, February 2012 GVARRDL98.pdf 05057_SPE_MSG_LRIT_HRI """ import logging from datetime import datetime, timedelta import numpy as np import xarray as xr import dask.array as da from pyresample import geometry from satpy.readers.eum_base import (time_cds_short, recarray2dict) from satpy.readers.hrit_base import (HRITFileHandler, ancillary_text, annotation_header, base_hdr_map, image_data_function) class CalibrationError(Exception): """Dummy error-class.""" pass logger = logging.getLogger('hrit_goes') # Geometric constants [meters] EQUATOR_RADIUS = 6378169.00 POLE_RADIUS = 6356583.80 ALTITUDE = 35785831.00 # goes implementation: key_header = np.dtype([('key_number', 'u1'), ('seed', '>f8')]) segment_identification = np.dtype([('GP_SC_ID', '>i2'), ('spectral_channel_id', '>i1'), ('segment_sequence_number', '>u2'), ('planned_start_segment_number', '>u2'), ('planned_end_segment_number', '>u2'), ('data_field_representation', '>i1')]) image_segment_line_quality = np.dtype([('line_number_in_grid', '>i4'), ('line_mean_acquisition', [('days', '>u2'), ('milliseconds', '>u4')]), ('line_validity', 'u1'), ('line_radiometric_quality', 'u1'), ('line_geometric_quality', 'u1')]) goms_variable_length_headers = { image_segment_line_quality: 'image_segment_line_quality'} goms_text_headers = {image_data_function: 'image_data_function', annotation_header: 'annotation_header', ancillary_text: 'ancillary_text'} goes_hdr_map = base_hdr_map.copy() goes_hdr_map.update({7: key_header, 128: segment_identification, 129: image_segment_line_quality }) orbit_coef = np.dtype([('StartTime', time_cds_short), ('EndTime', time_cds_short), ('X', '>f8', (8, )), ('Y', '>f8', (8, )), ('Z', '>f8', (8, )), ('VX', '>f8', (8, )), ('VY', '>f8', (8, )), ('VZ', '>f8', (8, ))]) attitude_coef = np.dtype([('StartTime', time_cds_short), ('EndTime', time_cds_short), ('XofSpinAxis', '>f8', (8, )), ('YofSpinAxis', '>f8', (8, )), ('ZofSpinAxis', '>f8', (8, ))]) cuc_time = np.dtype([('coarse', 'u1', (4, )), ('fine', 'u1', (3, ))]) sgs_time = np.dtype([('century', 'u1'), ('year', 'u1'), ('doy1', 'u1'), ('doy_hours', 'u1'), ('hours_mins', 'u1'), ('mins_secs', 'u1'), ('secs_msecs', 'u1'), ('msecs', 'u1')]) def make_sgs_time(sgs_time_array): """Make sgs time.""" year = ((sgs_time_array['century'] >> 4) * 1000 + (sgs_time_array['century'] & 15) * 100 + (sgs_time_array['year'] >> 4) * 10 + (sgs_time_array['year'] & 15)) doy = ((sgs_time_array['doy1'] >> 4) * 100 + (sgs_time_array['doy1'] & 15) * 10 + (sgs_time_array['doy_hours'] >> 4)) hours = ((sgs_time_array['doy_hours'] & 15) * 10 + (sgs_time_array['hours_mins'] >> 4)) mins = ((sgs_time_array['hours_mins'] & 15) * 10 + (sgs_time_array['mins_secs'] >> 4)) secs = ((sgs_time_array['mins_secs'] & 15) * 10 + (sgs_time_array['secs_msecs'] >> 4)) msecs = ((sgs_time_array['secs_msecs'] & 15) * 100 + (sgs_time_array['msecs'] >> 4) * 10 + (sgs_time_array['msecs'] & 15)) return (datetime(int(year), 1, 1) + timedelta(days=int(doy - 1), hours=int(hours), minutes=int(mins), seconds=int(secs), milliseconds=int(msecs))) satellite_status = np.dtype([("TagType", "> 24) - 64 mant = float_val & ((1 << 24) - 1) if mant == 0: return 0. res = sign * mant * 2.0**(-24 + exp * 4) return res prologue = np.dtype([ # common generic header ("CommonHeaderVersion", "u1"), ("Junk1", "u1", 3), ("NominalSGSProductTime", time_cds_short), ("SGSProductQuality", "u1"), ("SGSProductCompleteness", "u1"), ("SGSProductTimeliness", "u1"), ("SGSProcessingInstanceId", "u1"), ("BaseAlgorithmVersion", "S1", 16), ("ProductAlgorithmVersion", "S1", 16), # product header ("ImageProductHeaderVersion", "u1"), ("Junk2", "u1", 3), ("ImageProductHeaderLength", ">u4"), ("ImageProductVersion", "u1"), # first block-0 ("SatelliteID", "u1"), ("SPSID", "u1"), ("IScan", "u1", 4), ("IDSub", "u1", 16), ("TCurr", sgs_time), ("TCHED", sgs_time), ("TCTRL", sgs_time), ("TLHED", sgs_time), ("TLTRL", sgs_time), ("TIPFS", sgs_time), ("TINFS", sgs_time), ("TISPC", sgs_time), ("TIECL", sgs_time), ("TIBBC", sgs_time), ("TISTR", sgs_time), ("TLRAN", sgs_time), ("TIIRT", sgs_time), ("TIVIT", sgs_time), ("TCLMT", sgs_time), ("TIONA", sgs_time), ("RelativeScanCount", '>u2'), ("AbsoluteScanCount", '>u2'), ("NorthernmostScanLine", '>u2'), ("WesternmostPixel", '>u2'), ("EasternmostPixel", '>u2'), ("NorthernmostFrameLine", '>u2'), ("SouthernmostFrameLine", '>u2'), ("0Pixel", '>u2'), ("0ScanLine", '>u2'), ("0Scan", '>u2'), ("SubSatScan", '>u2'), ("SubSatPixel", '>u2'), ("SubSatLatitude", gvar_float), ("SubSatLongitude", gvar_float), ("Junk4", "u1", 96), # move to "word" 295 ("IMCIdentifier", "S4"), ("Zeros", "u1", 12), ("ReferenceLongitude", gvar_float), ("ReferenceDistance", gvar_float), ("ReferenceLatitude", gvar_float) ]) class HRITGOESPrologueFileHandler(HRITFileHandler): """GOES HRIT format reader.""" def __init__(self, filename, filename_info, filetype_info): """Initialize the reader.""" super(HRITGOESPrologueFileHandler, self).__init__(filename, filename_info, filetype_info, (goes_hdr_map, goms_variable_length_headers, goms_text_headers)) self.prologue = {} self.read_prologue() def read_prologue(self): """Read the prologue metadata.""" with open(self.filename, "rb") as fp_: fp_.seek(self.mda['total_header_length']) data = np.fromfile(fp_, dtype=prologue, count=1) self.prologue.update(recarray2dict(data)) self.process_prologue() def process_prologue(self): """Reprocess prologue to correct types.""" for key in ['TCurr', 'TCHED', 'TCTRL', 'TLHED', 'TLTRL', 'TIPFS', 'TINFS', 'TISPC', 'TIECL', 'TIBBC', 'TISTR', 'TLRAN', 'TIIRT', 'TIVIT', 'TCLMT', 'TIONA']: try: self.prologue[key] = make_sgs_time(self.prologue[key]) except ValueError: self.prologue.pop(key, None) logger.debug("Invalid data for %s", key) for key in ['SubSatLatitude', "SubSatLongitude", "ReferenceLongitude", "ReferenceDistance", "ReferenceLatitude"]: self.prologue[key] = make_gvar_float(self.prologue[key]) radiometric_processing = np.dtype([("TagType", ". """Reader for GOES 8-15 imager data in netCDF format from NOAA CLASS Also handles GOES 15 data in netCDF format reformated by Eumetsat GOES Imager netCDF files contain geolocated detector counts. If ordering via NOAA CLASS, select 16 bits/pixel. The instrument oversamples the viewed scene in E-W direction by a factor of 1.75: IR/VIS pixels are 112/28 urad on a side, but the instrument samples every 64/16 urad in E-W direction (see [BOOK-I] and [BOOK-N]). Important note: Some essential information are missing in the netCDF files, which might render them inappropriate for certain applications. The unknowns are: 1. Subsatellite point 2. Calibration coefficients 3. Detector-scanline assignment, i.e. information about which scanline was recorded by which detector Items 1. and 2. are not critical because the images are geo-located and NOAA provides static calibration coefficients ([VIS], [IR]). The detector-scanline assignment however cannot be reconstructed properly. This is where an approximation has to be applied (see below). Calibration ============ Calibration is performed according to [VIS] and [IR], but with an average calibration coefficient applied to all detectors in a certain channel. The reason for and impact of this approximation is described below. The GOES imager simultaneously records multiple scanlines per sweep using multiple detectors per channel. The VIS channel has 8 detectors, the IR channels have 1-2 detectors (see e.g. Figures 3-5a/b, 3-6a/b and 3-7/a-b in [BOOK-N]). Each detector has its own calibration coefficients, so in order to perform an accurate calibration, the detector-scanline assignment is needed. In theory it is known which scanline was recorded by which detector (VIS: 5,6,7,8,1,2,3,4; IR: 1,2). However, the plate on which the detectors are mounted flexes due to thermal gradients in the instrument which leads to a N-S shift of +/- 8 visible or +/- 2 IR pixels. This shift is compensated in the GVAR scan formation process, but in a way which is hard to reconstruct properly afterwards. See [GVAR], section 3.2.1. for details. Since the calibration coefficients of the detectors in a certain channel only differ slightly, a workaround is to calibrate each scanline with the average calibration coefficients. A worst case estimate of the introduced error can be obtained by calibrating all possible counts with both the minimum and the maximum calibration coefficients and computing the difference. The maximum differences are: ======= ===== ==== GOES-8 ------------------ Channel Diff Unit ======= ===== ==== 00_7 0.0 % # Counts are normalized 03_9 0.187 K 06_8 0.0 K # only one detector 10_7 0.106 K 12_0 0.036 K ======= ===== ==== ======= ===== ==== GOES-9 ------------------ Channel Diff Unit ======= ===== ==== 00_7 0.0 % # Counts are normalized 03_9 0.0 K # coefs identical 06_8 0.0 K # only one detector 10_7 0.021 K 12_0 0.006 K ======= ===== ==== ======= ===== ==== GOES-10 ------------------ Channel Diff Unit ======= ===== ==== 00_7 1.05 % 03_9 0.0 K # coefs identical 06_8 0.0 K # only one detector 10_7 0.013 K 12_0 0.004 K ======= ===== ==== ======= ===== ==== GOES-11 ------------------ Channel Diff Unit ======= ===== ==== 00_7 1.25 % 03_9 0.0 K # coefs identical 06_8 0.0 K # only one detector 10_7 0.0 K # coefs identical 12_0 0.065 K ======= ===== ==== ======= ===== ==== GOES-12 ------------------ Channel Diff Unit ======= ===== ==== 00_7 0.8 % 03_9 0.0 K # coefs identical 06_5 0.044 K 10_7 0.0 K # coefs identical 13_3 0.0 K # only one detector ======= ===== ==== ======= ===== ==== GOES-13 ------------------ Channel Diff Unit ======= ===== ==== 00_7 1.31 % 03_9 0.0 K # coefs identical 06_5 0.085 K 10_7 0.008 K 13_3 0.0 K # only one detector ======= ===== ==== ======= ===== ==== GOES-14 ------------------ Channel Diff Unit ======= ===== ==== 00_7 0.66 % 03_9 0.0 K # coefs identical 06_5 0.043 K 10_7 0.006 K 13_3 0.003 K ======= ===== ==== ======= ===== ==== GOES-15 ------------------ Channel Diff Unit ======= ===== ==== 00_7 0.86 % 03_9 0.0 K # coefs identical 06_5 0.02 K 10_7 0.009 K 13_3 0.008 K ======= ===== ==== References: - [GVAR] https://goes.gsfc.nasa.gov/text/GVARRDL98.pdf - [BOOK-N] https://goes.gsfc.nasa.gov/text/GOES-N_Databook/databook.pdf - [BOOK-I] https://goes.gsfc.nasa.gov/text/databook/databook.pdf - [IR] https://www.ospo.noaa.gov/Operations/GOES/calibration/gvar-conversion.html - [VIS] https://www.ospo.noaa.gov/Operations/GOES/calibration/goes-vis-ch-calibration.html - [FAQ] https://www.ncdc.noaa.gov/sites/default/files/attachments/Satellite-Frequently-Asked-Questions_2.pdf - [SCHED-W] http://www.ospo.noaa.gov/Operations/GOES/west/imager-routine.html - [SCHED-E] http://www.ospo.noaa.gov/Operations/GOES/east/imager-routine.html Eumetsat formated netCDF data: The main differences are: 1. The geolocation is in a separate file, used for all bands 2. VIS data is calibrated to Albedo (or reflectance) 3. IR data is calibrated to radiance. 4. VIS data is downsampled to IR resolution (4km) 5. File name differs also slightly 6. Data is received via EumetCast """ from abc import abstractmethod from collections import namedtuple from datetime import datetime, timedelta import logging import re import numpy as np import xarray as xr import pyresample.geometry from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.goes_imager_hrit import (SPACECRAFTS, EQUATOR_RADIUS, POLE_RADIUS, ALTITUDE) from satpy.readers.utils import bbox, get_geostationary_angle_extent logger = logging.getLogger(__name__) # Radiation constants. Source: [VIS] C1 = 1.191066E-5 # [mW/(m2-sr-cm-4)] C2 = 1.438833 # [K/cm-1] # Calibration Coefficients # # VIS Channel # ============ # slope, offset: Pre-Launch slope & offset for converting counts to radiance # (one per detector) [W m-2 um-1 sr-1]. # x0: Space count # k: pi / (solar spectral irradiance averaged over the spectral response # function of the detector) [m2 sr um W-1] # # # IR Channels # ============ # scale, offset: Scale & offset for converting counts to radiance. Units: # [mW m-2 cm-1 sr-1], [1]. They are identical for all platforms. # n: The channel's central wavenumber (one for each detector) [cm-1] # a, b: Offset and slope for converting effective BT to actual BT (one per # detector). Units: [K], [1] # btmin, btmax: Valid BT range [K]. Values outside this range will be masked. # Extracted from lookup tables provided in [IR]. SCALE_03_9 = 227.3889 OFFSET_03_9 = 68.2167 SCALE_06_8 = 38.8383 OFFSET_06_8 = 29.1287 SCALE_06_5 = 38.8383 OFFSET_06_5 = 29.1287 SCALE_10_7 = 5.2285 OFFSET_10_7 = 15.6854 SCALE_12_0 = 5.0273 OFFSET_12_0 = 15.3332 SCALE_13_3 = 5.5297 OFFSET_13_3 = 16.5892 CALIB_COEFS = { 'GOES-15': {'00_7': {'slope': [5.851966E-1, 5.879772E-1, 5.856793E-1, 5.854250E-1, 5.866992E-1, 5.836241E-1, 5.846555E-1, 5.843753E-1], 'offset': [-16.9707, -17.0513, -16.9847, -16.9773, -17.0143, -16.9251, -16.9550, -16.9469], 'x0': 29, 'k': 1.88852E-3}, '03_9': {'scale': SCALE_03_9, 'offset': OFFSET_03_9, 'n': [2562.7905, 2562.7905], 'a': [-1.5693377, -1.5693377], 'b': [1.0025034, 1.0025034], 'btmin': 205.0, 'btmax': 340.0}, '06_5': {'scale': SCALE_06_8, 'offset': OFFSET_06_8, 'n': [1521.1988, 1521.5277], 'a': [-3.4706545, -3.4755568], 'b': [1.0093296, 1.0092838], 'btmin': 180.0, 'btmax': 340.0}, '10_7': {'scale': SCALE_10_7, 'offset': OFFSET_10_7, 'n': [935.89417, 935.78158], 'a': [-0.36151367, -0.35316361], 'b': [1.0012715, 1.0012570], 'btmin': 180.0, 'btmax': 340.0}, '13_3': {'scale': SCALE_13_3, 'offset': OFFSET_13_3, 'n': [753.72229, 753.93403], 'a': [-0.21475817, -0.24630068], 'b': [1.0006485, 1.0007178], 'btmin': 180.0, 'btmax': 340.0} }, # ITT RevH + STAR Correction 'GOES-14': {'00_7': {'slope': [5.874693E-1, 5.865367E-1, 5.862807E-1, 5.864086E-1, 5.857146E-1, 5.852004E-1, 5.860814E-1, 5.841697E-1], 'offset': [-17.037, -17.010, -17.002, -17.006, -16.986, -16.971, -16.996, -16.941], 'x0': 29, 'k': 1.88772E-3}, '03_9': {'scale': SCALE_03_9, 'offset': OFFSET_03_9, 'n': [2577.3518, 2577.3518], 'a': [-1.5297091, -1.5297091], 'b': [1.0025608, 1.0025608], 'btmin': 205.0, 'btmax': 340.0}, '06_5': {'scale': SCALE_06_8, 'offset': OFFSET_06_8, 'n': [1519.3488, 1518.5610], 'a': [-3.4647892, -3.4390527], 'b': [1.0093656, 1.0094427], 'btmin': 180.0, 'btmax': 340.0}, '10_7': {'scale': SCALE_10_7, 'offset': OFFSET_10_7, 'n': [933.98541, 934.19579], 'a': [-0.29201763, -0.31824779], 'b': [1.0012018, 1.0012303], 'btmin': 180.0, 'btmax': 340.0}, '13_3': {'scale': SCALE_13_3, 'offset': OFFSET_13_3, 'n': [752.88143, 752.82392], 'a': [-0.22508805, -0.21700982], 'b': [1.0006686, 1.0006503], 'btmin': 180.0, 'btmax': 340.0} }, # ITT RevH + STAR Correction 'GOES-13': {'00_7': {'slope': [6.120196E-1, 6.118504E-1, 6.096360E-1, 6.087055E-1, 6.132860E-1, 6.118208E-1, 6.122307E-1, 6.066968E-1], 'offset': [-17.749, -17.744, -17.769, -17.653, -17.785, -17.743, -17.755, -17.594], 'x0': 29, 'k': 1.89544E-3}, '03_9': {'scale': SCALE_03_9, 'offset': OFFSET_03_9, 'n': [2561.74, 2561.74], 'a': [-1.437204, -1.437204], 'b': [1.002562, 1.002562], 'btmin': 205.0, 'btmax': 340.0}, '06_5': {'scale': SCALE_06_8, 'offset': OFFSET_06_8, 'n': [1522.52, 1521.66], 'a': [-3.625663, -3.607841], 'b': [1.010018, 1.010010], 'btmin': 180.0, 'btmax': 340.0}, '10_7': {'scale': SCALE_10_7, 'offset': OFFSET_10_7, 'n': [937.23, 937.27], 'a': [-0.386043, -0.380113], 'b': [1.001298, 1.001285], 'btmin': 180.0, 'btmax': 340.0}, '13_3': {'scale': SCALE_13_3, 'offset': OFFSET_13_3, 'n': [749.83], 'a': [-0.134801], 'b': [1.000482], 'btmin': 180.0, 'btmax': 340.0} # Has only one detector on GOES-13 }, 'GOES-12': {'00_7': {'slope': [5.771030E-1, 5.761764E-1, 5.775825E-1, 5.790699E-1, 5.787051E-1, 5.755969E-1, 5.753973E-1, 5.752099E-1], 'offset': [-16.736, -16.709, -16.750, -16.793, -16.782, -16.692, -16.687, -16.681], 'x0': 29, 'k': 1.97658E-3}, '03_9': {'scale': SCALE_03_9, 'offset': OFFSET_03_9, 'n': [2562.45, 2562.45], 'a': [-0.650731, -0.650731], 'b': [1.001520, 1.001520], 'btmin': 205.0, 'btmax': 340.0}, '06_5': {'scale': SCALE_06_8, 'offset': OFFSET_06_8, 'n': [1536.43, 1536.94], 'a': [-4.764728, -4.775517], 'b': [1.012420, 1.012403], 'btmin': 180.0, 'btmax': 340.0}, '10_7': {'scale': SCALE_10_7, 'offset': OFFSET_10_7, 'n': [933.21, 933.21], 'a': [-0.360331, -0.360331], 'b': [1.001306, 1.001306], 'btmin': 180.0, 'btmax': 340.0}, '13_3': {'scale': SCALE_13_3, 'offset': OFFSET_13_3, 'n': [751.91], 'a': [-0.253449], 'b': [1.000743], 'btmin': 180.0, 'btmax': 340.0} # Has only one detector on GOES-12 }, 'GOES-11': {'00_7': {'slope': [5.561568E-1, 5.552979E-1, 5.558981E-1, 5.577627E-1, 5.557238E-1, 5.587978E-1, 5.586530E-1, 5.528971E-1], 'offset': [-16.129, -16.104, -16.121, -16.175, -16.116, -16.205, -16.201, -16.034], 'x0': 29, 'k': 2.01524E-3}, '03_9': {'scale': SCALE_03_9, 'offset': OFFSET_03_9, 'n': [2562.07, 2562.07], 'a': [-0.644790, -0.644790], 'b': [1.000775, 1.000775], 'btmin': 205.0, 'btmax': 340.0}, '06_8': {'scale': SCALE_06_8, 'offset': OFFSET_06_8, 'n': [1481.53], 'a': [-0.543401], 'b': [1.001495], 'btmin': 180.0, 'btmax': 340.0}, '10_7': {'scale': SCALE_10_7, 'offset': OFFSET_10_7, 'n': [931.76, 931.76], 'a': [-0.306809, -0.306809], 'b': [1.001274, 1.001274], 'btmin': 180.0, 'btmax': 340.0}, '12_0': {'scale': SCALE_12_0, 'offset': OFFSET_12_0, 'n': [833.67, 833.04], 'a': [-0.333216, -0.315110], 'b': [1.001000, 1.000967], 'btmin': 180.0, 'btmax': 340.0} }, 'GOES-10': {'00_7': {'slope': [5.605602E-1, 5.563529E-1, 5.566574E-1, 5.582154E-1, 5.583361E-1, 5.571736E-1, 5.563135E-1, 5.613536E-1], 'offset': [-16.256, -16.134, -16.143, -16.188, -16.192, -16.158, -16.133, -16.279], 'x0': 29, 'k': 1.98808E-3}, '03_9': {'scale': SCALE_03_9, 'offset': OFFSET_03_9, 'n': [2552.9845, 2552.9845], 'a': [-0.60584483, -0.60584483], 'b': [1.0011017, 1.0011017], 'btmin': 205.0, 'btmax': 340.0}, '06_8': {'scale': SCALE_06_8, 'offset': OFFSET_06_8, 'n': [1486.2212], 'a': [-0.61653805], 'b': [1.0014011], 'btmin': 180.0, 'btmax': 340.0}, '10_7': {'scale': SCALE_10_7, 'offset': OFFSET_10_7, 'n': [936.10260, 935.98981], 'a': [-0.27128884, -0.27064036], 'b': [1.0009674, 1.0009687], 'btmin': 180.0, 'btmax': 340.0}, '12_0': {'scale': SCALE_12_0, 'offset': OFFSET_12_0, 'n': [830.88473, 830.89691], 'a': [-0.26505411, -0.26056452], 'b': [1.0009087, 1.0008962], 'btmin': 180.0, 'btmax': 340.0} }, 'GOES-9': {'00_7': {'slope': [0.5492361], 'offset': [-15.928], 'x0': 29, 'k': 1.94180E-3}, '03_9': {'scale': SCALE_03_9, 'offset': OFFSET_03_9, 'n': [2555.18, 2555.18], 'a': [-0.579908, -0.579908], 'b': [1.000942, 1.000942], 'btmin': 205.0, 'btmax': 340.0}, '06_8': {'scale': SCALE_06_8, 'offset': OFFSET_06_8, 'n': [1481.82], 'a': [-0.493016], 'b': [1.001076], 'btmin': 180.0, 'btmax': 340.0}, '10_7': {'scale': SCALE_10_7, 'offset': OFFSET_10_7, 'n': [934.59, 934.28], 'a': [-0.384798, -0.363703], 'b': [1.001293, 1.001272], 'btmin': 180.0, 'btmax': 340.0}, '12_0': {'scale': SCALE_12_0, 'offset': OFFSET_12_0, 'n': [834.02, 834.09], 'a': [-0.302995, -0.306838], 'b': [1.000941, 1.000948], 'btmin': 180.0, 'btmax': 340.0} }, 'GOES-8': {'00_7': {'slope': [0.5501873], 'offset': [-15.955], 'x0': 29, 'k': 1.92979E-3}, '03_9': {'scale': SCALE_03_9, 'offset': OFFSET_03_9, 'n': [2556.71, 2558.62], 'a': [-0.578526, -0.581853], 'b': [1.001512, 1.001532], 'btmin': 205.0, 'btmax': 340.0}, '06_8': {'scale': SCALE_06_8, 'offset': OFFSET_06_8, 'n': [1481.91], 'a': [-0.593903], 'b': [1.001418], 'btmin': 180.0, 'btmax': 340.0}, '10_7': {'scale': SCALE_10_7, 'offset': OFFSET_10_7, 'n': [934.30, 935.38], 'a': [-0.322585, -0.351889], 'b': [1.001271, 1.001293], 'btmin': 180.0, 'btmax': 340.0}, '12_0': {'scale': SCALE_12_0, 'offset': OFFSET_12_0, 'n': [837.06, 837.00], 'a': [-0.422571, -0.466954], 'b': [1.001170, 1.001257], 'btmin': 180.0, 'btmax': 340.0} } } # Angular sampling rates in radians. Source: [BOOK-I], [BOOK-N] SAMPLING_EW_VIS = 16E-6 SAMPLING_NS_VIS = 28E-6 SAMPLING_EW_IR = 64E-6 SAMPLING_NS_IR = 112E-6 # Sector definitions. TODO: Add remaining sectors (PACUS, CONUS, ...) FULL_DISC = 'Full Disc' NORTH_HEMIS_EAST = 'Northern Hemisphere (GOES-East)' SOUTH_HEMIS_EAST = 'Southern Hemisphere (GOES-East)' NORTH_HEMIS_WEST = 'Northern Hemisphere (GOES-West)' SOUTH_HEMIS_WEST = 'Southern Hemisphere (GOES-West)' UNKNOWN_SECTOR = 'Unknown' IR_SECTORS = { (2704, 5208): FULL_DISC, (1826, 3464): NORTH_HEMIS_EAST, (566, 3464): SOUTH_HEMIS_EAST, (1354, 3312): NORTH_HEMIS_WEST, (1062, 2760): SOUTH_HEMIS_WEST } # (nlines, ncols) VIS_SECTORS = { (10819, 20800): FULL_DISC, (7307, 13852): NORTH_HEMIS_EAST, (2267, 13852): SOUTH_HEMIS_EAST, (5419, 13244): NORTH_HEMIS_WEST, (4251, 11044): SOUTH_HEMIS_WEST } # (nlines, ncols) SCAN_DURATION = { FULL_DISC: timedelta(minutes=26), NORTH_HEMIS_WEST: timedelta(minutes=10, seconds=5), SOUTH_HEMIS_WEST: timedelta(minutes=6, seconds=54), NORTH_HEMIS_EAST: timedelta(minutes=14, seconds=15), SOUTH_HEMIS_EAST: timedelta(minutes=4, seconds=49) } # Source: [SCHED-W], [SCHED-E] class GOESNCBaseFileHandler(BaseFileHandler): """File handler for GOES Imager data in netCDF format""" def __init__(self, filename, filename_info, filetype_info, geo_data=None): """Initialize the reader.""" super(GOESNCBaseFileHandler, self).__init__(filename, filename_info, filetype_info) self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=False, chunks={'xc': CHUNK_SIZE, 'yc': CHUNK_SIZE}) self.sensor = 'goes_imager' self.nlines = self.nc.dims['yc'] self.ncols = self.nc.dims['xc'] self.platform_name = self._get_platform_name( self.nc.attrs['Satellite Sensor']) self.platform_shortname = self.platform_name.replace('-', '').lower() self.gvar_channel = int(self.nc['bands'].values) self.sector = self._get_sector(channel=self.gvar_channel, nlines=self.nlines, ncols=self.ncols) self._meta = None self.geo_data = geo_data if geo_data is not None else self.nc @abstractmethod def get_dataset(self, key, info): """Load dataset designated by the given key from file""" raise NotImplementedError @abstractmethod def calibrate(self, data, calibration, channel): """Perform calibration""" raise NotImplementedError @property @abstractmethod def vis_sectors(self): raise NotImplementedError @property @abstractmethod def ir_sectors(self): raise NotImplementedError @staticmethod def _get_platform_name(ncattr): """Determine name of the platform""" match = re.match(r'G-(\d+)', ncattr) if match: return SPACECRAFTS.get(int(match.groups()[0])) return None def _get_sector(self, channel, nlines, ncols): """Determine which sector was scanned""" if self._is_vis(channel): margin = 100 sectors_ref = self.vis_sectors else: margin = 50 sectors_ref = self.ir_sectors for (nlines_ref, ncols_ref), sector in sectors_ref.items(): if np.fabs(ncols - ncols_ref) < margin and \ np.fabs(nlines - nlines_ref) < margin: return sector return UNKNOWN_SECTOR @staticmethod def _is_vis(channel): """Determine whether the given channel is a visible channel""" if isinstance(channel, str): return channel == '00_7' elif isinstance(channel, int): return channel == 1 else: raise ValueError('Invalid channel') @staticmethod def _get_earth_mask(lat): """Identify earth/space pixels Returns: Mask (1=earth, 0=space) """ logger.debug('Computing earth mask') return np.fabs(lat) <= 90 @staticmethod def _get_nadir_pixel(earth_mask, sector): """Find the nadir pixel Args: earth_mask: Mask identifying earth and space pixels sector: Specifies the scanned sector Returns: nadir row, nadir column """ if sector == FULL_DISC: logger.debug('Computing nadir pixel') # The earth is not centered in the image, compute bounding box # of the earth disc first rmin, rmax, cmin, cmax = bbox(earth_mask) # The nadir pixel is approximately at the centre of the earth disk nadir_row = rmin + (rmax - rmin) // 2 nadir_col = cmin + (cmax - cmin) // 2 return nadir_row, nadir_col return None, None @staticmethod def _is_yaw_flip(lat, delta=10): """Determine whether the satellite is yaw-flipped ('upside down')""" logger.debug('Computing yaw flip flag') # In case of yaw-flip the data and coordinates in the netCDF files are # also flipped. Just check whether the latitude increases or decrases # with the line number. crow, ccol = np.array(lat.shape) // 2 return (lat[crow+delta, ccol] - lat[crow, ccol]).values > 0 def _get_area_def_uniform_sampling(self, lon0, channel): """Get area definition with uniform sampling""" logger.debug('Computing area definition') if lon0 is not None: # Define proj4 projection parameters proj_dict = {'a': EQUATOR_RADIUS, 'b': POLE_RADIUS, 'lon_0': lon0, 'h': ALTITUDE, 'proj': 'geos', 'units': 'm'} # Calculate maximum scanning angles xmax, ymax = get_geostationary_angle_extent( namedtuple('area', ['proj_dict'])(proj_dict)) # Derive area extent using small angle approximation (maximum # scanning angle is ~8.6 degrees) llx, lly, urx, ury = ALTITUDE * np.array([-xmax, -ymax, xmax, ymax]) area_extent = [llx, lly, urx, ury] # Original image is oversampled. Create pyresample area definition # with uniform sampling in N-S and E-W direction if self._is_vis(channel): sampling = SAMPLING_NS_VIS else: sampling = SAMPLING_NS_IR pix_size = ALTITUDE * sampling area_def = pyresample.geometry.AreaDefinition( 'goes_geos_uniform', '{} geostationary projection (uniform sampling)'.format(self.platform_name), 'goes_geos_uniform', proj_dict, np.rint((urx - llx) / pix_size).astype(int), np.rint((ury - lly) / pix_size).astype(int), area_extent) return area_def else: return None @property def start_time(self): """Start timestamp of the dataset""" dt = self.nc['time'].dt return datetime(year=dt.year, month=dt.month, day=dt.day, hour=dt.hour, minute=dt.minute, second=dt.second, microsecond=dt.microsecond) @property def end_time(self): """End timestamp of the dataset""" try: return self.start_time + SCAN_DURATION[self.sector] except KeyError: return self.start_time @property def resolution(self): """Specify the spatial resolution of the dataset. Channel 13_3's spatial resolution changes from one platform to another while the wavelength and file format remain the same. In order to avoid multiple YAML reader definitions for the same file format, read the channel's resolution from the file instead of defining it in the YAML dataset. This information will then be used by the YAML reader to complement the YAML definition of the dataset. Returns: Spatial resolution in kilometers """ return 1000. * self.nc['lineRes'].values def get_shape(self, key, info): """Get the shape of the data Returns: Number of lines, number of columns """ return self.nlines, self.ncols @property def meta(self): """Derive metadata from the coordinates""" # Use buffered data if available if self._meta is None: lat = self.geo_data['lat'] earth_mask = self._get_earth_mask(lat) crow, ccol = self._get_nadir_pixel(earth_mask=earth_mask, sector=self.sector) lat0 = lat.values[crow, ccol] if crow is not None else None yaw_flip = self._is_yaw_flip(lat) del lat lon = self.geo_data['lon'] lon0 = lon.values[crow, ccol] if crow is not None else None area_def_uni = self._get_area_def_uniform_sampling( lon0=lon0, channel=self.gvar_channel) del lon self._meta = {'earth_mask': earth_mask, 'yaw_flip': yaw_flip, 'lat0': lat0, 'lon0': lon0, 'nadir_row': crow, 'nadir_col': ccol, 'area_def_uni': area_def_uni} return self._meta def _counts2radiance(self, counts, coefs, channel): """Convert raw detector counts to radiance""" logger.debug('Converting counts to radiance') if self._is_vis(channel): # Since the scanline-detector assignment is unknown, use the average # coefficients for all scanlines. slope = np.array(coefs['slope']).mean() offset = np.array(coefs['offset']).mean() return self._viscounts2radiance(counts=counts, slope=slope, offset=offset) return self._ircounts2radiance(counts=counts, scale=coefs['scale'], offset=coefs['offset']) def _calibrate(self, radiance, coefs, channel, calibration): """Convert radiance to reflectance or brightness temperature""" if self._is_vis(channel): if not calibration == 'reflectance': raise ValueError('Cannot calibrate VIS channel to ' '{}'.format(calibration)) return self._calibrate_vis(radiance=radiance, k=coefs['k']) else: if not calibration == 'brightness_temperature': raise ValueError('Cannot calibrate IR channel to ' '{}'.format(calibration)) # Since the scanline-detector assignment is unknown, use the average # coefficients for all scanlines. mean_coefs = {'a': np.array(coefs['a']).mean(), 'b': np.array(coefs['b']).mean(), 'n': np.array(coefs['n']).mean(), 'btmin': coefs['btmin'], 'btmax': coefs['btmax']} return self._calibrate_ir(radiance=radiance, coefs=mean_coefs) @staticmethod def _ircounts2radiance(counts, scale, offset): """Convert IR counts to radiance Reference: [IR]. Args: counts: Raw detector counts scale: Scale [mW-1 m2 cm sr] offset: Offset [1] Returns: Radiance [mW m-2 cm-1 sr-1] """ rad = (counts - offset) / scale return rad.clip(min=0) @staticmethod def _calibrate_ir(radiance, coefs): """Convert IR radiance to brightness temperature Reference: [IR] Args: radiance: Radiance [mW m-2 cm-1 sr-1] coefs: Dictionary of calibration coefficients. Keys: n: The channel's central wavenumber [cm-1] a: Offset [K] b: Slope [1] btmin: Minimum brightness temperature threshold [K] btmax: Maximum brightness temperature threshold [K] Returns: Brightness temperature [K] """ logger.debug('Calibrating to brightness temperature') # Compute brightness temperature using inverse Planck formula n = coefs['n'] bteff = C2 * n / np.log(1 + C1 * n ** 3 / radiance.where(radiance > 0)) bt = xr.DataArray(bteff * coefs['b'] + coefs['a']) # Apply BT threshold return bt.where(np.logical_and(bt >= coefs['btmin'], bt <= coefs['btmax'])) @staticmethod def _viscounts2radiance(counts, slope, offset): """Convert VIS counts to radiance References: [VIS] Args: counts: Raw detector counts slope: Slope [W m-2 um-1 sr-1] offset: Offset [W m-2 um-1 sr-1] Returns: Radiance [W m-2 um-1 sr-1] """ rad = counts * slope + offset return rad.clip(min=0) @staticmethod def _calibrate_vis(radiance, k): """Convert VIS radiance to reflectance Note: Angle of incident radiation and annual variation of the earth-sun distance is not taken into account. A value of 100% corresponds to the radiance of a perfectly reflecting diffuse surface illuminated at normal incidence when the sun is at its annual-average distance from the Earth. TODO: Take angle of incident radiation (cos sza) and annual variation of the earth-sun distance into account. Reference: [VIS] Args: radiance: Radiance [mW m-2 cm-1 sr-1] k: pi / H, where H is the solar spectral irradiance at annual-average sun-earth distance, averaged over the spectral response function of the detector). Units of k: [m2 um sr W-1] Returns: Reflectance [%] """ logger.debug('Calibrating to reflectance') refl = 100 * k * radiance return refl.clip(min=0) def _update_metadata(self, data, ds_info): """Update metadata of the given DataArray""" # Metadata from the dataset definition data.attrs.update(ds_info) # If the file_type attribute is a list and the data is xarray # the concat of the dataset will not work. As the file_type is # not needed this will be popped here. if 'file_type' in data.attrs: data.attrs.pop('file_type') # Metadata discovered from the file. data.attrs.update( {'platform_name': self.platform_name, 'sensor': self.sensor, 'sector': self.sector, 'orbital_parameters': {'yaw_flip': self.meta['yaw_flip']}} ) if self.meta['lon0'] is not None: # Attributes only available for full disc images. YAML reader # doesn't like it if satellite_* is present but None data.attrs.update( {'satellite_longitude': self.meta['lon0'], 'satellite_latitude': self.meta['lat0'], 'satellite_altitude': ALTITUDE, 'nadir_row': self.meta['nadir_row'], 'nadir_col': self.meta['nadir_col'], 'area_def_uniform_sampling': self.meta['area_def_uni']} ) data.attrs['orbital_parameters'].update( {'projection_longitude': self.meta['lon0'], 'projection_latitude': self.meta['lat0'], 'projection_altitude': ALTITUDE} ) def __del__(self): try: self.nc.close() except (AttributeError, IOError, OSError): pass def available_datasets(self, configured_datasets=None): """Update information for or add datasets provided by this file. If this file handler can load a dataset then it will supplement the dataset info with the resolution and possibly coordinate datasets needed to load it. Otherwise it will continue passing the dataset information down the chain. See :meth:`satpy.readers.file_handlers.BaseFileHandler.available_datasets` for details. """ res = self.resolution # update previously configured datasets for is_avail, ds_info in (configured_datasets or []): if is_avail is not None: yield is_avail, ds_info matches = self.file_type_matches(ds_info['file_type']) if matches and ds_info.get('resolution') != res: new_info = ds_info.copy() new_info['resolution'] = res yield True, new_info elif is_avail is None: yield is_avail, ds_info class GOESNCFileHandler(GOESNCBaseFileHandler): """File handler for GOES Imager data in netCDF format""" vis_sectors = VIS_SECTORS ir_sectors = IR_SECTORS def __init__(self, filename, filename_info, filetype_info): """Initialize the reader.""" super(GOESNCFileHandler, self).__init__(filename, filename_info, filetype_info) def get_dataset(self, key, info): """Load dataset designated by the given key from file""" logger.debug('Reading dataset {}'.format(key.name)) # Read data from file and calibrate if necessary if 'longitude' in key.name: data = self.geo_data['lon'] elif 'latitude' in key.name: data = self.geo_data['lat'] else: tic = datetime.now() data = self.calibrate(self.nc['data'].isel(time=0), calibration=key.calibration, channel=key.name) logger.debug('Calibration time: {}'.format(datetime.now() - tic)) # Mask space pixels data = data.where(self.meta['earth_mask']) # Set proper dimension names data = data.rename({'xc': 'x', 'yc': 'y'}) # Update metadata self._update_metadata(data, ds_info=info) return data def calibrate(self, counts, calibration, channel): """Perform calibration""" # Convert 16bit counts from netCDF4 file to the original 10bit # GVAR counts by dividing by 32. See [FAQ]. counts = counts / 32. coefs = CALIB_COEFS[self.platform_name][channel] if calibration == 'counts': return counts elif calibration in ['radiance', 'reflectance', 'brightness_temperature']: radiance = self._counts2radiance(counts=counts, coefs=coefs, channel=channel) if calibration == 'radiance': return radiance return self._calibrate(radiance=radiance, coefs=coefs, channel=channel, calibration=calibration) else: raise ValueError('Unsupported calibration for channel {}: {}' .format(channel, calibration)) class GOESEUMNCFileHandler(GOESNCBaseFileHandler): """File handler for GOES Imager data in EUM netCDF format TODO: Remove datasets which are not available in the file (counts, VIS radiance) via available_datasets() -> See #434 """ vis_sectors = IR_SECTORS # VIS channel is downsampled to IR resolution ir_sectors = IR_SECTORS def __init__(self, filename, filename_info, filetype_info, geo_data): """Initialize the reader.""" super(GOESEUMNCFileHandler, self).__init__(filename, filename_info, filetype_info, geo_data) def get_dataset(self, key, info): """Load dataset designated by the given key from file""" logger.debug('Reading dataset {}'.format(key.name)) tic = datetime.now() data = self.calibrate(self.nc['data'].isel(time=0), calibration=key.calibration, channel=key.name) logger.debug('Calibration time: {}'.format(datetime.now() - tic)) # Mask space pixels data = data.where(self.meta['earth_mask']) # Set proper dimension names data = data.rename({'xc': 'x', 'yc': 'y'}) data = data.drop('time') # Update metadata self._update_metadata(data, ds_info=info) return data def calibrate(self, data, calibration, channel): """Perform calibration""" coefs = CALIB_COEFS[self.platform_name][channel] is_vis = self._is_vis(channel) # IR files provide radiances, VIS file provides reflectances if is_vis and calibration == 'reflectance': return data elif not is_vis and calibration == 'radiance': return data elif not is_vis and calibration == 'brightness_temperature': return self._calibrate(radiance=data, calibration=calibration, coefs=coefs, channel=channel) else: raise ValueError('Unsupported calibration for channel {}: {}' .format(channel, calibration)) class GOESEUMGEONCFileHandler(BaseFileHandler): """File handler for GOES Geolocation data in EUM netCDF format""" def __init__(self, filename, filename_info, filetype_info): """Initialize the reader.""" super(GOESEUMGEONCFileHandler, self).__init__(filename, filename_info, filetype_info) self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=False, chunks={'xc': CHUNK_SIZE, 'yc': CHUNK_SIZE}) self.sensor = 'goes_imager' self.nlines = self.nc.dims['yc'] self.ncols = self.nc.dims['xc'] self.platform_name = GOESNCBaseFileHandler._get_platform_name( self.nc.attrs['Satellite Sensor']) self.platform_shortname = self.platform_name.replace('-', '').lower() self._meta = None def __getitem__(self, item): return getattr(self.nc, item) def get_dataset(self, key, info): """Load dataset designated by the given key from file""" logger.debug('Reading dataset {}'.format(key.name)) # Read data from file and calibrate if necessary if 'longitude' in key.name: data = self.nc['lon'] elif 'latitude' in key.name: data = self.nc['lat'] else: raise KeyError("Unknown dataset: {}".format(key.name)) # Set proper dimension names data = data.rename({'xc': 'x', 'yc': 'y'}) # Update metadata data.attrs.update(info) return data @property def resolution(self): """Specify the spatial resolution of the dataset. In the EUMETSAT format VIS data is downsampled to IR resolution (4km). """ return 4000.0 class GOESCoefficientReader(object): """Read GOES Imager calibration coefficients from NOAA reference HTMLs""" gvar_channels = { 'GOES-8': {'00_7': 1, '03_9': 2, '06_8': 3, '10_7': 4, '12_0': 5}, 'GOES-9': {'00_7': 1, '03_9': 2, '06_8': 3, '10_7': 4, '12_0': 5}, 'GOES-10': {'00_7': 1, '03_9': 2, '06_8': 3, '10_7': 4, '12_0': 5}, 'GOES-11': {'00_7': 1, '03_9': 2, '06_8': 3, '10_7': 4, '12_0': 5}, 'GOES-12': {'00_7': 1, '03_9': 2, '06_5': 3, '10_7': 4, '13_3': 6}, 'GOES-13': {'00_7': 1, '03_9': 2, '06_5': 3, '10_7': 4, '13_3': 6}, 'GOES-14': {'00_7': 1, '03_9': 2, '06_5': 3, '10_7': 4, '13_3': 6}, 'GOES-15': {'00_7': 1, '03_9': 2, '06_5': 3, '10_7': 4, '13_3': 6}, } ir_tables = { 'GOES-8': '2-1', 'GOES-9': '2-2', 'GOES-10': '2-3', 'GOES-11': '2-4', 'GOES-12': '2-5a', 'GOES-13': '2-6', 'GOES-14': '2-7c', 'GOES-15': '2-8b' } vis_tables = { 'GOES-8': 'Table 1.', 'GOES-9': 'Table 1.', 'GOES-10': 'Table 2.', 'GOES-11': 'Table 3.', 'GOES-12': 'Table 4.', 'GOES-13': 'Table 5.', 'GOES-14': 'Table 6.', 'GOES-15': 'Table 7.' } def __init__(self, ir_url, vis_url): from bs4 import BeautifulSoup self.ir_html = BeautifulSoup(self._load_url_or_file(ir_url), features="html5lib") self.vis_html = BeautifulSoup(self._load_url_or_file(vis_url), features="html5lib") def _load_url_or_file(self, url): import requests from requests.exceptions import MissingSchema try: response = requests.get(url) if response.ok: return response.text else: raise requests.HTTPError except (MissingSchema, requests.HTTPError): # Not a valid URL, is it a file? try: return open(url, mode='r') except IOError: raise ValueError('Invalid URL or file: {}'.format(url)) def get_coefs(self, platform, channel): if channel == '00_7': return self._get_vis_coefs(platform=platform) return self._get_ir_coefs(platform=platform, channel=channel) def _get_ir_coefs(self, platform, channel): from collections import defaultdict coefs = defaultdict(list) # Extract scale and offset for conversion counts->radiance from # Table 1-1 (same for all platforms, only depends on the channel) gvar_channel = self.gvar_channels[platform][channel] table11 = self._get_table(root=self.ir_html, heading='Table 1-1', heading_type='h3') for row in table11: if int(row[0]) == gvar_channel: coefs['scale'] = self._float(row[1]) coefs['offset'] = self._float(row[2]) # Extract n,a,b (radiance -> BT) from the coefficient table for the # given platform table = self._get_table(root=self.ir_html, heading=self.ir_tables[platform], heading_type='h3') channel_regex = re.compile('^{}(?:/[a,b])?$'.format(gvar_channel)) for row in table: if channel_regex.match(row[0]): # Extract coefficients. Detector (a) always comes before (b) # in the table so that simply appending preserves the order. coefs['n'].append(self._float(row[1])) coefs['a'].append(self._float(row[2])) coefs['b'].append(self._float(row[3])) return coefs def _get_vis_coefs(self, platform): from collections import defaultdict # Find calibration table table = self._get_table(root=self.vis_html, heading=self.vis_tables[platform], heading_type='p') # Extract values coefs = defaultdict(list) if platform in ('GOES-8', 'GOES-9'): # GOES 8&9 coefficients are in the same table col = 1 if platform == 'GOES-8' else 2 coefs['slope'].append(self._float(table[1][col])) coefs['x0'] = self._float(table[2][col]) coefs['offset'].append(self._float(table[3][col])) coefs['k'] = self._float(table[4][col]) else: # k and x0 appear in the first row only coefs['slope'].append(self._float(table[0][1])) coefs['x0'] = self._float(table[0][2]) coefs['k'] = self._float(table[0][4]) coefs['offset'].append(self._float(table[0][3])) # Remaining rows for row in table[1:]: coefs['slope'].append(self._float(row[1])) coefs['offset'].append(self._float(row[2])) return coefs def _get_table(self, root, heading, heading_type, ): # Find table by its heading headings = [h for h in root.find_all(heading_type) if heading in h.text] if not headings: raise ValueError('Cannot find a coefficient table matching text ' '"{}"'.format(heading)) elif len(headings) > 1: raise ValueError('Found multiple headings matching text "{}"' .format(heading)) table = headings[0].next_sibling.next_sibling # Copy items to a list of lists tab = list() for row in table.find_all('tr'): cols = row.find_all('td') if cols: tab.append([c.text for c in cols]) return tab def _denoise(self, string): return string.replace('\n', '').replace(' ', '') def _float(self, string): """Convert string to float Take care of numbers in exponential format """ string = self._denoise(string) exp_match = re.match(r'^[-.\d]+x10-(\d)$', string) if exp_match: exp = int(exp_match.groups()[0]) fac = 10 ** -exp string = string.replace('x10-{}'.format(exp), '') else: fac = 1 return fac * float(string) def test_coefs(ir_url, vis_url): """Test calibration coefficients against NOAA reference pages Currently the reference pages are: ir_url = https://www.ospo.noaa.gov/Operations/GOES/calibration/gvar-conversion.html vis_url = https://www.ospo.noaa.gov/Operations/GOES/calibration/goes-vis-ch-calibration.html Args: ir_url: Path or URL to HTML page with IR coefficients vis_url: Path or URL to HTML page with VIS coefficients Raises: ValueError if coefficients don't match the reference """ reader = GOESCoefficientReader(ir_url=ir_url, vis_url=vis_url) for platform in CALIB_COEFS.keys(): for channel, coefs in CALIB_COEFS[platform].items(): coefs_expected = reader.get_coefs(platform=platform, channel=channel) for cname in coefs_expected.keys(): if not np.allclose(coefs[cname], coefs_expected[cname]): raise ValueError( 'Coefficient {} for {} channel {} does not match the ' 'reference'.format(cname, platform, channel)) logger.info('Coefficients OK') return True satpy-0.20.0/satpy/readers/grib.py000066400000000000000000000236031362525524100170240ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Generic Reader for GRIB2 files. Currently this reader depends on the `pygrib` python package. The `eccodes` package from ECMWF is preferred, but does not support python 3 at the time of writing. """ import logging import numpy as np import xarray as xr import dask.array as da from pyproj import Proj from pyresample import geometry from datetime import datetime from satpy import DatasetID, CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler import pygrib LOG = logging.getLogger(__name__) CF_UNITS = { 'none': '1', } class GRIBFileHandler(BaseFileHandler): def __init__(self, filename, filename_info, filetype_info): super(GRIBFileHandler, self).__init__(filename, filename_info, filetype_info) self._msg_datasets = {} self._start_time = None self._end_time = None try: with pygrib.open(self.filename) as grib_file: first_msg = grib_file.message(1) last_msg = grib_file.message(grib_file.messages) start_time = self._convert_datetime( first_msg, 'validityDate', 'validityTime') end_time = self._convert_datetime( last_msg, 'validityDate', 'validityTime') self._start_time = start_time self._end_time = end_time if 'keys' not in filetype_info: self._analyze_messages(grib_file) self._idx = None else: self._create_dataset_ids(filetype_info['keys']) self._idx = pygrib.index(self.filename, *filetype_info['keys'].keys()) except (RuntimeError, KeyError): raise IOError("Unknown GRIB file format: {}".format(self.filename)) def _analyze_messages(self, grib_file): grib_file.seek(0) for idx, msg in enumerate(grib_file): msg_id = DatasetID(name=msg['shortName'], level=msg['level']) ds_info = { 'message': idx + 1, 'name': msg['shortName'], 'level': msg['level'], 'file_type': self.filetype_info['file_type'], } self._msg_datasets[msg_id] = ds_info def _create_dataset_ids(self, keys): from itertools import product ordered_keys = [k for k in keys.keys() if 'id_key' in keys[k]] for id_vals in product(*[keys[k]['values'] for k in ordered_keys]): id_keys = [keys[k]['id_key'] for k in ordered_keys] msg_info = dict(zip(ordered_keys, id_vals)) ds_info = dict(zip(id_keys, id_vals)) msg_id = DatasetID(**ds_info) ds_info = msg_id.to_dict() ds_info.update(msg_info) ds_info['file_type'] = self.filetype_info['file_type'] self._msg_datasets[msg_id] = ds_info @staticmethod def _convert_datetime(msg, date_key, time_key, format="%Y%m%d%H%M"): date_str = "{:d}{:04d}".format(msg[date_key], msg[time_key]) return datetime.strptime(date_str, format) @property def start_time(self): """Get start time of this entire file. Assumes the first message is the earliest message. """ return self._start_time @property def end_time(self): """Get end time of this entire file. Assumes the last message is the latest message. """ return self._end_time def available_datasets(self, configured_datasets=None): """Automatically determine datasets provided by this file""" # previously configured or provided datasets # we can't provide any additional information for is_avail, ds_info in (configured_datasets or []): yield is_avail, ds_info # new datasets for ds_info in self._msg_datasets.values(): yield True, ds_info def _get_message(self, ds_info): with pygrib.open(self.filename) as grib_file: if 'message' in ds_info: msg_num = ds_info['message'] msg = grib_file.message(msg_num) else: msg_keys = self.filetype_info['keys'].keys() msg = self._idx(**{k: ds_info[k] for k in msg_keys})[0] return msg def _area_def_from_msg(self, msg): proj_params = msg.projparams.copy() # correct for longitudes over 180 for lon_param in ['lon_0', 'lon_1', 'lon_2']: if proj_params.get(lon_param, 0) > 180: proj_params[lon_param] -= 360 if proj_params['proj'] == 'cyl': proj_params['proj'] = 'eqc' proj = Proj(**proj_params) lons = msg['distinctLongitudes'] lats = msg['distinctLatitudes'] min_lon = lons[0] max_lon = lons[-1] min_lat = lats[0] max_lat = lats[-1] if min_lat > max_lat: # lats aren't in the order we thought they were, flip them # we also need to flip the data in the data loading section min_lat, max_lat = max_lat, min_lat shape = (lats.shape[0], lons.shape[0]) min_x, min_y = proj(min_lon, min_lat) max_x, max_y = proj(max_lon, max_lat) if max_x < min_x and 'over' not in proj_params: # wrap around proj_params['over'] = True proj = Proj(**proj_params) max_x, max_y = proj(max_lon, max_lat) pixel_size_x = (max_x - min_x) / (shape[1] - 1) pixel_size_y = (max_y - min_y) / (shape[0] - 1) extents = ( min_x - pixel_size_x / 2., min_y - pixel_size_y / 2., max_x + pixel_size_x / 2., max_y + pixel_size_y / 2., ) else: lats, lons = msg.latlons() shape = lats.shape # take the corner points only lons = lons[([0, 0, -1, -1], [0, -1, 0, -1])] lats = lats[([0, 0, -1, -1], [0, -1, 0, -1])] # correct for longitudes over 180 lons[lons > 180] -= 360 proj = Proj(**proj_params) x, y = proj(lons, lats) if msg.valid_key('jScansPositively') and msg['jScansPositively'] == 1: min_x, min_y = x[0], y[0] max_x, max_y = x[3], y[3] else: min_x, min_y = x[2], y[2] max_x, max_y = x[1], y[1] half_x = abs((max_x - min_x) / (shape[1] - 1)) / 2. half_y = abs((max_y - min_y) / (shape[0] - 1)) / 2. extents = (min_x - half_x, min_y - half_y, max_x + half_x, max_y + half_y) return geometry.AreaDefinition( 'on-the-fly grib area', 'on-the-fly grib area', 'on-the-fly grib area', proj_params, shape[1], shape[0], extents, ) def get_area_def(self, dsid): """Get area definition for message. If latlong grid then convert to valid eqc grid. """ msg = self._get_message(self._msg_datasets[dsid]) try: return self._area_def_from_msg(msg) except (RuntimeError, KeyError): raise RuntimeError("Unknown GRIB projection information") def get_metadata(self, msg, ds_info): model_time = self._convert_datetime(msg, 'dataDate', 'dataTime') start_time = self._convert_datetime(msg, 'validityDate', 'validityTime') end_time = start_time try: center_description = msg['centreDescription'] except (RuntimeError, KeyError): center_description = None ds_info.update({ 'filename': self.filename, 'shortName': msg['shortName'], 'long_name': msg['name'], 'pressureUnits': msg['pressureUnits'], 'typeOfLevel': msg['typeOfLevel'], 'standard_name': msg['cfName'], 'units': msg['units'], 'modelName': msg['modelName'], 'model_time': model_time, 'centreDescription': center_description, 'valid_min': msg['minimum'], 'valid_max': msg['maximum'], 'start_time': start_time, 'end_time': end_time, 'sensor': msg['modelName'], # National Weather Prediction 'platform_name': 'unknown', }) return ds_info def get_dataset(self, dataset_id, ds_info): """Read a GRIB message into an xarray DataArray.""" msg = self._get_message(ds_info) ds_info = self.get_metadata(msg, ds_info) fill = msg['missingValue'] data = msg.values.astype(np.float32) if msg.valid_key('jScansPositively') and msg['jScansPositively'] == 1: data = data[::-1] if isinstance(data, np.ma.MaskedArray): data = data.filled(np.nan) data = da.from_array(data, chunks=CHUNK_SIZE) else: data[data == fill] = np.nan data = da.from_array(data, chunks=CHUNK_SIZE) return xr.DataArray(data, attrs=ds_info, dims=('y', 'x')) satpy-0.20.0/satpy/readers/hdf4_utils.py000066400000000000000000000103671362525524100201510ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Helpers for reading hdf4-based files.""" import logging from pyhdf.SD import SD, SDC, SDS import dask.array as da import xarray as xr import numpy as np import six from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler LOG = logging.getLogger(__name__) HTYPE_TO_DTYPE = { SDC.INT8: np.int8, SDC.UCHAR: np.uint8, SDC.CHAR: np.int8, SDC.INT32: np.int32, SDC.INT16: np.int16, SDC.UINT8: np.uint8, SDC.UINT16: np.uint16, SDC.UINT32: np.uint32, SDC.FLOAT32: np.float32, SDC.FLOAT64: np.float64, } def from_sds(var, *args, **kwargs): """Create a dask array from a SD dataset.""" var.__dict__['dtype'] = np.dtype(HTYPE_TO_DTYPE[var.info()[3]]) shape = var.info()[2] var.__dict__['shape'] = shape if isinstance(shape, (tuple, list)) else tuple(shape) return da.from_array(var, *args, **kwargs) class HDF4FileHandler(BaseFileHandler): """Base class for common HDF4 operations.""" def __init__(self, filename, filename_info, filetype_info): """Open file and collect information.""" super(HDF4FileHandler, self).__init__(filename, filename_info, filetype_info) self.file_content = {} file_handle = SD(self.filename, SDC.READ) self._collect_attrs('', file_handle.attributes()) for k in file_handle.datasets().keys(): self.collect_metadata(k, file_handle.select(k)) del file_handle def _collect_attrs(self, name, attrs): for key, value in six.iteritems(attrs): value = np.squeeze(value) if issubclass(value.dtype.type, (np.string_, np.unicode_)) and not value.shape: value = value.item() # convert to scalar if not isinstance(value, str): # python 3 - was scalar numpy array of bytes # otherwise python 2 - scalar numpy array of 'str' value = value.decode() self.file_content["{}/attr/{}".format(name, key)] = value elif not value.shape: # convert to a scalar self.file_content["{}/attr/{}".format(name, key)] = value.item() else: self.file_content["{}/attr/{}".format(name, key)] = value def collect_metadata(self, name, obj): """Collect all metadata about file content.""" if isinstance(obj, SDS): self.file_content[name] = obj info = obj.info() self.file_content[name + "/dtype"] = np.dtype(HTYPE_TO_DTYPE.get(info[3])) self.file_content[name + "/shape"] = info[2] if isinstance(info[2], (int, float)) else tuple(info[2]) def _open_xarray_dataset(self, val, chunks=CHUNK_SIZE): """Read the band in blocks.""" dask_arr = from_sds(val, chunks=chunks) attrs = val.attributes() return xr.DataArray(dask_arr, dims=('y', 'x'), attrs=attrs) def __getitem__(self, key): """Get file content as xarray compatible objects.""" val = self.file_content[key] if isinstance(val, SDS): # these datasets are closed and inaccessible when the file is closed, need to reopen return self._open_xarray_dataset(val) return val def __contains__(self, item): """Check if item is in file content.""" return item in self.file_content def get(self, item, default=None): """Get variable as DataArray or return the default.""" if item in self: return self[item] else: return default satpy-0.20.0/satpy/readers/hdf5_utils.py000066400000000000000000000074701362525524100201530ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2017, 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Helpers for reading hdf5-based files.""" import logging import h5py import numpy as np import six import xarray as xr import dask.array as da from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.utils import np2str from satpy import CHUNK_SIZE LOG = logging.getLogger(__name__) class HDF5FileHandler(BaseFileHandler): """Small class for inspecting a HDF5 file and retrieve its metadata/header data.""" def __init__(self, filename, filename_info, filetype_info): """Initialize file handler.""" super(HDF5FileHandler, self).__init__( filename, filename_info, filetype_info) self.file_content = {} try: file_handle = h5py.File(self.filename, 'r') except IOError: LOG.exception( 'Failed reading file %s. Possibly corrupted file', self.filename) raise file_handle.visititems(self.collect_metadata) self._collect_attrs('', file_handle.attrs) file_handle.close() def _collect_attrs(self, name, attrs): for key, value in six.iteritems(attrs): value = np.squeeze(value) fc_key = "{}/attr/{}".format(name, key) try: self.file_content[fc_key] = np2str(value) except ValueError: self.file_content[fc_key] = value except AttributeError: # A HDF5 reference ? value = self.get_reference(name, key) if value is None: LOG.warning("Value cannot be converted - skip setting attribute %s", fc_key) else: self.file_content[fc_key] = value def get_reference(self, name, key): """Get reference.""" with h5py.File(self.filename, 'r') as hf: if isinstance(hf[name].attrs[key], h5py.h5r.Reference): ref_name = h5py.h5r.get_name(hf[name].attrs[key], hf.id) return hf[ref_name][()] def collect_metadata(self, name, obj): """Collect metadata.""" if isinstance(obj, h5py.Dataset): self.file_content[name] = obj self.file_content[name + "/dtype"] = obj.dtype self.file_content[name + "/shape"] = obj.shape self._collect_attrs(name, obj.attrs) def __getitem__(self, key): """Get item for given key.""" val = self.file_content[key] if isinstance(val, h5py.Dataset): # these datasets are closed and inaccessible when the file is closed, need to reopen dset = h5py.File(self.filename, 'r')[key] dset_data = da.from_array(dset, chunks=CHUNK_SIZE) if dset.ndim == 2: return xr.DataArray(dset_data, dims=['y', 'x'], attrs=dset.attrs) return xr.DataArray(dset_data, attrs=dset.attrs) return val def __contains__(self, item): """Get item from file content.""" return item in self.file_content def get(self, item, default=None): """Get item.""" if item in self: return self[item] else: return default satpy-0.20.0/satpy/readers/hdfeos_base.py000066400000000000000000000266161362525524100203520ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Base HDF-EOS reader.""" import re import logging from datetime import datetime import xarray as xr import numpy as np from pyhdf.error import HDF4Error from pyhdf.SD import SD from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler logger = logging.getLogger(__name__) def interpolate(clons, clats, csatz, src_resolution, dst_resolution): """Interpolate two parallel datasets jointly.""" from geotiepoints.modisinterpolator import modis_1km_to_250m, modis_1km_to_500m, modis_5km_to_1km interpolation_functions = { (5000, 1000): modis_5km_to_1km, (1000, 500): modis_1km_to_500m, (1000, 250): modis_1km_to_250m } try: interpolation_function = interpolation_functions[(src_resolution, dst_resolution)] except KeyError: error_message = "Interpolation from {}m to {}m not implemented".format( src_resolution, dst_resolution) raise NotImplementedError(error_message) logger.debug("Interpolating from {} to {}".format(src_resolution, dst_resolution)) return interpolation_function(clons, clats, csatz) class HDFEOSBaseFileReader(BaseFileHandler): """Base file handler for HDF EOS data for both L1b and L2 products.""" def __init__(self, filename, filename_info, filetype_info): """Initialize the base reader.""" BaseFileHandler.__init__(self, filename, filename_info, filetype_info) try: self.sd = SD(self.filename) except HDF4Error as err: error_message = "Could not load data from file {}: {}".format(self.filename, err) raise ValueError(error_message) # Read metadata self.metadata = self.read_mda(self.sd.attributes()['CoreMetadata.0']) self.metadata.update(self.read_mda( self.sd.attributes()['StructMetadata.0']) ) self.metadata.update(self.read_mda( self.sd.attributes()['ArchiveMetadata.0']) ) @staticmethod def read_mda(attribute): """Read the EOS metadata.""" lines = attribute.split('\n') mda = {} current_dict = mda path = [] prev_line = None for line in lines: if not line: continue if line == 'END': break if prev_line: line = prev_line + line key, val = line.split('=') key = key.strip() val = val.strip() try: val = eval(val) except NameError: pass except SyntaxError: prev_line = line continue prev_line = None if key in ['GROUP', 'OBJECT']: new_dict = {} path.append(val) current_dict[val] = new_dict current_dict = new_dict elif key in ['END_GROUP', 'END_OBJECT']: if val != path[-1]: raise SyntaxError path = path[:-1] current_dict = mda for item in path: current_dict = current_dict[item] elif key in ['CLASS', 'NUM_VAL']: pass else: current_dict[key] = val return mda @property def start_time(self): """Get the start time of the dataset.""" date = (self.metadata['INVENTORYMETADATA']['RANGEDATETIME']['RANGEBEGINNINGDATE']['VALUE'] + ' ' + self.metadata['INVENTORYMETADATA']['RANGEDATETIME']['RANGEBEGINNINGTIME']['VALUE']) return datetime.strptime(date, '%Y-%m-%d %H:%M:%S.%f') @property def end_time(self): """Get the end time of the dataset.""" date = (self.metadata['INVENTORYMETADATA']['RANGEDATETIME']['RANGEENDINGDATE']['VALUE'] + ' ' + self.metadata['INVENTORYMETADATA']['RANGEDATETIME']['RANGEENDINGTIME']['VALUE']) return datetime.strptime(date, '%Y-%m-%d %H:%M:%S.%f') def _read_dataset_in_file(self, dataset_name): if dataset_name not in self.sd.datasets(): error_message = "Dataset name {} not included in available datasets {}".format( dataset_name, self.sd.datasets() ) raise KeyError(error_message) dataset = self.sd.select(dataset_name) return dataset def load_dataset(self, dataset_name): """Load the dataset from HDF EOS file.""" from satpy.readers.hdf4_utils import from_sds dataset = self._read_dataset_in_file(dataset_name) fill_value = dataset._FillValue dask_arr = from_sds(dataset, chunks=CHUNK_SIZE) dims = ('y', 'x') if dask_arr.ndim == 2 else None data = xr.DataArray(dask_arr, dims=dims, attrs=dataset.attributes()) # preserve integer data types if possible if np.issubdtype(data.dtype, np.integer): new_fill = fill_value else: new_fill = np.nan data.attrs.pop('_FillValue', None) good_mask = data != fill_value scale_factor = data.attrs.get('scale_factor') if scale_factor is not None: data = data * scale_factor data = data.where(good_mask, new_fill) return data class HDFEOSGeoReader(HDFEOSBaseFileReader): """Handler for the geographical datasets.""" # list of geographical datasets handled by the georeader # mapping to the default variable name if not specified in YAML DATASET_NAMES = { 'longitude': 'Longitude', 'latitude': 'Latitude', 'satellite_azimuth_angle': ('SensorAzimuth', 'Sensor_Azimuth'), 'satellite_zenith_angle': ('SensorZenith', 'Sensor_Zenith'), 'solar_azimuth_angle': ('SolarAzimuth', 'SolarAzimuth'), 'solar_zenith_angle': ('SolarZenith', 'Solar_Zenith'), } def __init__(self, filename, filename_info, filetype_info): """Initialize the geographical reader.""" HDFEOSBaseFileReader.__init__(self, filename, filename_info, filetype_info) self.cache = {} @staticmethod def read_geo_resolution(metadata): """Parse metadata to find the geolocation resolution. It is implemented as a staticmethod to match read_mda pattern. """ # level 1 files try: ds = metadata['INVENTORYMETADATA']['COLLECTIONDESCRIPTIONCLASS']['SHORTNAME']['VALUE'] if ds.endswith('D03'): return 1000 else: # 1km files have 5km geolocation usually return 5000 except KeyError: pass # data files probably have this level 2 files # this does not work for L1B 1KM data files because they are listed # as 1KM data but the geo data inside is at 5km try: latitude_dim = metadata['SwathStructure']['SWATH_1']['DimensionMap']['DimensionMap_2']['GeoDimension'] resolution_regex = re.compile(r'(?P\d+)(km|KM)') resolution_match = resolution_regex.search(latitude_dim) return int(resolution_match.group('resolution')) * 1000 except (AttributeError, KeyError): pass raise RuntimeError("Could not determine resolution from file metadata") @property def geo_resolution(self): """Resolution of the geographical data retrieved in the metadata.""" return self.read_geo_resolution(self.metadata) def _load_ds_by_name(self, ds_name): """Attempt loading using multiple common names.""" var_names = self.DATASET_NAMES[ds_name] if isinstance(var_names, (list, tuple)): try: return self.load_dataset(var_names[0]) except KeyError: return self.load_dataset(var_names[1]) return self.load_dataset(var_names) def get_interpolated_dataset(self, name1, name2, resolution, sensor_zenith, offset=0): """Load and interpolate datasets.""" try: result1 = self.cache[(name1, resolution)] result2 = self.cache[(name2, resolution)] except KeyError: result1 = self._load_ds_by_name(name1) result2 = self._load_ds_by_name(name2) - offset result1, result2 = interpolate( result1, result2, sensor_zenith, self.geo_resolution, resolution ) self.cache[(name1, resolution)] = result1 self.cache[(name2, resolution)] = result2 + offset def get_dataset(self, dataset_keys, dataset_info): """Get the geolocation dataset.""" # Name of the dataset as it appears in the HDF EOS file in_file_dataset_name = dataset_info.get('file_key') # Name of the dataset in the YAML file dataset_name = dataset_keys.name # Resolution asked resolution = dataset_keys.resolution if in_file_dataset_name is not None: # if the YAML was configured with a specific name use that data = self.load_dataset(in_file_dataset_name) else: # otherwise use the default name for this variable data = self._load_ds_by_name(dataset_name) if resolution != self.geo_resolution: if in_file_dataset_name is not None: # they specified a custom variable name but # we don't know how to interpolate this yet raise NotImplementedError( "Interpolation for variable '{}' is not " "configured".format(dataset_name)) # The data must be interpolated sensor_zenith = self._load_ds_by_name('satellite_zenith_angle') logger.debug("Loading %s", dataset_name) if dataset_name in ['longitude', 'latitude']: self.get_interpolated_dataset('longitude', 'latitude', resolution, sensor_zenith) elif dataset_name in ['satellite_azimuth_angle', 'satellite_zenith_angle']: # Sensor dataset names differs between L1b and L2 products self.get_interpolated_dataset('satellite_azimuth_angle', 'satellite_zenith_angle', resolution, sensor_zenith, offset=90) elif dataset_name in ['solar_azimuth_angle', 'solar_zenith_angle']: # Sensor dataset names differs between L1b and L2 products self.get_interpolated_dataset('solar_azimuth_angle', 'solar_zenith_angle', resolution, sensor_zenith, offset=90) data = self.cache[dataset_name, resolution] for key in ('standard_name', 'units'): if key in dataset_info: data.attrs[key] = dataset_info[key] return data satpy-0.20.0/satpy/readers/hrit_base.py000066400000000000000000000274751362525524100200540ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2014-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """HRIT/LRIT format reader. This module is the base module for all HRIT-based formats. Here, you will find the common building blocks for hrit reading. One of the features here is the on-the-fly decompression of hrit files. It needs a path to the xRITDecompress binary to be provided through the environment variable called XRIT_DECOMPRESS_PATH. When compressed hrit files are then encountered (files finishing with `.C_`), they are decompressed to the system's temporary directory for reading. """ import logging from datetime import timedelta from tempfile import gettempdir import os from six import BytesIO from subprocess import Popen, PIPE import numpy as np import xarray as xr import dask.array as da from pyresample import geometry from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.eum_base import time_cds_short from satpy.readers.seviri_base import dec10216 logger = logging.getLogger('hrit_base') common_hdr = np.dtype([('hdr_id', 'u1'), ('record_length', '>u2')]) primary_header = np.dtype([('file_type', 'u1'), ('total_header_length', '>u4'), ('data_field_length', '>u8')]) image_structure = np.dtype([('number_of_bits_per_pixel', 'u1'), ('number_of_columns', '>u2'), ('number_of_lines', '>u2'), ('compression_flag_for_data', 'u1')]) image_navigation = np.dtype([('projection_name', 'S32'), ('cfac', '>i4'), ('lfac', '>i4'), ('coff', '>i4'), ('loff', '>i4')]) image_data_function = np.dtype([('function', '|S1')]) annotation_header = np.dtype([('annotation', '|S1')]) timestamp_record = np.dtype([('cds_p_field', 'u1'), ('timestamp', time_cds_short)]) ancillary_text = np.dtype([('ancillary', '|S1')]) key_header = np.dtype([('key', '|S1')]) base_variable_length_headers = {} base_text_headers = {image_data_function: 'image_data_function', annotation_header: 'annotation_header', ancillary_text: 'ancillary_text', key_header: 'key_header'} base_hdr_map = {0: primary_header, 1: image_structure, 2: image_navigation, 3: image_data_function, 4: annotation_header, 5: timestamp_record, 6: ancillary_text, 7: key_header, } def get_xritdecompress_cmd(): """Find a valid binary for the xRITDecompress command.""" cmd = os.environ.get('XRIT_DECOMPRESS_PATH', None) if not cmd: raise IOError("XRIT_DECOMPRESS_PATH is not defined (complete path to xRITDecompress)") question = ("Did you set the environment variable XRIT_DECOMPRESS_PATH correctly?") if not os.path.exists(cmd): raise IOError(str(cmd) + " does not exist!\n" + question) elif os.path.isdir(cmd): raise IOError(str(cmd) + " is a directory!\n" + question) return cmd def get_xritdecompress_outfile(stdout): """Analyse the output of the xRITDecompress command call and return the file.""" outfile = b'' for line in stdout: try: k, v = [x.strip() for x in line.split(b':', 1)] except ValueError: break if k == b'Decompressed file': outfile = v break return outfile def decompress(infile, outdir='.'): """Decompress an XRIT data file and return the path to the decompressed file. It expect to find Eumetsat's xRITDecompress through the environment variable XRIT_DECOMPRESS_PATH. """ cmd = get_xritdecompress_cmd() infile = os.path.abspath(infile) cwd = os.getcwd() os.chdir(outdir) p = Popen([cmd, infile], stdout=PIPE) stdout = BytesIO(p.communicate()[0]) status = p.returncode os.chdir(cwd) if status != 0: raise IOError("xrit_decompress '%s', failed, status=%d" % (infile, status)) outfile = get_xritdecompress_outfile(stdout) if not outfile: raise IOError("xrit_decompress '%s', failed, no output file is generated" % infile) return os.path.join(outdir, outfile.decode('utf-8')) class HRITFileHandler(BaseFileHandler): """HRIT standard format reader.""" def __init__(self, filename, filename_info, filetype_info, hdr_info): """Initialize the reader.""" super(HRITFileHandler, self).__init__(filename, filename_info, filetype_info) self.mda = {} self._get_hd(hdr_info) if self.mda.get('compression_flag_for_data'): logger.debug('Unpacking %s', filename) try: self.filename = decompress(filename, gettempdir()) except IOError as err: logger.warning("Unpacking failed: %s", str(err)) self.mda = {} self._get_hd(hdr_info) self._start_time = filename_info['start_time'] self._end_time = self._start_time + timedelta(minutes=15) def _get_hd(self, hdr_info): """Open the file, read and get the basic file header info and set the mda dictionary.""" hdr_map, variable_length_headers, text_headers = hdr_info with open(self.filename) as fp: total_header_length = 16 while fp.tell() < total_header_length: hdr_id = np.fromfile(fp, dtype=common_hdr, count=1)[0] the_type = hdr_map[hdr_id['hdr_id']] if the_type in variable_length_headers: field_length = int((hdr_id['record_length'] - 3) / the_type.itemsize) current_hdr = np.fromfile(fp, dtype=the_type, count=field_length) key = variable_length_headers[the_type] if key in self.mda: if not isinstance(self.mda[key], list): self.mda[key] = [self.mda[key]] self.mda[key].append(current_hdr) else: self.mda[key] = current_hdr elif the_type in text_headers: field_length = int((hdr_id['record_length'] - 3) / the_type.itemsize) char = list(the_type.fields.values())[0][0].char new_type = np.dtype(char + str(field_length)) current_hdr = np.fromfile(fp, dtype=new_type, count=1)[0] self.mda[text_headers[the_type]] = current_hdr else: current_hdr = np.fromfile(fp, dtype=the_type, count=1)[0] self.mda.update( dict(zip(current_hdr.dtype.names, current_hdr))) total_header_length = self.mda['total_header_length'] self.mda.setdefault('number_of_bits_per_pixel', 10) self.mda['projection_parameters'] = {'a': 6378169.00, 'b': 6356583.80, 'h': 35785831.00, # FIXME: find a reasonable SSP 'SSP_longitude': 0.0} self.mda['orbital_parameters'] = {} def get_shape(self, dsid, ds_info): """Get shape.""" return int(self.mda['number_of_lines']), int(self.mda['number_of_columns']) @property def start_time(self): """Get start time.""" return self._start_time @property def end_time(self): """Get end time.""" return self._end_time def get_dataset(self, key, info): """Load a dataset.""" # Read bands data = self.read_band(key, info) # Convert to xarray xdata = xr.DataArray(data, dims=['y', 'x']) return xdata def get_xy_from_linecol(self, line, col, offsets, factors): """Get the intermediate coordinates from line & col. Intermediate coordinates are actually the instruments scanning angles. """ loff, coff = offsets lfac, cfac = factors x__ = (col - coff) / cfac * 2**16 y__ = (line - loff) / lfac * 2**16 return x__, y__ def get_area_extent(self, size, offsets, factors, platform_height): """Get the area extent of the file.""" nlines, ncols = size h = platform_height # count starts at 1 cols = 1 - 0.5 lines = 1 - 0.5 ll_x, ll_y = self.get_xy_from_linecol(lines, cols, offsets, factors) cols += ncols lines += nlines ur_x, ur_y = self.get_xy_from_linecol(lines, cols, offsets, factors) return (np.deg2rad(ll_x) * h, np.deg2rad(ll_y) * h, np.deg2rad(ur_x) * h, np.deg2rad(ur_y) * h) def get_area_def(self, dsid): """Get the area definition of the band.""" cfac = np.int32(self.mda['cfac']) lfac = np.int32(self.mda['lfac']) coff = np.float32(self.mda['coff']) loff = np.float32(self.mda['loff']) a = self.mda['projection_parameters']['a'] b = self.mda['projection_parameters']['b'] h = self.mda['projection_parameters']['h'] lon_0 = self.mda['projection_parameters']['SSP_longitude'] nlines = int(self.mda['number_of_lines']) ncols = int(self.mda['number_of_columns']) area_extent = self.get_area_extent((nlines, ncols), (loff, coff), (lfac, cfac), h) proj_dict = {'a': float(a), 'b': float(b), 'lon_0': float(lon_0), 'h': float(h), 'proj': 'geos', 'units': 'm'} area = geometry.AreaDefinition( 'some_area_name', "On-the-fly area", 'geosmsg', proj_dict, ncols, nlines, area_extent) self.area = area return area def read_band(self, key, info): """Read the data.""" shape = int(np.ceil(self.mda['data_field_length'] / 8.)) if self.mda['number_of_bits_per_pixel'] == 16: dtype = '>u2' shape //= 2 elif self.mda['number_of_bits_per_pixel'] in [8, 10]: dtype = np.uint8 shape = (shape, ) data = np.memmap(self.filename, mode='r', offset=self.mda['total_header_length'], dtype=dtype, shape=shape) data = da.from_array(data, chunks=shape[0]) if self.mda['number_of_bits_per_pixel'] == 10: data = dec10216(data) data = data.reshape((self.mda['number_of_lines'], self.mda['number_of_columns'])) return data satpy-0.20.0/satpy/readers/hrit_jma.py000066400000000000000000000270421362525524100176770ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2010-2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """HRIT format reader for JMA data. References: JMA HRIT - Mission Specific Implementation http://www.jma.go.jp/jma/jma-eng/satellite/introduction/4_2HRIT.pdf """ import logging from datetime import datetime import numpy as np import xarray as xr from satpy.readers.hrit_base import (HRITFileHandler, ancillary_text, annotation_header, base_hdr_map, image_data_function) from satpy.readers._geos_area import get_area_definition, get_area_extent from satpy.readers.utils import get_geostationary_mask logger = logging.getLogger('hrit_jma') # JMA implementation: key_header = np.dtype([('key_number', 'u4')]) segment_identification = np.dtype([('image_segm_seq_no', '>u1'), ('total_no_image_segm', '>u1'), ('line_no_image_segm', '>u2')]) encryption_key_message = np.dtype([('station_number', '>u2')]) image_compensation_information = np.dtype([('compensation', '|S1')]) image_observation_time = np.dtype([('times', '|S1')]) image_quality_information = np.dtype([('quality', '|S1')]) jma_variable_length_headers = {} jma_text_headers = {image_data_function: 'image_data_function', annotation_header: 'annotation_header', ancillary_text: 'ancillary_text', image_compensation_information: 'image_compensation_information', image_observation_time: 'image_observation_time', image_quality_information: 'image_quality_information'} jma_hdr_map = base_hdr_map.copy() jma_hdr_map.update({7: key_header, 128: segment_identification, 129: encryption_key_message, 130: image_compensation_information, 131: image_observation_time, 132: image_quality_information }) cuc_time = np.dtype([('coarse', 'u1', (4, )), ('fine', 'u1', (3, ))]) time_cds_expanded = np.dtype([('days', '>u2'), ('milliseconds', '>u4'), ('microseconds', '>u2'), ('nanoseconds', '>u2')]) FULL_DISK = 1 NORTH_HEMIS = 2 SOUTH_HEMIS = 3 UNKNOWN_AREA = -1 AREA_NAMES = {FULL_DISK: {'short': 'FLDK', 'long': 'Full Disk'}, NORTH_HEMIS: {'short': 'NH', 'long': 'Northern Hemisphere'}, SOUTH_HEMIS: {'short': 'SH', 'long': 'Southern Hemisphere'}, UNKNOWN_AREA: {'short': 'UNKNOWN', 'long': 'Unknown Area'}} MTSAT1R = 'MTSAT-1R' MTSAT2 = 'MTSAT-2' HIMAWARI8 = 'Himawari-8' UNKNOWN_PLATFORM = 'Unknown Platform' PLATFORMS = { 'GEOS(140.00)': MTSAT1R, 'GEOS(140.25)': MTSAT1R, 'GEOS(140.70)': HIMAWARI8, 'GEOS(145.00)': MTSAT2, } SENSORS = { MTSAT1R: 'jami', MTSAT2: 'mtsat2_imager', HIMAWARI8: 'ahi' } class HRITJMAFileHandler(HRITFileHandler): """JMA HRIT format reader.""" def __init__(self, filename, filename_info, filetype_info): """Initialize the reader.""" super(HRITJMAFileHandler, self).__init__(filename, filename_info, filetype_info, (jma_hdr_map, jma_variable_length_headers, jma_text_headers)) self.mda['segment_sequence_number'] = self.mda['image_segm_seq_no'] self.mda['planned_end_segment_number'] = self.mda['total_no_image_segm'] self.mda['planned_start_segment_number'] = 1 items = self.mda['image_data_function'].decode().split('\r') if items[0].startswith('$HALFTONE'): self.calibration_table = [] for item in items[1:]: if item == '': continue key, value = item.split(':=') if key.startswith('_UNIT'): self.mda['unit'] = item.split(':=')[1] elif key.startswith('_NAME'): pass elif key.isdigit(): key = int(key) value = float(value) self.calibration_table.append((key, value)) self.calibration_table = np.array(self.calibration_table) self.projection_name = self.mda['projection_name'].decode().strip() sublon = float(self.projection_name.split('(')[1][:-1]) self.mda['projection_parameters']['SSP_longitude'] = sublon self.platform = self._get_platform() self.is_segmented = self.mda['segment_sequence_number'] > 0 self.area_id = filename_info.get('area', UNKNOWN_AREA) if self.area_id not in AREA_NAMES: self.area_id = UNKNOWN_AREA self.area = self._get_area_def() def _get_platform(self): """Get the platform name. The platform is not specified explicitly in JMA HRIT files. For segmented data it is not even specified in the filename. But it can be derived indirectly from the projection name: GEOS(140.00): MTSAT-1R GEOS(140.25): MTSAT-1R # TODO: Check if there is more... GEOS(140.70): Himawari-8 GEOS(145.00): MTSAT-2 See [MTSAT], section 3.1. Unfortunately Himawari-8 and 9 are not distinguishable using that method at the moment. From [HIMAWARI]: "HRIT/LRIT files have the same file naming convention in the same format in Himawari-8 and Himawari-9, so there is no particular difference." TODO: Find another way to distinguish Himawari-8 and 9. References: [MTSAT] http://www.data.jma.go.jp/mscweb/notice/Himawari7_e.html [HIMAWARI] http://www.data.jma.go.jp/mscweb/en/himawari89/space_segment/sample_hrit.html """ try: return PLATFORMS[self.projection_name] except KeyError: logger.error('Unable to determine platform: Unknown projection ' 'name "{}"'.format(self.projection_name)) return UNKNOWN_PLATFORM def _check_sensor_platform_consistency(self, sensor): """Make sure sensor and platform are consistent. Args: sensor (str) : Sensor name from YAML dataset definition Raises: ValueError if they don't match """ ref_sensor = SENSORS.get(self.platform, None) if ref_sensor and not sensor == ref_sensor: logger.error('Sensor-Platform mismatch: {} is not a payload ' 'of {}. Did you choose the correct reader?' .format(sensor, self.platform)) def _get_line_offset(self): """Get line offset for the current segment. Read line offset from the file and adapt it to the current segment or half disk scan so that y(l) ~ l - loff because this is what get_geostationary_area_extent() expects. """ # Get line offset from the file nlines = int(self.mda['number_of_lines']) loff = np.float32(self.mda['loff']) # Adapt it to the current segment if self.is_segmented: # loff in the file specifies the offset of the full disk image # centre (1375/2750 for VIS/IR) segment_number = self.mda['segment_sequence_number'] - 1 loff -= (self.mda['total_no_image_segm'] - segment_number - 1) * nlines elif self.area_id in (NORTH_HEMIS, SOUTH_HEMIS): # loff in the file specifies the start line of the half disk image # in the full disk image loff = nlines - loff elif self.area_id == UNKNOWN_AREA: logger.error('Cannot compute line offset for unknown area') return loff def _get_area_def(self): """Get the area definition of the band.""" pdict = { 'cfac': np.int32(self.mda['cfac']), 'lfac': np.int32(self.mda['lfac']), 'coff': np.float32(self.mda['coff']), 'loff': self._get_line_offset(), 'ncols': int(self.mda['number_of_columns']), 'nlines': int(self.mda['number_of_lines']), 'scandir': 'N2S', 'a': float(self.mda['projection_parameters']['a']), 'b': float(self.mda['projection_parameters']['b']), 'h': float(self.mda['projection_parameters']['h']), 'ssp_lon': float(self.mda['projection_parameters']['SSP_longitude']), 'a_name': AREA_NAMES[self.area_id]['short'], 'a_desc': AREA_NAMES[self.area_id]['long'], 'p_id': 'geosmsg' } area_extent = get_area_extent(pdict) return get_area_definition(pdict, area_extent) def get_area_def(self, dsid): """Get the area definition of the band.""" return self.area def get_dataset(self, key, info): """Get the dataset designated by *key*.""" res = super(HRITJMAFileHandler, self).get_dataset(key, info) # Filenames of segmented data is identical for MTSAT-1R, MTSAT-2 # and Himawari-8/9. Make sure we have the correct reader for the data # at hand. self._check_sensor_platform_consistency(info['sensor']) # Calibrate and mask space pixels res = self._mask_space(self.calibrate(res, key.calibration)) # Update attributes res.attrs.update(info) res.attrs['platform_name'] = self.platform res.attrs['satellite_longitude'] = float(self.mda['projection_parameters']['SSP_longitude']) res.attrs['satellite_latitude'] = 0. res.attrs['satellite_altitude'] = float(self.mda['projection_parameters']['h']) res.attrs['orbital_parameters'] = { 'projection_longitude': float(self.mda['projection_parameters']['SSP_longitude']), 'projection_latitude': 0., 'projection_altitude': float(self.mda['projection_parameters']['h'])} return res def _mask_space(self, data): """Mask space pixels.""" geomask = get_geostationary_mask(area=self.area) return data.where(geomask) @staticmethod def _interp(arr, cal): return np.interp(arr.ravel(), cal[:, 0], cal[:, 1]).reshape(arr.shape) def calibrate(self, data, calibration): """Calibrate the data.""" tic = datetime.now() if calibration == 'counts': return data elif calibration == 'radiance': raise NotImplementedError("Can't calibrate to radiance.") else: cal = self.calibration_table res = data.data.map_blocks(self._interp, cal, dtype=cal[:, 0].dtype) res = xr.DataArray(res, dims=data.dims, attrs=data.attrs, coords=data.coords) res = res.where(data < 65535) logger.debug("Calibration time " + str(datetime.now() - tic)) return res satpy-0.20.0/satpy/readers/hrpt.py000066400000000000000000000214121362525524100170520ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2009-2016 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reading and calibrating hrpt avhrr data. Todo: - AMSU - Compare output with AAPP Reading: http://www.ncdc.noaa.gov/oa/pod-guide/ncdc/docs/klm/html/c4/sec4-1.htm#t413-1 Calibration: http://www.ncdc.noaa.gov/oa/pod-guide/ncdc/docs/klm/html/c7/sec7-1.htm """ import logging from datetime import datetime import numpy as np try: from pygac.calibration import calibrate_solar, calibrate_thermal except ImportError: from pygac.gac_calibration import calibrate_solar, calibrate_thermal from satpy.dataset import Dataset from satpy.readers.file_handlers import BaseFileHandler logger = logging.getLogger(__name__) AVHRR_CHANNEL_NAMES = ("1", "2", "3a", "3b", "4", "5") dtype = np.dtype([('frame_sync', '>u2', (6, )), ('id', [('id', '>u2'), ('spare', '>u2')]), ('timecode', '>u2', (4, )), ('telemetry', [("ramp_calibration", '>u2', (5, )), ("PRT", '>u2', (3, )), ("ch3_patch_temp", '>u2'), ("spare", '>u2'), ]), ('back_scan', '>u2', (10, 3)), ('space_data', '>u2', (10, 5)), ('sync', '>u2'), ('TIP_data', '>u2', (520, )), ('spare', '>u2', (127, )), ('image_data', '>u2', (2048, 5)), ('aux_sync', '>u2', (100, ))]) def time_seconds(tc_array, year): """Return the time object from the timecodes """ tc_array = np.array(tc_array, copy=True) word = tc_array[:, 0] day = word >> 1 word = tc_array[:, 1].astype(np.uint64) msecs = ((127) & word) * 1024 word = tc_array[:, 2] msecs += word & 1023 msecs *= 1024 word = tc_array[:, 3] msecs += word & 1023 return (np.datetime64( str(year) + '-01-01T00:00:00Z', 's') + msecs[:].astype('timedelta64[ms]') + (day - 1)[:].astype('timedelta64[D]')) def bfield(array, bit): """return the bit array. """ return (array & 2**(9 - bit + 1)).astype(np.bool) spacecrafts = {7: "NOAA 15", 3: "NOAA 16", 13: "NOAA 18", 15: "NOAA 19"} def geo_interpolate(lons32km, lats32km): from geotiepoints import SatelliteInterpolator cols32km = np.arange(0, 2048, 32) cols1km = np.arange(2048) lines = lons32km.shape[0] rows32km = np.arange(lines) rows1km = np.arange(lines) along_track_order = 1 cross_track_order = 3 satint = SatelliteInterpolator( (lons32km, lats32km), (rows32km, cols32km), (rows1km, cols1km), along_track_order, cross_track_order) lons, lats = satint.interpolate() return lons, lats class HRPTFile(BaseFileHandler): """Reader for HRPT Minor Frame, 10 bits data expanded to 16 bits. """ def __init__(self, filename, filename_info, filetype_info): super(HRPTFile, self).__init__(filename, filename_info, filetype_info) self.channels = {i: None for i in AVHRR_CHANNEL_NAMES} self.units = {i: 'counts' for i in AVHRR_CHANNEL_NAMES} self._data = None self._is3b = None self.lons = None self.lats = None self.area = None self.platform_name = None self.year = filename_info.get('start_time', datetime.utcnow()).year self.times = None self.prt = None self.ict = None self.space = None self.read() def read(self): with open(self.filename, "rb") as fp_: self._data = np.memmap(fp_, dtype=dtype, mode="r") if np.all(self._data['frame_sync'][0] > 1024): self._data = self._data.newbyteorder() self.platform_name = spacecrafts[ (self._data["id"]["id"][0] >> 3) & 15] def get_dataset(self, key, info): if self._data is None: self.read() if key.name in ['latitude', 'longitude']: lons, lats = self.get_lonlats() if key.name == 'latitude': return Dataset(lats, id=key) else: return Dataset(lons, id=key) avhrr_channel_index = {'1': 0, '2': 1, '3a': 2, '3b': 2, '4': 3, '5': 4} index = avhrr_channel_index[key.name] mask = False if key.name in ['3a', '3b'] and self._is3b is None: ch3a = bfield(self._data["id"]["id"], 10) self._is3b = np.logical_not(ch3a) if key.name == '3a': mask = np.tile(self._is3b, (1, 2048)) elif key.name == '3b': mask = np.tile(np.logical_not(self._is3b), (1, 2048)) data = self._data["image_data"][:, :, index] if key.calibration == 'counts': return Dataset(data, mask=mask, area=self.get_lonlats(), units='1') pg_spacecraft = ''.join(self.platform_name.split()).lower() jdays = (np.datetime64(self.start_time) - np.datetime64(str( self.year) + '-01-01T00:00:00Z')) / np.timedelta64(1, 'D') if index < 2 or key.name == '3a': data = calibrate_solar(data, index, self.year, jdays, pg_spacecraft) units = '%' if index > 2 or key.name == '3b': if self.times is None: self.times = time_seconds(self._data["timecode"], self.year) line_numbers = ( np.round((self.times - self.times[-1]) / np.timedelta64(166666667, 'ns'))).astype(np.int) line_numbers -= line_numbers[0] if self.prt is None: self.prt, self.ict, self.space = self.get_telemetry() chan = index + 1 data = calibrate_thermal(data, self.prt, self.ict[:, chan - 3], self.space[:, chan - 3], line_numbers, chan, pg_spacecraft) units = 'K' # TODO: check if entirely masked before returning return Dataset(data, mask=mask, units=units) def get_telemetry(self): prt = np.mean(self._data["telemetry"]['PRT'], axis=1) ict = np.empty((len(self._data), 3)) for i in range(3): ict[:, i] = np.mean(self._data['back_scan'][:, :, i], axis=1) space = np.empty((len(self._data), 3)) for i in range(3): space[:, i] = np.mean(self._data['space_data'][ :, :, i + 2], axis=1) return prt, ict, space def get_lonlats(self): if self.lons is not None and self.lats is not None: return self.lons, self.lats from pyorbital.orbital import Orbital from pyorbital.geoloc import compute_pixels, get_lonlatalt from pyorbital.geoloc_instrument_definitions import avhrr if self.times is None: self.times = time_seconds(self._data["timecode"], self.year) scanline_nb = len(self.times) scan_points = np.arange(0, 2048, 32) # scan_points = np.arange(2048) sgeom = avhrr(scanline_nb, scan_points, apply_offset=False) # no attitude error rpy = [0, 0, 0] s_times = sgeom.times( self.times[:, np.newaxis]).ravel() # s_times = (np.tile(sgeom._times[0, :], (scanline_nb, 1)).astype( # 'timedelta64[s]') + self.times[:, np.newaxis]).ravel() orb = Orbital(self.platform_name) pixels_pos = compute_pixels(orb, sgeom, s_times, rpy) lons, lats, alts = get_lonlatalt(pixels_pos, s_times) self.lons, self.lats = geo_interpolate( lons.reshape((scanline_nb, -1)), lats.reshape((scanline_nb, -1))) return self.lons, self.lats @property def start_time(self): return time_seconds(self._data["timecode"][0, np.newaxis, :], self.year).astype(datetime)[0] @property def end_time(self): return time_seconds(self._data["timecode"][-1, np.newaxis, :], self.year).astype(datetime)[0] satpy-0.20.0/satpy/readers/hsaf_grib.py000066400000000000000000000131471362525524100200270ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019. # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """A reader for files produced by the Hydrology SAF Currently this reader depends on the `pygrib` python package. The `eccodes` package from ECMWF is preferred, but does not support python 3 at the time of writing. """ import logging import numpy as np import xarray as xr import dask.array as da from pyresample import geometry from datetime import datetime, timedelta from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler import pygrib LOG = logging.getLogger(__name__) CF_UNITS = { 'none': '1', } class HSAFFileHandler(BaseFileHandler): def __init__(self, filename, filename_info, filetype_info): super(HSAFFileHandler, self).__init__(filename, filename_info, filetype_info) self._msg_datasets = {} self._start_time = None self._end_time = None try: with pygrib.open(self.filename) as grib_file: first_msg = grib_file.message(1) analysis_time = self._get_datetime(first_msg) self._analysis_time = analysis_time self.metadata = self.get_metadata(first_msg) except (RuntimeError, KeyError): raise IOError("Unknown GRIB file format: {}".format(self.filename)) @staticmethod def _get_datetime(msg): dtstr = str(msg['dataDate']) + str(msg['dataTime']).zfill(4) return datetime.strptime(dtstr, "%Y%m%d%H%M") @property def analysis_time(self): """ Get validity time of this file """ return self._analysis_time def get_metadata(self, msg): try: center_description = msg['centreDescription'] except (RuntimeError, KeyError): center_description = None ds_info = { 'filename': self.filename, 'shortName': msg['shortName'], 'long_name': msg['name'], 'units': msg['units'], 'centreDescription': center_description, 'data_time': self._analysis_time, 'nx': msg['Nx'], 'ny': msg['Ny'], 'projparams': msg.projparams } return ds_info def get_area_def(self, dsid): """ Get area definition for message. """ msg = self._get_message(1) try: return self._get_area_def(msg) except (RuntimeError, KeyError): raise RuntimeError("Unknown GRIB projection information") def _get_area_def(self, msg): """ Get the area definition of the datasets in the file. """ proj_param = msg.projparams.copy() Rx = 2 * np.arcsin(1. / msg['NrInRadiusOfEarth']) / msg['dx'] Ry = 2 * np.arcsin(1. / msg['NrInRadiusOfEarth']) / msg['dy'] x_0 = - msg['XpInGridLengths'] x_1 = msg['Nx'] - msg['XpInGridLengths'] y_0 = (msg['Ny'] - msg['YpInGridLengths']) * -1 y_1 = msg['YpInGridLengths'] min_x = (x_0 * Rx) * proj_param['h'] max_x = (x_1 * Rx) * proj_param['h'] min_y = (y_0 * Ry) * proj_param['h'] max_y = (y_1 * Ry) * proj_param['h'] area_extent = (min_x, min_y, max_x, max_y) area = geometry.AreaDefinition('hsaf_region', 'A region from H-SAF', 'geos', proj_param, msg['Nx'], msg['Ny'], area_extent) return area def _get_message(self, idx): with pygrib.open(self.filename) as grib_file: msg = grib_file.message(idx) return msg def get_dataset(self, ds_id, ds_info): """Read a GRIB message into an xarray DataArray.""" if (ds_id.name not in self.filename): raise IOError("File does not contain {} data".format(ds_id.name)) msg = self._get_message(1) ds_info = self.get_metadata(msg) ds_info['end_time'] = ds_info['data_time'] if (ds_id.name == 'h05' or ds_id.name == 'h05B'): flen = len(self.filename) timedelt = self.filename[flen-10:flen-8] ds_info['start_time'] = (ds_info['end_time'] - timedelta(hours=int(timedelt))) else: ds_info['start_time'] = ds_info['end_time'] fill = msg['missingValue'] data = msg.values.astype(np.float32) if msg.valid_key('jScansPositively') and msg['jScansPositively'] == 1: data = data[::-1] if isinstance(data, np.ma.MaskedArray): data = data.filled(np.nan) data = da.from_array(data, chunks=CHUNK_SIZE) else: data[data == fill] = np.nan data = da.from_array(data, chunks=CHUNK_SIZE) return xr.DataArray(data, attrs=ds_info, dims=('y', 'x')) satpy-0.20.0/satpy/readers/iasi_l2.py000066400000000000000000000130411362525524100174160ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """IASI L2 HDF5 files. """ import h5py import numpy as np import xarray as xr import dask.array as da import datetime as dt import logging from satpy.readers.file_handlers import BaseFileHandler from satpy import CHUNK_SIZE # Scan timing values taken from # http://oiswww.eumetsat.org/WEBOPS/eps-pg/IASI-L1/IASIL1-PG-4ProdOverview.htm # Time between each scan in one scanline [ms] SCAN_STEP_TIME = 8. / 37. # Duration of one measurement [ms] SCAN_STARE_DURATION = 151.0 # Time correction used between each 4-footprint measurements VIEW_TIME_ADJUSTMENT = SCAN_STEP_TIME + SCAN_STARE_DURATION / 2. VALUES_PER_SCAN_LINE = 120 # Epoch for the dates EPOCH = dt.datetime(2000, 1, 1) SHORT_NAMES = {'M01': 'Metop-B', 'M02': 'Metop-A', 'M03': 'Metop-C'} DSET_NAMES = {'ozone_mixing_ratio': 'O', 'ozone_mixing_ratio_quality': 'QO', 'pressure': 'P', 'pressure_quality': 'QP', 'temperature': 'T', 'temperature_quality': 'QT', 'water_mixing_ratio': 'W', 'water_mixing_ratio_quality': 'QW', 'water_total_column': 'WC', 'ozone_total_column': 'OC', 'surface_skin_temperature': 'Ts', 'surface_skin_temperature_quality': 'QTs', 'emissivity': 'E', 'emissivity_quality': 'QE'} GEO_NAMES = {'latitude': 'Latitude', 'longitude': 'Longitude', 'satellite_azimuth_angle': 'SatAzimuth', 'satellite_zenith_angle': 'SatZenith', 'sensing_time': {'day': 'SensingTime_day', 'msec': 'SensingTime_msec'}, 'solar_azimuth_angle': 'SunAzimuth', 'solar_zenith_angle': 'SunZenith'} LOGGER = logging.getLogger(__name__) class IASIL2HDF5(BaseFileHandler): """File handler for IASI L2 HDF5 files.""" def __init__(self, filename, filename_info, filetype_info): super(IASIL2HDF5, self).__init__(filename, filename_info, filetype_info) self.finfo = filename_info self.lons = None self.lats = None self.sensor = 'iasi' self.mda = {} short_name = filename_info['platform_id'] self.mda['platform_name'] = SHORT_NAMES.get(short_name, short_name) self.mda['sensor'] = 'iasi' @property def start_time(self): return self.finfo['start_time'] @property def end_time(self): end_time = dt.datetime.combine(self.start_time.date(), self.finfo['end_time'].time()) if end_time < self.start_time: end_time += dt.timedelta(days=1) return end_time def get_dataset(self, key, info): """Load a dataset""" with h5py.File(self.filename, 'r') as fid: LOGGER.debug('Reading %s.', key.name) if key.name in DSET_NAMES: m_data = read_dataset(fid, key) else: m_data = read_geo(fid, key) m_data.attrs.update(info) m_data.attrs['sensor'] = self.sensor return m_data def read_dataset(fid, key): """Read dataset""" dsid = DSET_NAMES[key.name] dset = fid["/PWLR/" + dsid] if dset.ndim == 3: dims = ['y', 'x', 'level'] else: dims = ['y', 'x'] data = xr.DataArray(da.from_array(dset.value, chunks=CHUNK_SIZE), name=key.name, dims=dims).astype(np.float32) data = xr.where(data > 1e30, np.nan, data) dset_attrs = dict(dset.attrs) data.attrs.update(dset_attrs) return data def read_geo(fid, key): """Read geolocation and related datasets.""" dsid = GEO_NAMES[key.name] add_epoch = False if "time" in key.name: days = fid["/L1C/" + dsid["day"]].value msecs = fid["/L1C/" + dsid["msec"]].value data = _form_datetimes(days, msecs) add_epoch = True dtype = np.float64 else: data = fid["/L1C/" + dsid].value dtype = np.float32 data = xr.DataArray(da.from_array(data, chunks=CHUNK_SIZE), name=key.name, dims=['y', 'x']).astype(dtype) if add_epoch: data.attrs['sensing_time_epoch'] = EPOCH return data def _form_datetimes(days, msecs): """Calculate seconds since EPOCH from days and milliseconds for each of IASI scan.""" all_datetimes = [] for i in range(days.size): day = int(days[i]) msec = msecs[i] scanline_datetimes = [] for j in range(int(VALUES_PER_SCAN_LINE / 4)): usec = 1000 * (j * VIEW_TIME_ADJUSTMENT + msec) delta = (dt.timedelta(days=day, microseconds=usec)) for k in range(4): scanline_datetimes.append(delta.total_seconds()) all_datetimes.append(scanline_datetimes) return np.array(all_datetimes, dtype=np.float64) satpy-0.20.0/satpy/readers/iasi_l2_so2_bufr.py000066400000000000000000000167661362525524100212400ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . r"""IASI L2 SO2 BUFR format reader. Introduction ------------ The ``iasi_l2_so2_bufr`` reader reads IASI level2 SO2 data in BUFR format. The algorithm is described in the Theoretical Basis Document, linked below. Each BUFR file consists of a number of messages, one for each scan, each of which contains SO2 column amounts in Dobson units for retrievals performed with plume heights of 7, 10, 13, 16 and 25 km. Reader Arguments ---------------- A list of retrieval files, fnames, can be opened as follows:: Scene(reader="iasi_l2_so2_bufr", filenames=fnames) Example ------- Here is an example how to read the data in satpy: .. code-block:: python from satpy import Scene import glob filenames = glob.glob( '/test_data/W_XX-EUMETSAT-Darmstadt,SOUNDING+SATELLITE,METOPA+IASI_C_EUMC_20200204091455_68984_eps_o_so2_l2.bin') scn = Scene(filenames=filenames, reader='iasi_l2_so2_bufr') scn.load(['so2_height_3', 'so2_height_4']) print(scn['so2_height_3']) Output: .. code-block:: none dask.array Coordinates: crs object +proj=latlong +datum=WGS84 +ellps=WGS84 +type=crs Dimensions without coordinates: y, x Attributes: sensor: IASI units: dobson file_type: iasi_l2_so2_bufr wavelength: None modifiers: () platform_name: METOP-2 resolution: 12000 fill_value: -1e+100 level: None polarization: None coordinates: ('longitude', 'latitude') calibration: None key: #3#sulphurDioxide name: so2_height_3 start_time: 2020-02-04 09:14:55 end_time: 2020-02-04 09:17:51 area: Shape: (23, 120)\nLons: . """Interface to MTG-LI L2 product NetCDF files The reader is based on preliminary test data provided by EUMETSAT. The data description is described in the "LI L2 Product User Guide [LIL2PUG] Draft version" documentation. """ import h5netcdf import logging import numpy as np from datetime import datetime from pyresample import geometry from satpy.readers.file_handlers import BaseFileHandler # FIXME: This is not xarray/dask compatible # TODO: Once migrated to xarray/dask, remove ignored path in setup.cfg from satpy.dataset import Dataset logger = logging.getLogger(__name__) class LIFileHandler(BaseFileHandler): """MTG LI File Reader.""" def __init__(self, filename, filename_info, filetype_info): super(LIFileHandler, self).__init__(filename, filename_info, filetype_info) self.nc = h5netcdf.File(self.filename, 'r') # Get grid dimensions from file refdim = self.nc['grid_position'][:] # Get number of lines and columns self.nlines = int(refdim[2]) self.ncols = int(refdim[3]) self.cache = {} logger.debug('Dimension : {}'.format(refdim)) logger.debug('Row/Cols: {} / {}'.format(self.nlines, self.ncols)) logger.debug('Reading: {}'.format(self.filename)) logger.debug('Start: {}'.format(self.start_time)) logger.debug('End: {}'.format(self.end_time)) @property def start_time(self): return datetime.strptime(self.nc.attrs['sensing_start'], '%Y%m%d%H%M%S') @property def end_time(self): return datetime.strptime(self.nc.attrs['end_time'], '%Y%m%d%H%M%S') def get_dataset(self, key, info=None, out=None): """Load a dataset """ if key in self.cache: return self.cache[key] # Type dictionary typedict = {"af": "flash_accumulation", "afa": "accumulated_flash_area", "afr": "flash_radiance", "lgr": "radiance", "lef": "radiance", "lfl": "radiance"} # Get lightning data out of NetCDF container logger.debug("Key: {}".format(key.name)) # Create reference grid grid = np.full((self.nlines, self.ncols), np.NaN) # Get product values values = self.nc[typedict[key.name]] rows = self.nc['row'] cols = self.nc['column'] logger.debug('[ Number of values ] : {}'.format((len(values)))) logger.debug('[Min/Max] : <{}> / <{}>'.format(np.min(values), np.max(values))) # Convert xy coordinates to flatten indices ids = np.ravel_multi_index([rows, cols], grid.shape) # Replace NaN values with data np.put(grid, ids, values) # Correct for bottom left origin in LI row/column indices. rotgrid = np.flipud(grid) # Rotate the grid by 90 degree clockwise rotgrid = np.rot90(rotgrid, 3) logger.warning("LI data has been rotated to fit to reference grid. \ Works only for test dataset") # Mask invalid values ds = np.ma.masked_where(np.isnan(rotgrid), rotgrid) # Create dataset object out.data[:] = np.ma.getdata(ds) out.mask[:] = np.ma.getmask(ds) out.info.update(key.to_dict()) return out def get_area_def(self, key, info=None): """Create AreaDefinition for specified product. Projection information are hard coded for 0 degree geos projection Test dataset doesn't provide the values in the file container. Only fill values are inserted. """ # TODO Get projection information from input file a = 6378169. h = 35785831. b = 6356583.8 lon_0 = 0. # area_extent = (-5432229.9317116784, -5429229.5285458621, # 5429229.5285458621, 5432229.9317116784) area_extent = (-5570248.4773392612, -5567248.074173444, 5567248.074173444, 5570248.4773392612) proj_dict = {'a': float(a), 'b': float(b), 'lon_0': float(lon_0), 'h': float(h), 'proj': 'geos', 'units': 'm'} area = geometry.AreaDefinition( 'LI_area_name', "LI area", 'geosli', proj_dict, self.ncols, self.nlines, area_extent) self.area = area logger.debug("Dataset area definition: \n {}".format(area)) return area satpy-0.20.0/satpy/readers/maia.py000066400000000000000000000116541362525524100170130ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reader for NWPSAF AAPP MAIA Cloud product. https://nwpsaf.eu/site/software/aapp/ Documentation reference: [NWPSAF-MF-UD-003] DATA Formats [NWPSAF-MF-UD-009] MAIA version 4 Scientific User Manual """ import logging import h5py import numpy as np from xarray import DataArray import dask.array as da from satpy.readers.file_handlers import BaseFileHandler from satpy import CHUNK_SIZE logger = logging.getLogger(__name__) class MAIAFileHandler(BaseFileHandler): def __init__(self, filename, filename_info, filetype_info): super(MAIAFileHandler, self).__init__( filename, filename_info, filetype_info) self.finfo = filename_info # set the day date part for end_time from the file name self.finfo['end_time'] = self.finfo['end_time'].replace( year=self.finfo['start_time'].year, month=self.finfo['start_time'].month, day=self.finfo['start_time'].day) if self.finfo['end_time'] < self.finfo['start_time']: myday = self.finfo['end_time'].day self.finfo['end_time'] = self.finfo['end_time'].replace( day=myday + 1) self.selected = None self.read(self.filename) def read(self, filename): self.h5 = h5py.File(filename, 'r') missing = -9999. self.Lat = da.from_array(self.h5[u'DATA/Latitude'], chunks=CHUNK_SIZE) / 10000. self.Lon = da.from_array(self.h5[u'DATA/Longitude'], chunks=CHUNK_SIZE) / 10000. self.selected = (self.Lon > missing) self.file_content = {} for key in self.h5['DATA'].keys(): self.file_content[key] = da.from_array(self.h5[u'DATA/' + key], chunks=CHUNK_SIZE) for key in self.h5[u'HEADER'].keys(): self.file_content[key] = self.h5[u'HEADER/' + key][:] # Cloud Mask on pixel mask = 2**0 + 2**1 + 2**2 lst = self.file_content[u'CloudMask'] & mask lst = lst / 2**0 self.file_content[u"cma"] = lst # Cloud Mask confidence mask = 2**5 + 2**6 lst = self.file_content[u'CloudMask'] & mask lst = lst / 2**5 self.file_content[u"cma_conf"] = lst # Cloud Mask Quality mask = 2**3 + 2**4 lst = self.file_content[u'CloudMask'] & mask lst = lst / 2**3 self.file_content[u'cma_qual'] = lst # Opaque Cloud mask = 2**21 lst = self.file_content[u'CloudMask'] & mask lst = lst / 2**21 self.file_content[u'opaq_cloud'] = lst # land /water Background mask = 2**15 + 2**16 + 2**17 lst = self.file_content[u'CloudMask'] & mask lst = lst / 2**15 self.file_content[u'land_water_background'] = lst # CT (Actual CloudType) mask = 2**4 + 2**5 + 2**6 + 2**7 + 2**8 classif = self.file_content[u'CloudType'] & mask classif = classif / 2**4 self.file_content['ct'] = classif.astype(np.uint8) def get_platform(self, platform): if self.file_content['sat_id'] in (14,): return "viirs" else: return "avhrr" @property def start_time(self): return self.finfo['start_time'] @property def end_time(self): return self.finfo['end_time'] def get_dataset(self, key, info, out=None): """Get a dataset from the file.""" logger.debug("Reading %s.", key.name) values = self.file_content[key.name] selected = np.array(self.selected) if key.name in ("Latitude", "Longitude"): values = values / 10000. if key.name in ('Tsurf', 'CloudTopPres', 'CloudTopTemp'): goods = values > -9998. selected = np.array(selected & goods) if key.name in ('Tsurf', "Alt_surface", "CloudTopTemp"): values = values / 100. if key.name in ("CloudTopPres"): values = values / 10. else: selected = self.selected info.update(self.finfo) fill_value = np.nan if key.name == 'ct': fill_value = 0 info['_FillValue'] = 0 ds = DataArray(values, dims=['y', 'x'], attrs=info).where(selected, fill_value) # update dataset info with file_info return ds satpy-0.20.0/satpy/readers/mersi2_l1b.py000066400000000000000000000164451362525524100200460ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reader for the FY-3D MERSI-2 L1B file format. The files for this reader are HDF5 and come in four varieties; band data and geolocation data, both at 250m and 1000m resolution. This reader was tested on FY-3D MERSI-2 data, but should work on future platforms as well assuming no file format changes. """ from datetime import datetime from satpy.readers.hdf5_utils import HDF5FileHandler from pyspectral.blackbody import blackbody_wn_rad2temp as rad2temp import numpy as np import dask.array as da class MERSI2L1B(HDF5FileHandler): """MERSI-2 L1B file reader.""" def _strptime(self, date_attr, time_attr): """Parse date/time strings.""" date = self[date_attr] time = self[time_attr] # "18:27:39.720" # cuts off microseconds because of unknown meaning # is .720 == 720 microseconds or 720000 microseconds return datetime.strptime(date + " " + time.split('.')[0], "%Y-%m-%d %H:%M:%S") @property def start_time(self): """Time for first observation.""" return self._strptime('/attr/Observing Beginning Date', '/attr/Observing Beginning Time') @property def end_time(self): """Time for final observation.""" return self._strptime('/attr/Observing Ending Date', '/attr/Observing Ending Time') @property def sensor_name(self): """Map sensor name to Satpy 'standard' sensor names.""" file_sensor = self['/attr/Sensor Identification Code'] sensor = { 'MERSI': 'mersi-2', }.get(file_sensor, file_sensor) return sensor def _get_coefficients(self, cal_key, cal_index): coeffs = self[cal_key][cal_index] slope = coeffs.attrs.pop('Slope', None) intercept = coeffs.attrs.pop('Intercept', None) if slope is not None: # sometimes slope has multiple elements if hasattr(slope, '__len__') and len(slope) == 1: slope = slope[0] intercept = intercept[0] elif hasattr(slope, '__len__'): slope = slope[cal_index] intercept = intercept[cal_index] coeffs = coeffs * slope + intercept return coeffs def get_dataset(self, dataset_id, ds_info): """Load data variable and metadata and calibrate if needed.""" file_key = ds_info.get('file_key', dataset_id.name) band_index = ds_info.get('band_index') data = self[file_key] if band_index is not None: data = data[band_index] if data.ndim >= 2: data = data.rename({data.dims[-2]: 'y', data.dims[-1]: 'x'}) attrs = data.attrs.copy() # avoid contaminating other band loading attrs.update(ds_info) if 'rows_per_scan' in self.filetype_info: attrs.setdefault('rows_per_scan', self.filetype_info['rows_per_scan']) fill_value = attrs.pop('FillValue', np.nan) # covered by valid_range valid_range = attrs.pop('valid_range', None) if dataset_id.calibration == 'counts': # preserve integer type of counts if possible attrs['_FillValue'] = fill_value new_fill = fill_value else: new_fill = np.nan if valid_range is not None: # Due to a bug in the valid_range upper limit in the 10.8(24) and 12.0(25) # in the HDF data, this is hardcoded here. if dataset_id.name in ['24', '25'] and valid_range[1] == 4095: valid_range[1] = 25000 # typically bad_values == 65535, saturated == 65534 # dead detector == 65533 data = data.where((data >= valid_range[0]) & (data <= valid_range[1]), new_fill) slope = attrs.pop('Slope', None) intercept = attrs.pop('Intercept', None) if slope is not None and dataset_id.calibration != 'counts': if band_index is not None: slope = slope[band_index] intercept = intercept[band_index] data = data * slope + intercept if dataset_id.calibration == "reflectance": # some bands have 0 counts for the first N columns and # seem to be invalid data points data = data.where(data != 0) coeffs = self._get_coefficients(ds_info['calibration_key'], ds_info['calibration_index']) data = coeffs[0] + coeffs[1] * data + coeffs[2] * data**2 elif dataset_id.calibration == "brightness_temperature": cal_index = ds_info['calibration_index'] # Apparently we don't use these calibration factors for Rad -> BT # coeffs = self._get_coefficients(ds_info['calibration_key'], cal_index) # # coefficients are per-scan, we need to repeat the values for a # # clean alignment # coeffs = np.repeat(coeffs, data.shape[0] // coeffs.shape[1], axis=1) # coeffs = coeffs.rename({ # coeffs.dims[0]: 'coefficients', coeffs.dims[1]: 'y' # }) # match data dims # data = coeffs[0] + coeffs[1] * data + coeffs[2] * data**2 + coeffs[3] * data**3 # Converts um^-1 (wavenumbers) and (mW/m^2)/(str/cm^-1) (radiance data) # to SI units m^-1, mW*m^-3*str^-1. wave_number = 1. / (dataset_id.wavelength[1] / 1e6) # pass the dask array bt_data = rad2temp(wave_number, data.data * 1e-5) # brightness temperature if isinstance(bt_data, np.ndarray): # old versions of pyspectral produce numpy arrays data.data = da.from_array(bt_data, chunks=data.data.chunks) else: # new versions of pyspectral can do dask arrays data.data = bt_data # additional corrections from the file corr_coeff_a = float(self['/attr/TBB_Trans_Coefficient_A'][cal_index]) corr_coeff_b = float(self['/attr/TBB_Trans_Coefficient_B'][cal_index]) if corr_coeff_a != 0: data = (data - corr_coeff_b) / corr_coeff_a # Some BT bands seem to have 0 in the first 10 columns # and it is an invalid Kelvin measurement, so let's mask data = data.where(data != 0) data.attrs = attrs # convert bytes to str for key, val in attrs.items(): # python 3 only if bytes is not str and isinstance(val, bytes): data.attrs[key] = val.decode('utf8') data.attrs.update({ 'platform_name': self['/attr/Satellite Name'], 'sensor': self.sensor_name, }) return data satpy-0.20.0/satpy/readers/mimic_TPW2_nc.py000066400000000000000000000150371362525524100204750ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2019 Satpy developers # # This file is part of Satpy. # # Satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # Satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # Satpy. If not, see . # # """Reader for Mimic TPW data in netCDF format from SSEC. This module implements reader for MIMIC_TPW2 netcdf files. MIMIC-TPW2 is an experimental global product of total precipitable water (TPW), using morphological compositing of the MIRS retrieval from several available operational microwave-frequency sensors. Originally described in a 2010 paper by Wimmers and Velden. This Version 2 is developed from an older method that uses simpler, but more limited TPW retrievals and advection calculations. More information, data and credits at http://tropic.ssec.wisc.edu/real-time/mtpw2/credits.html """ import numpy as np import xarray as xr from satpy.readers.netcdf_utils import NetCDF4FileHandler, netCDF4 from pyresample.geometry import AreaDefinition import logging logger = logging.getLogger(__name__) class MimicTPW2FileHandler(NetCDF4FileHandler): """NetCDF4 reader for MIMC TPW.""" def __init__(self, filename, filename_info, filetype_info): """Initialize the reader.""" super(MimicTPW2FileHandler, self).__init__(filename, filename_info, filetype_info) def available_datasets(self, configured_datasets=None): """Get datasets in file matching gelocation shape (lat/lon).""" lat_shape = self.file_content.get('/dimension/lat') lon_shape = self.file_content.get('/dimension/lon') # Read the lat/lon variables? handled_variables = set() # update previously configured datasets logger.debug("Starting previously configured variables loop...") for is_avail, ds_info in (configured_datasets or []): # some other file handler knows how to load this if is_avail is not None: yield is_avail, ds_info var_name = ds_info.get('file_key', ds_info['name']) # logger.debug("Evaluating previously configured variable: %s", var_name) matches = self.file_type_matches(ds_info['file_type']) # we can confidently say that we can provide this dataset and can # provide more info if matches and var_name in self: logger.debug("Handling previously configured variable: %s", var_name) handled_variables.add(var_name) new_info = ds_info.copy() # don't mess up the above yielded yield True, new_info elif is_avail is None: # if we didn't know how to handle this dataset and no one else did # then we should keep it going down the chain yield is_avail, ds_info # Iterate over dataset contents for var_name, val in self.file_content.items(): # Only evaluate variables if isinstance(val, netCDF4.Variable): logger.debug("Evaluating new variable: %s", var_name) var_shape = self[var_name + "/shape"] logger.debug("Dims:{}".format(var_shape)) if var_shape == (lat_shape, lon_shape): logger.debug("Found valid additional dataset: %s", var_name) # Skip anything we have already configured if var_name in handled_variables: logger.debug("Already handled, skipping: %s", var_name) continue handled_variables.add(var_name) # Create new ds_info object new_info = { 'name': var_name, 'file_key': var_name, 'file_type': self.filetype_info['file_type'], } logger.debug(var_name) yield True, new_info def get_dataset(self, ds_id, info): """Load dataset designated by the given key from file.""" logger.debug("Getting data for: %s", ds_id.name) file_key = info.get('file_key', ds_id.name) data = np.flipud(self[file_key]) data = xr.DataArray(data, dims=['y', 'x']) data.attrs = self.get_metadata(data, info) if 'lon' in data.dims: data.rename({'lon': 'x'}) if 'lat' in data.dims: data.rename({'lat': 'y'}) return data def get_area_def(self, dsid): """Flip data up/down and define equirectangular AreaDefintion.""" flip_lat = np.flipud(self['latArr']) latlon = np.meshgrid(self['lonArr'], flip_lat) width = self['lonArr/shape'][0] height = self['latArr/shape'][0] lower_left_x = latlon[0][height-1][0] lower_left_y = latlon[1][height-1][0] upper_right_y = latlon[1][0][width-1] upper_right_x = latlon[0][0][width-1] area_extent = (lower_left_x, lower_left_y, upper_right_x, upper_right_y) description = "MIMIC TPW Equirectangular Projection" area_id = 'mimic' proj_id = 'equirectangular' proj_dict = {'proj': 'longlat', 'datum': 'WGS84', 'ellps': 'WGS84', } area_def = AreaDefinition(area_id, description, proj_id, proj_dict, width, height, area_extent, ) return area_def def get_metadata(self, data, info): """Get general metadata for file.""" metadata = {} metadata.update(data.attrs) metadata.update(info) metadata.update({ 'platform_shortname': 'aggregated microwave', 'sensor': 'mimic', 'start_time': self.start_time, 'end_time': self.end_time, }) return metadata @property def start_time(self): """Start timestamp of the dataset determined from yaml.""" return self.filename_info['start_time'] @property def end_time(self): """End timestamp of the dataset same as start_time.""" return self.filename_info.get('end_time', self.start_time) @property def sensor_name(self): """Sensor name.""" return self["sensor"] satpy-0.20.0/satpy/readers/modis_l1b.py000066400000000000000000000264741362525524100177630ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2010-2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Modis level 1b hdf-eos format reader Introduction ------------ The ``modis_l1b`` reader reads and calibrates Modis L1 image data in hdf-eos format. Files often have a pattern similar to the following one: .. parsed-literal:: M[O/Y]D02[1/H/Q]KM.A[date].[time].[collection].[processing_time].hdf Other patterns where "collection" and/or "proccessing_time" are missing might also work (see the readers yaml file for details). Geolocation files (MOD03) are also supported. Geolocation files ----------------- For the 1km data (mod021km) geolocation files (mod03) are optional. If not given to the reader 1km geolocations will be interpolated from the 5km geolocation contained within the file. For the 500m and 250m data geolocation files are needed. References: - Modis gelocation description: http://www.icare.univ-lille1.fr/wiki/index.php/MODIS_geolocation """ import logging import numpy as np import xarray as xr from satpy import CHUNK_SIZE from satpy.readers.hdfeos_base import HDFEOSBaseFileReader, HDFEOSGeoReader from satpy.readers.hdf4_utils import from_sds logger = logging.getLogger(__name__) class HDFEOSBandReader(HDFEOSBaseFileReader): """Handler for the regular band channels.""" res = {"1": 1000, "Q": 250, "H": 500} def __init__(self, filename, filename_info, filetype_info): HDFEOSBaseFileReader.__init__(self, filename, filename_info, filetype_info) ds = self.metadata['INVENTORYMETADATA'][ 'COLLECTIONDESCRIPTIONCLASS']['SHORTNAME']['VALUE'] self.resolution = self.res[ds[-3]] def get_dataset(self, key, info): """Read data from file and return the corresponding projectables.""" datadict = { 1000: ['EV_250_Aggr1km_RefSB', 'EV_500_Aggr1km_RefSB', 'EV_1KM_RefSB', 'EV_1KM_Emissive'], 500: ['EV_250_Aggr500_RefSB', 'EV_500_RefSB'], 250: ['EV_250_RefSB']} platform_name = self.metadata['INVENTORYMETADATA']['ASSOCIATEDPLATFORMINSTRUMENTSENSOR'][ 'ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER']['ASSOCIATEDPLATFORMSHORTNAME']['VALUE'] info.update({'platform_name': 'EOS-' + platform_name}) info.update({'sensor': 'modis'}) if self.resolution != key.resolution: return datasets = datadict[self.resolution] for dataset in datasets: subdata = self.sd.select(dataset) var_attrs = subdata.attributes() band_names = var_attrs["band_names"].split(",") # get the relative indices of the desired channel try: index = band_names.index(key.name) except ValueError: continue uncertainty = self.sd.select(dataset + "_Uncert_Indexes") array = xr.DataArray(from_sds(subdata, chunks=CHUNK_SIZE)[index, :, :], dims=['y', 'x']).astype(np.float32) valid_range = var_attrs['valid_range'] # Fill values: # Data Value Meaning # 65535 Fill Value (includes reflective band data at night mode # and completely missing L1A scans) # 65534 L1A DN is missing within a scan # 65533 Detector is saturated # 65532 Cannot compute zero point DN, e.g., SV is saturated # 65531 Detector is dead (see comments below) # 65530 RSB dn** below the minimum of the scaling range # 65529 TEB radiance or RSB dn** exceeds the maximum of the # scaling range # 65528 Aggregation algorithm failure # 65527 Rotation of Earth view Sector from nominal science # collection position # 65526 Calibration coefficient b1 could not be computed # 65525 Subframe is dead # 65524 Both sides of the PCLW electronics on simultaneously # 65501 - 65523 (reserved for future use) # 65500 NAD closed upper limit array = array.where(array >= np.float32(valid_range[0])) array = array.where(array <= np.float32(valid_range[1])) array = array.where(from_sds(uncertainty, chunks=CHUNK_SIZE)[index, :, :] < 15) if key.calibration == 'brightness_temperature': projectable = calibrate_bt(array, var_attrs, index, key.name) info.setdefault('units', 'K') info.setdefault('standard_name', 'toa_brightness_temperature') elif key.calibration == 'reflectance': projectable = calibrate_refl(array, var_attrs, index) info.setdefault('units', '%') info.setdefault('standard_name', 'toa_bidirectional_reflectance') elif key.calibration == 'radiance': projectable = calibrate_radiance(array, var_attrs, index) info.setdefault('units', var_attrs.get('radiance_units')) info.setdefault('standard_name', 'toa_outgoing_radiance_per_unit_wavelength') elif key.calibration == 'counts': projectable = calibrate_counts(array, var_attrs, index) info.setdefault('units', 'counts') info.setdefault('standard_name', 'counts') # made up else: raise ValueError("Unknown calibration for " "key: {}".format(key)) projectable.attrs = info # if ((platform_name == 'Aqua' and key.name in ["6", "27", "36"]) or # (platform_name == 'Terra' and key.name in ["29"])): # height, width = projectable.shape # row_indices = projectable.mask.sum(1) == width # if row_indices.sum() != height: # projectable.mask[row_indices, :] = True # Get the orbit number # if not satscene.orbit: # mda = self.data.attributes()["CoreMetadata.0"] # orbit_idx = mda.index("ORBITNUMBER") # satscene.orbit = mda[orbit_idx + 111:orbit_idx + 116] # Trimming out dead sensor lines (detectors) on terra: # (in addition channel 27, 30, 34, 35, and 36 are nosiy) # if satscene.satname == "terra": # for band in ["29"]: # if not satscene[band].is_loaded() or satscene[band].data.mask.all(): # continue # width = satscene[band].data.shape[1] # height = satscene[band].data.shape[0] # indices = satscene[band].data.mask.sum(1) < width # if indices.sum() == height: # continue # satscene[band] = satscene[band].data[indices, :] # satscene[band].area = geometry.SwathDefinition( # lons=satscene[band].area.lons[indices, :], # lats=satscene[band].area.lats[indices, :]) return projectable class MixedHDFEOSReader(HDFEOSGeoReader, HDFEOSBandReader): """A file handler for the files that have both regular bands and geographical information in them.""" def __init__(self, filename, filename_info, filetype_info): HDFEOSGeoReader.__init__(self, filename, filename_info, filetype_info) HDFEOSBandReader.__init__(self, filename, filename_info, filetype_info) def get_dataset(self, key, info): if key.name in HDFEOSGeoReader.DATASET_NAMES: return HDFEOSGeoReader.get_dataset(self, key, info) return HDFEOSBandReader.get_dataset(self, key, info) def calibrate_counts(array, attributes, index): """Calibration for counts channels.""" offset = np.float32(attributes["corrected_counts_offsets"][index]) scale = np.float32(attributes["corrected_counts_scales"][index]) array = (array - offset) * scale return array def calibrate_radiance(array, attributes, index): """Calibration for radiance channels.""" offset = np.float32(attributes["radiance_offsets"][index]) scale = np.float32(attributes["radiance_scales"][index]) array = (array - offset) * scale return array def calibrate_refl(array, attributes, index): """Calibration for reflective channels.""" offset = np.float32(attributes["reflectance_offsets"][index]) scale = np.float32(attributes["reflectance_scales"][index]) # convert to reflectance and convert from 1 to % array = (array - offset) * scale * 100 return array def calibrate_bt(array, attributes, index, band_name): """Calibration for the emissive channels.""" offset = np.float32(attributes["radiance_offsets"][index]) scale = np.float32(attributes["radiance_scales"][index]) array = (array - offset) * scale # Planck constant (Joule second) h__ = np.float32(6.6260755e-34) # Speed of light in vacuum (meters per second) c__ = np.float32(2.9979246e+8) # Boltzmann constant (Joules per Kelvin) k__ = np.float32(1.380658e-23) # Derived constants c_1 = 2 * h__ * c__ * c__ c_2 = (h__ * c__) / k__ # Effective central wavenumber (inverse centimeters) cwn = np.array([ 2.641775E+3, 2.505277E+3, 2.518028E+3, 2.465428E+3, 2.235815E+3, 2.200346E+3, 1.477967E+3, 1.362737E+3, 1.173190E+3, 1.027715E+3, 9.080884E+2, 8.315399E+2, 7.483394E+2, 7.308963E+2, 7.188681E+2, 7.045367E+2], dtype=np.float32) # Temperature correction slope (no units) tcs = np.array([ 9.993411E-1, 9.998646E-1, 9.998584E-1, 9.998682E-1, 9.998819E-1, 9.998845E-1, 9.994877E-1, 9.994918E-1, 9.995495E-1, 9.997398E-1, 9.995608E-1, 9.997256E-1, 9.999160E-1, 9.999167E-1, 9.999191E-1, 9.999281E-1], dtype=np.float32) # Temperature correction intercept (Kelvin) tci = np.array([ 4.770532E-1, 9.262664E-2, 9.757996E-2, 8.929242E-2, 7.310901E-2, 7.060415E-2, 2.204921E-1, 2.046087E-1, 1.599191E-1, 8.253401E-2, 1.302699E-1, 7.181833E-2, 1.972608E-2, 1.913568E-2, 1.817817E-2, 1.583042E-2], dtype=np.float32) # Transfer wavenumber [cm^(-1)] to wavelength [m] cwn = 1. / (cwn * 100) # Some versions of the modis files do not contain all the bands. emmissive_channels = ["20", "21", "22", "23", "24", "25", "27", "28", "29", "30", "31", "32", "33", "34", "35", "36"] global_index = emmissive_channels.index(band_name) cwn = cwn[global_index] tcs = tcs[global_index] tci = tci[global_index] array = c_2 / (cwn * np.log(c_1 / (1000000 * array * cwn ** 5) + 1)) array = (array - tci) / tcs return array satpy-0.20.0/satpy/readers/modis_l2.py000066400000000000000000000173501362525524100176130ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Modis level 2 hdf-eos format reader. Introduction ------------ The ``modis_l2`` reader reads and calibrates Modis L2 image data in hdf-eos format. Since there are a multitude of different level 2 datasets not all of theses are implemented (yet). Currently the reader supports: - m[o/y]d35_l2: cloud_mask dataset - some datasets in m[o/y]d06 files To get a list of the available datasets for a given file refer to the "Load data" section in :doc:`../readers`. Geolocation files ----------------- Similar to the ``modis_l1b`` reader the geolocation files (mod03) for the 1km data are optional and if not given 1km geolocations will be interpolated from the 5km geolocation contained within the file. For the 500m and 250m data geolocation files are needed. References: - Documentation about the format: https://modis-atmos.gsfc.nasa.gov/products """ import logging import numpy as np import xarray as xr from satpy import CHUNK_SIZE from satpy.readers.hdfeos_base import HDFEOSGeoReader from satpy.readers.hdf4_utils import from_sds logger = logging.getLogger(__name__) class ModisL2HDFFileHandler(HDFEOSGeoReader): """File handler for MODIS HDF-EOS Level 2 files.""" def _select_hdf_dataset(self, hdf_dataset_name, byte_dimension): """Load a dataset from HDF-EOS level 2 file.""" hdf_dataset = self.sd.select(hdf_dataset_name) if byte_dimension == 0: dataset = xr.DataArray(from_sds(hdf_dataset, chunks=CHUNK_SIZE), dims=['i', 'y', 'x']).astype(np.uint8) elif byte_dimension == 2: dataset = xr.DataArray(from_sds(hdf_dataset, chunks=CHUNK_SIZE), dims=['y', 'x', 'i']).astype(np.uint8) # Reorder dimensions for consistency dataset = dataset.transpose('i', 'y', 'x') return dataset def _parse_resolution_info(self, info, resolution): if isinstance(info, list): if len(info) == 1 and isinstance(info[0], int): return info[0] # Check if the values are stored in a with resolution as a key if isinstance(info[0], dict): for elem in info: try: return elem[resolution] except KeyError: pass # The information doesn't concern the current resolution return None return info def get_dataset(self, dataset_id, dataset_info): """Get DataArray for specified dataset.""" dataset_name = dataset_id.name if dataset_name in HDFEOSGeoReader.DATASET_NAMES: return HDFEOSGeoReader.get_dataset(self, dataset_id, dataset_info) dataset_name_in_file = dataset_info['file_key'] # The dataset asked correspond to a given set of bits of the HDF EOS dataset if 'byte' in dataset_info and 'byte_dimension' in dataset_info: byte_dimension = dataset_info['byte_dimension'] # Where the information is stored dataset = self._select_hdf_dataset(dataset_name_in_file, byte_dimension) byte_information = self._parse_resolution_info(dataset_info['byte'], dataset_id.resolution) # At which bit starts the information bit_start = self._parse_resolution_info(dataset_info['bit_start'], dataset_id.resolution) # How many bits store the information bit_count = self._parse_resolution_info(dataset_info['bit_count'], dataset_id.resolution) # Only one byte: select the byte information if isinstance(byte_information, int): byte_dataset = dataset[byte_information, :, :] # Two bytes: recombine the two bytes elif isinstance(byte_information, list) and len(byte_information) == 2: # We recombine the two bytes dataset_a = dataset[byte_information[0], :, :] dataset_b = dataset[byte_information[1], :, :] dataset_a = np.uint16(dataset_a) dataset_a = np.left_shift(dataset_a, 8) # dataset_a << 8 byte_dataset = np.bitwise_or(dataset_a, dataset_b).astype(np.uint16) shape = byte_dataset.shape # We replicate the concatenated byte with the right shape byte_dataset = np.repeat(np.repeat(byte_dataset, 4, axis=0), 4, axis=1) # All bits carry information, we update bit_start consequently bit_start = np.arange(16, dtype=np.uint16).reshape((4, 4)) bit_start = np.tile(bit_start, (shape[0], shape[1])) # Compute the final bit mask dataset = bits_strip(bit_start, bit_count, byte_dataset) # Apply quality assurance filter if 'quality_assurance' in dataset_info: quality_assurance_required = self._parse_resolution_info( dataset_info['quality_assurance'], dataset_id.resolution ) if quality_assurance_required is True: # Get quality assurance dataset recursively from satpy import DatasetID quality_assurance_dataset_id = DatasetID( name='quality_assurance', resolution=1000 ) quality_assurance_dataset_info = { 'name': 'quality_assurance', 'resolution': [1000], 'byte_dimension': 2, 'byte': [0], 'bit_start': 0, 'bit_count': 1, 'file_key': 'Quality_Assurance' } quality_assurance = self.get_dataset( quality_assurance_dataset_id, quality_assurance_dataset_info ) # Duplicate quality assurance dataset to create relevant filter duplication_factor = [int(dataset_dim / quality_assurance_dim) for dataset_dim, quality_assurance_dim in zip(dataset.shape, quality_assurance.shape)] quality_assurance = np.tile(quality_assurance, duplication_factor) # Replace unassured data by NaN value dataset[np.where(quality_assurance == 0)] = np.NaN # No byte manipulation required else: dataset = self.load_dataset(dataset_name) return dataset def bits_strip(bit_start, bit_count, value): """Extract specified bit from bit representation of integer value. Parameters ---------- bit_start : int Starting index of the bits to extract (first bit has index 0) bit_count : int Number of bits starting from bit_start to extract value : int Number from which to extract the bits Returns ------- int Value of the extracted bits """ bit_mask = pow(2, bit_start + bit_count) - 1 return np.right_shift(np.bitwise_and(value, bit_mask), bit_start) satpy-0.20.0/satpy/readers/msi_safe.py000066400000000000000000000167041362525524100176730ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """SAFE MSI L1C reader. """ import logging import glymur import numpy as np from xarray import DataArray import dask.array as da import xml.etree.ElementTree as ET from pyresample import geometry from dask import delayed from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler logger = logging.getLogger(__name__) PLATFORMS = {'S2A': "Sentinel-2A", 'S2B': "Sentinel-2B", 'S2C': "Sentinel-2C", 'S2D': "Sentinel-2D"} class SAFEMSIL1C(BaseFileHandler): def __init__(self, filename, filename_info, filetype_info, mda): super(SAFEMSIL1C, self).__init__(filename, filename_info, filetype_info) self._start_time = filename_info['observation_time'] self._end_time = filename_info['observation_time'] self._channel = filename_info['band_name'] self._mda = mda self.platform_name = PLATFORMS[filename_info['fmission_id']] def get_dataset(self, key, info): """Load a dataset.""" if self._channel != key.name: return logger.debug('Reading %s.', key.name) # FIXME: get this from MTD_MSIL1C.xml quantification_value = 10000. jp2 = glymur.Jp2k(self.filename) bitdepth = 0 for seg in jp2.codestream.segment: try: bitdepth = max(bitdepth, seg.bitdepth[0]) except AttributeError: pass jp2.dtype = (np.uint8 if bitdepth <= 8 else np.uint16) # Initialize the jp2 reader / doesn't work in a multi-threaded context. # jp2[0, 0] # data = da.from_array(jp2, chunks=CHUNK_SIZE) / quantification_value * 100 data = da.from_delayed(delayed(jp2.read)(), jp2.shape, jp2.dtype) data = data.rechunk(CHUNK_SIZE) / quantification_value * 100 proj = DataArray(data, dims=['y', 'x']) proj.attrs = info.copy() proj.attrs['units'] = '%' proj.attrs['platform_name'] = self.platform_name return proj @property def start_time(self): return self._start_time @property def end_time(self): return self._start_time def get_area_def(self, dsid): if self._channel != dsid.name: return return self._mda.get_area_def(dsid) class SAFEMSIMDXML(BaseFileHandler): def __init__(self, filename, filename_info, filetype_info): super(SAFEMSIMDXML, self).__init__(filename, filename_info, filetype_info) self._start_time = filename_info['observation_time'] self._end_time = filename_info['observation_time'] self.root = ET.parse(self.filename) self.tile = filename_info['gtile_number'] self.platform_name = PLATFORMS[filename_info['fmission_id']] @property def start_time(self): return self._start_time @property def end_time(self): return self._start_time def get_area_def(self, dsid): """Get the area definition of the dataset.""" geocoding = self.root.find('.//Tile_Geocoding') epsg = geocoding.find('HORIZONTAL_CS_CODE').text rows = int(geocoding.find('Size[@resolution="' + str(dsid.resolution) + '"]/NROWS').text) cols = int(geocoding.find('Size[@resolution="' + str(dsid.resolution) + '"]/NCOLS').text) geoposition = geocoding.find('Geoposition[@resolution="' + str(dsid.resolution) + '"]') ulx = float(geoposition.find('ULX').text) uly = float(geoposition.find('ULY').text) xdim = float(geoposition.find('XDIM').text) ydim = float(geoposition.find('YDIM').text) area_extent = (ulx, uly + rows * ydim, ulx + cols * xdim, uly) area = geometry.AreaDefinition( self.tile, "On-the-fly area", self.tile, {'init': epsg}, cols, rows, area_extent) return area @staticmethod def _do_interp(minterp, xcoord, ycoord): interp_points2 = np.vstack((xcoord.ravel(), ycoord.ravel())) res = minterp(interp_points2) return res.reshape(xcoord.shape) def interpolate_angles(self, angles, resolution): # FIXME: interpolate in cartesian coordinates if the lons or lats are # problematic from geotiepoints.multilinear import MultilinearInterpolator geocoding = self.root.find('.//Tile_Geocoding') rows = int(geocoding.find('Size[@resolution="' + str(resolution) + '"]/NROWS').text) cols = int(geocoding.find('Size[@resolution="' + str(resolution) + '"]/NCOLS').text) smin = [0, 0] smax = np.array(angles.shape) - 1 orders = angles.shape minterp = MultilinearInterpolator(smin, smax, orders) minterp.set_values(da.atleast_2d(angles.ravel())) x = da.arange(rows, dtype=angles.dtype, chunks=CHUNK_SIZE) / (rows-1) * (angles.shape[0] - 1) y = da.arange(cols, dtype=angles.dtype, chunks=CHUNK_SIZE) / (cols-1) * (angles.shape[1] - 1) xcoord, ycoord = da.meshgrid(x, y) return da.map_blocks(self._do_interp, minterp, xcoord, ycoord, dtype=angles.dtype, chunks=xcoord.chunks) def _get_coarse_dataset(self, key, info): """Get the coarse dataset refered to by `key` from the XML data.""" angles = self.root.find('.//Tile_Angles') if key in ['solar_zenith_angle', 'solar_azimuth_angle']: elts = angles.findall(info['xml_tag'] + '/Values_List/VALUES') return np.array([[val for val in elt.text.split()] for elt in elts], dtype=np.float) elif key in ['satellite_zenith_angle', 'satellite_azimuth_angle']: arrays = [] elts = angles.findall(info['xml_tag'] + '[@bandId="1"]') for elt in elts: items = elt.findall(info['xml_item'] + '/Values_List/VALUES') arrays.append(np.array([[val for val in item.text.split()] for item in items], dtype=np.float)) return np.nanmean(np.dstack(arrays), -1) else: return def get_dataset(self, key, info): """Get the dataset refered to by `key`.""" angles = self._get_coarse_dataset(key, info) if angles is None: return # Fill gaps at edges of swath darr = DataArray(angles, dims=['y', 'x']) darr = darr.bfill('x') darr = darr.ffill('x') angles = darr.data res = self.interpolate_angles(angles, key.resolution) proj = DataArray(res, dims=['y', 'x']) proj.attrs = info.copy() proj.attrs['units'] = 'degrees' proj.attrs['platform_name'] = self.platform_name return proj satpy-0.20.0/satpy/readers/netcdf_utils.py000066400000000000000000000224271362525524100205670ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Helpers for reading netcdf-based files.""" import netCDF4 import logging import numpy as np import xarray as xr import dask.array as da from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.utils import np2str LOG = logging.getLogger(__name__) class NetCDF4FileHandler(BaseFileHandler): """Small class for inspecting a NetCDF4 file and retrieving its metadata/header data. File information can be accessed using bracket notation. Variables are accessed by using: wrapper["var_name"] Or: wrapper["group/subgroup/var_name"] Attributes can be accessed by appending "/attr/attr_name" to the item string: wrapper["group/subgroup/var_name/attr/units"] Or for global attributes: wrapper["/attr/platform_short_name"] Note that loading datasets requires reopening the original file (unless those datasets are cached, see below), but to get just the shape of the dataset append "/shape" to the item string: wrapper["group/subgroup/var_name/shape"] If your file has many small data variables that are frequently accessed, you may choose to cache some of them. You can do this by passing a number, any variable smaller than this number in bytes will be read into RAM. Warning, this part of the API is provisional and subject to change. You may get an additional speedup by passing ``cache_handle=True``. This will keep the netCDF4 dataset handles open throughout the lifetime of the object, and instead of using `xarray.open_dataset` to open every data variable, a dask array will be created "manually". This may be useful if you have a dataset distributed over many files, such as for FCI. Note that the coordinates will be missing in this case. If you use this option, ``xarray_kwargs`` will have no effect. Args: filename (str): File to read filename_info (dict): Dictionary with filename information filetype_info (dict): Dictionary with filetype information auto_maskandscale (bool): Apply mask and scale factors xarray_kwargs (dict): Addition arguments to `xarray.open_dataset` cache_var_size (int): Cache variables smaller than this size. cache_handle (bool): Keep files open for lifetime of filehandler. """ file_handle = None def __init__(self, filename, filename_info, filetype_info, auto_maskandscale=False, xarray_kwargs=None, cache_var_size=0, cache_handle=False): super(NetCDF4FileHandler, self).__init__( filename, filename_info, filetype_info) self.file_content = {} self.cached_file_content = {} try: file_handle = netCDF4.Dataset(self.filename, 'r') except IOError: LOG.exception( 'Failed reading file %s. Possibly corrupted file', self.filename) raise self.auto_maskandscale = auto_maskandscale if hasattr(file_handle, "set_auto_maskandscale"): file_handle.set_auto_maskandscale(auto_maskandscale) self.collect_metadata("", file_handle) self.collect_dimensions("", file_handle) if cache_var_size > 0: self.collect_cache_vars( [varname for (varname, var) in self.file_content.items() if isinstance(var, netCDF4.Variable) and isinstance(var.dtype, np.dtype) # vlen may be str and var.size * var.dtype.itemsize < cache_var_size], file_handle) if cache_handle: self.file_handle = file_handle else: file_handle.close() self._xarray_kwargs = xarray_kwargs or {} self._xarray_kwargs.setdefault('chunks', CHUNK_SIZE) self._xarray_kwargs.setdefault('mask_and_scale', self.auto_maskandscale) def __del__(self): if self.file_handle is not None: try: self.file_handle.close() except RuntimeError: # presumably closed already pass def _collect_attrs(self, name, obj): """Collect all the attributes for the provided file object.""" for key in obj.ncattrs(): value = getattr(obj, key) fc_key = "{}/attr/{}".format(name, key) try: self.file_content[fc_key] = np2str(value) except ValueError: self.file_content[fc_key] = value def collect_metadata(self, name, obj): """Collect all file variables and attributes for the provided file object. This method also iterates through subgroups of the provided object. """ # Look through each subgroup base_name = name + "/" if name else "" for group_name, group_obj in obj.groups.items(): self.collect_metadata(base_name + group_name, group_obj) for var_name, var_obj in obj.variables.items(): var_name = base_name + var_name self.file_content[var_name] = var_obj self.file_content[var_name + "/dtype"] = var_obj.dtype self.file_content[var_name + "/shape"] = var_obj.shape self._collect_attrs(var_name, var_obj) self._collect_attrs(name, obj) def collect_dimensions(self, name, obj): """Collect dimensions.""" for dim_name, dim_obj in obj.dimensions.items(): dim_name = "{}/dimension/{}".format(name, dim_name) self.file_content[dim_name] = len(dim_obj) def collect_cache_vars(self, cache_vars, obj): """Collect data variables for caching. This method will collect some data variables and store them in RAM. This may be useful if some small variables are frequently accessed, to prevent needlessly frequently opening and closing the file, which in case of xarray is associated with some overhead. Should be called later than `collect_metadata`. Args: cache_vars (List[str]): Names of data variables to be cached. obj (netCDF4.Dataset): Dataset object from which to read them. """ for var_name in cache_vars: v = self.file_content[var_name] self.cached_file_content[var_name] = xr.DataArray( v[:], dims=v.dimensions, attrs=v.__dict__, name=v.name) def __getitem__(self, key): """Get item for given key.""" val = self.file_content[key] if isinstance(val, netCDF4.Variable): if key in self.cached_file_content: return self.cached_file_content[key] # these datasets are closed and inaccessible when the file is # closed, need to reopen # TODO: Handle HDF4 versus NetCDF3 versus NetCDF4 parts = key.rsplit('/', 1) if len(parts) == 2: group, key = parts else: group = None if self.file_handle is not None: val = self._get_var_from_filehandle(group, key) else: val = self._get_var_from_xr(group, key) return val def _get_var_from_xr(self, group, key): with xr.open_dataset(self.filename, group=group, **self._xarray_kwargs) as nc: val = nc[key] # Even though `chunks` is specified in the kwargs, xarray # uses dask.arrays only for data variables that have at least # one dimension; for zero-dimensional data variables (scalar), # it uses its own lazy loading for scalars. When those are # accessed after file closure, xarray reopens the file without # closing it again. This will leave potentially many open file # objects (which may in turn trigger a Segmentation Fault: # https://github.com/pydata/xarray/issues/2954#issuecomment-491221266 if not val.chunks: val.load() return val def _get_var_from_filehandle(self, group, key): # Not getting coordinates as this is more work, therefore more # overhead, and those are not used downstream. g = self.file_handle[group] v = g[key] x = xr.DataArray( da.from_array(v), dims=v.dimensions, attrs=v.__dict__, name=v.name) return x def __contains__(self, item): """Get item from file content.""" return item in self.file_content def get(self, item, default=None): """Get item.""" if item in self: return self[item] else: return default satpy-0.20.0/satpy/readers/nucaps.py000066400000000000000000000405261362525524100173750ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Interface to NUCAPS Retrieval NetCDF files. NUCAPS stands for NOAA Unique Combined Atmospheric Processing System. NUCAPS retrievals include temperature, moisture, trace gas, and cloud-cleared radiance profiles. Product details can be found at: https://www.ospo.noaa.gov/Products/atmosphere/soundings/nucaps/ This reader supports both standard NOAA NUCAPS EDRs, and Science EDRs, which are essentially a subset of the standard EDRs with some additional parameters such as relative humidity and boundary layer temperature. NUCAPS data is derived from Cross-track Infrared Sounder (CrIS) data, and from Advanced Technology Microwave Sounder (ATMS) data, instruments onboard Joint Polar Satellite System spacecraft. """ from datetime import datetime import xarray as xr import numpy as np import logging from collections import defaultdict from satpy.readers.yaml_reader import FileYAMLReader from satpy.readers.netcdf_utils import NetCDF4FileHandler LOG = logging.getLogger(__name__) # It's difficult to do processing without knowing the pressure levels beforehand ALL_PRESSURE_LEVELS = [ 0.0161, 0.0384, 0.0769, 0.137, 0.2244, 0.3454, 0.5064, 0.714, 0.9753, 1.2972, 1.6872, 2.1526, 2.7009, 3.3398, 4.077, 4.9204, 5.8776, 6.9567, 8.1655, 9.5119, 11.0038, 12.6492, 14.4559, 16.4318, 18.5847, 20.9224, 23.4526, 26.1829, 29.121, 32.2744, 35.6505, 39.2566, 43.1001, 47.1882, 51.5278, 56.126, 60.9895, 66.1253, 71.5398, 77.2396, 83.231, 89.5204, 96.1138, 103.017, 110.237, 117.777, 125.646, 133.846, 142.385, 151.266, 160.496, 170.078, 180.018, 190.32, 200.989, 212.028, 223.441, 235.234, 247.408, 259.969, 272.919, 286.262, 300, 314.137, 328.675, 343.618, 358.966, 374.724, 390.893, 407.474, 424.47, 441.882, 459.712, 477.961, 496.63, 515.72, 535.232, 555.167, 575.525, 596.306, 617.511, 639.14, 661.192, 683.667, 706.565, 729.886, 753.628, 777.79, 802.371, 827.371, 852.788, 878.62, 904.866, 931.524, 958.591, 986.067, 1013.95, 1042.23, 1070.92, 1100 ] class NUCAPSFileHandler(NetCDF4FileHandler): """File handler for NUCAPS netCDF4 format.""" def __init__(self, *args, **kwargs): """Initialize file handler.""" kwargs.setdefault('xarray_kwargs', {}).setdefault( 'decode_times', False) super(NUCAPSFileHandler, self).__init__(*args, **kwargs) def __contains__(self, item): """Return item from file content.""" return item in self.file_content def _parse_datetime(self, datestr): """Parse NUCAPS datetime string.""" return datetime.strptime(datestr, "%Y-%m-%dT%H:%M:%S.%fZ") @property def start_time(self): """Get start time.""" try: return self._parse_datetime(self['/attr/time_coverage_start']) except KeyError: # If attribute not present, use time from file name return self.filename_info['start_time'] @property def end_time(self): """Get end time.""" try: return self._parse_datetime(self['/attr/time_coverage_end']) except KeyError: # If attribute not present, use time from file name return self.filename_info['end_time'] @property def start_orbit_number(self): """Return orbit number for the beginning of the swath.""" try: return int(self['/attr/start_orbit_number']) except KeyError: return 0 @property def end_orbit_number(self): """Return orbit number for the end of the swath.""" try: return int(self['/attr/end_orbit_number']) except KeyError: return 0 @property def platform_name(self): """Return standard platform name for the file's data.""" try: res = self['/attr/platform_name'] if isinstance(res, np.ndarray): return str(res.astype(str)) else: return res except KeyError: return self.filename_info['platform_shortname'] @property def sensor_names(self): """Return standard sensor or instrument name for the file's data.""" try: res = self['/attr/instrument_name'] if isinstance(res, np.ndarray): res = str(res.astype(str)) res = [x.strip() for x in res.split(',')] if len(res) == 1: return res[0] return res except KeyError: return ['CrIS', 'ATMS', 'VIIRS'] def get_shape(self, ds_id, ds_info): """Return data array shape for item specified.""" var_path = ds_info.get('file_key', '{}'.format(ds_id.name)) if var_path + '/shape' not in self: # loading a scalar value shape = 1 else: shape = self[var_path + "/shape"] if "index" in ds_info: shape = shape[1:] if "pressure_index" in ds_info: shape = shape[:-1] return shape def get_metadata(self, dataset_id, ds_info): """Get metadata.""" var_path = ds_info.get('file_key', '{}'.format(dataset_id.name)) shape = self.get_shape(dataset_id, ds_info) file_units = ds_info.get('file_units', self.get(var_path + '/attr/units')) ds_info.update(getattr(self[var_path], 'attrs', {})) # don't overwrite information in the files attrs because the same # `.attrs` is used for each separate Temperature pressure level dataset # Plus, if someone gets metadata multiple times then we are screwed info = ds_info info.update(ds_info) info.update(dataset_id.to_dict()) info.update({ "shape": shape, "units": ds_info.get("units", file_units), "platform_name": self.platform_name, "sensor": self.sensor_names, "start_orbit": self.start_orbit_number, "end_orbit": self.end_orbit_number, }) if 'standard_name' not in info: sname_path = var_path + '/attr/standard_name' info['standard_name'] = self.get(sname_path) if dataset_id.name != 'Quality_Flag': anc_vars = info.get('ancillary_variables', []) if 'Quality_Flag' not in anc_vars: anc_vars.append('Quality_Flag') info['ancillary_variables'] = anc_vars return info def get_dataset(self, dataset_id, ds_info): """Load data array and metadata for specified dataset.""" var_path = ds_info.get('file_key', '{}'.format(dataset_id.name)) metadata = self.get_metadata(dataset_id, ds_info) valid_min, valid_max = self[var_path + '/attr/valid_range'] fill_value = self.get(var_path + '/attr/_FillValue') d_tmp = self[var_path] if "index" in ds_info: d_tmp = d_tmp[int(ds_info["index"])] if "pressure_index" in ds_info: d_tmp = d_tmp[..., int(ds_info["pressure_index"])] # this is a pressure based field # include surface_pressure as metadata sp = self['Surface_Pressure'] # Older format if 'number_of_FORs' in sp.dims: sp = sp.rename({'number_of_FORs': 'y'}) # Newer format if 'Number_of_CrIS_FORs' in sp.dims: sp = sp.rename({'Number_of_CrIS_FORs': 'y'}) if 'surface_pressure' in ds_info: ds_info['surface_pressure'] = xr.concat((ds_info['surface_pressure'], sp)) else: ds_info['surface_pressure'] = sp # include all the pressure levels ds_info.setdefault('pressure_levels', self['Pressure'][0]) data = d_tmp if valid_min is not None and valid_max is not None: # the original .cfg/INI based reader only checked valid_max data = data.where((data <= valid_max)) # | (data >= valid_min)) if fill_value is not None: data = data.where(data != fill_value) data.attrs.update(metadata) # Older format if 'number_of_FORs' in data.dims: data = data.rename({'number_of_FORs': 'y'}) # Newer format if 'Number_of_CrIS_FORs' in data.dims: data = data.rename({'Number_of_CrIS_FORs': 'y'}) return data class NUCAPSReader(FileYAMLReader): """Reader for NUCAPS NetCDF4 files.""" def __init__(self, config_files, mask_surface=True, mask_quality=True, **kwargs): """Configure reader behavior. Args: mask_surface (boolean): mask anything below the surface pressure mask_quality (boolean): mask anything where the `Quality_Flag` metadata is ``!= 1``. """ self.pressure_dataset_names = defaultdict(list) super(NUCAPSReader, self).__init__(config_files, **kwargs) self.mask_surface = self.info.get('mask_surface', mask_surface) self.mask_quality = self.info.get('mask_quality', mask_quality) def load_ds_ids_from_config(self): """Convert config dataset entries to DatasetIDs. Special handling is done to provide level specific datasets for any pressured based datasets. For example, a dataset is added for each pressure level of 'Temperature' with each new dataset being named 'Temperature_Xmb' where X is the pressure level. """ super(NUCAPSReader, self).load_ds_ids_from_config() for ds_id in list(self.all_ids.keys()): ds_info = self.all_ids[ds_id] if ds_info.get('pressure_based', False): for idx, lvl_num in enumerate(ALL_PRESSURE_LEVELS): if lvl_num < 5.0: suffix = "_{:0.03f}mb".format(lvl_num) else: suffix = "_{:0.0f}mb".format(lvl_num) new_info = ds_info.copy() new_info['pressure_level'] = lvl_num new_info['pressure_index'] = idx new_info['file_key'] = '{}'.format(ds_id.name) new_info['name'] = ds_id.name + suffix new_ds_id = ds_id._replace(name=new_info['name']) new_info['id'] = new_ds_id self.all_ids[new_ds_id] = new_info self.pressure_dataset_names[ds_id.name].append(new_info['name']) def load(self, dataset_keys, previous_datasets=None, pressure_levels=None): """Load data from one or more set of files. :param pressure_levels: mask out certain pressure levels: True for all levels (min, max) for a range of pressure levels [...] list of levels to include """ dataset_keys = set(self.get_dataset_key(x) for x in dataset_keys) if pressure_levels is not None: # Filter out datasets that don't fit in the correct pressure level for ds_id in dataset_keys.copy(): ds_info = self.all_ids[ds_id] ds_level = ds_info.get("pressure_level") if ds_level is not None: if pressure_levels is True: # they want all pressure levels continue elif len(pressure_levels) == 2 and pressure_levels[0] <= ds_level <= pressure_levels[1]: # given a min and a max pressure level continue elif np.isclose(pressure_levels, ds_level).any(): # they asked for this specific pressure level continue else: # they don't want this dataset at this pressure level LOG.debug("Removing dataset to load: %s", ds_id) dataset_keys.remove(ds_id) continue # Add pressure levels to the datasets to load if needed so # we can do further filtering after loading plevels_ds_id = self.get_dataset_key('Pressure_Levels') remove_plevels = False if plevels_ds_id not in dataset_keys: dataset_keys.add(plevels_ds_id) remove_plevels = True datasets_loaded = super(NUCAPSReader, self).load( dataset_keys, previous_datasets=previous_datasets) if pressure_levels is not None: if remove_plevels: plevels_ds = datasets_loaded.pop(plevels_ds_id) dataset_keys.remove(plevels_ds_id) else: plevels_ds = datasets_loaded[plevels_ds_id] if pressure_levels is True: cond = None elif len(pressure_levels) == 2: cond = (plevels_ds >= pressure_levels[0]) & (plevels_ds <= pressure_levels[1]) else: cond = plevels_ds == pressure_levels if cond is not None: new_plevels = plevels_ds.where(cond, drop=True) else: new_plevels = plevels_ds for ds_id in datasets_loaded.keys(): ds_obj = datasets_loaded[ds_id] if plevels_ds.dims[0] not in ds_obj.dims: continue if cond is not None: datasets_loaded[ds_id] = ds_obj.where(cond, drop=True) datasets_loaded[ds_id].attrs['pressure_levels'] = new_plevels if self.mask_surface: LOG.debug("Filtering pressure levels at or below the surface pressure") for ds_id in sorted(dataset_keys): ds = datasets_loaded[ds_id] if "surface_pressure" not in ds.attrs or "pressure_levels" not in ds.attrs: continue data_pressure = ds.attrs["pressure_levels"] surface_pressure = ds.attrs["surface_pressure"] if isinstance(surface_pressure, float): # scalar needs to become array for each record surface_pressure = np.repeat(surface_pressure, ds.shape[0]) if surface_pressure.ndim == 1 and surface_pressure.shape[0] == ds.shape[0]: # surface is one element per record LOG.debug("Filtering %s at and below the surface pressure", ds_id) if ds.ndim == 2: surface_pressure = np.repeat(surface_pressure[:, None], data_pressure.shape[0], axis=1) data_pressure = np.repeat(data_pressure[None, :], surface_pressure.shape[0], axis=0) datasets_loaded[ds_id] = ds.where(data_pressure < surface_pressure) else: # entire dataset represents one pressure level data_pressure = ds.attrs["pressure_level"] datasets_loaded[ds_id] = ds.where(data_pressure < surface_pressure) else: LOG.warning("Not sure how to handle shape of 'surface_pressure' metadata") if self.mask_quality: LOG.debug("Filtering data based on quality flags") for ds_id in sorted(dataset_keys): ds = datasets_loaded[ds_id] quality_flag = [ x for x in ds.attrs.get('ancillary_variables', []) if x.attrs.get('name') == 'Quality_Flag'] if not quality_flag: continue quality_flag = quality_flag[0] if quality_flag.dims[0] not in ds.dims: continue LOG.debug("Masking %s where quality flag doesn't equal 1", ds_id) datasets_loaded[ds_id] = ds.where(quality_flag == 0) return datasets_loaded satpy-0.20.0/satpy/readers/nwcsaf_msg2013_hdf5.py000066400000000000000000000124261362525524100214450ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reader for the old NWCSAF/Geo (v2013 and earlier) cloud product format. References: - The NWCSAF GEO 2013 products documentation: http://www.nwcsaf.org/web/guest/archive - Search for Code "ICD/3"; Type "MSG" and the box to the right should say 'Status' (which means any status). Version 7.0 seems to be for v2013 http://www.nwcsaf.org/aemetRest/downloadAttachment/2623 """ import logging from datetime import datetime import numpy as np from satpy.readers.hdf5_utils import HDF5FileHandler from pyresample.geometry import AreaDefinition import h5py logger = logging.getLogger(__name__) PLATFORM_NAMES = {'MSG1': 'Meteosat-8', 'MSG2': 'Meteosat-9', 'MSG3': 'Meteosat-10', 'MSG4': 'Meteosat-11', } class Hdf5NWCSAF(HDF5FileHandler): """NWCSAF MSG hdf5 reader.""" def __init__(self, filename, filename_info, filetype_info): """Init method.""" super(Hdf5NWCSAF, self).__init__(filename, filename_info, filetype_info) self.cache = {} def get_dataset(self, dataset_id, ds_info): """Load a dataset.""" file_key = ds_info.get('file_key', dataset_id.name) data = self[file_key] nodata = None if 'SCALING_FACTOR' in data.attrs and 'OFFSET' in data.attrs: dtype = np.dtype(data.data) if dataset_id.name in ['ctth_alti']: data.attrs['valid_range'] = (0, 27000) data.attrs['_FillValue'] = np.nan if dataset_id.name in ['ctth_alti', 'ctth_pres', 'ctth_tempe', 'ctth_effective_cloudiness']: dtype = np.dtype('float32') nodata = 255 if dataset_id.name in ['ct']: data.attrs['valid_range'] = (0, 20) data.attrs['_FillValue'] = 255 # data.attrs['palette_meanings'] = list(range(21)) attrs = data.attrs scaled_data = (data * data.attrs['SCALING_FACTOR'] + data.attrs['OFFSET']).astype(dtype) if nodata: scaled_data = scaled_data.where(data != nodata) scaled_data = scaled_data.where(scaled_data >= 0) data = scaled_data data.attrs = attrs for key in list(data.attrs.keys()): val = data.attrs[key] if isinstance(val, h5py.h5r.Reference): del data.attrs[key] return data def get_area_def(self, dsid): """Get the area definition of the datasets in the file.""" if dsid.name.endswith('_pal'): raise NotImplementedError cfac = self.file_content['/attr/CFAC'] lfac = self.file_content['/attr/LFAC'] coff = self.file_content['/attr/COFF'] loff = self.file_content['/attr/LOFF'] numcols = int(self.file_content['/attr/NC']) numlines = int(self.file_content['/attr/NL']) aex = get_area_extent(cfac, lfac, coff, loff, numcols, numlines) pname = self.file_content['/attr/PROJECTION_NAME'] proj = {} if pname.startswith("GEOS"): proj["proj"] = "geos" proj["a"] = "6378169.0" proj["b"] = "6356583.8" proj["h"] = "35785831.0" proj["lon_0"] = str(float(pname.split("<")[1][:-1])) else: raise NotImplementedError("Only geos projection supported yet.") area_def = AreaDefinition(self.file_content['/attr/REGION_NAME'], self.file_content['/attr/REGION_NAME'], pname, proj, numcols, numlines, aex) return area_def @property def start_time(self): """Return the start time of the object.""" return datetime.strptime(self.file_content['/attr/IMAGE_ACQUISITION_TIME'], '%Y%m%d%H%M') def get_area_extent(cfac, lfac, coff, loff, numcols, numlines): """Get the area extent from msg parameters.""" xur = (numcols - coff) * 2 ** 16 / (cfac * 1.0) xur = np.deg2rad(xur) * 35785831.0 xll = (-1 - coff) * 2 ** 16 / (cfac * 1.0) xll = np.deg2rad(xll) * 35785831.0 xres = (xur - xll) / numcols xur, xll = xur - xres / 2, xll + xres / 2 yll = (numlines - loff) * 2 ** 16 / (-lfac * 1.0) yll = np.deg2rad(yll) * 35785831.0 yur = (-1 - loff) * 2 ** 16 / (-lfac * 1.0) yur = np.deg2rad(yur) * 35785831.0 yres = (yur - yll) / numlines yll, yur = yll + yres / 2, yur - yres / 2 return xll, yll, xur, yur satpy-0.20.0/satpy/readers/nwcsaf_nc.py000066400000000000000000000270601362525524100200430ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Nowcasting SAF common PPS&MSG NetCDF/CF format reader. References: - The NWCSAF GEO 2018 products documentation: http://www.nwcsaf.org/web/guest/archive """ import logging import os from datetime import datetime import dask.array as da import numpy as np import xarray as xr from pyresample.utils import get_area_def from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.utils import unzip_file logger = logging.getLogger(__name__) SENSOR = {'NOAA-19': 'avhrr/3', 'NOAA-18': 'avhrr/3', 'NOAA-15': 'avhrr/3', 'Metop-A': 'avhrr/3', 'Metop-B': 'avhrr/3', 'Metop-C': 'avhrr/3', 'EOS-Aqua': 'modis', 'EOS-Terra': 'modis', 'Suomi-NPP': 'viirs', 'NOAA-20': 'viirs', 'JPSS-1': 'viirs', } PLATFORM_NAMES = {'MSG1': 'Meteosat-8', 'MSG2': 'Meteosat-9', 'MSG3': 'Meteosat-10', 'MSG4': 'Meteosat-11', } class NcNWCSAF(BaseFileHandler): """NWCSAF PPS&MSG NetCDF reader.""" def __init__(self, filename, filename_info, filetype_info): """Init method.""" super(NcNWCSAF, self).__init__(filename, filename_info, filetype_info) self._unzipped = unzip_file(self.filename) if self._unzipped: self.filename = self._unzipped self.cache = {} self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=False, chunks=CHUNK_SIZE) self.nc = self.nc.rename({'nx': 'x', 'ny': 'y'}) self.sw_version = self.nc.attrs['source'] self.pps = False try: # MSG: sat_id = self.nc.attrs['satellite_identifier'] try: self.platform_name = PLATFORM_NAMES[sat_id] except KeyError: self.platform_name = PLATFORM_NAMES[sat_id.astype(str)] except KeyError: # PPS: self.platform_name = self.nc.attrs['platform'] self.pps = True self.sensor = SENSOR.get(self.platform_name, 'seviri') def remove_timedim(self, var): """Remove time dimension from dataset.""" if self.pps and var.dims[0] == 'time': data = var[0, :, :] data.attrs = var.attrs var = data return var def get_dataset(self, dsid, info): """Load a dataset.""" dsid_name = dsid.name if dsid_name in self.cache: logger.debug('Get the data set from cache: %s.', dsid_name) return self.cache[dsid_name] if dsid_name in ['lon', 'lat'] and dsid_name not in self.nc: dsid_name = dsid_name + '_reduced' logger.debug('Reading %s.', dsid_name) variable = self.nc[dsid_name] variable = self.remove_timedim(variable) variable = self.scale_dataset(dsid, variable, info) if dsid_name.endswith('_reduced'): # Get full resolution lon,lat from the reduced (tie points) grid self.upsample_geolocation(dsid, info) return self.cache[dsid.name] return variable def scale_dataset(self, dsid, variable, info): """Scale the data set, applying the attributes from the netCDF file. The scale and offset attributes will then be removed from the resulting variable. """ variable = remove_empties(variable) scale = variable.attrs.get('scale_factor', np.array(1)) offset = variable.attrs.get('add_offset', np.array(0)) if np.issubdtype((scale + offset).dtype, np.floating) or np.issubdtype(variable.dtype, np.floating): if '_FillValue' in variable.attrs: variable = variable.where( variable != variable.attrs['_FillValue']) variable.attrs['_FillValue'] = np.nan if 'valid_range' in variable.attrs: variable = variable.where( variable <= variable.attrs['valid_range'][1]) variable = variable.where( variable >= variable.attrs['valid_range'][0]) if 'valid_max' in variable.attrs: variable = variable.where( variable <= variable.attrs['valid_max']) if 'valid_min' in variable.attrs: variable = variable.where( variable >= variable.attrs['valid_min']) attrs = variable.attrs.copy() variable = variable * scale + offset variable.attrs = attrs variable.attrs.pop('add_offset', None) variable.attrs.pop('scale_factor', None) variable.attrs.update({'platform_name': self.platform_name, 'sensor': self.sensor}) if not variable.attrs.get('standard_name', '').endswith('status_flag'): # TODO: do we really need to add units to everything ? variable.attrs.setdefault('units', '1') ancillary_names = variable.attrs.get('ancillary_variables', '') try: variable.attrs['ancillary_variables'] = ancillary_names.split() except AttributeError: pass if 'palette_meanings' in variable.attrs: variable.attrs['palette_meanings'] = [int(val) for val in variable.attrs['palette_meanings'].split()] if variable.attrs['palette_meanings'][0] == 1: variable.attrs['palette_meanings'] = [0] + variable.attrs['palette_meanings'] variable = xr.DataArray(da.vstack((np.array(variable.attrs['fill_value_color']), variable.data)), coords=variable.coords, dims=variable.dims, attrs=variable.attrs) val, idx = np.unique(variable.attrs['palette_meanings'], return_index=True) variable.attrs['palette_meanings'] = val variable = variable[idx] if 'standard_name' in info: variable.attrs.setdefault('standard_name', info['standard_name']) if self.sw_version == 'NWC/PPS version v2014' and dsid.name == 'ctth_alti': # pps 2014 valid range and palette don't match variable.attrs['valid_range'] = (0., 9000.) if self.sw_version == 'NWC/PPS version v2014' and dsid.name == 'ctth_alti_pal': # pps 2014 palette has the nodata color (black) first variable = variable[1:, :] return variable def upsample_geolocation(self, dsid, info): """Upsample the geolocation (lon,lat) from the tiepoint grid.""" from geotiepoints import SatelliteInterpolator # Read the fields needed: col_indices = self.nc['nx_reduced'].values row_indices = self.nc['ny_reduced'].values lat_reduced = self.scale_dataset(dsid, self.nc['lat_reduced'], info) lon_reduced = self.scale_dataset(dsid, self.nc['lon_reduced'], info) shape = (self.nc['y'].shape[0], self.nc['x'].shape[0]) cols_full = np.arange(shape[1]) rows_full = np.arange(shape[0]) satint = SatelliteInterpolator((lon_reduced.values, lat_reduced.values), (row_indices, col_indices), (rows_full, cols_full)) lons, lats = satint.interpolate() self.cache['lon'] = xr.DataArray(lons, attrs=lon_reduced.attrs, dims=['y', 'x']) self.cache['lat'] = xr.DataArray(lats, attrs=lat_reduced.attrs, dims=['y', 'x']) return def get_area_def(self, dsid): """Get the area definition of the datasets in the file. Only applicable for MSG products! """ if self.pps: # PPS: raise NotImplementedError if dsid.name.endswith('_pal'): raise NotImplementedError proj_str, area_extent = self._get_projection() nlines, ncols = self.nc[dsid.name].shape area = get_area_def('some_area_name', "On-the-fly area", 'geosmsg', proj_str, ncols, nlines, area_extent) return area def __del__(self): """Delete the instance.""" if self._unzipped: try: os.remove(self._unzipped) except (IOError, OSError): pass @property def start_time(self): """Return the start time of the object.""" try: # MSG: try: return datetime.strptime(self.nc.attrs['time_coverage_start'], '%Y-%m-%dT%H:%M:%SZ') except TypeError: return datetime.strptime(self.nc.attrs['time_coverage_start'].astype(str), '%Y-%m-%dT%H:%M:%SZ') except ValueError: # PPS: return datetime.strptime(self.nc.attrs['time_coverage_start'], '%Y%m%dT%H%M%S%fZ') @property def end_time(self): """Return the end time of the object.""" try: # MSG: try: return datetime.strptime(self.nc.attrs['time_coverage_end'], '%Y-%m-%dT%H:%M:%SZ') except TypeError: return datetime.strptime(self.nc.attrs['time_coverage_end'].astype(str), '%Y-%m-%dT%H:%M:%SZ') except ValueError: # PPS: return datetime.strptime(self.nc.attrs['time_coverage_end'], '%Y%m%dT%H%M%S%fZ') def _get_projection(self): """Get projection from the NetCDF4 attributes.""" try: proj_str = self.nc.attrs['gdal_projection'] except TypeError: proj_str = self.nc.attrs['gdal_projection'].decode() # Check the a/b/h units radius_a = proj_str.split('+a=')[-1].split()[0] if float(radius_a) > 10e3: units = 'm' scale = 1.0 else: units = 'km' scale = 1e3 if 'units' not in proj_str: proj_str = proj_str + ' +units=' + units area_extent = (float(self.nc.attrs['gdal_xgeo_up_left']) / scale, float(self.nc.attrs['gdal_ygeo_low_right']) / scale, float(self.nc.attrs['gdal_xgeo_low_right']) / scale, float(self.nc.attrs['gdal_ygeo_up_left']) / scale) return proj_str, area_extent def remove_empties(variable): """Remove empty objects from the *variable*'s attrs.""" import h5py for key, val in variable.attrs.items(): if isinstance(val, h5py._hl.base.Empty): variable.attrs.pop(key) return variable satpy-0.20.0/satpy/readers/olci_nc.py000066400000000000000000000334511362525524100175110ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Sentinel-3 OLCI reader. This reader supports an optional argument to choose the 'engine' for reading OLCI netCDF4 files. By default, this reader uses the default xarray choice of engine, as defined in the :func:`xarray.open_dataset` documentation`. As an alternative, the user may wish to use the 'h5netcdf' engine, but that is not default as it typically prints many non-fatal but confusing error messages to the terminal. To choose between engines the user can do as follows for the default:: scn = Scene(filenames=my_files, reader='olci_l1b') or as follows for the h5netcdf engine:: scn = Scene(filenames=my_files, reader='olci_l1b', reader_kwargs={'engine': 'h5netcdf'}) References: - :func:`xarray.open_dataset` """ import logging from datetime import datetime import dask.array as da import numpy as np import xarray as xr from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import angle2xyz, xyz2angle from satpy import CHUNK_SIZE from functools import reduce logger = logging.getLogger(__name__) PLATFORM_NAMES = {'S3A': 'Sentinel-3A', 'S3B': 'Sentinel-3B'} class BitFlags(object): """Manipulate flags stored bitwise.""" flag_list = ['INVALID', 'WATER', 'LAND', 'CLOUD', 'SNOW_ICE', 'INLAND_WATER', 'TIDAL', 'COSMETIC', 'SUSPECT', 'HISOLZEN', 'SATURATED', 'MEGLINT', 'HIGHGLINT', 'WHITECAPS', 'ADJAC', 'WV_FAIL', 'PAR_FAIL', 'AC_FAIL', 'OC4ME_FAIL', 'OCNN_FAIL', 'Extra_1', 'KDM_FAIL', 'Extra_2', 'CLOUD_AMBIGUOUS', 'CLOUD_MARGIN', 'BPAC_ON', 'WHITE_SCATT', 'LOWRW', 'HIGHRW'] meaning = {f: i for i, f in enumerate(flag_list)} def __init__(self, value): """Init the flags.""" self._value = value def __getitem__(self, item): """Get the item.""" pos = self.meaning[item] data = self._value if isinstance(data, xr.DataArray): data = data.data res = ((data >> pos) % 2).astype(np.bool) res = xr.DataArray(res, coords=self._value.coords, attrs=self._value.attrs, dims=self._value.dims) else: res = ((data >> pos) % 2).astype(np.bool) return res class NCOLCIBase(BaseFileHandler): """The OLCI reader base.""" def __init__(self, filename, filename_info, filetype_info, engine=None): """Init the olci reader base.""" super(NCOLCIBase, self).__init__(filename, filename_info, filetype_info) self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=True, engine=engine, chunks={'columns': CHUNK_SIZE, 'rows': CHUNK_SIZE}) self.nc = self.nc.rename({'columns': 'x', 'rows': 'y'}) # TODO: get metadata from the manifest file (xfdumanifest.xml) self.platform_name = PLATFORM_NAMES[filename_info['mission_id']] self.sensor = 'olci' @property def start_time(self): """Start time property.""" return datetime.strptime(self.nc.attrs['start_time'], '%Y-%m-%dT%H:%M:%S.%fZ') @property def end_time(self): """End time property.""" return datetime.strptime(self.nc.attrs['stop_time'], '%Y-%m-%dT%H:%M:%S.%fZ') def get_dataset(self, key, info): """Load a dataset.""" logger.debug('Reading %s.', key.name) variable = self.nc[key.name] return variable def __del__(self): """Close the NetCDF file that may still be open.""" try: self.nc.close() except (IOError, OSError, AttributeError): pass class NCOLCICal(NCOLCIBase): """Dummy class for calibration.""" pass class NCOLCIGeo(NCOLCIBase): """Dummy class for navigation.""" pass class NCOLCIChannelBase(NCOLCIBase): """Base class for channel reading.""" def __init__(self, filename, filename_info, filetype_info, engine=None): """Init the file handler.""" super(NCOLCIChannelBase, self).__init__(filename, filename_info, filetype_info) self.channel = filename_info.get('dataset_name') class NCOLCI1B(NCOLCIChannelBase): """File handler for OLCI l1b.""" def __init__(self, filename, filename_info, filetype_info, cal, engine=None): """Init the file handler.""" super(NCOLCI1B, self).__init__(filename, filename_info, filetype_info) self.cal = cal.nc @staticmethod def _get_items(idx, solar_flux): """Get items.""" return solar_flux[idx] def _get_solar_flux(self, band): """Get the solar flux for the band.""" solar_flux = self.cal['solar_flux'].isel(bands=band).values d_index = self.cal['detector_index'].fillna(0).astype(int) return da.map_blocks(self._get_items, d_index.data, solar_flux=solar_flux, dtype=solar_flux.dtype) def get_dataset(self, key, info): """Load a dataset.""" if self.channel != key.name: return logger.debug('Reading %s.', key.name) radiances = self.nc[self.channel + '_radiance'] if key.calibration == 'reflectance': idx = int(key.name[2:]) - 1 sflux = self._get_solar_flux(idx) radiances = radiances / sflux * np.pi * 100 radiances.attrs['units'] = '%' radiances.attrs['platform_name'] = self.platform_name radiances.attrs['sensor'] = self.sensor radiances.attrs.update(key.to_dict()) return radiances class NCOLCI2(NCOLCIChannelBase): """File handler for OLCI l2.""" def get_dataset(self, key, info): """Load a dataset.""" if self.channel is not None and self.channel != key.name: return logger.debug('Reading %s.', key.name) if self.channel is not None and self.channel.startswith('Oa'): dataset = self.nc[self.channel + '_reflectance'] else: dataset = self.nc[info['nc_key']] if key.name == 'wqsf': dataset.attrs['_FillValue'] = 1 elif key.name == 'mask': dataset = self.getbitmask(dataset) dataset.attrs['platform_name'] = self.platform_name dataset.attrs['sensor'] = self.sensor dataset.attrs.update(key.to_dict()) return dataset def getbitmask(self, wqsf, items=None): """Get the bitmask.""" if items is None: items = ["INVALID", "SNOW_ICE", "INLAND_WATER", "SUSPECT", "AC_FAIL", "CLOUD", "HISOLZEN", "OCNN_FAIL", "CLOUD_MARGIN", "CLOUD_AMBIGUOUS", "LOWRW", "LAND"] bflags = BitFlags(wqsf) return reduce(np.logical_or, [bflags[item] for item in items]) class NCOLCILowResData(BaseFileHandler): """Handler for low resolution data.""" def __init__(self, filename, filename_info, filetype_info, engine=None): """Init the file handler.""" super(NCOLCILowResData, self).__init__(filename, filename_info, filetype_info) self.nc = None # TODO: get metadata from the manifest file (xfdumanifest.xml) self.platform_name = PLATFORM_NAMES[filename_info['mission_id']] self.sensor = 'olci' self.cache = {} self.engine = engine def _open_dataset(self): if self.nc is None: self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=True, engine=self.engine, chunks={'tie_columns': CHUNK_SIZE, 'tie_rows': CHUNK_SIZE}) self.nc = self.nc.rename({'tie_columns': 'x', 'tie_rows': 'y'}) self.l_step = self.nc.attrs['al_subsampling_factor'] self.c_step = self.nc.attrs['ac_subsampling_factor'] def _do_interpolate(self, data): if not isinstance(data, tuple): data = (data,) shape = data[0].shape from geotiepoints.interpolator import Interpolator tie_lines = np.arange(0, (shape[0] - 1) * self.l_step + 1, self.l_step) tie_cols = np.arange(0, (shape[1] - 1) * self.c_step + 1, self.c_step) lines = np.arange((shape[0] - 1) * self.l_step + 1) cols = np.arange((shape[1] - 1) * self.c_step + 1) along_track_order = 1 cross_track_order = 3 satint = Interpolator([x.values for x in data], (tie_lines, tie_cols), (lines, cols), along_track_order, cross_track_order) int_data = satint.interpolate() return [xr.DataArray(da.from_array(x, chunks=(CHUNK_SIZE, CHUNK_SIZE)), dims=['y', 'x']) for x in int_data] def _need_interpolation(self): return (self.c_step != 1 or self.l_step != 1) def __del__(self): """Close the NetCDF file that may still be open.""" try: self.nc.close() except (IOError, OSError, AttributeError): pass class NCOLCIAngles(NCOLCILowResData): """File handler for the OLCI angles.""" datasets = {'satellite_azimuth_angle': 'OAA', 'satellite_zenith_angle': 'OZA', 'solar_azimuth_angle': 'SAA', 'solar_zenith_angle': 'SZA'} def get_dataset(self, key, info): """Load a dataset.""" if key.name not in self.datasets: return self._open_dataset() logger.debug('Reading %s.', key.name) if self._need_interpolation() and self.cache.get(key.name) is None: if key.name.startswith('satellite'): zen = self.nc[self.datasets['satellite_zenith_angle']] zattrs = zen.attrs azi = self.nc[self.datasets['satellite_azimuth_angle']] aattrs = azi.attrs elif key.name.startswith('solar'): zen = self.nc[self.datasets['solar_zenith_angle']] zattrs = zen.attrs azi = self.nc[self.datasets['solar_azimuth_angle']] aattrs = azi.attrs else: raise NotImplementedError("Don't know how to read " + key.name) x, y, z = angle2xyz(azi, zen) x, y, z = self._do_interpolate((x, y, z)) azi, zen = xyz2angle(x, y, z) azi.attrs = aattrs zen.attrs = zattrs if 'zenith' in key.name: values = zen elif 'azimuth' in key.name: values = azi else: raise NotImplementedError("Don't know how to read " + key.name) if key.name.startswith('satellite'): self.cache['satellite_zenith_angle'] = zen self.cache['satellite_azimuth_angle'] = azi elif key.name.startswith('solar'): self.cache['solar_zenith_angle'] = zen self.cache['solar_azimuth_angle'] = azi elif key.name in self.cache: values = self.cache[key.name] else: values = self.nc[self.datasets[key.name]] values.attrs['platform_name'] = self.platform_name values.attrs['sensor'] = self.sensor values.attrs.update(key.to_dict()) return values def __del__(self): """Close the NetCDF file that may still be open.""" try: self.nc.close() except (IOError, OSError, AttributeError): pass class NCOLCIMeteo(NCOLCILowResData): """File handler for the OLCI meteo data.""" datasets = ['humidity', 'sea_level_pressure', 'total_columnar_water_vapour', 'total_ozone'] # TODO: the following depends on more than columns, rows # float atmospheric_temperature_profile(tie_rows, tie_columns, tie_pressure_levels) ; # float horizontal_wind(tie_rows, tie_columns, wind_vectors) ; # float reference_pressure_level(tie_pressure_levels) ; def get_dataset(self, key, info): """Load a dataset.""" if key.name not in self.datasets: return self._open_dataset() logger.debug('Reading %s.', key.name) if self._need_interpolation() and self.cache.get(key.name) is None: data = self.nc[key.name] values, = self._do_interpolate(data) values.attrs = data.attrs self.cache[key.name] = values elif key.name in self.cache: values = self.cache[key.name] else: values = self.nc[key.name] values.attrs['platform_name'] = self.platform_name values.attrs['sensor'] = self.sensor values.attrs.update(key.to_dict()) return values satpy-0.20.0/satpy/readers/omps_edr.py000066400000000000000000000112071362525524100177060ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2011-2015 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Interface to OMPS EDR format """ from datetime import datetime, timedelta import numpy as np import logging from satpy.readers.hdf5_utils import HDF5FileHandler NO_DATE = datetime(1958, 1, 1) EPSILON_TIME = timedelta(days=2) LOG = logging.getLogger(__name__) class EDRFileHandler(HDF5FileHandler): _fill_name = "_FillValue" @property def start_orbit_number(self): return self.filename_info['orbit'] @property def end_orbit_number(self): return self.filename_info['orbit'] @property def platform_name(self): return self.filename_info['platform_shortname'] @property def sensor_name(self): return self.filename_info['instrument_shortname'] def get_shape(self, ds_id, ds_info): return self[ds_info['file_key'] + '/shape'] def adjust_scaling_factors(self, factors, file_units, output_units): if factors is None or factors[0] is None: factors = [1, 0] if file_units == output_units: LOG.debug("File units and output units are the same (%s)", file_units) return factors return np.array(factors) def get_metadata(self, dataset_id, ds_info): var_path = ds_info.get('file_key', '{}'.format(dataset_id.name)) info = getattr(self[var_path], 'attrs', {}) info.update(ds_info) file_units = ds_info.get('file_units') if file_units is None: file_units = self.get(var_path + '/attr/units', self.get(var_path + '/attr/Units')) if file_units is None: raise KeyError("File variable '{}' has no units attribute".format(var_path)) elif file_units == 'deg': file_units = 'degrees' elif file_units == 'Unitless': file_units = '1' info.update({ "shape": self.get_shape(dataset_id, ds_info), "file_units": file_units, "units": ds_info.get("units", file_units), "platform_name": self.platform_name, "sensor": self.sensor_name, "start_orbit": self.start_orbit_number, "end_orbit": self.end_orbit_number, }) info.update(dataset_id.to_dict()) if 'standard_name' not in ds_info: info['standard_name'] = self.get(var_path + '/attr/Title', dataset_id.name) return info def get_dataset(self, dataset_id, ds_info): var_path = ds_info.get('file_key', '{}'.format(dataset_id.name)) metadata = self.get_metadata(dataset_id, ds_info) valid_min, valid_max = self.get(var_path + '/attr/valid_range', self.get(var_path + '/attr/ValidRange', (None, None))) if valid_min is None or valid_max is None: raise KeyError("File variable '{}' has no valid range attribute".format(var_path)) fill_name = var_path + '/attr/{}'.format(self._fill_name) if fill_name in self: fill_value = self[fill_name] else: fill_value = None data = self[var_path] scale_factor_path = var_path + '/attr/ScaleFactor' if scale_factor_path in self: scale_factor = self[scale_factor_path] scale_offset = self[var_path + '/attr/Offset'] else: scale_factor = None scale_offset = None if valid_min is not None and valid_max is not None: # the original .cfg/INI based reader only checked valid_max data = data.where((data <= valid_max) & (data >= valid_min)) if fill_value is not None: data = data.where(data != fill_value) factors = (scale_factor, scale_offset) factors = self.adjust_scaling_factors(factors, metadata['file_units'], ds_info.get("units")) if factors[0] != 1 or factors[1] != 0: data = data * factors[0] + factors[1] data.attrs.update(metadata) return data class EDREOSFileHandler(EDRFileHandler): _fill_name = "MissingValue" satpy-0.20.0/satpy/readers/safe_sar_l2_ocn.py000066400000000000000000000112401362525524100211120ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """SAFE SAR L2 OCN format reader The OCN data contains various parameters, but mainly the wind speed and direction calculated from SAR data and input model data from ECMWF Implemented in this reader is the OWI, Ocean Wind field. See more at ESA webpage https://sentinel.esa.int/web/sentinel/ocean-wind-field-component """ import logging from satpy.readers.file_handlers import BaseFileHandler from satpy import CHUNK_SIZE import numpy as np import xarray as xr logger = logging.getLogger(__name__) class SAFENC(BaseFileHandler): """Measurement file reader.""" def __init__(self, filename, filename_info, filetype_info): super(SAFENC, self).__init__(filename, filename_info, filetype_info) self._start_time = filename_info['start_time'] self._end_time = filename_info['end_time'] # For some SAFE packages, fstart_time differs, but start_time is the same # To avoid over writing exiting file with same start_time, a solution is to # use fstart_time self._fstart_time = filename_info['fstart_time'] self._fend_time = filename_info['fend_time'] self._polarization = filename_info['polarization'] self.lats = None self.lons = None self._shape = None self.area = None self.nc = xr.open_dataset(filename, decode_cf=True, mask_and_scale=False, chunks={'owiAzSize': CHUNK_SIZE, 'owiRaSize': CHUNK_SIZE}) self.nc = self.nc.rename({'owiAzSize': 'y'}) self.nc = self.nc.rename({'owiRaSize': 'x'}) self.filename = filename def get_dataset(self, key, info): """Load a dataset.""" if key.name in ['owiLat', 'owiLon']: if self.lons is None or self.lats is None: self.lons = self.nc['owiLon'] self.lats = self.nc['owiLat'] if key.name == 'owiLat': res = self.lats else: res = self.lons res.attrs = info else: res = self.nc[key.name] if key.name in ['owiHs', 'owiWl', 'owiDirmet']: res = xr.DataArray(res, dims=['y', 'x', 'oswPartitions']) elif key.name in ['owiNrcs', 'owiNesz', 'owiNrcsNeszCorr']: res = xr.DataArray(res, dims=['y', 'x', 'oswPolarisation']) elif key.name in ['owiPolarisationName']: res = xr.DataArray(res, dims=['owiPolarisation']) elif key.name in ['owiCalConstObsi', 'owiCalConstInci']: res = xr.DataArray(res, dims=['owiIncSize']) elif key.name.startswith('owi'): res = xr.DataArray(res, dims=['y', 'x']) else: res = xr.DataArray(res, dims=['y', 'x']) res.attrs.update(info) if '_FillValue' in res.attrs: res = res.where(res != res.attrs['_FillValue']) res.attrs['_FillValue'] = np.nan if 'missionName' in self.nc.attrs: res.attrs.update({'platform_name': self.nc.attrs['missionName']}) res.attrs.update({'fstart_time': self._fstart_time}) res.attrs.update({'fend_time': self._fend_time}) if not self._shape: self._shape = res.shape return res @property def start_time(self): """Product start_time, parsed from the measurement file name.""" return self._start_time @property def end_time(self): """Product end_time, parsed from the measurement file name.""" return self._end_time @property def fstart_time(self): """Product fstart_time meaning the start time parsed from the SAFE directory.""" return self._fstart_time @property def fend_time(self): """Product fend_time meaning the end time parsed from the SAFE directory.""" return self._fend_time satpy-0.20.0/satpy/readers/sar_c_safe.py000066400000000000000000000417551362525524100201760ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """SAFE SAR-C reader ********************* This module implements a reader for Sentinel 1 SAR-C GRD (level1) SAFE format as provided by ESA. The format is comprised of a directory containing multiple files, most notably two measurement files in geotiff and a few xml files for calibration, noise and metadata. References: - *Level 1 Product Formatting* https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-1-sar/products-algorithms/level-1-product-formatting - J. Park, A. A. Korosov, M. Babiker, S. Sandven and J. Won, *"Efficient Thermal Noise Removal for Sentinel-1 TOPSAR Cross-Polarization Channel,"* in IEEE Transactions on Geoscience and Remote Sensing, vol. 56, no. 3, pp. 1555-1565, March 2018. doi: `10.1109/TGRS.2017.2765248 `_ """ import logging import xml.etree.ElementTree as ET import numpy as np import rasterio from rasterio.windows import Window import dask.array as da from xarray import DataArray from dask.base import tokenize from threading import Lock from satpy.readers.file_handlers import BaseFileHandler from satpy import CHUNK_SIZE logger = logging.getLogger(__name__) def dictify(r, root=True): """Convert an ElementTree into a dict.""" if root: return {r.tag: dictify(r, False)} d = {} if r.text and r.text.strip(): try: return int(r.text) except ValueError: try: return float(r.text) except ValueError: return r.text for x in r.findall("./*"): if x.tag in d and not isinstance(d[x.tag], list): d[x.tag] = [d[x.tag]] d[x.tag].append(dictify(x, False)) else: d[x.tag] = dictify(x, False) return d class SAFEXML(BaseFileHandler): """XML file reader for the SAFE format.""" def __init__(self, filename, filename_info, filetype_info, header_file=None): super(SAFEXML, self).__init__(filename, filename_info, filetype_info) self._start_time = filename_info['start_time'] self._end_time = filename_info['end_time'] self._polarization = filename_info['polarization'] self.root = ET.parse(self.filename) self.hdr = {} if header_file is not None: self.hdr = header_file.get_metadata() def get_metadata(self): """Convert the xml metadata to dict.""" return dictify(self.root.getroot()) @staticmethod def read_xml_array(elts, variable_name): """Read an array from an xml elements *elts*.""" y = [] x = [] data = [] for elt in elts: newx = elt.find('pixel').text.split() y += [int(elt.find('line').text)] * len(newx) x += [int(val) for val in newx] data += [float(val) for val in elt.find(variable_name).text.split()] return np.asarray(data), (x, y) @staticmethod def read_azimuth_noise_array(elts): """Read the azimuth noise vectors. The azimuth noise is normalized per swath to account for gain differences between the swaths in EW mode. This is based on the this reference: J. Park, A. A. Korosov, M. Babiker, S. Sandven and J. Won, "Efficient Thermal Noise Removal for Sentinel-1 TOPSAR Cross-Polarization Channel," in IEEE Transactions on Geoscience and Remote Sensing, vol. 56, no. 3, pp. 1555-1565, March 2018. doi: 10.1109/TGRS.2017.2765248 """ y = [] x = [] data = [] for elt in elts: first_pixel = int(elt.find('firstRangeSample').text) last_pixel = int(elt.find('lastRangeSample').text) lines = elt.find('line').text.split() lut = elt.find('noiseAzimuthLut').text.split() pixels = [first_pixel, last_pixel] swath = elt.find('swath').text corr = 1 if swath == 'EW1': corr = 1.5 if swath == 'EW4': corr = 1.2 if swath == 'EW5': corr = 1.5 for pixel in pixels: y += [int(val) for val in lines] x += [pixel] * len(lines) data += [float(val) * corr for val in lut] return np.asarray(data), (x, y) @staticmethod def interpolate_xml_array(data, low_res_coords, shape, chunks): """Interpolate arbitrary size dataset to a full sized grid.""" xpoints, ypoints = low_res_coords return interpolate_xarray_linear(xpoints, ypoints, data, shape, chunks=chunks) def get_dataset(self, key, info): """Load a dataset.""" if self._polarization != key.polarization: return xml_items = info['xml_item'] xml_tags = info['xml_tag'] if not isinstance(xml_items, list): xml_items = [xml_items] xml_tags = [xml_tags] for xml_item, xml_tag in zip(xml_items, xml_tags): data_items = self.root.findall(".//" + xml_item) if not data_items: continue data, low_res_coords = self.read_xml_array(data_items, xml_tag) if key.name.endswith('squared'): data **= 2 data = self.interpolate_xml_array(data, low_res_coords, data.shape) def get_noise_correction(self, shape, chunks=None): """Get the noise correction array.""" data_items = self.root.findall(".//noiseVector") data, low_res_coords = self.read_xml_array(data_items, 'noiseLut') if not data_items: data_items = self.root.findall(".//noiseRangeVector") data, low_res_coords = self.read_xml_array(data_items, 'noiseRangeLut') range_noise = self.interpolate_xml_array(data, low_res_coords, shape, chunks=chunks) data_items = self.root.findall(".//noiseAzimuthVector") data, low_res_coords = self.read_azimuth_noise_array(data_items) azimuth_noise = self.interpolate_xml_array(data, low_res_coords, shape, chunks=chunks) noise = range_noise * azimuth_noise else: noise = self.interpolate_xml_array(data, low_res_coords, shape, chunks=chunks) return noise def get_calibration(self, name, shape, chunks=None): """Get the calibration array.""" data_items = self.root.findall(".//calibrationVector") data, low_res_coords = self.read_xml_array(data_items, name) return self.interpolate_xml_array(data, low_res_coords, shape, chunks=chunks) def get_calibration_constant(self): """Load the calibration constant.""" return float(self.root.find('.//absoluteCalibrationConstant').text) @property def start_time(self): return self._start_time @property def end_time(self): return self._end_time def interpolate_slice(slice_rows, slice_cols, interpolator): """Interpolate the given slice of the larger array.""" fine_rows = np.arange(slice_rows.start, slice_rows.stop, slice_rows.step) fine_cols = np.arange(slice_cols.start, slice_cols.stop, slice_cols.step) return interpolator(fine_cols, fine_rows) def interpolate_xarray(xpoints, ypoints, values, shape, kind='cubic', blocksize=CHUNK_SIZE): """Interpolate, generating a dask array.""" vchunks = range(0, shape[0], blocksize) hchunks = range(0, shape[1], blocksize) token = tokenize(blocksize, xpoints, ypoints, values, kind, shape) name = 'interpolate-' + token from scipy.interpolate import interp2d interpolator = interp2d(xpoints, ypoints, values, kind=kind) dskx = {(name, i, j): (interpolate_slice, slice(vcs, min(vcs + blocksize, shape[0])), slice(hcs, min(hcs + blocksize, shape[1])), interpolator) for i, vcs in enumerate(vchunks) for j, hcs in enumerate(hchunks) } res = da.Array(dskx, name, shape=list(shape), chunks=(blocksize, blocksize), dtype=values.dtype) return DataArray(res, dims=('y', 'x')) def intp(grid_x, grid_y, interpolator): return interpolator((grid_y, grid_x)) def interpolate_xarray_linear(xpoints, ypoints, values, shape, chunks=CHUNK_SIZE): """Interpolate linearly, generating a dask array.""" from scipy.interpolate.interpnd import (LinearNDInterpolator, _ndim_coords_from_arrays) if isinstance(chunks, (list, tuple)): vchunks, hchunks = chunks else: vchunks, hchunks = chunks, chunks points = _ndim_coords_from_arrays(np.vstack((np.asarray(ypoints), np.asarray(xpoints))).T) interpolator = LinearNDInterpolator(points, values) grid_x, grid_y = da.meshgrid(da.arange(shape[1], chunks=hchunks), da.arange(shape[0], chunks=vchunks)) # workaround for non-thread-safe first call of the interpolator: interpolator((0, 0)) res = da.map_blocks(intp, grid_x, grid_y, interpolator=interpolator) return DataArray(res, dims=('y', 'x')) class SAFEGRD(BaseFileHandler): """Measurement file reader. The measurement files are in geotiff format and read using rasterio. For performance reasons, the reading adapts the chunk size to match the file's block size. """ def __init__(self, filename, filename_info, filetype_info, calfh, noisefh): super(SAFEGRD, self).__init__(filename, filename_info, filetype_info) self._start_time = filename_info['start_time'] self._end_time = filename_info['end_time'] self._polarization = filename_info['polarization'] self._mission_id = filename_info['mission_id'] self.lats = None self.lons = None self.alts = None self.calibration = calfh self.noise = noisefh self.read_lock = Lock() self.filehandle = rasterio.open(self.filename, 'r', sharing=False) def get_dataset(self, key, info): """Load a dataset.""" if self._polarization != key.polarization: return logger.debug('Reading %s.', key.name) if key.name in ['longitude', 'latitude']: logger.debug('Constructing coordinate arrays.') if self.lons is None or self.lats is None: self.lons, self.lats, self.alts = self.get_lonlatalts() if key.name == 'latitude': data = self.lats else: data = self.lons data.attrs.update(info) else: calibration = key.calibration or 'gamma' if calibration == 'sigma_nought': calibration = 'sigmaNought' elif calibration == 'beta_nought': calibration = 'betaNought' data = self.read_band() # chunks = data.chunks # This seems to be slower for some reason chunks = CHUNK_SIZE logger.debug('Reading noise data.') noise = self.noise.get_noise_correction(data.shape, chunks=chunks).fillna(0) logger.debug('Reading calibration data.') cal = self.calibration.get_calibration(calibration, data.shape, chunks=chunks) cal_constant = self.calibration.get_calibration_constant() logger.debug('Calibrating.') data = data.where(data > 0) data = data.astype(np.float64) dn = data * data data = ((dn - noise).clip(min=0) + cal_constant) data = (np.sqrt(data) / cal).clip(min=0) data.attrs.update(info) del noise, cal data.attrs.update({'platform_name': self._mission_id}) data.attrs['units'] = calibration return data def read_band_blocks(self, blocksize=CHUNK_SIZE): """Read the band in native blocks.""" # For sentinel 1 data, the block are 1 line, and dask seems to choke on that. band = self.filehandle shape = band.shape token = tokenize(blocksize, band) name = 'read_band-' + token dskx = dict() if len(band.block_shapes) != 1: raise NotImplementedError('Bands with multiple shapes not supported.') else: chunks = band.block_shapes[0] def do_read(the_band, the_window, the_lock): with the_lock: return the_band.read(1, None, window=the_window) for ji, window in band.block_windows(1): dskx[(name, ) + ji] = (do_read, band, window, self.read_lock) res = da.Array(dskx, name, shape=list(shape), chunks=chunks, dtype=band.dtypes[0]) return DataArray(res, dims=('y', 'x')) def read_band(self, blocksize=CHUNK_SIZE): """Read the band in chunks.""" band = self.filehandle shape = band.shape if len(band.block_shapes) == 1: total_size = blocksize * blocksize * 1.0 lines, cols = band.block_shapes[0] if cols > lines: hblocks = cols vblocks = int(total_size / cols / lines) else: hblocks = int(total_size / cols / lines) vblocks = lines else: hblocks = blocksize vblocks = blocksize vchunks = range(0, shape[0], vblocks) hchunks = range(0, shape[1], hblocks) token = tokenize(hblocks, vblocks, band) name = 'read_band-' + token def do_read(the_band, the_window, the_lock): with the_lock: return the_band.read(1, None, window=the_window) dskx = {(name, i, j): (do_read, band, Window(hcs, vcs, min(hblocks, shape[1] - hcs), min(vblocks, shape[0] - vcs)), self.read_lock) for i, vcs in enumerate(vchunks) for j, hcs in enumerate(hchunks) } res = da.Array(dskx, name, shape=list(shape), chunks=(vblocks, hblocks), dtype=band.dtypes[0]) return DataArray(res, dims=('y', 'x')) def get_lonlatalts(self): """Obtain GCPs and construct latitude and longitude arrays. Args: band (gdal band): Measurement band which comes with GCP's array_shape (tuple) : The size of the data array Returns: coordinates (tuple): A tuple with longitude and latitude arrays """ band = self.filehandle (xpoints, ypoints), (gcp_lons, gcp_lats, gcp_alts), (gcps, crs) = self.get_gcps() # FIXME: do interpolation on cartesion coordinates if the area is # problematic. longitudes = interpolate_xarray(xpoints, ypoints, gcp_lons, band.shape) latitudes = interpolate_xarray(xpoints, ypoints, gcp_lats, band.shape) altitudes = interpolate_xarray(xpoints, ypoints, gcp_alts, band.shape) longitudes.attrs['gcps'] = gcps longitudes.attrs['crs'] = crs latitudes.attrs['gcps'] = gcps latitudes.attrs['crs'] = crs altitudes.attrs['gcps'] = gcps altitudes.attrs['crs'] = crs return longitudes, latitudes, altitudes def get_gcps(self): """Read GCP from the GDAL band. Args: band (gdal band): Measurement band which comes with GCP's coordinates (tuple): A tuple with longitude and latitude arrays Returns: points (tuple): Pixel and Line indices 1d arrays gcp_coords (tuple): longitude and latitude 1d arrays """ gcps = self.filehandle.gcps gcp_array = np.array([(p.row, p.col, p.x, p.y, p.z) for p in gcps[0]]) ypoints = np.unique(gcp_array[:, 0]) xpoints = np.unique(gcp_array[:, 1]) gcp_lons = gcp_array[:, 2].reshape(ypoints.shape[0], xpoints.shape[0]) gcp_lats = gcp_array[:, 3].reshape(ypoints.shape[0], xpoints.shape[0]) gcp_alts = gcp_array[:, 4].reshape(ypoints.shape[0], xpoints.shape[0]) return (xpoints, ypoints), (gcp_lons, gcp_lats, gcp_alts), gcps @property def start_time(self): return self._start_time @property def end_time(self): return self._end_time satpy-0.20.0/satpy/readers/scatsat1_l2b.py000066400000000000000000000052701362525524100203630ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """ScatSat-1 L2B Reader, distributed by Eumetsat in HDF5 format """ from datetime import datetime import h5py from satpy.dataset import Dataset from satpy.readers.file_handlers import BaseFileHandler class SCATSAT1L2BFileHandler(BaseFileHandler): def __init__(self, filename, filename_info, filetype_info): super(SCATSAT1L2BFileHandler, self).__init__(filename, filename_info, filetype_info) self.h5f = h5py.File(self.filename, "r") h5data = self.h5f['science_data'] self.filename_info['start_time'] = datetime.strptime(h5data.attrs['Range Beginning Date'], '%Y-%jT%H:%M:%S.%f') self.filename_info['end_time'] = datetime.strptime(h5data.attrs['Range Ending Date'], '%Y-%jT%H:%M:%S.%f') self.lons = None self.lats = None self.wind_speed_scale = float(h5data.attrs['Wind Speed Selection Scale']) self.wind_direction_scale = float(h5data.attrs['Wind Direction Selection Scale']) self.latitude_scale = float(h5data.attrs['Latitude Scale']) self.longitude_scale = float(h5data.attrs['Longitude Scale']) def get_dataset(self, key, info): h5data = self.h5f['science_data'] stdname = info.get('standard_name') if stdname in ['latitude', 'longitude']: if self.lons is None or self.lats is None: self.lons = h5data['Longitude'][:]*self.longitude_scale self.lats = h5data['Latitude'][:]*self.latitude_scale if info['standard_name'] == 'longitude': return Dataset(self.lons, id=key, **info) else: return Dataset(self.lats, id=key, **info) if stdname in ['wind_speed']: windspeed = h5data['Wind_speed_selection'][:, :] * self.wind_speed_scale return Dataset(windspeed, id=key, **info) if stdname in ['wind_direction']: wind_direction = h5data['Wind_direction_selection'][:, :] * self.wind_direction_scale return Dataset(wind_direction, id=key, **info) satpy-0.20.0/satpy/readers/scmi.py000066400000000000000000000270541362525524100170400ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """SCMI NetCDF4 Reader. SCMI files are typically used for data for the ABI instrument onboard the GOES-16/17 satellites. It is the primary format used for providing ABI data to the AWIPS visualization clients used by the US National Weather Service forecasters. The python code for this reader may be reused by other readers as NetCDF schemes/metadata change for different products. The initial reader using this code is the "scmi_abi" reader (see `abi_l1b_scmi.yaml` for more information). There are two forms of these files that this reader supports: 1. Official SCMI format: NetCDF4 files where the main data variable is stored in a variable called "Sectorized_CMI". This variable name can be configured in the YAML configuration file. 2. Satpy/Polar2Grid SCMI format: NetCDF4 files based on the official SCMI format created for the Polar2Grid project. This format was migrated to Satpy as part of Polar2Grid's adoption of Satpy for the majority of its features. This format is what is produced by Satpy's `scmi` writer. This format can be identified by a single variable named "data" and a global attribute named ``"awips_id"`` that is set to a string starting with ``"AWIPS_"``. """ import logging from datetime import datetime import os import numpy as np import xarray as xr from pyresample import geometry from satpy.readers.file_handlers import BaseFileHandler from satpy import CHUNK_SIZE # NetCDF doesn't support multi-threaded reading, trick it by opening # as one whole chunk then split it up before we do any calculations LOAD_CHUNK_SIZE = int(os.getenv('PYTROLL_LOAD_CHUNK_SIZE', -1)) logger = logging.getLogger(__name__) class SCMIFileHandler(BaseFileHandler): """Handle a single SCMI NetCDF4 file.""" def __init__(self, filename, filename_info, filetype_info): """Set up the SCMI file handler.""" super(SCMIFileHandler, self).__init__(filename, filename_info, filetype_info) # xarray's default netcdf4 engine self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=False, chunks={'x': LOAD_CHUNK_SIZE, 'y': LOAD_CHUNK_SIZE}) self.platform_name = self.nc.attrs['satellite_id'] self.sensor = self._get_sensor() self.nlines = self.nc.dims['y'] self.ncols = self.nc.dims['x'] self.coords = {} def _get_sensor(self): """Determine the sensor for this file.""" # sometimes Himawari-8 (or 9) data is stored in SCMI format is_h8 = 'H8' in self.platform_name is_h9 = 'H9' in self.platform_name is_ahi = is_h8 or is_h9 return 'ahi' if is_ahi else 'abi' @property def sensor_names(self): """Get the sensor names.""" return [self.sensor] def __getitem__(self, item): """Wrap around `self.nc[item]`. Some datasets use a 32-bit float scaling factor like the 'x' and 'y' variables which causes inaccurate unscaled data values. This method forces the scale factor to a 64-bit float first. """ data = self.nc[item] attrs = data.attrs factor = data.attrs.get('scale_factor') offset = data.attrs.get('add_offset') fill = data.attrs.get('_FillValue') if fill is not None: data = data.where(data != fill) if factor is not None: # make sure the factor is a 64-bit float # can't do this in place since data is most likely uint16 # and we are making it a 64-bit float data = data * float(factor) + offset data.attrs = attrs # handle coordinates (and recursive fun) new_coords = {} # 'time' dimension causes issues in other processing if 'time' in data.coords: data = data.drop_vars('time') if item in data.coords: self.coords[item] = data for coord_name in data.coords.keys(): if coord_name not in self.coords: self.coords[coord_name] = self[coord_name] new_coords[coord_name] = self.coords[coord_name] data.coords.update(new_coords) return data def get_shape(self, key, info): """Get the shape of the data.""" return self.nlines, self.ncols def get_dataset(self, key, info): """Load a dataset.""" logger.debug('Reading in get_dataset %s.', key.name) var_name = info.get('file_key', self.filetype_info.get('file_key')) if var_name: data = self[var_name] elif 'Sectorized_CMI' in self.nc: data = self['Sectorized_CMI'] elif 'data' in self.nc: data = self['data'] # NetCDF doesn't support multi-threaded reading, trick it by opening # as one whole chunk then split it up before we do any calculations data = data.chunk({'x': CHUNK_SIZE, 'y': CHUNK_SIZE}) # convert to satpy standard units factor = data.attrs.pop('scale_factor', 1) offset = data.attrs.pop('add_offset', 0) units = data.attrs.get('units', 1) # the '*1' unit is some weird convention added/needed by AWIPS if units in ['1', '*1'] and key.calibration == 'reflectance': data *= 100 factor *= 100 # used for valid_min/max data.attrs['units'] = '%' # set up all the attributes that might be useful to the user/satpy data.attrs.update({'platform_name': self.platform_name, 'sensor': data.attrs.get('sensor', self.sensor), }) if 'satellite_longitude' in self.nc.attrs: data.attrs['satellite_longitude'] = self.nc.attrs['satellite_longitude'] data.attrs['satellite_latitude'] = self.nc.attrs['satellite_latitude'] data.attrs['satellite_altitude'] = self.nc.attrs['satellite_altitude'] scene_id = self.nc.attrs.get('scene_id') if scene_id is not None: data.attrs['scene_id'] = scene_id data.attrs.update(key.to_dict()) data.attrs.pop('_FillValue', None) if 'valid_min' in data.attrs: vmin = data.attrs.pop('valid_min') vmax = data.attrs.pop('valid_max') vmin = vmin * factor + offset vmax = vmax * factor + offset data.attrs['valid_min'] = vmin data.attrs['valid_max'] = vmax return data def _get_cf_grid_mapping_var(self): """Figure out which grid mapping should be used.""" gmaps = ['fixedgrid_projection', 'goes_imager_projection', 'lambert_projection', 'polar_projection', 'mercator_projection'] if 'grid_mapping' in self.filename_info: gmaps = [self.filename_info.get('grid_mapping')] + gmaps for grid_mapping in gmaps: if grid_mapping in self.nc: return self.nc[grid_mapping] raise KeyError("Can't find grid mapping variable in SCMI file") def _get_proj4_name(self, projection): """Map CF projection name to PROJ.4 name.""" gmap_name = projection.attrs['grid_mapping_name'] proj = { 'geostationary': 'geos', 'lambert_conformal_conic': 'lcc', 'polar_stereographic': 'stere', 'mercator': 'merc', }.get(gmap_name, gmap_name) return proj def _get_proj_specific_params(self, projection): """Convert CF projection parameters to PROJ.4 dict.""" proj = self._get_proj4_name(projection) proj_dict = { 'proj': proj, 'a': float(projection.attrs['semi_major_axis']), 'b': float(projection.attrs['semi_minor_axis']), 'units': 'm', } if proj == 'geos': proj_dict['h'] = float(projection.attrs['perspective_point_height']) proj_dict['sweep'] = projection.attrs.get('sweep_angle_axis', 'y') proj_dict['lon_0'] = float(projection.attrs['longitude_of_projection_origin']) proj_dict['lat_0'] = float(projection.attrs.get('latitude_of_projection_origin', 0.0)) elif proj == 'lcc': proj_dict['lat_0'] = float(projection.attrs['standard_parallel']) proj_dict['lon_0'] = float(projection.attrs['longitude_of_central_meridian']) proj_dict['lat_1'] = float(projection.attrs['latitude_of_projection_origin']) elif proj == 'stere': proj_dict['lat_ts'] = float(projection.attrs['standard_parallel']) proj_dict['lon_0'] = float(projection.attrs['straight_vertical_longitude_from_pole']) proj_dict['lat_0'] = float(projection.attrs['latitude_of_projection_origin']) elif proj == 'merc': proj_dict['lat_ts'] = float(projection.attrs['standard_parallel']) proj_dict['lat_0'] = proj_dict['lat_ts'] proj_dict['lon_0'] = float(projection.attrs['longitude_of_projection_origin']) else: raise ValueError("Can't handle projection '{}'".format(proj)) return proj_dict def _calc_extents(self, proj_dict): """Calculate area extents from x/y variables.""" h = float(proj_dict.get('h', 1.)) # force to 64-bit float x = self['x'] y = self['y'] x_units = x.attrs.get('units', 'rad') if x_units == 'meters': h_factor = 1. factor = 1. elif x_units == 'microradian': h_factor = h factor = 1e6 else: # radians h_factor = h factor = 1. x_l = h_factor * x[0] / factor x_r = h_factor * x[-1] / factor y_l = h_factor * y[-1] / factor y_u = h_factor * y[0] / factor x_half = (x_r - x_l) / (self.ncols - 1) / 2. y_half = (y_u - y_l) / (self.nlines - 1) / 2. return x_l - x_half, y_l - y_half, x_r + x_half, y_u + y_half def get_area_def(self, key): """Get the area definition of the data at hand.""" # FIXME: Can't we pass dataset info to the get_area_def? projection = self._get_cf_grid_mapping_var() proj_dict = self._get_proj_specific_params(projection) area_extent = self._calc_extents(proj_dict) area_name = '{}_{}'.format(self.sensor, proj_dict['proj']) return geometry.AreaDefinition( area_name, "SCMI file area", area_name, proj_dict, self.ncols, self.nlines, np.asarray(area_extent)) @property def start_time(self): """Get the start time.""" return datetime.strptime(self.nc.attrs['start_date_time'], '%Y%j%H%M%S') @property def end_time(self): """Get the end time.""" return self.start_time def __del__(self): """Delete the instance.""" try: self.nc.close() except (IOError, OSError): pass satpy-0.20.0/satpy/readers/seviri_base.py000066400000000000000000000313541362525524100203760ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Utilities and eventually also base classes for MSG HRIT/Native data reading.""" import numpy as np from numpy.polynomial.chebyshev import Chebyshev import dask.array as da from satpy.readers.eum_base import (time_cds_short, issue_revision) C1 = 1.19104273e-5 C2 = 1.43877523 VISIR_NUM_COLUMNS = 3712 VISIR_NUM_LINES = 3712 HRV_NUM_COLUMNS = 11136 CHANNEL_NAMES = {1: "VIS006", 2: "VIS008", 3: "IR_016", 4: "IR_039", 5: "WV_062", 6: "WV_073", 7: "IR_087", 8: "IR_097", 9: "IR_108", 10: "IR_120", 11: "IR_134", 12: "HRV"} VIS_CHANNELS = ['HRV', 'VIS006', 'VIS008', 'IR_016'] # Polynomial coefficients for spectral-effective BT fits BTFIT = {} # [A, B, C] BTFIT['IR_039'] = [0.0, 1.011751900, -3.550400] BTFIT['WV_062'] = [0.00001805700, 1.000255533, -1.790930] BTFIT['WV_073'] = [0.00000231818, 1.000668281, -0.456166] BTFIT['IR_087'] = [-0.00002332000, 1.011803400, -1.507390] BTFIT['IR_097'] = [-0.00002055330, 1.009370670, -1.030600] BTFIT['IR_108'] = [-0.00007392770, 1.032889800, -3.296740] BTFIT['IR_120'] = [-0.00007009840, 1.031314600, -3.181090] BTFIT['IR_134'] = [-0.00007293450, 1.030424800, -2.645950] SATNUM = {321: "8", 322: "9", 323: "10", 324: "11"} CALIB = {} # Meteosat 8 CALIB[321] = {'HRV': {'F': 78.7599 / np.pi}, 'VIS006': {'F': 65.2296 / np.pi}, 'VIS008': {'F': 73.0127 / np.pi}, 'IR_016': {'F': 62.3715 / np.pi}, 'IR_039': {'VC': 2567.33, 'ALPHA': 0.9956, 'BETA': 3.41}, 'WV_062': {'VC': 1598.103, 'ALPHA': 0.9962, 'BETA': 2.218}, 'WV_073': {'VC': 1362.081, 'ALPHA': 0.9991, 'BETA': 0.478}, 'IR_087': {'VC': 1149.069, 'ALPHA': 0.9996, 'BETA': 0.179}, 'IR_097': {'VC': 1034.343, 'ALPHA': 0.9999, 'BETA': 0.06}, 'IR_108': {'VC': 930.647, 'ALPHA': 0.9983, 'BETA': 0.625}, 'IR_120': {'VC': 839.66, 'ALPHA': 0.9988, 'BETA': 0.397}, 'IR_134': {'VC': 752.387, 'ALPHA': 0.9981, 'BETA': 0.578}} # Meteosat 9 CALIB[322] = {'HRV': {'F': 79.0113 / np.pi}, 'VIS006': {'F': 65.2065 / np.pi}, 'VIS008': {'F': 73.1869 / np.pi}, 'IR_016': {'F': 61.9923 / np.pi}, 'IR_039': {'VC': 2568.832, 'ALPHA': 0.9954, 'BETA': 3.438}, 'WV_062': {'VC': 1600.548, 'ALPHA': 0.9963, 'BETA': 2.185}, 'WV_073': {'VC': 1360.330, 'ALPHA': 0.9991, 'BETA': 0.47}, 'IR_087': {'VC': 1148.620, 'ALPHA': 0.9996, 'BETA': 0.179}, 'IR_097': {'VC': 1035.289, 'ALPHA': 0.9999, 'BETA': 0.056}, 'IR_108': {'VC': 931.7, 'ALPHA': 0.9983, 'BETA': 0.64}, 'IR_120': {'VC': 836.445, 'ALPHA': 0.9988, 'BETA': 0.408}, 'IR_134': {'VC': 751.792, 'ALPHA': 0.9981, 'BETA': 0.561}} # Meteosat 10 CALIB[323] = {'HRV': {'F': 78.9416 / np.pi}, 'VIS006': {'F': 65.5148 / np.pi}, 'VIS008': {'F': 73.1807 / np.pi}, 'IR_016': {'F': 62.0208 / np.pi}, 'IR_039': {'VC': 2547.771, 'ALPHA': 0.9915, 'BETA': 2.9002}, 'WV_062': {'VC': 1595.621, 'ALPHA': 0.9960, 'BETA': 2.0337}, 'WV_073': {'VC': 1360.337, 'ALPHA': 0.9991, 'BETA': 0.4340}, 'IR_087': {'VC': 1148.130, 'ALPHA': 0.9996, 'BETA': 0.1714}, 'IR_097': {'VC': 1034.715, 'ALPHA': 0.9999, 'BETA': 0.0527}, 'IR_108': {'VC': 929.842, 'ALPHA': 0.9983, 'BETA': 0.6084}, 'IR_120': {'VC': 838.659, 'ALPHA': 0.9988, 'BETA': 0.3882}, 'IR_134': {'VC': 750.653, 'ALPHA': 0.9982, 'BETA': 0.5390}} # Meteosat 11 CALIB[324] = {'HRV': {'F': 79.0035 / np.pi}, 'VIS006': {'F': 65.2656 / np.pi}, 'VIS008': {'F': 73.1692 / np.pi}, 'IR_016': {'F': 61.9416 / np.pi}, 'IR_039': {'VC': 2555.280, 'ALPHA': 0.9916, 'BETA': 2.9438}, 'WV_062': {'VC': 1596.080, 'ALPHA': 0.9959, 'BETA': 2.0780}, 'WV_073': {'VC': 1361.748, 'ALPHA': 0.9990, 'BETA': 0.4929}, 'IR_087': {'VC': 1147.433, 'ALPHA': 0.9996, 'BETA': 0.1731}, 'IR_097': {'VC': 1034.851, 'ALPHA': 0.9998, 'BETA': 0.0597}, 'IR_108': {'VC': 931.122, 'ALPHA': 0.9983, 'BETA': 0.6256}, 'IR_120': {'VC': 839.113, 'ALPHA': 0.9988, 'BETA': 0.4002}, 'IR_134': {'VC': 748.585, 'ALPHA': 0.9981, 'BETA': 0.5635}} def get_cds_time(days, msecs): """Compute timestamp given the days since epoch and milliseconds of the day. 1958-01-01 00:00 is interpreted as fill value and will be replaced by NaT (Not a Time). Args: days (int, either scalar or numpy.ndarray): Days since 1958-01-01 msecs (int, either scalar or numpy.ndarray): Milliseconds of the day Returns: numpy.datetime64: Timestamp(s) """ if np.isscalar(days): days = np.array([days], dtype='int64') msecs = np.array([msecs], dtype='int64') time = np.datetime64('1958-01-01').astype('datetime64[ms]') + \ days.astype('timedelta64[D]') + msecs.astype('timedelta64[ms]') time[time == np.datetime64('1958-01-01 00:00')] = np.datetime64("NaT") if len(time) == 1: return time[0] return time def dec10216(inbuf): """Decode 10 bits data into 16 bits words. :: /* * pack 4 10-bit words in 5 bytes into 4 16-bit words * * 0 1 2 3 4 5 * 01234567890123456789012345678901234567890 * 0 1 2 3 4 */ ip = &in_buffer[i]; op = &out_buffer[j]; op[0] = ip[0]*4 + ip[1]/64; op[1] = (ip[1] & 0x3F)*16 + ip[2]/16; op[2] = (ip[2] & 0x0F)*64 + ip[3]/4; op[3] = (ip[3] & 0x03)*256 +ip[4]; """ arr10 = inbuf.astype(np.uint16) arr16_len = int(len(arr10) * 4 / 5) arr10_len = int((arr16_len * 5) / 4) arr10 = arr10[:arr10_len] # adjust size # dask is slow with indexing arr10_0 = arr10[::5] arr10_1 = arr10[1::5] arr10_2 = arr10[2::5] arr10_3 = arr10[3::5] arr10_4 = arr10[4::5] arr16_0 = (arr10_0 << 2) + (arr10_1 >> 6) arr16_1 = ((arr10_1 & 63) << 4) + (arr10_2 >> 4) arr16_2 = ((arr10_2 & 15) << 6) + (arr10_3 >> 2) arr16_3 = ((arr10_3 & 3) << 8) + arr10_4 arr16 = da.stack([arr16_0, arr16_1, arr16_2, arr16_3], axis=-1).ravel() arr16 = da.rechunk(arr16, arr16.shape[0]) return arr16 class MpefProductHeader(object): """MPEF product header class.""" def get(self): """Return numpy record_array for MPEF product header.""" record = [ ('MPEF_File_Id', np.int16), ('MPEF_Header_Version', np.uint8), ('ManualDissAuthRequest', np.bool), ('ManualDisseminationAuth', np.bool), ('DisseminationAuth', np.bool), ('NominalTime', time_cds_short), ('ProductQuality', np.uint8), ('ProductCompleteness', np.uint8), ('ProductTimeliness', np.uint8), ('ProcessingInstanceId', np.int8), ('ImagesUsed', self.images_used, (4,)), ('BaseAlgorithmVersion', issue_revision), ('ProductAlgorithmVersion', issue_revision), ('InstanceServerName', 'S2'), ('SpacecraftName', 'S2'), ('Mission', 'S3'), ('RectificationLongitude', 'S5'), ('Encoding', 'S1'), ('TerminationSpace', 'S1'), ('EncodingVersion', np.uint16), ('Channel', np.uint8), ('Filler', 'S20'), ('RepeatCycle', 'S15'), ] return np.dtype(record).newbyteorder('>') @property def images_used(self): """Return structure for images_used.""" record = [ ('Padding1', 'S2'), ('ExpectedImage', time_cds_short), ('ImageReceived', np.bool), ('Padding2', 'S1'), ('UsedImageStart_Day', np.uint16), ('UsedImageStart_Millsec', np.uint32), ('Padding3', 'S2'), ('UsedImageEnd_Day', np.uint16), ('UsedImageEndt_Millsec', np.uint32), ] return record mpef_product_header = MpefProductHeader().get() class SEVIRICalibrationHandler(object): """Calibration handler for SEVIRI HRIT- and native-formats.""" def _convert_to_radiance(self, data, gain, offset): """Calibrate to radiance.""" return (data * gain + offset).clip(0.0, None) def _erads2bt(self, data, channel_name): """Convert effective radiance to brightness temperature.""" cal_info = CALIB[self.platform_id][channel_name] alpha = cal_info["ALPHA"] beta = cal_info["BETA"] wavenumber = CALIB[self.platform_id][channel_name]["VC"] return (self._tl15(data, wavenumber) - beta) / alpha def _ir_calibrate(self, data, channel_name, cal_type): """Calibrate to brightness temperature.""" if cal_type == 1: # spectral radiances return self._srads2bt(data, channel_name) elif cal_type == 2: # effective radiances return self._erads2bt(data, channel_name) else: raise NotImplementedError('Unknown calibration type') def _srads2bt(self, data, channel_name): """Convert spectral radiance to brightness temperature.""" a__, b__, c__ = BTFIT[channel_name] wavenumber = CALIB[self.platform_id][channel_name]["VC"] temp = self._tl15(data, wavenumber) return a__ * temp * temp + b__ * temp + c__ def _tl15(self, data, wavenumber): """Compute the L15 temperature.""" return ((C2 * wavenumber) / np.log((1.0 / data) * C1 * wavenumber ** 3 + 1.0)) def _vis_calibrate(self, data, solar_irradiance): """Calibrate to reflectance.""" return data * 100.0 / solar_irradiance def chebyshev(coefs, time, domain): """Evaluate a Chebyshev Polynomial. Args: coefs (list, np.array): Coefficients defining the polynomial time (int, float): Time where to evaluate the polynomial domain (list, tuple): Domain (or time interval) for which the polynomial is defined: [left, right] Reference: Appendix A in the MSG Level 1.5 Image Data Format Description. """ return Chebyshev(coefs, domain=domain)(time) - 0.5 * coefs[0] satpy-0.20.0/satpy/readers/seviri_l1b_hrit.py000066400000000000000000001077551362525524100212010ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2010-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . r"""SEVIRI HRIT format reader. Introduction ------------ The ``seviri_l1b_hrit`` reader reads and calibrates MSG-SEVIRI L1.5 image data in HRIT format. The format is explained in the `MSG Level 1.5 Image Format Description`_. The files are usually named as follows: .. code-block:: none H-000-MSG4__-MSG4________-_________-PRO______-201903011200-__ H-000-MSG4__-MSG4________-IR_108___-000001___-201903011200-__ H-000-MSG4__-MSG4________-IR_108___-000002___-201903011200-__ H-000-MSG4__-MSG4________-IR_108___-000003___-201903011200-__ H-000-MSG4__-MSG4________-IR_108___-000004___-201903011200-__ H-000-MSG4__-MSG4________-IR_108___-000005___-201903011200-__ H-000-MSG4__-MSG4________-IR_108___-000006___-201903011200-__ H-000-MSG4__-MSG4________-IR_108___-000007___-201903011200-__ H-000-MSG4__-MSG4________-IR_108___-000008___-201903011200-__ H-000-MSG4__-MSG4________-_________-EPI______-201903011200-__ Each image is decomposed into 24 segments (files) for the high-resolution-visible (HRV) channel and 8 segments for other visible (VIS) and infrared (IR) channels. Additionally there is one prologue and one epilogue file for the entire scan which contain global metadata valid for all channels. Reader Arguments ---------------- Some arguments can be provided to the reader to change it's behaviour. These are provided through the `Scene` instantiation, eg:: Scene(reader="seviri_l1b_hrit", filenames=fnames, reader_kwargs={'fill_hrv': False}) To see the full list of arguments that can be provided, look into the documentation of `:class:HRITMSGFileHandler`. Example ------- Here is an example how to read the data in satpy: .. code-block:: python from satpy import Scene import glob filenames = glob.glob('data/H-000-MSG4__-MSG4________-*201903011200*') scn = Scene(filenames=filenames, reader='seviri_l1b_hrit') scn.load(['VIS006', 'IR_108']) print(scn['IR_108']) Output: .. code-block:: none dask.array Coordinates: acq_time (y) datetime64[ns] NaT NaT NaT NaT NaT NaT ... NaT NaT NaT NaT NaT * x (x) float64 5.566e+06 5.563e+06 5.56e+06 ... -5.566e+06 -5.569e+06 * y (y) float64 -5.566e+06 -5.563e+06 ... 5.566e+06 5.569e+06 Attributes: satellite_longitude: 0.0 satellite_latitude: 0.0 satellite_altitude: 35785831.0 orbital_parameters: {'projection_longitude': 0.0, 'projection_latit... platform_name: Meteosat-11 georef_offset_corrected: True standard_name: brightness_temperature raw_metadata: {'file_type': 0, 'total_header_length': 6198, '... wavelength: (9.8, 10.8, 11.8) units: K sensor: seviri platform_name: Meteosat-11 start_time: 2019-03-01 12:00:09.716000 end_time: 2019-03-01 12:12:42.946000 area: Area ID: some_area_name\\nDescription: On-the-fl... name: IR_108 resolution: 3000.403165817 calibration: brightness_temperature polarization: None level: None modifiers: () ancillary_variables: [] * The ``orbital_parameters`` attribute provides the nominal and actual satellite position, as well as the projection centre. * You can choose between nominal and GSICS calibration coefficients or even specify your own coefficients, see :class:`HRITMSGFileHandler`. * The ``raw_metadata`` attribute provides raw metadata from the prologue, epilogue and segment header. By default, arrays with more than 100 elements are excluded in order to limit memory usage. This threshold can be adjusted, see :class:`HRITMSGFileHandler`. * The ``acq_time`` coordinate provides the acquisition time for each scanline. Use a ``MultiIndex`` to enable selection by acquisition time: .. code-block:: python import pandas as pd mi = pd.MultiIndex.from_arrays([scn['IR_108']['y'].data, scn['IR_108']['acq_time'].data], names=('y_coord', 'time')) scn['IR_108']['y'] = mi scn['IR_108'].sel(time=np.datetime64('2019-03-01T12:06:13.052000000')) References: - `MSG Level 1.5 Image Format Description`_ - `Radiometric Calibration of MSG SEVIRI Level 1.5 Image Data in Equivalent Spectral Blackbody Radiance`_ .. _MSG Level 1.5 Image Format Description: http://www.eumetsat.int/website/wcm/idc/idcplg?IdcService=GET_FILE&dDocName= PDF_TEN_05105_MSG_IMG_DATA&RevisionSelectionMethod=LatestReleased&Rendition=Web .. _Radiometric Calibration of MSG SEVIRI Level 1.5 Image Data in Equivalent Spectral Blackbody Radiance: https://www.eumetsat.int/website/wcm/idc/idcplg?IdcService=GET_FILE&dDocName=PDF_TEN_MSG_SEVIRI_RAD_CALIB& RevisionSelectionMethod=LatestReleased&Rendition=Web """ from __future__ import division import copy import logging from datetime import datetime import dask.array as da import numpy as np import pyproj import xarray as xr import satpy.readers.utils as utils from pyresample import geometry from satpy import CHUNK_SIZE from satpy.readers.eum_base import recarray2dict, time_cds_short from satpy.readers.hrit_base import (HRITFileHandler, ancillary_text, annotation_header, base_hdr_map, image_data_function) from satpy.readers.seviri_base import (CALIB, CHANNEL_NAMES, SATNUM, VIS_CHANNELS, SEVIRICalibrationHandler, chebyshev, get_cds_time) from satpy.readers.seviri_l1b_native_hdr import (hrit_epilogue, hrit_prologue, impf_configuration) from satpy.readers._geos_area import get_area_extent, get_area_definition logger = logging.getLogger('hrit_msg') # MSG implementation: key_header = np.dtype([('key_number', 'u1'), ('seed', '>f8')]) segment_identification = np.dtype([('GP_SC_ID', '>i2'), ('spectral_channel_id', '>i1'), ('segment_sequence_number', '>u2'), ('planned_start_segment_number', '>u2'), ('planned_end_segment_number', '>u2'), ('data_field_representation', '>i1')]) image_segment_line_quality = np.dtype([('line_number_in_grid', '>i4'), ('line_mean_acquisition', [('days', '>u2'), ('milliseconds', '>u4')]), ('line_validity', 'u1'), ('line_radiometric_quality', 'u1'), ('line_geometric_quality', 'u1')]) msg_variable_length_headers = { image_segment_line_quality: 'image_segment_line_quality'} msg_text_headers = {image_data_function: 'image_data_function', annotation_header: 'annotation_header', ancillary_text: 'ancillary_text'} msg_hdr_map = base_hdr_map.copy() msg_hdr_map.update({7: key_header, 128: segment_identification, 129: image_segment_line_quality }) orbit_coef = np.dtype([('StartTime', time_cds_short), ('EndTime', time_cds_short), ('X', '>f8', (8, )), ('Y', '>f8', (8, )), ('Z', '>f8', (8, )), ('VX', '>f8', (8, )), ('VY', '>f8', (8, )), ('VZ', '>f8', (8, ))]) attitude_coef = np.dtype([('StartTime', time_cds_short), ('EndTime', time_cds_short), ('XofSpinAxis', '>f8', (8, )), ('YofSpinAxis', '>f8', (8, )), ('ZofSpinAxis', '>f8', (8, ))]) cuc_time = np.dtype([('coarse', 'u1', (4, )), ('fine', 'u1', (3, ))]) class NoValidOrbitParams(Exception): """Exception when validOrbitParameters are missing.""" pass class HRITMSGPrologueEpilogueBase(HRITFileHandler): """Base reader for prologue and epilogue files.""" def __init__(self, filename, filename_info, filetype_info, hdr_info): """Initialize the file handler for prologue and epilogue files.""" super(HRITMSGPrologueEpilogueBase, self).__init__(filename, filename_info, filetype_info, hdr_info) self._reduced = None def _reduce(self, mda, max_size): """Reduce the metadata.""" if self._reduced is None: self._reduced = utils.reduce_mda(mda, max_size=max_size) return self._reduced def reduce(self, max_size): """Reduce the metadata (placeholder).""" raise NotImplementedError class HRITMSGPrologueFileHandler(HRITMSGPrologueEpilogueBase): """SEVIRI HRIT prologue reader.""" def __init__(self, filename, filename_info, filetype_info, calib_mode='nominal', ext_calib_coefs=None, mda_max_array_size=None, fill_hrv=None): """Initialize the reader.""" super(HRITMSGPrologueFileHandler, self).__init__(filename, filename_info, filetype_info, (msg_hdr_map, msg_variable_length_headers, msg_text_headers)) self.prologue = {} self.read_prologue() self.satpos = None service = filename_info['service'] if service == '': self.mda['service'] = '0DEG' else: self.mda['service'] = service def read_prologue(self): """Read the prologue metadata.""" with open(self.filename, "rb") as fp_: fp_.seek(self.mda['total_header_length']) data = np.fromfile(fp_, dtype=hrit_prologue, count=1) self.prologue.update(recarray2dict(data)) try: impf = np.fromfile(fp_, dtype=impf_configuration, count=1)[0] except IndexError: logger.info('No IMPF configuration field found in prologue.') else: self.prologue.update(recarray2dict(impf)) def get_satpos(self): """Get actual satellite position in geodetic coordinates (WGS-84). Returns: Longitude [deg east], Latitude [deg north] and Altitude [m] """ if self.satpos is None: logger.debug("Computing actual satellite position") try: # Get satellite position in cartesian coordinates x, y, z = self._get_satpos_cart() # Transform to geodetic coordinates geocent = pyproj.Proj(proj='geocent') a, b = self.get_earth_radii() latlong = pyproj.Proj(proj='latlong', a=a, b=b, units='m') lon, lat, alt = pyproj.transform(geocent, latlong, x, y, z) except NoValidOrbitParams as err: logger.warning(err) lon = lat = alt = None # Cache results self.satpos = lon, lat, alt return self.satpos def _get_satpos_cart(self): """Determine satellite position in earth-centered cartesion coordinates. The coordinates as a function of time are encoded in the coefficients of an 8th-order Chebyshev polynomial. In the prologue there is one set of coefficients for each coordinate (x, y, z). The coordinates are obtained by evalutaing the polynomials at the start time of the scan. Returns: x, y, z [m] """ orbit_polynomial = self.prologue['SatelliteStatus']['Orbit']['OrbitPolynomial'] # Find Chebyshev coefficients for the start time of the scan coef_idx = self._find_orbit_coefs() tstart = orbit_polynomial['StartTime'][0, coef_idx] tend = orbit_polynomial['EndTime'][0, coef_idx] # Obtain cartesian coordinates (x, y, z) of the satellite by evaluating the Chebyshev polynomial at the # start time of the scan. Express timestamps in microseconds since 1970-01-01 00:00. time = self.prologue['ImageAcquisition']['PlannedAcquisitionTime']['TrueRepeatCycleStart'] time64 = np.datetime64(time).astype('int64') domain = [np.datetime64(tstart).astype('int64'), np.datetime64(tend).astype('int64')] x = chebyshev(coefs=orbit_polynomial['X'][coef_idx], time=time64, domain=domain) y = chebyshev(coefs=orbit_polynomial['Y'][coef_idx], time=time64, domain=domain) z = chebyshev(coefs=orbit_polynomial['Z'][coef_idx], time=time64, domain=domain) return x*1000, y*1000, z*1000 # km -> m def _find_orbit_coefs(self): """Find orbit coefficients for the start time of the scan. The header entry SatelliteStatus/Orbit/OrbitPolynomial contains multiple coefficients, each of them valid for a certain time interval. Find the coefficients which are valid for the start time of the scan. A manoeuvre is a discontinuity in the orbit parameters. The flight dynamic algorithms are not made to interpolate over the time-span of the manoeuvre; hence we have elements describing the orbit before a manoeuvre and a new set of elements describing the orbit after the manoeuvre. The flight dynamic products are created so that there is an intentional gap at the time of the manoeuvre. Also the two pre-manoeuvre elements may overlap. But the overlap is not of an issue as both sets of elements describe the same pre-manoeuvre orbit (with negligible variations). Returns: Corresponding index in the coefficient list. """ time = np.datetime64(self.prologue['ImageAcquisition']['PlannedAcquisitionTime']['TrueRepeatCycleStart']) intervals_tstart = self.prologue['SatelliteStatus']['Orbit']['OrbitPolynomial']['StartTime'][0].astype( 'datetime64[us]') intervals_tend = self.prologue['SatelliteStatus']['Orbit']['OrbitPolynomial']['EndTime'][0].astype( 'datetime64[us]') try: # Find index of interval enclosing the nominal timestamp of the scan. If there are # multiple enclosing intervals, use the most recent one. enclosing = np.where(np.logical_and(time >= intervals_tstart, time < intervals_tend))[0] most_recent = np.argmax(intervals_tstart[enclosing]) return enclosing[most_recent] except ValueError: # No enclosing interval. Instead, find the interval whose centre is closest to the scan's timestamp # (but not more than 6 hours apart) intervals_centre = intervals_tstart + 0.5 * (intervals_tend - intervals_tstart) diffs_us = (time - intervals_centre).astype('i8') closest_match = np.argmin(np.fabs(diffs_us)) if abs(intervals_centre[closest_match] - time) < np.timedelta64(6, 'h'): logger.warning('No orbit coefficients valid for {}. Using closest match.'.format(time)) return closest_match else: raise NoValidOrbitParams('Unable to find orbit coefficients valid for {}'.format(time)) def get_earth_radii(self): """Get earth radii from prologue. Returns: Equatorial radius, polar radius [m] """ earth_model = self.prologue['GeometricProcessing']['EarthModel'] a = earth_model['EquatorialRadius'] * 1000 b = (earth_model['NorthPolarRadius'] + earth_model['SouthPolarRadius']) / 2.0 * 1000 return a, b def reduce(self, max_size): """Reduce the prologue metadata.""" return self._reduce(self.prologue, max_size=max_size) class HRITMSGEpilogueFileHandler(HRITMSGPrologueEpilogueBase): """SEVIRI HRIT epilogue reader.""" def __init__(self, filename, filename_info, filetype_info, calib_mode='nominal', ext_calib_coefs=None, mda_max_array_size=None, fill_hrv=None): """Initialize the reader.""" super(HRITMSGEpilogueFileHandler, self).__init__(filename, filename_info, filetype_info, (msg_hdr_map, msg_variable_length_headers, msg_text_headers)) self.epilogue = {} self.read_epilogue() service = filename_info['service'] if service == '': self.mda['service'] = '0DEG' else: self.mda['service'] = service def read_epilogue(self): """Read the epilogue metadata.""" with open(self.filename, "rb") as fp_: fp_.seek(self.mda['total_header_length']) data = np.fromfile(fp_, dtype=hrit_epilogue, count=1) self.epilogue.update(recarray2dict(data)) def reduce(self, max_size): """Reduce the epilogue metadata.""" return self._reduce(self.epilogue, max_size=max_size) class HRITMSGFileHandler(HRITFileHandler, SEVIRICalibrationHandler): """SEVIRI HRIT format reader. **Calibration** It is possible to choose between two file-internal calibration coefficients for the conversion from counts to radiances: - Nominal for all channels (default) - GSICS for IR channels and nominal for VIS channels In order to change the default behaviour, use the ``reader_kwargs`` upon Scene creation:: import satpy import glob filenames = glob.glob('H-000-MSG3*') scene = satpy.Scene(filenames, reader='seviri_l1b_hrit', reader_kwargs={'calib_mode': 'GSICS'}) scene.load(['VIS006', 'IR_108']) Furthermore, it is possible to specify external calibration coefficients for the conversion from counts to radiances. They must be specified in [mW m-2 sr-1 (cm-1)-1]. External coefficients take precedence over internal coefficients. If external calibration coefficients are specified for only a subset of channels, the remaining channels will be calibrated using the chosen file-internal coefficients (nominal or GSICS). In the following example we use external calibration coefficients for the ``VIS006`` & ``IR_108`` channels, and nominal coefficients for the remaining channels:: coefs = {'VIS006': {'gain': 0.0236, 'offset': -1.20}, 'IR_108': {'gain': 0.2156, 'offset': -10.4}} scene = satpy.Scene(filenames, reader='seviri_l1b_hrit', reader_kwargs={'ext_calib_coefs': coefs}) scene.load(['VIS006', 'VIS008', 'IR_108', 'IR_120']) In the next example we use we use external calibration coefficients for the ``VIS006`` & ``IR_108`` channels, nominal coefficients for the remaining VIS channels and GSICS coefficients for the remaining IR channels:: coefs = {'VIS006': {'gain': 0.0236, 'offset': -1.20}, 'IR_108': {'gain': 0.2156, 'offset': -10.4}} scene = satpy.Scene(filenames, reader='seviri_l1b_hrit', reader_kwargs={'calib_mode': 'GSICS', 'ext_calib_coefs': coefs}) scene.load(['VIS006', 'VIS008', 'IR_108', 'IR_120']) **Raw Metadata** By default, arrays with more than 100 elements are excluded from the raw reader metadata to limit memory usage. This threshold can be adjusted using the `mda_max_array_size` keyword argument:: scene = satpy.Scene(filenames, reader='seviri_l1b_hrit', reader_kwargs={'mda_max_array_size': 1000}) **Padding of the HRV channel** By default, the HRV channel is loaded padded with no-data, that is it is returned as a full-disk dataset. If you want the original, unpadded, data, just provide the `fill_hrv` as False in the `reader_kwargs`:: scene = satpy.Scene(filenames, reader='seviri_l1b_hrit', reader_kwargs={'fill_hrv': False}) """ def __init__(self, filename, filename_info, filetype_info, prologue, epilogue, calib_mode='nominal', ext_calib_coefs=None, mda_max_array_size=100, fill_hrv=True): """Initialize the reader.""" super(HRITMSGFileHandler, self).__init__(filename, filename_info, filetype_info, (msg_hdr_map, msg_variable_length_headers, msg_text_headers)) self.prologue_ = prologue self.epilogue_ = epilogue self.prologue = prologue.prologue self.epilogue = epilogue.epilogue self._filename_info = filename_info self.ext_calib_coefs = ext_calib_coefs if ext_calib_coefs is not None else {} self.mda_max_array_size = mda_max_array_size self.fill_hrv = fill_hrv calib_mode_choices = ('NOMINAL', 'GSICS') if calib_mode.upper() not in calib_mode_choices: raise ValueError('Invalid calibration mode: {}. Choose one of {}'.format( calib_mode, calib_mode_choices)) self.calib_mode = calib_mode.upper() self._get_header() def _get_header(self): """Read the header info, and fill the metadata dictionary.""" earth_model = self.prologue['GeometricProcessing']['EarthModel'] self.mda['offset_corrected'] = earth_model['TypeOfEarthModel'] == 2 # Projection a, b = self.prologue_.get_earth_radii() self.mda['projection_parameters']['a'] = a self.mda['projection_parameters']['b'] = b ssp = self.prologue['ImageDescription'][ 'ProjectionDescription']['LongitudeOfSSP'] self.mda['projection_parameters']['SSP_longitude'] = ssp self.mda['projection_parameters']['SSP_latitude'] = 0.0 # Orbital parameters actual_lon, actual_lat, actual_alt = self.prologue_.get_satpos() self.mda['orbital_parameters']['satellite_nominal_longitude'] = self.prologue['SatelliteStatus'][ 'SatelliteDefinition']['NominalLongitude'] self.mda['orbital_parameters']['satellite_nominal_latitude'] = 0.0 if actual_lon is not None: self.mda['orbital_parameters']['satellite_actual_longitude'] = actual_lon self.mda['orbital_parameters']['satellite_actual_latitude'] = actual_lat self.mda['orbital_parameters']['satellite_actual_altitude'] = actual_alt # Misc self.platform_id = self.prologue["SatelliteStatus"][ "SatelliteDefinition"]["SatelliteId"] self.platform_name = "Meteosat-" + SATNUM[self.platform_id] self.mda['platform_name'] = self.platform_name service = self._filename_info['service'] if service == '': self.mda['service'] = '0DEG' else: self.mda['service'] = service self.channel_name = CHANNEL_NAMES[self.mda['spectral_channel_id']] @property def start_time(self): """Get the start time.""" return self.epilogue['ImageProductionStats'][ 'ActualScanningSummary']['ForwardScanStart'] @property def end_time(self): """Get the end time.""" return self.epilogue['ImageProductionStats'][ 'ActualScanningSummary']['ForwardScanEnd'] def _get_area_extent(self, pdict): """Get the area extent of the file. Until December 2017, the data is shifted by 1.5km SSP North and West against the nominal GEOS projection. Since December 2017 this offset has been corrected. A flag in the data indicates if the correction has been applied. If no correction was applied, adjust the area extent to match the shifted data. For more information see Section 3.1.4.2 in the MSG Level 1.5 Image Data Format Description. The correction of the area extent is documented in a `developer's memo `_. """ aex = get_area_extent(pdict) if not self.mda['offset_corrected']: # Geo-referencing offset present. Adjust area extent to match the shifted data. Note that we have to adjust # the corners in the *opposite* direction, i.e. S-E. Think of it as if the coastlines were fixed and you # dragged the image to S-E until coastlines and data area aligned correctly. # # Although the image is flipped upside-down and left-right, the projection coordinates retain their # properties, i.e. positive x/y is East/North, respectively. xadj = 1500 yadj = -1500 aex = (aex[0] + xadj, aex[1] + yadj, aex[2] + xadj, aex[3] + yadj) return aex def get_area_def(self, dsid): """Get the area definition of the band.""" # Common parameters for both HRV and other channels nlines = int(self.mda['number_of_lines']) loff = np.float32(self.mda['loff']) pdict = {} pdict['cfac'] = np.int32(self.mda['cfac']) pdict['lfac'] = np.int32(self.mda['lfac']) pdict['coff'] = np.float32(self.mda['coff']) pdict['a'] = self.mda['projection_parameters']['a'] pdict['b'] = self.mda['projection_parameters']['b'] pdict['h'] = self.mda['projection_parameters']['h'] pdict['ssp_lon'] = self.mda['projection_parameters']['SSP_longitude'] pdict['nlines'] = nlines pdict['ncols'] = int(self.mda['number_of_columns']) if (self.prologue['ImageDescription']['Level15ImageProduction'] ['ImageProcDirection'] == 0): pdict['scandir'] = 'N2S' else: pdict['scandir'] = 'S2N' # Compute area definition for non-HRV channels: if dsid.name != 'HRV': pdict['loff'] = loff - nlines aex = self._get_area_extent(pdict) pdict['a_name'] = 'geosmsg' pdict['a_desc'] = 'MSG/SEVIRI low resolution channel area' pdict['p_id'] = 'msg_lowres' area = get_area_definition(pdict, aex) self.area = area return self.area segment_number = self.mda['segment_sequence_number'] current_first_line = ((segment_number - self.mda['planned_start_segment_number']) * pdict['nlines']) # Or, if we are processing HRV: pdict['a_name'] = 'geosmsg_hrv' pdict['p_id'] = 'msg_hires' bounds = self.epilogue['ImageProductionStats']['ActualL15CoverageHRV'].copy() if self.fill_hrv: bounds['UpperEastColumnActual'] = 1 bounds['UpperWestColumnActual'] = 11136 bounds['LowerEastColumnActual'] = 1 bounds['LowerWestColumnActual'] = 11136 pdict['ncols'] = 11136 upper_south_line = bounds[ 'LowerNorthLineActual'] - current_first_line - 1 upper_south_line = min(max(upper_south_line, 0), pdict['nlines']) lower_coff = (5566 - bounds['LowerEastColumnActual'] + 1) upper_coff = (5566 - bounds['UpperEastColumnActual'] + 1) # First we look at the lower window pdict['nlines'] = upper_south_line pdict['loff'] = loff - upper_south_line pdict['coff'] = lower_coff pdict['a_desc'] = 'MSG/SEVIRI high resolution channel, lower window' lower_area_extent = self._get_area_extent(pdict) lower_area = get_area_definition(pdict, lower_area_extent) # Now the upper window pdict['nlines'] = nlines - upper_south_line pdict['loff'] = loff - pdict['nlines'] - upper_south_line pdict['coff'] = upper_coff pdict['a_desc'] = 'MSG/SEVIRI high resolution channel, upper window' upper_area_extent = self._get_area_extent(pdict) upper_area = get_area_definition(pdict, upper_area_extent) area = geometry.StackedAreaDefinition(lower_area, upper_area) self.area = area.squeeze() return self.area def get_dataset(self, key, info): """Get the dataset.""" res = super(HRITMSGFileHandler, self).get_dataset(key, info) res = self.calibrate(res, key.calibration) if key.name == 'HRV' and self.fill_hrv: res = self.pad_hrv_data(res) res.attrs['units'] = info['units'] res.attrs['wavelength'] = info['wavelength'] res.attrs['standard_name'] = info['standard_name'] res.attrs['platform_name'] = self.platform_name res.attrs['sensor'] = 'seviri' res.attrs['satellite_longitude'] = self.mda[ 'projection_parameters']['SSP_longitude'] res.attrs['satellite_latitude'] = self.mda[ 'projection_parameters']['SSP_latitude'] res.attrs['satellite_altitude'] = self.mda['projection_parameters']['h'] res.attrs['orbital_parameters'] = { 'projection_longitude': self.mda['projection_parameters']['SSP_longitude'], 'projection_latitude': self.mda['projection_parameters']['SSP_latitude'], 'projection_altitude': self.mda['projection_parameters']['h']} res.attrs['orbital_parameters'].update(self.mda['orbital_parameters']) res.attrs['georef_offset_corrected'] = self.mda['offset_corrected'] res.attrs['raw_metadata'] = self._get_raw_mda() # Add scanline timestamps as additional y-coordinate res['acq_time'] = ('y', self._get_timestamps()) res['acq_time'].attrs['long_name'] = 'Mean scanline acquisition time' return res def pad_hrv_data(self, res): """Add empty pixels around the HRV.""" logger.debug('Padding HRV data to full disk') nlines = int(self.mda['number_of_lines']) segment_number = self.mda['segment_sequence_number'] current_first_line = (segment_number - self.mda['planned_start_segment_number']) * nlines bounds = self.epilogue['ImageProductionStats']['ActualL15CoverageHRV'] upper_south_line = bounds[ 'LowerNorthLineActual'] - current_first_line - 1 upper_south_line = min(max(upper_south_line, 0), nlines) data_list = list() if upper_south_line > 0: # we have some of the lower window data_lower = pad_data(res[:upper_south_line, :].data, (upper_south_line, 11136), bounds['LowerEastColumnActual'], bounds['LowerWestColumnActual']) data_list.append(data_lower) if upper_south_line < nlines: # we have some of the upper window data_upper = pad_data(res[upper_south_line:, :].data, (nlines - upper_south_line, 11136), bounds['UpperEastColumnActual'], bounds['UpperWestColumnActual']) data_list.append(data_upper) return xr.DataArray(da.vstack(data_list), dims=('y', 'x')) def calibrate(self, data, calibration): """Calibrate the data.""" tic = datetime.now() channel_name = self.channel_name if calibration == 'counts': res = data elif calibration in ['radiance', 'reflectance', 'brightness_temperature']: # Choose calibration coefficients # a) Internal: Nominal or GSICS? band_idx = self.mda['spectral_channel_id'] - 1 if self.calib_mode != 'GSICS' or self.channel_name in VIS_CHANNELS: # you cant apply GSICS values to the VIS channels coefs = self.prologue["RadiometricProcessing"]["Level15ImageCalibration"] int_gain = coefs['CalSlope'][band_idx] int_offset = coefs['CalOffset'][band_idx] else: coefs = self.prologue["RadiometricProcessing"]['MPEFCalFeedback'] int_gain = coefs['GSICSCalCoeff'][band_idx] int_offset = coefs['GSICSOffsetCount'][band_idx] # b) Internal or external? External takes precedence. gain = self.ext_calib_coefs.get(self.channel_name, {}).get('gain', int_gain) offset = self.ext_calib_coefs.get(self.channel_name, {}).get('offset', int_offset) # Convert to radiance data = data.where(data > 0) res = self._convert_to_radiance(data.astype(np.float32), gain, offset) line_mask = self.mda['image_segment_line_quality']['line_validity'] >= 2 line_mask &= self.mda['image_segment_line_quality']['line_validity'] <= 3 line_mask &= self.mda['image_segment_line_quality']['line_radiometric_quality'] == 4 line_mask &= self.mda['image_segment_line_quality']['line_geometric_quality'] == 4 res *= np.choose(line_mask, [1, np.nan])[:, np.newaxis].astype(np.float32) if calibration == 'reflectance': solar_irradiance = CALIB[self.platform_id][channel_name]["F"] res = self._vis_calibrate(res, solar_irradiance) elif calibration == 'brightness_temperature': cal_type = self.prologue['ImageDescription'][ 'Level15ImageProduction']['PlannedChanProcessing'][self.mda['spectral_channel_id']] res = self._ir_calibrate(res, channel_name, cal_type) logger.debug("Calibration time " + str(datetime.now() - tic)) return res def _get_raw_mda(self): """Compile raw metadata to be included in the dataset attributes.""" # Metadata from segment header (excluding items which vary among the different segments) raw_mda = copy.deepcopy(self.mda) for key in ('image_segment_line_quality', 'segment_sequence_number', 'annotation_header', 'loff'): raw_mda.pop(key, None) # Metadata from prologue and epilogue (large arrays removed) raw_mda.update(self.prologue_.reduce(self.mda_max_array_size)) raw_mda.update(self.epilogue_.reduce(self.mda_max_array_size)) return raw_mda def _get_timestamps(self): """Read scanline timestamps from the segment header.""" tline = self.mda['image_segment_line_quality']['line_mean_acquisition'] return get_cds_time(days=tline['days'], msecs=tline['milliseconds']) def pad_data(data, final_size, east_bound, west_bound): """Pad the data given east and west bounds and the desired size.""" nlines = final_size[0] if west_bound - east_bound != data.shape[1] - 1: raise IndexError('East and west bounds do not match data shape') padding_east = da.zeros((nlines, east_bound - 1), dtype=data.dtype, chunks=CHUNK_SIZE) padding_west = da.zeros((nlines, (final_size[1] - west_bound)), dtype=data.dtype, chunks=CHUNK_SIZE) if np.issubdtype(data.dtype, np.floating): padding_east = padding_east * np.nan padding_west = padding_west * np.nan return np.hstack((padding_east, data, padding_west)) satpy-0.20.0/satpy/readers/seviri_l1b_icare.py000066400000000000000000000236541362525524100213110ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . r"""Interface to SEVIRI L1B data from ICARE (Lille). Introduction ------------ The ``seviri_l1b_icare`` reader reads MSG-SEVIRI L1.5 image data in HDF format that has been produced by the ICARE Data and Services Center Data can be accessed via: http://www.icare.univ-lille1.fr Each SEVIRI timeslot comes as 12 HDF files, one per band. Only those bands that are of interest need to be passed to the reader. Others can be ignored. Filenames follow the format: GEO_L1B-MSG1_YYYY-MM-DDTHH-MM-SS_G_CHANN_VX-XX.hdf Where: YYYY, MM, DD, HH, MM, SS specify the timeslot starting time. CHANN is the channel (i.e: HRV, IR016, WV073, etc) VX-XX is the processing version number Example ------- Here is an example how to read the data in satpy: .. code-block:: python from satpy import Scene import glob filenames = glob.glob('data/*2019-03-01T12-00-00*.hdf') scn = Scene(filenames=filenames, reader='seviri_l1b_icare') scn.load(['VIS006', 'IR_108']) print(scn['IR_108']) Output: .. code-block:: none dask.array Coordinates: crs object +proj=geos +a=6378169.0 +b=6356583.8 +lon_0=0.0 +h=35785831.0 +units=m +type=crs * y (y) float64 5.566e+06 5.563e+06 5.56e+06 ... -5.566e+06 -5.569e+06 * x (x) float64 -5.566e+06 -5.563e+06 -5.56e+06 ... 5.566e+06 5.569e+06 Attributes: start_time: 2004-12-29 12:15:00 end_time: 2004-12-29 12:27:44 area: Area ID: geosmsg\nDescription: MSG/SEVIRI low resol... name: IR_108 resolution: 3000.403165817 calibration: brightness_temperature polarization: None level: None modifiers: () ancillary_variables: [] """ from satpy.readers._geos_area import get_area_extent, get_area_definition from satpy.readers.hdf4_utils import HDF4FileHandler from datetime import datetime import numpy as np class SEVIRI_ICARE(HDF4FileHandler): """SEVIRI L1B handler for HDF4 files.""" def __init__(self, filename, filename_info, filetype_info): super(SEVIRI_ICARE, self).__init__(filename, filename_info, filetype_info) # These are VIS bands self.ref_bands = ['HRV', 'VIS006', 'VIS008', 'IR_016'] # And these are IR bands self.bt_bands = ['IR_039', 'IR_062', 'IR_073', 'IR_087', 'IR_097', 'IR_108', 'IR_120', 'IR_134', 'WV_062', 'WV_073'] @property def sensor_name(self): # the sensor and platform names are stored together, eg: MSG1/SEVIRI attr = self['/attr/Sensors'] if isinstance(attr, np.ndarray): attr = str(attr.astype(str)).lower() else: attr = attr.lower() plat = attr[0:4] sens = attr[5:] # icare uses non-standard platform names if plat == 'msg1': plat = 'Meteosat-08' elif plat == 'msg2': plat = 'Meteosat-09' elif plat == 'msg3': plat = 'Meteosat-10' elif plat == 'msg4': plat = 'Meteosat-11' else: raise NameError("Unsupported satellite platform:"+plat) return [plat, sens] @property def satlon(self): attr = self['/attr/Sub_Satellite_Longitude'] if isinstance(attr, np.ndarray): attr = float(attr.astype(str)) return attr @property def projlon(self): attr = self['/attr/Projection_Longitude'] if isinstance(attr, np.ndarray): attr = float(attr.astype(str)) return attr @property def projection(self): attr = self['/attr/Geographic_Projection'] if isinstance(attr, np.ndarray): attr = str(attr.astype(str)) attr = attr.lower() if attr != 'geos': raise NotImplementedError("Only the GEOS projection is supported.\ This is:", attr) return attr @property def zone(self): attr = self['/attr/Zone'] if isinstance(attr, np.ndarray): attr = str(attr.astype(str)).lower() return attr @property def res(self): attr = self['/attr/Nadir_Pixel_Size'] if isinstance(attr, np.ndarray): attr = str(attr.astype(str)).lower() return float(attr) @property def end_time(self): attr = self['/attr/End_Acquisition_Date'] if isinstance(attr, np.ndarray): attr = str(attr.astype(str)) # In some versions milliseconds are present, sometimes not. try: endacq = datetime.strptime(attr, "%Y-%m-%dT%H:%M:%SZ") except ValueError: endacq = datetime.strptime(attr, "%Y-%m-%dT%H:%M:%S.%fZ") return endacq @property def start_time(self): attr = self['/attr/Beginning_Acquisition_Date'] if isinstance(attr, np.ndarray): attr = str(attr.astype(str)) # In some versions milliseconds are present, sometimes not. try: stacq = datetime.strptime(attr, "%Y-%m-%dT%H:%M:%SZ") except ValueError: stacq = datetime.strptime(attr, "%Y-%m-%dT%H:%M:%S.%fZ") return stacq @property def alt(self): attr = self['/attr/Altitude'] if isinstance(attr, np.ndarray): attr = attr.astype(str) attr = float(attr) # This is stored in km, convert to m attr = attr * 1000. return attr @property def geoloc(self): attr = self['/attr/Geolocation'] if isinstance(attr, np.ndarray): attr = attr.astype(str) cfac = float(attr[0]) coff = float(attr[1]) lfac = float(attr[2]) loff = float(attr[3]) return [cfac, lfac, coff, loff] def get_metadata(self, data, ds_info): mda = {} mda.update(data.attrs) mda.update(ds_info) geoloc = self.geoloc mda.update({ 'start_time': self.start_time, 'end_time': self.end_time, 'platform_name': self.sensor_name[0], 'sensor': self.sensor_name[1], 'zone': self.zone, 'projection_altitude': self.alt, 'cfac': geoloc[0], 'lfac': geoloc[1], 'coff': geoloc[2], 'loff': geoloc[3], 'resolution': self.res, 'satellite_actual_longitude': self.satlon, 'projection_longitude': self.projlon, 'projection_type': self.projection }) return mda def _get_dsname(self, ds_id): """Returns the correct dataset name based on requested band.""" if ds_id.name in self.ref_bands: ds_get_name = 'Normalized_Radiance' elif ds_id.name in self.bt_bands: ds_get_name = 'Brightness_Temperature' else: raise NameError("Datset type "+ds_id.name+" is not supported.") return ds_get_name def get_dataset(self, ds_id, ds_info): ds_get_name = self._get_dsname(ds_id) data = self[ds_get_name] data.attrs = self.get_metadata(data, ds_info) fill = data.attrs.pop('_FillValue') offset = data.attrs.get('add_offset') scale_factor = data.attrs.get('scale_factor') data = data.where(data != fill) data.values = data.values.astype(np.float32) if scale_factor is not None and offset is not None: data.values *= scale_factor data.values += offset # Now we correct range from 0-1 to 0-100 for VIS: if ds_id.name in self.ref_bands: data.values *= 100. return data def get_area_def(self, ds_id): ds_get_name = self._get_dsname(ds_id) ds_shape = self[ds_get_name + '/shape'] geoloc = self.geoloc pdict = {} pdict['cfac'] = np.int32(geoloc[0]) pdict['lfac'] = np.int32(geoloc[1]) pdict['coff'] = np.float32(geoloc[2]) pdict['loff'] = -np.float32(geoloc[3]) # Unfortunately this dataset does not store a, b or h. # We assume a and b here, and calculate h from altitude # a and b are from SEVIRI data HRIT header (201912101300) pdict['a'] = 6378169 pdict['b'] = 6356583.8 pdict['h'] = self.alt - pdict['a'] pdict['ssp_lon'] = self.projlon pdict['ncols'] = int(ds_shape[0]) pdict['nlines'] = int(ds_shape[1]) # Force scandir to SEVIRI default, not known from file pdict['scandir'] = 'S2N' pdict['a_name'] = 'geosmsg' if ds_id.name == 'HRV': pdict['a_desc'] = 'MSG/SEVIRI HRV channel area' pdict['p_id'] = 'msg_hires' else: pdict['a_desc'] = 'MSG/SEVIRI low resolution channel area' pdict['p_id'] = 'msg_lowres' aex = get_area_extent(pdict) area = get_area_definition(pdict, aex) return area satpy-0.20.0/satpy/readers/seviri_l1b_native.py000066400000000000000000000545501362525524100215130ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """SEVIRI native format reader. References: MSG Level 1.5 Native Format File Definition https://www.eumetsat.int/website/wcm/idc/idcplg?IdcService=GET_FILE&dDocName=PDF_FG15_MSG-NATIVE-FORMAT-15&RevisionSelectionMethod=LatestReleased&Rendition=Web MSG Level 1.5 Image Data Format Description https://www.eumetsat.int/website/wcm/idc/idcplg?IdcService=GET_FILE&dDocName=PDF_TEN_05105_MSG_IMG_DATA&RevisionSelectionMethod=LatestReleased&Rendition=Web """ import logging from datetime import datetime import numpy as np import xarray as xr import dask.array as da from satpy import CHUNK_SIZE from pyresample import geometry from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.eum_base import recarray2dict from satpy.readers.seviri_base import (SEVIRICalibrationHandler, CHANNEL_NAMES, CALIB, SATNUM, dec10216, VISIR_NUM_COLUMNS, VISIR_NUM_LINES, HRV_NUM_COLUMNS, VIS_CHANNELS) from satpy.readers.seviri_l1b_native_hdr import (GSDTRecords, native_header, native_trailer) from satpy.readers._geos_area import get_area_definition logger = logging.getLogger('native_msg') class NativeMSGFileHandler(BaseFileHandler, SEVIRICalibrationHandler): """SEVIRI native format reader. The Level1.5 Image data calibration method can be changed by adding the required mode to the Scene object instantiation kwargs eg kwargs = {"calib_mode": "gsics",} """ def __init__(self, filename, filename_info, filetype_info, calib_mode='nominal'): """Initialize the reader.""" super(NativeMSGFileHandler, self).__init__(filename, filename_info, filetype_info) self.platform_name = None self.calib_mode = calib_mode # Declare required variables. # Assume a full disk file, reset in _read_header if otherwise. self.header = {} self.mda = {} self.mda['is_full_disk'] = True self.trailer = {} # Read header, prepare dask-array, read trailer # Available channels are known only after the header has been read self._read_header() self.dask_array = da.from_array(self._get_memmap(), chunks=(CHUNK_SIZE,)) self._read_trailer() @property def start_time(self): """Read the repeat cycle start time from metadata.""" return self.header['15_DATA_HEADER']['ImageAcquisition'][ 'PlannedAcquisitionTime']['TrueRepeatCycleStart'] @property def end_time(self): """Read the repeat cycle end time from metadata.""" return self.header['15_DATA_HEADER']['ImageAcquisition'][ 'PlannedAcquisitionTime']['PlannedRepeatCycleEnd'] @staticmethod def _calculate_area_extent(center_point, north, east, south, west, we_offset, ns_offset, column_step, line_step): # For Earth model 2 and full disk VISIR, (center_point - west - 0.5 + we_offset) must be 1856.5 . # See MSG Level 1.5 Image Data Format Description Figure 7 - Alignment and numbering of the non-HRV pixels. ll_c = (center_point - west - 0.5 + we_offset) * column_step ll_l = (south - center_point - 0.5 + ns_offset) * line_step ur_c = (center_point - east + 0.5 + we_offset) * column_step ur_l = (north - center_point + 0.5 + ns_offset) * line_step return (ll_c, ll_l, ur_c, ur_l) def _get_data_dtype(self): """Get the dtype of the file based on the actual available channels.""" pkhrec = [ ('GP_PK_HEADER', GSDTRecords.gp_pk_header), ('GP_PK_SH1', GSDTRecords.gp_pk_sh1) ] pk_head_dtype = np.dtype(pkhrec) def get_lrec(cols): lrec = [ ("gp_pk", pk_head_dtype), ("version", np.uint8), ("satid", np.uint16), ("time", (np.uint16, 5)), ("lineno", np.uint32), ("chan_id", np.uint8), ("acq_time", (np.uint16, 3)), ("line_validity", np.uint8), ("line_rquality", np.uint8), ("line_gquality", np.uint8), ("line_data", (np.uint8, cols)) ] return lrec # each pixel is 10-bits -> one line of data has 25% more bytes # than the number of columns suggest (10/8 = 1.25) visir_rec = get_lrec(int(self.mda['number_of_columns'] * 1.25)) number_of_visir_channels = len( [s for s in self.mda['channel_list'] if not s == 'HRV']) drec = [('visir', (visir_rec, number_of_visir_channels))] if self.mda['available_channels']['HRV']: hrv_rec = get_lrec(int(self.mda['hrv_number_of_columns'] * 1.25)) drec.append(('hrv', (hrv_rec, 3))) return np.dtype(drec) def _get_memmap(self): """Get the memory map for the SEVIRI data.""" with open(self.filename) as fp: data_dtype = self._get_data_dtype() hdr_size = native_header.itemsize return np.memmap(fp, dtype=data_dtype, shape=(self.mda['number_of_lines'],), offset=hdr_size, mode="r") def _read_header(self): """Read the header info.""" data = np.fromfile(self.filename, dtype=native_header, count=1) self.header.update(recarray2dict(data)) data15hd = self.header['15_DATA_HEADER'] sec15hd = self.header['15_SECONDARY_PRODUCT_HEADER'] # Set the list of available channels: self.mda['available_channels'] = get_available_channels(self.header) self.mda['channel_list'] = [i for i in CHANNEL_NAMES.values() if self.mda['available_channels'][i]] self.platform_id = data15hd[ 'SatelliteStatus']['SatelliteDefinition']['SatelliteId'] self.mda['platform_name'] = "Meteosat-" + SATNUM[self.platform_id] equator_radius = data15hd['GeometricProcessing'][ 'EarthModel']['EquatorialRadius'] * 1000. north_polar_radius = data15hd[ 'GeometricProcessing']['EarthModel']['NorthPolarRadius'] * 1000. south_polar_radius = data15hd[ 'GeometricProcessing']['EarthModel']['SouthPolarRadius'] * 1000. polar_radius = (north_polar_radius + south_polar_radius) * 0.5 ssp_lon = data15hd['ImageDescription'][ 'ProjectionDescription']['LongitudeOfSSP'] self.mda['projection_parameters'] = {'a': equator_radius, 'b': polar_radius, 'h': 35785831.00, 'ssp_longitude': ssp_lon} north = int(sec15hd['NorthLineSelectedRectangle']['Value']) east = int(sec15hd['EastColumnSelectedRectangle']['Value']) south = int(sec15hd['SouthLineSelectedRectangle']['Value']) west = int(sec15hd['WestColumnSelectedRectangle']['Value']) ncolumns = west - east + 1 nrows = north - south + 1 # check if the file has less rows or columns than # the maximum, if so it is an area of interest file if (nrows < VISIR_NUM_LINES) or (ncolumns < VISIR_NUM_COLUMNS): self.mda['is_full_disk'] = False # If the number of columns in the file is not divisible by 4, # UMARF will add extra columns to the file modulo = ncolumns % 4 padding = 0 if modulo > 0: padding = 4 - modulo cols_visir = ncolumns + padding # Check the VISIR calculated column dimension against # the header information cols_visir_hdr = int(sec15hd['NumberColumnsVISIR']['Value']) if cols_visir_hdr != cols_visir: logger.warning( "Number of VISIR columns from the header is incorrect!") logger.warning("Header: %d", cols_visir_hdr) logger.warning("Calculated: = %d", cols_visir) # HRV Channel - check if the area is reduced in east west # direction as this affects the number of columns in the file cols_hrv_hdr = int(sec15hd['NumberColumnsHRV']['Value']) if ncolumns < VISIR_NUM_COLUMNS: cols_hrv = cols_hrv_hdr else: cols_hrv = int(cols_hrv_hdr / 2) # self.mda represents the 16bit dimensions not 10bit self.mda['number_of_lines'] = int(sec15hd['NumberLinesVISIR']['Value']) self.mda['number_of_columns'] = cols_visir self.mda['hrv_number_of_lines'] = int(sec15hd["NumberLinesHRV"]['Value']) self.mda['hrv_number_of_columns'] = cols_hrv def _read_trailer(self): hdr_size = native_header.itemsize data_size = (self._get_data_dtype().itemsize * self.mda['number_of_lines']) with open(self.filename) as fp: fp.seek(hdr_size + data_size) data = np.fromfile(fp, dtype=native_trailer, count=1) self.trailer.update(recarray2dict(data)) def get_area_def(self, dataset_id): """Get the area definition of the band.""" pdict = {} pdict['a'] = self.mda['projection_parameters']['a'] pdict['b'] = self.mda['projection_parameters']['b'] pdict['h'] = self.mda['projection_parameters']['h'] pdict['ssp_lon'] = self.mda['projection_parameters']['ssp_longitude'] if dataset_id.name == 'HRV': pdict['nlines'] = self.mda['hrv_number_of_lines'] pdict['ncols'] = self.mda['hrv_number_of_columns'] pdict['a_name'] = 'geos_seviri_hrv' pdict['a_desc'] = 'SEVIRI high resolution channel area' pdict['p_id'] = 'seviri_hrv' if self.mda['is_full_disk']: # handle full disk HRV data with two separated area definitions [upper_area_extent, lower_area_extent, upper_nlines, upper_ncols, lower_nlines, lower_ncols] = self.get_area_extent(dataset_id) # upper area pdict['a_desc'] = 'SEVIRI high resolution channel, upper window' pdict['nlines'] = upper_nlines pdict['ncols'] = upper_ncols upper_area = get_area_definition(pdict, upper_area_extent) # lower area pdict['a_desc'] = 'SEVIRI high resolution channel, lower window' pdict['nlines'] = lower_nlines pdict['ncols'] = lower_ncols lower_area = get_area_definition(pdict, lower_area_extent) # order of areas is flipped w.r.t. the hrit reader due to the flipping of the data in get_dataset area = geometry.StackedAreaDefinition(upper_area, lower_area) area = area.squeeze() else: # if the HRV data is in a ROI, the HRV channel is delivered in one area area = get_area_definition(pdict, self.get_area_extent(dataset_id)) else: pdict['nlines'] = self.mda['number_of_lines'] pdict['ncols'] = self.mda['number_of_columns'] pdict['a_name'] = 'geos_seviri_visir' pdict['a_desc'] = 'SEVIRI low resolution channel area' pdict['p_id'] = 'seviri_visir' area = get_area_definition(pdict, self.get_area_extent(dataset_id)) return area def get_area_extent(self, dataset_id): """Get the area extent of the file. Until December 2017, the data is shifted by 1.5km SSP North and West against the nominal GEOS projection. Since December 2017 this offset has been corrected. A flag in the data indicates if the correction has been applied. If no correction was applied, adjust the area extent to match the shifted data. For more information see Section 3.1.4.2 in the MSG Level 1.5 Image Data Format Description. The correction of the area extent is documented in a `developer's memo `_. """ data15hd = self.header['15_DATA_HEADER'] sec15hd = self.header['15_SECONDARY_PRODUCT_HEADER'] # check for Earth model as this affects the north-south and # west-east offsets # section 3.1.4.2 of MSG Level 1.5 Image Data Format Description earth_model = data15hd['GeometricProcessing']['EarthModel'][ 'TypeOfEarthModel'] if earth_model == 2: ns_offset = 0 we_offset = 0 elif earth_model == 1: ns_offset = -0.5 we_offset = 0.5 if dataset_id.name == 'HRV': ns_offset = -1.5 we_offset = 1.5 else: raise NotImplementedError( 'Unrecognised Earth model: {}'.format(earth_model) ) if dataset_id.name == 'HRV': grid_origin = data15hd['ImageDescription']['ReferenceGridHRV']['GridOrigin'] center_point = HRV_NUM_COLUMNS / 2 coeff = 3 column_step = data15hd['ImageDescription']['ReferenceGridHRV']['ColumnDirGridStep'] * 1000.0 line_step = data15hd['ImageDescription']['ReferenceGridHRV']['LineDirGridStep'] * 1000.0 else: grid_origin = data15hd['ImageDescription']['ReferenceGridVIS_IR']['GridOrigin'] center_point = VISIR_NUM_COLUMNS / 2 coeff = 1 column_step = data15hd['ImageDescription']['ReferenceGridVIS_IR']['ColumnDirGridStep'] * 1000.0 line_step = data15hd['ImageDescription']['ReferenceGridVIS_IR']['LineDirGridStep'] * 1000.0 # Calculations assume grid origin is south-east corner # section 7.2.4 of MSG Level 1.5 Image Data Format Description origins = {0: 'NW', 1: 'SW', 2: 'SE', 3: 'NE'} if grid_origin != 2: msg = 'Grid origin not supported number: {}, {} corner'.format( grid_origin, origins[grid_origin] ) raise NotImplementedError(msg) # When dealing with HRV channel and full disk, area extent is # in two pieces if (dataset_id.name == 'HRV') and self.mda['is_full_disk']: # get actual navigation parameters from trailer data data15tr = self.trailer['15TRAILER'] HRV_bounds = data15tr['ImageProductionStats']['ActualL15CoverageHRV'] # upper window upper_north_line = HRV_bounds['UpperNorthLineActual'] upper_west_column = HRV_bounds['UpperWestColumnActual'] upper_south_line = HRV_bounds['UpperSouthLineActual'] upper_east_column = HRV_bounds['UpperEastColumnActual'] upper_area_extent = self._calculate_area_extent( center_point, upper_north_line, upper_east_column, upper_south_line, upper_west_column, we_offset, ns_offset, column_step, line_step ) upper_nlines = upper_north_line - upper_south_line + 1 upper_ncols = upper_west_column - upper_east_column + 1 # lower window lower_north_line = HRV_bounds['LowerNorthLineActual'] lower_west_column = HRV_bounds['LowerWestColumnActual'] lower_south_line = HRV_bounds['LowerSouthLineActual'] lower_east_column = HRV_bounds['LowerEastColumnActual'] lower_area_extent = self._calculate_area_extent( center_point, lower_north_line, lower_east_column, lower_south_line, lower_west_column, we_offset, ns_offset, column_step, line_step ) lower_nlines = lower_north_line - lower_south_line + 1 lower_ncols = lower_west_column - lower_east_column + 1 return [upper_area_extent, lower_area_extent, upper_nlines, upper_ncols, lower_nlines, lower_ncols] # Otherwise area extent is in one piece, corner points are # the same as for VISIR channels, HRV channel is having # three times the amount of columns and rows else: north = coeff * int(sec15hd['NorthLineSelectedRectangle']['Value']) east = coeff * int(sec15hd['EastColumnSelectedRectangle']['Value']) west = coeff * int(sec15hd['WestColumnSelectedRectangle']['Value']) south = coeff * int(sec15hd['SouthLineSelectedRectangle']['Value']) area_extent = self._calculate_area_extent( center_point, north, east, south, west, we_offset, ns_offset, column_step, line_step ) return area_extent def get_dataset(self, dataset_id, dataset_info): """Get the dataset.""" if dataset_id.name not in self.mda['channel_list']: raise KeyError('Channel % s not available in the file' % dataset_id.name) elif dataset_id.name not in ['HRV']: shape = (self.mda['number_of_lines'], self.mda['number_of_columns']) # Check if there is only 1 channel in the list as a change # is needed in the arrray assignment ie channl id is not present if len(self.mda['channel_list']) == 1: raw = self.dask_array['visir']['line_data'] else: i = self.mda['channel_list'].index(dataset_id.name) raw = self.dask_array['visir']['line_data'][:, i, :] data = dec10216(raw.flatten()) data = da.flipud(da.fliplr((data.reshape(shape)))) else: shape = (self.mda['hrv_number_of_lines'], self.mda['hrv_number_of_columns']) raw2 = self.dask_array['hrv']['line_data'][:, 2, :] raw1 = self.dask_array['hrv']['line_data'][:, 1, :] raw0 = self.dask_array['hrv']['line_data'][:, 0, :] shape_layer = (self.mda['number_of_lines'], self.mda['hrv_number_of_columns']) data2 = dec10216(raw2.flatten()) data2 = da.flipud(da.fliplr((data2.reshape(shape_layer)))) data1 = dec10216(raw1.flatten()) data1 = da.flipud(da.fliplr((data1.reshape(shape_layer)))) data0 = dec10216(raw0.flatten()) data0 = da.flipud(da.fliplr((data0.reshape(shape_layer)))) data = np.zeros(shape) idx = range(0, shape[0], 3) data[idx, :] = data2 idx = range(1, shape[0], 3) data[idx, :] = data1 idx = range(2, shape[0], 3) data[idx, :] = data0 xarr = xr.DataArray(data, dims=['y', 'x']).where(data != 0).astype(np.float32) if xarr is None: dataset = None else: dataset = self.calibrate(xarr, dataset_id) dataset.attrs['units'] = dataset_info['units'] dataset.attrs['wavelength'] = dataset_info['wavelength'] dataset.attrs['standard_name'] = dataset_info['standard_name'] dataset.attrs['platform_name'] = self.mda['platform_name'] dataset.attrs['sensor'] = 'seviri' dataset.attrs['orbital_parameters'] = { 'projection_longitude': self.mda['projection_parameters']['ssp_longitude'], 'projection_latitude': 0., 'projection_altitude': self.mda['projection_parameters']['h']} return dataset def calibrate(self, data, dataset_id): """Calibrate the data.""" tic = datetime.now() data15hdr = self.header['15_DATA_HEADER'] calibration = dataset_id.calibration channel = dataset_id.name # even though all the channels may not be present in the file, # the header does have calibration coefficients for all the channels # hence, this channel index needs to refer to full channel list i = list(CHANNEL_NAMES.values()).index(channel) if calibration == 'counts': return data if calibration in ['radiance', 'reflectance', 'brightness_temperature']: # determine the required calibration coefficients to use # for the Level 1.5 Header if (self.calib_mode.upper() != 'GSICS' and self.calib_mode.upper() != 'NOMINAL'): raise NotImplementedError( 'Unknown Calibration mode : Please check') # NB GSICS doesn't have calibration coeffs for VIS channels if (self.calib_mode.upper() != 'GSICS' or channel in VIS_CHANNELS): coeffs = data15hdr[ 'RadiometricProcessing']['Level15ImageCalibration'] gain = coeffs['CalSlope'][i] offset = coeffs['CalOffset'][i] else: coeffs = data15hdr[ 'RadiometricProcessing']['MPEFCalFeedback'] gain = coeffs['GSICSCalCoeff'][i] offset = coeffs['GSICSOffsetCount'][i] offset = offset * gain res = self._convert_to_radiance(data, gain, offset) if calibration == 'reflectance': solar_irradiance = CALIB[self.platform_id][channel]["F"] res = self._vis_calibrate(res, solar_irradiance) elif calibration == 'brightness_temperature': cal_type = data15hdr['ImageDescription'][ 'Level15ImageProduction']['PlannedChanProcessing'][i] res = self._ir_calibrate(res, channel, cal_type) logger.debug("Calibration time " + str(datetime.now() - tic)) return res def get_available_channels(header): """Get the available channels from the header information.""" chlist_str = header['15_SECONDARY_PRODUCT_HEADER'][ 'SelectedBandIDs']['Value'] retv = {} for idx, char in zip(range(12), chlist_str): retv[CHANNEL_NAMES[idx + 1]] = (char == 'X') return retv satpy-0.20.0/satpy/readers/seviri_l1b_native_hdr.py000066400000000000000000001066121362525524100223450ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2014-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Header and trailer records of SEVIRI native format. """ import numpy as np from satpy.readers.eum_base import (time_cds_short, time_cds, time_cds_expanded) class GSDTRecords(object): """MSG Ground Segment Data Type records. Reference Document (EUM/MSG/SPE/055): MSG Ground Segment Design Specification (GSDS) """ gp_fac_env = np.uint8 gp_fac_id = np.uint8 gp_sc_id = np.uint16 gp_su_id = np.uint32 gp_svce_type = np.uint8 # 4 bytes gp_cpu_address = [ ('Qualifier_1', np.uint8), ('Qualifier_2', np.uint8), ('Qualifier_3', np.uint8), ('Qualifier_4', np.uint8) ] # 22 bytes gp_pk_header = [ ('HeaderVersionNo', np.uint8), ('PacketType', np.uint8), ('SubHeaderType', np.uint8), ('SourceFacilityId', gp_fac_id), ('SourceEnvId', gp_fac_env), ('SourceInstanceId', np.uint8), ('SourceSUId', gp_su_id), ('SourceCPUId', gp_cpu_address), ('DestFacilityId', gp_fac_id), ('DestEnvId', gp_fac_env), ('SequenceCount', np.uint16), ('PacketLength', np.int32) ] # 16 bytes gp_pk_sh1 = [ ('SubHeaderVersionNo', np.uint8), ('ChecksumFlag', np.bool), ('Acknowledgement', (np.uint8, 4)), ('ServiceType', gp_svce_type), ('ServiceSubtype', np.uint8), ('PacketTime', time_cds_short), ('SpacecraftId', gp_sc_id) ] class Msg15NativeHeaderRecord(object): """ SEVIRI Level 1.5 header for native-format """ def get(self): # 450400 bytes record = [ ('15_MAIN_PRODUCT_HEADER', L15MainProductHeaderRecord().get()), ('15_SECONDARY_PRODUCT_HEADER', L15SecondaryProductHeaderRecord().get()), ('GP_PK_HEADER', GSDTRecords.gp_pk_header), ('GP_PK_SH1', GSDTRecords.gp_pk_sh1), ('15_DATA_HEADER', L15DataHeaderRecord().get()) ] return np.dtype(record).newbyteorder('>') class L15PhData(object): # 80 bytes l15_ph_data = [ ('Name', 'S30'), ('Value', 'S50') ] class L15MainProductHeaderRecord(object): """ Reference Document: MSG Level 1.5 Native Format File Definition """ def get(self): l15_ph_data = L15PhData.l15_ph_data l15_ph_data_identification = [ ('Name', 'S30'), ('Size', 'S16'), ('Address', 'S16') ] # 3674 bytes record = [ ('FormatName', l15_ph_data), ('FormatDocumentName', l15_ph_data), ('FormatDocumentMajorVersion', l15_ph_data), ('FormatDocumentMinorVersion', l15_ph_data), ('CreationDateTime', l15_ph_data), ('CreatingCentre', l15_ph_data), ('DataSetIdentification', (l15_ph_data_identification, 27)), ('TotalFileSize', l15_ph_data), ('GORT', l15_ph_data), ('ASTI', l15_ph_data), ('LLOS', l15_ph_data), ('SNIT', l15_ph_data), ('AIID', l15_ph_data), ('SSBT', l15_ph_data), ('SSST', l15_ph_data), ('RRCC', l15_ph_data), ('RRBT', l15_ph_data), ('RRST', l15_ph_data), ('PPRC', l15_ph_data), ('PPDT', l15_ph_data), ('GPLV', l15_ph_data), ('APNM', l15_ph_data), ('AARF', l15_ph_data), ('UUDT', l15_ph_data), ('QQOV', l15_ph_data), ('UDSP', l15_ph_data) ] return record class L15SecondaryProductHeaderRecord(object): """ Reference Document: MSG Level 1.5 Native Format File Definition """ def get(self): l15_ph_data = L15PhData.l15_ph_data # 1440 bytes record = [ ('ABID', l15_ph_data), ('SMOD', l15_ph_data), ('APXS', l15_ph_data), ('AVPA', l15_ph_data), ('LSCD', l15_ph_data), ('LMAP', l15_ph_data), ('QDLC', l15_ph_data), ('QDLP', l15_ph_data), ('QQAI', l15_ph_data), ('SelectedBandIDs', l15_ph_data), ('SouthLineSelectedRectangle', l15_ph_data), ('NorthLineSelectedRectangle', l15_ph_data), ('EastColumnSelectedRectangle', l15_ph_data), ('WestColumnSelectedRectangle', l15_ph_data), ('NumberLinesVISIR', l15_ph_data), ('NumberColumnsVISIR', l15_ph_data), ('NumberLinesHRV', l15_ph_data), ('NumberColumnsHRV', l15_ph_data) ] return record class L15DataHeaderRecord(object): """ Reference Document (EUM/MSG/ICD/105): MSG Level 1.5 Image Data Format Description """ def get(self): # 445248 bytes record = [ ('15HeaderVersion', np.uint8), ('SatelliteStatus', self.satellite_status), ('ImageAcquisition', self.image_acquisition), ('CelestialEvents', self.celestial_events), ('ImageDescription', self.image_description), ('RadiometricProcessing', self.radiometric_processing), ('GeometricProcessing', self.geometric_processing), ('IMPFConfiguration', self.impf_configuration)] return record @property def satellite_status(self): # 7 bytes satellite_definition = [ ('SatelliteId', np.uint16), ('NominalLongitude', np.float32), ('SatelliteStatus', np.uint8)] # 28 bytes satellite_operations = [ ('LastManoeuvreFlag', np.bool), ('LastManoeuvreStartTime', time_cds_short), ('LastManoeuvreEndTime', time_cds_short), ('LastManoeuvreType', np.uint8), ('NextManoeuvreFlag', np.bool), ('NextManoeuvreStartTime', time_cds_short), ('NextManoeuvreEndTime', time_cds_short), ('NextManoeuvreType', np.uint8)] # 396 bytes orbit_coeff = [ ('StartTime', time_cds_short), ('EndTime', time_cds_short), ('X', (np.float64, 8)), ('Y', (np.float64, 8)), ('Z', (np.float64, 8)), ('VX', (np.float64, 8)), ('VY', (np.float64, 8)), ('VZ', (np.float64, 8))] # 39612 bytes orbit = [ ('PeriodStartTime', time_cds_short), ('PeriodEndTime', time_cds_short), ('OrbitPolynomial', (orbit_coeff, 100))] # 204 bytes attitude_coeff = [ ('StartTime', time_cds_short), ('EndTime', time_cds_short), ('XofSpinAxis', (np.float64, 8)), ('YofSpinAxis', (np.float64, 8)), ('ZofSpinAxis', (np.float64, 8))] # 20420 bytes attitude = [ ('PeriodStartTime', time_cds_short), ('PeriodEndTime', time_cds_short), ('PrincipleAxisOffsetAngle', np.float64), ('AttitudePolynomial', (attitude_coeff, 100))] # 59 bytes utc_correlation = [ ('PeriodStartTime', time_cds_short), ('PeriodEndTime', time_cds_short), ('OnBoardTimeStart', (np.uint8, 7)), ('VarOnBoardTimeStart', np.float64), ('A1', np.float64), ('VarA1', np.float64), ('A2', np.float64), ('VarA2', np.float64)] # 60134 bytes record = [ ('SatelliteDefinition', satellite_definition), ('SatelliteOperations', satellite_operations), ('Orbit', orbit), ('Attitude', attitude), ('SpinRetreatRCStart', np.float64), ('UTCCorrelation', utc_correlation)] return record @property def image_acquisition(self): planned_acquisition_time = [ ('TrueRepeatCycleStart', time_cds_expanded), ('PlanForwardScanEnd', time_cds_expanded), ('PlannedRepeatCycleEnd', time_cds_expanded)] radiometer_status = [ ('ChannelStatus', (np.uint8, 12)), ('DetectorStatus', (np.uint8, 42))] hrv_frame_offsets = [ ('MDUNomHRVDelay1', np.uint16), ('MDUNomHRVDelay2', np.uint16), ('Spare', np.uint16), ('MDUNomHRVBreakLine', np.uint16)] operation_parameters = [ ('L0_LineCounter', np.uint16), ('K1_RetraceLines', np.uint16), ('K2_PauseDeciseconds', np.uint16), ('K3_RetraceLines', np.uint16), ('K4_PauseDeciseconds', np.uint16), ('K5_RetraceLines', np.uint16), ('XDeepSpaceWindowPosition', np.uint8)] radiometer_settings = [ ('MDUSamplingDelays', (np.uint16, 42)), ('HRVFrameOffsets', hrv_frame_offsets), ('DHSSSynchSelection', np.uint8), ('MDUOutGain', (np.uint16, 42)), ('MDUCoarseGain', (np.uint8, 42)), ('MDUFineGain', (np.uint16, 42)), ('MDUNumericalOffset', (np.uint16, 42)), ('PUGain', (np.uint16, 42)), ('PUOffset', (np.uint16, 27)), ('PUBias', (np.uint16, 15)), ('OperationParameters', operation_parameters), ('RefocusingLines', np.uint16), ('RefocusingDirection', np.uint8), ('RefocusingPosition', np.uint16), ('ScanRefPosFlag', np.bool), ('ScanRefPosNumber', np.uint16), ('ScanRefPosVal', np.float32), ('ScanFirstLine', np.uint16), ('ScanLastLine', np.uint16), ('RetraceStartLine', np.uint16)] decontamination = [ ('DecontaminationNow', np.bool), ('DecontaminationStart', time_cds_short), ('DecontaminationEnd', time_cds_short)] radiometer_operations = [ ('LastGainChangeFlag', np.bool), ('LastGainChangeTime', time_cds_short), ('Decontamination', decontamination), ('BBCalScheduled', np.bool), ('BBCalibrationType', np.uint8), ('BBFirstLine', np.uint16), ('BBLastLine', np.uint16), ('ColdFocalPlaneOpTemp', np.uint16), ('WarmFocalPlaneOpTemp', np.uint16)] record = [ ('PlannedAcquisitionTime', planned_acquisition_time), ('RadiometerStatus', radiometer_status), ('RadiometerSettings', radiometer_settings), ('RadiometerOperations', radiometer_operations)] return record @property def celestial_events(self): earth_moon_sun_coeff = [ ('StartTime', time_cds_short), ('EndTime', time_cds_short), ('AlphaCoef', (np.float64, 8)), ('BetaCoef', (np.float64, 8))] star_coeff = [ ('StarId', np.uint16), ('StartTime', time_cds_short), ('EndTime', time_cds_short), ('AlphaCoef', (np.float64, 8)), ('BetaCoef', (np.float64, 8))] ephemeris = [ ('PeriodTimeStart', time_cds_short), ('PeriodTimeEnd', time_cds_short), ('RelatedOrbitFileTime', 'S15'), ('RelatedAttitudeFileTime', 'S15'), ('EarthEphemeris', (earth_moon_sun_coeff, 100)), ('MoonEphemeris', (earth_moon_sun_coeff, 100)), ('SunEphemeris', (earth_moon_sun_coeff, 100)), ('StarEphemeris', (star_coeff, (20, 100)))] relation_to_image = [ ('TypeOfEclipse', np.uint8), ('EclipseStartTime', time_cds_short), ('EclipseEndTime', time_cds_short), ('VisibleBodiesInImage', np.uint8), ('BodiesCloseToFOV', np.uint8), ('ImpactOnImageQuality', np.uint8)] record = [ ('CelestialBodiesPosition', ephemeris), ('RelationToImage', relation_to_image)] return record @property def image_description(self): projection_description = [ ('TypeOfProjection', np.uint8), ('LongitudeOfSSP', np.float32)] reference_grid = [ ('NumberOfLines', np.int32), ('NumberOfColumns', np.int32), ('LineDirGridStep', np.float32), ('ColumnDirGridStep', np.float32), ('GridOrigin', np.uint8)] planned_coverage_vis_ir = [ ('SouthernLinePlanned', np.int32), ('NorthernLinePlanned', np.int32), ('EasternColumnPlanned', np.int32), ('WesternColumnPlanned', np.int32)] planned_coverage_hrv = [ ('LowerSouthLinePlanned', np.int32), ('LowerNorthLinePlanned', np.int32), ('LowerEastColumnPlanned', np.int32), ('LowerWestColumnPlanned', np.int32), ('UpperSouthLinePlanned', np.int32), ('UpperNorthLinePlanned', np.int32), ('UpperEastColumnPlanned', np.int32), ('UpperWestColumnPlanned', np.int32)] level_15_image_production = [ ('ImageProcDirection', np.uint8), ('PixelGenDirection', np.uint8), ('PlannedChanProcessing', (np.uint8, 12))] record = [ ('ProjectionDescription', projection_description), ('ReferenceGridVIS_IR', reference_grid), ('ReferenceGridHRV', reference_grid), ('PlannedCoverageVIS_IR', planned_coverage_vis_ir), ('PlannedCoverageHRV', planned_coverage_hrv), ('Level15ImageProduction', level_15_image_production)] return record @property def radiometric_processing(self): rp_summary = [ ('RadianceLinearization', (np.bool, 12)), ('DetectorEqualization', (np.bool, 12)), ('OnboardCalibrationResult', (np.bool, 12)), ('MPEFCalFeedback', (np.bool, 12)), ('MTFAdaptation', (np.bool, 12)), ('StrayLightCorrection', (np.bool, 12))] level_15_image_calibration = [ ('CalSlope', np.float64), ('CalOffset', np.float64)] time_cuc_size = [ ('CT1', np.uint8), ('CT2', np.uint8), ('CT3', np.uint8), ('CT4', np.uint8), ('FT1', np.uint8), ('FT2', np.uint8), ('FT3', np.uint8)] cold_fp_temperature = [ ('FCUNominalColdFocalPlaneTemp', np.uint16), ('FCURedundantColdFocalPlaneTemp', np.uint16)] warm_fp_temperature = [ ('FCUNominalWarmFocalPlaneVHROTemp', np.uint16), ('FCURedundantWarmFocalPlaneVHROTemp', np.uint16)] scan_mirror_temperature = [ ('FCUNominalScanMirrorSensor1Temp', np.uint16), ('FCURedundantScanMirrorSensor1Temp', np.uint16), ('FCUNominalScanMirrorSensor2Temp', np.uint16), ('FCURedundantScanMirrorSensor2Temp', np.uint16)] m1m2m3_temperature = [ ('FCUNominalM1MirrorSensor1Temp', np.uint16), ('FCURedundantM1MirrorSensor1Temp', np.uint16), ('FCUNominalM1MirrorSensor2Temp', np.uint16), ('FCURedundantM1MirrorSensor2Temp', np.uint16), ('FCUNominalM23AssemblySensor1Temp', np.uint8), ('FCURedundantM23AssemblySensor1Temp', np.uint8), ('FCUNominalM23AssemblySensor2Temp', np.uint8), ('FCURedundantM23AssemblySensor2Temp', np.uint8)] baffle_temperature = [ ('FCUNominalM1BaffleTemp', np.uint16), ('FCURedundantM1BaffleTemp', np.uint16)] blackbody_temperature = [ ('FCUNominalBlackBodySensorTemp', np.uint16), ('FCURedundantBlackBodySensorTemp', np.uint16)] fcu_mode = [ ('FCUNominalSMMStatus', 'S2'), ('FCURedundantSMMStatus', 'S2')] extracted_bb_data = [ ('NumberOfPixelsUsed', np.uint32), ('MeanCount', np.float32), ('RMS', np.float32), ('MaxCount', np.uint16), ('MinCount', np.uint16), ('BB_Processing_Slope', np.float64), ('BB_Processing_Offset', np.float64)] bb_related_data = [ ('OnBoardBBTime', time_cuc_size), ('MDUOutGain', (np.uint16, 42)), ('MDUCoarseGain', (np.uint8, 42)), ('MDUFineGain', (np.uint16, 42)), ('MDUNumericalOffset', (np.uint16, 42)), ('PUGain', (np.uint16, 42)), ('PUOffset', (np.uint16, 27)), ('PUBias', (np.uint16, 15)), ('DCRValues', (np.uint8, 63)), ('X_DeepSpaceWindowPosition', np.int8), ('ColdFPTemperature', cold_fp_temperature), ('WarmFPTemperature', warm_fp_temperature), ('ScanMirrorTemperature', scan_mirror_temperature), ('M1M2M3Temperature', m1m2m3_temperature), ('BaffleTemperature', baffle_temperature), ('BlackBodyTemperature', blackbody_temperature), ('FCUMode', fcu_mode), ('ExtractedBBData', (extracted_bb_data, 12))] black_body_data_used = [ ('BBObservationUTC', time_cds_expanded), ('BBRelatedData', bb_related_data)] impf_cal_data = [ ('ImageQualityFlag', np.uint8), ('ReferenceDataFlag', np.uint8), ('AbsCalMethod', np.uint8), ('Pad1', 'S1'), ('AbsCalWeightVic', np.float32), ('AbsCalWeightXsat', np.float32), ('AbsCalCoeff', np.float32), ('AbsCalError', np.float32), ('GSICSCalCoeff', np.float32), ('GSICSCalError', np.float32), ('GSICSOffsetCount', np.float32)] rad_proc_mtf_adaptation = [ ('VIS_IRMTFCorrectionE_W', (np.float32, (33, 16))), ('VIS_IRMTFCorrectionN_S', (np.float32, (33, 16))), ('HRVMTFCorrectionE_W', (np.float32, (9, 16))), ('HRVMTFCorrectionN_S', (np.float32, (9, 16))), ('StraylightCorrection', (np.float32, (12, 8, 8)))] record = [ ('RPSummary', rp_summary), ('Level15ImageCalibration', (level_15_image_calibration, 12)), ('BlackBodyDataUsed', black_body_data_used), ('MPEFCalFeedback', (impf_cal_data, 12)), ('RadTransform', (np.float32, (42, 64))), ('RadProcMTFAdaptation', rad_proc_mtf_adaptation)] return record @property def geometric_processing(self): opt_axis_distances = [ ('E-WFocalPlane', (np.float32, 42)), ('N_SFocalPlane', (np.float32, 42))] earth_model = [ ('TypeOfEarthModel', np.uint8), ('EquatorialRadius', np.float64), ('NorthPolarRadius', np.float64), ('SouthPolarRadius', np.float64)] record = [ ('OptAxisDistances', opt_axis_distances), ('EarthModel', earth_model), ('AtmosphericModel', (np.float32, (12, 360))), ('ResamplingFunctions', (np.uint8, 12))] return record @property def impf_configuration(self): overall_configuration = [ ('Issue', np.uint16), ('Revision', np.uint16) ] sw_version = overall_configuration info_base_versions = sw_version su_configuration = [ ('SWVersion', sw_version), ('InfoBaseVersions', (info_base_versions, 10)) ] su_details = [ ('SUId', GSDTRecords.gp_su_id), ('SUIdInstance', np.int8), ('SUMode', np.uint8), ('SUState', np.uint8), ('SUConfiguration', su_configuration) ] equalisation_params = [ ('ConstCoeff', np.float32), ('LinearCoeff', np.float32), ('QuadraticCoeff', np.float32) ] black_body_data_for_warm_start = [ ('GTotalForMethod1', (np.float64, 12)), ('GTotalForMethod2', (np.float64, 12)), ('GTotalForMethod3', (np.float64, 12)), ('GBackForMethod1', (np.float64, 12)), ('GBackForMethod2', (np.float64, 12)), ('GBackForMethod3', (np.float64, 12)), ('RatioGTotalToGBack', (np.float64, 12)), ('GainInFrontOpticsCont', (np.float64, 12)), ('CalibrationConstants', (np.float32, 12)), ('maxIncidentRadiance', (np.float64, 12)), ('TimeOfColdObsSeconds', np.float64), ('TimeOfColdObsNanoSecs', np.float64), ('IncidenceRadiance', (np.float64, 12)), ('TempCal', np.float64), ('TempM1', np.float64), ('TempScan', np.float64), ('TempM1Baf', np.float64), ('TempCalSurround', np.float64) ] mirror_parameters = [ ('MaxFeedbackVoltage', np.float64), ('MinFeedbackVoltage', np.float64), ('MirrorSlipEstimate', np.float64) ] hktm_parameters = [ ('TimeS0Packet', time_cds_short), ('TimeS1Packet', time_cds_short), ('TimeS2Packet', time_cds_short), ('TimeS3Packet', time_cds_short), ('TimeS4Packet', time_cds_short), ('TimeS5Packet', time_cds_short), ('TimeS6Packet', time_cds_short), ('TimeS7Packet', time_cds_short), ('TimeS8Packet', time_cds_short), ('TimeS9Packet', time_cds_short), ('TimeSYPacket', time_cds_short), ('TimePSPacket', time_cds_short) ] warm_start_params = [ ('ScanningLaw', (np.float64, 1527)), ('RadFramesAlignment', (np.float64, 3)), ('ScanningLawVariation', (np.float32, 2)), ('EqualisationParams', (equalisation_params, 42)), ('BlackBodyDataForWarmStart', black_body_data_for_warm_start), ('MirrorParameters', mirror_parameters), ('LastSpinPeriod', np.float64), ('HKTMParameters', hktm_parameters), ('WSPReserved', (np.uint8, 3312)) ] record = [ ('OverallConfiguration', overall_configuration), ('SUDetails', (su_details, 50)), ('WarmStartParams', warm_start_params) ] return record class Msg15NativeTrailerRecord(object): """ SEVIRI Level 1.5 trailer for native-format Reference Document (EUM/MSG/ICD/105): MSG Level 1.5 Image Data Format Description """ def get(self): # 380363 bytes record = [ ('GP_PK_HEADER', GSDTRecords.gp_pk_header), ('GP_PK_SH1', GSDTRecords.gp_pk_sh1), ('15TRAILER', self.seviri_l15_trailer) ] return np.dtype(record).newbyteorder('>') @property def seviri_l15_trailer(self): record = [ ('15TrailerVersion', np.uint8), ('ImageProductionStats', self.image_production_stats), ('NavigationExtractionResults', self.navigation_extraction_results), ('RadiometricQuality', self.radiometric_quality), ('GeometricQuality', self.geometric_quality), ('TimelinessAndCompleteness', self.timeliness_and_completeness) ] return record @property def image_production_stats(self): gp_sc_id = GSDTRecords.gp_sc_id actual_scanning_summary = [ ('NominalImageScanning', np.uint8), ('ReducedScan', np.uint8), ('ForwardScanStart', time_cds_short), ('ForwardScanEnd', time_cds_short) ] radiometric_behaviour = [ ('NominalBehaviour', np.uint8), ('RadScanIrregularity', np.uint8), ('RadStoppage', np.uint8), ('RepeatCycleNotCompleted', np.uint8), ('GainChangeTookPlace', np.uint8), ('DecontaminationTookPlace', np.uint8), ('NoBBCalibrationAchieved', np.uint8), ('IncorrectTemperature', np.uint8), ('InvalidBBData', np.uint8), ('InvalidAuxOrHKTMData', np.uint8), ('RefocusingMechanismActuated', np.uint8), ('MirrorBackToReferencePos', np.uint8) ] reception_summary_stats = [ ('PlannedNumberOfL10Lines', (np.uint32, 12)), ('NumberOfMissingL10Lines', (np.uint32, 12)), ('NumberOfCorruptedL10Lines', (np.uint32, 12)), ('NumberOfReplacedL10Lines', (np.uint32, 12)) ] l15_image_validity = [ ('NominalImage', np.uint8), ('NonNominalBecauseIncomplete', np.uint8), ('NonNominalRadiometricQuality', np.uint8), ('NonNominalGeometricQuality', np.uint8), ('NonNominalTimeliness', np.uint8), ('IncompleteL15', np.uint8), ] actual_l15_coverage_vis_ir = [ ('SouthernLineActual', np.int32), ('NorthernLineActual', np.int32), ('EasternColumnActual', np.int32), ('WesternColumnActual', np.int32) ] actual_l15_coverage_hrv = [ ('LowerSouthLineActual', np.int32), ('LowerNorthLineActual', np.int32), ('LowerEastColumnActual', np.int32), ('LowerWestColumnActual', np.int32), ('UpperSouthLineActual', np.int32), ('UpperNorthLineActual', np.int32), ('UpperEastColumnActual', np.int32), ('UpperWestColumnActual', np.int32), ] record = [ ('SatelliteId', gp_sc_id), ('ActualScanningSummary', actual_scanning_summary), ('RadiometricBehaviour', radiometric_behaviour), ('ReceptionSummaryStats', reception_summary_stats), ('L15ImageValidity', (l15_image_validity, 12)), ('ActualL15CoverageVIS_IR', actual_l15_coverage_vis_ir), ('ActualL15CoverageHRV', actual_l15_coverage_hrv) ] return record @property def navigation_extraction_results(self): horizon_observation = [ ('HorizonId', np.uint8), ('Alpha', np.float64), ('AlphaConfidence', np.float64), ('Beta', np.float64), ('BetaConfidence', np.float64), ('ObservationTime', time_cds), ('SpinRate', np.float64), ('AlphaDeviation', np.float64), ('BetaDeviation', np.float64) ] star_observation = [ ('StarId', np.uint16), ('Alpha', np.float64), ('AlphaConfidence', np.float64), ('Beta', np.float64), ('BetaConfidence', np.float64), ('ObservationTime', time_cds), ('SpinRate', np.float64), ('AlphaDeviation', np.float64), ('BetaDeviation', np.float64) ] landmark_observation = [ ('LandmarkId', np.uint16), ('LandmarkLongitude', np.float64), ('LandmarkLatitude', np.float64), ('Alpha', np.float64), ('AlphaConfidence', np.float64), ('Beta', np.float64), ('BetaConfidence', np.float64), ('ObservationTime', time_cds), ('SpinRate', np.float64), ('AlphaDeviation', np.float64), ('BetaDeviation', np.float64) ] record = [ ('ExtractedHorizons', (horizon_observation, 4)), ('ExtractedStars', (star_observation, 20)), ('ExtractedLandmarks', (landmark_observation, 50)) ] return record @property def radiometric_quality(self): l10_rad_quality = [ ('FullImageMinimumCount', np.uint16), ('FullImageMaximumCount', np.uint16), ('EarthDiskMinimumCount', np.uint16), ('EarthDiskMaximumCount', np.uint16), ('MoonMinimumCount', np.uint16), ('MoonMaximumCount', np.uint16), ('FullImageMeanCount', np.float32), ('FullImageStandardDeviation', np.float32), ('EarthDiskMeanCount', np.float32), ('EarthDiskStandardDeviation', np.float32), ('MoonMeanCount', np.float32), ('MoonStandardDeviation', np.float32), ('SpaceMeanCount', np.float32), ('SpaceStandardDeviation', np.float32), ('SESpaceCornerMeanCount', np.float32), ('SESpaceCornerStandardDeviation', np.float32), ('SWSpaceCornerMeanCount', np.float32), ('SWSpaceCornerStandardDeviation', np.float32), ('NESpaceCornerMeanCount', np.float32), ('NESpaceCornerStandardDeviation', np.float32), ('NWSpaceCornerMeanCount', np.float32), ('NWSpaceCornerStandardDeviation', np.float32), ('4SpaceCornersMeanCount', np.float32), ('4SpaceCornersStandardDeviation', np.float32), ('FullImageHistogram', (np.uint32, 256)), ('EarthDiskHistogram', (np.uint32, 256)), ('ImageCentreSquareHistogram', (np.uint32, 256)), ('SESpaceCornerHistogram', (np.uint32, 128)), ('SWSpaceCornerHistogram', (np.uint32, 128)), ('NESpaceCornerHistogram', (np.uint32, 128)), ('NWSpaceCornerHistogram', (np.uint32, 128)), ('FullImageEntropy', (np.float32, 3)), ('EarthDiskEntropy', (np.float32, 3)), ('ImageCentreSquareEntropy', (np.float32, 3)), ('SESpaceCornerEntropy', (np.float32, 3)), ('SWSpaceCornerEntropy', (np.float32, 3)), ('NESpaceCornerEntropy', (np.float32, 3)), ('NWSpaceCornerEntropy', (np.float32, 3)), ('4SpaceCornersEntropy', (np.float32, 3)), ('ImageCentreSquarePSD_EW', (np.float32, 128)), ('FullImagePSD_EW', (np.float32, 128)), ('ImageCentreSquarePSD_NS', (np.float32, 128)), ('FullImagePSD_NS', (np.float32, 128)) ] l15_rad_quality = [ ('FullImageMinimumCount', np.uint16), ('FullImageMaximumCount', np.uint16), ('EarthDiskMinimumCount', np.uint16), ('EarthDiskMaximumCount', np.uint16), ('FullImageMeanCount', np.float32), ('FullImageStandardDeviation', np.float32), ('EarthDiskMeanCount', np.float32), ('EarthDiskStandardDeviation', np.float32), ('SpaceMeanCount', np.float32), ('SpaceStandardDeviation', np.float32), ('FullImageHistogram', (np.uint32, 256)), ('EarthDiskHistogram', (np.uint32, 256)), ('ImageCentreSquareHistogram', (np.uint32, 256)), ('FullImageEntropy', (np.float32, 3)), ('EarthDiskEntropy', (np.float32, 3)), ('ImageCentreSquareEntropy', (np.float32, 3)), ('ImageCentreSquarePSD_EW', (np.float32, 128)), ('FullImagePSD_EW', (np.float32, 128)), ('ImageCentreSquarePSD_NS', (np.float32, 128)), ('FullImagePSD_NS', (np.float32, 128)), ('SESpaceCornerL15_RMS', np.float32), ('SESpaceCornerL15_Mean', np.float32), ('SWSpaceCornerL15_RMS', np.float32), ('SWSpaceCornerL15_Mean', np.float32), ('NESpaceCornerL15_RMS', np.float32), ('NESpaceCornerL15_Mean', np.float32), ('NWSpaceCornerL15_RMS', np.float32), ('NWSpaceCornerL15_Mean', np.float32) ] record = [ ('L10RadQuality', (l10_rad_quality, 42)), ('L15RadQuality', (l15_rad_quality, 12)) ] return record @property def geometric_quality(self): absolute_accuracy = [ ('QualityInfoValidity', np.uint8), ('EastWestAccuracyRMS', np.float32), ('NorthSouthAccuracyRMS', np.float32), ('MagnitudeRMS', np.float32), ('EastWestUncertaintyRMS', np.float32), ('NorthSouthUncertaintyRMS', np.float32), ('MagnitudeUncertaintyRMS', np.float32), ('EastWestMaxDeviation', np.float32), ('NorthSouthMaxDeviation', np.float32), ('MagnitudeMaxDeviation', np.float32), ('EastWestUncertaintyMax', np.float32), ('NorthSouthUncertaintyMax', np.float32), ('MagnitudeUncertaintyMax', np.float32) ] relative_accuracy = absolute_accuracy pixels_500_relative_accuracy = absolute_accuracy pixels_16_relative_accuracy = absolute_accuracy misregistration_residuals = [ ('QualityInfoValidity', np.uint8), ('EastWestResidual', np.float32), ('NorthSouthResidual', np.float32), ('EastWestUncertainty', np.float32), ('NorthSouthUncertainty', np.float32), ('EastWestRMS', np.float32), ('NorthSouthRMS', np.float32), ('EastWestMagnitude', np.float32), ('NorthSouthMagnitude', np.float32), ('EastWestMagnitudeUncertainty', np.float32), ('NorthSouthMagnitudeUncertainty', np.float32) ] geometric_quality_status = [ ('QualityNominal', np.uint8), ('NominalAbsolute', np.uint8), ('NominalRelativeToPreviousImage', np.uint8), ('NominalForREL500', np.uint8), ('NominalForREL16', np.uint8), ('NominalForResMisreg', np.uint8) ] record = [ ('AbsoluteAccuracy', (absolute_accuracy, 12)), ('RelativeAccuracy', (relative_accuracy, 12)), ('500PixelsRelativeAccuracy', (pixels_500_relative_accuracy, 12)), ('16PixelsRelativeAccuracy', (pixels_16_relative_accuracy, 12)), ('MisregistrationResiduals', (misregistration_residuals, 12)), ('GeometricQualityStatus', (geometric_quality_status, 12)) ] return record @property def timeliness_and_completeness(self): timeliness = [ ('MaxDelay', np.float32), ('MinDelay', np.float32), ('MeanDelay', np.float32) ] completeness = [ ('PlannedL15ImageLines', np.uint16), ('GeneratedL15ImageLines', np.uint16), ('ValidL15ImageLines', np.uint16), ('DummyL15ImageLines', np.uint16), ('CorruptedL15ImageLines', np.uint16) ] record = [ ('Timeliness', timeliness), ('Completeness', (completeness, 12)) ] return record class HritPrologue(L15DataHeaderRecord): def get(self): # X bytes record = [ ('SatelliteStatus', self.satellite_status), ('ImageAcquisition', self.image_acquisition), ('CelestialEvents', self.celestial_events), ('ImageDescription', self.image_description), ('RadiometricProcessing', self.radiometric_processing), ('GeometricProcessing', self.geometric_processing) ] return np.dtype(record).newbyteorder('>') hrit_epilogue = np.dtype( Msg15NativeTrailerRecord().seviri_l15_trailer).newbyteorder('>') hrit_prologue = HritPrologue().get() impf_configuration = np.dtype( L15DataHeaderRecord().impf_configuration).newbyteorder('>') native_header = Msg15NativeHeaderRecord().get() native_trailer = Msg15NativeTrailerRecord().get() satpy-0.20.0/satpy/readers/seviri_l1b_nc.py000066400000000000000000000211351362525524100206160ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """SEVIRI netcdf format reader. References: MSG Level 1.5 Image Data Format Description https://www.eumetsat.int/website/wcm/idc/idcplg?IdcService=GET_FILE&dDocName=PDF_TEN_05105_MSG_IMG_DATA&RevisionSelectionMethod=LatestReleased&Rendition=Web """ from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.seviri_base import (SEVIRICalibrationHandler, CHANNEL_NAMES, CALIB, SATNUM) import xarray as xr from satpy.readers._geos_area import get_area_definition import datetime class NCSEVIRIFileHandler(BaseFileHandler, SEVIRICalibrationHandler): def __init__(self, filename, filename_info, filetype_info): super(NCSEVIRIFileHandler, self).__init__(filename, filename_info, filetype_info) self.nc = None self.mda = {} self.reference = datetime.datetime(1958, 1, 1) self._read_file() @property def start_time(self): return self.deltaSt @property def end_time(self): return self.deltaEnd def _read_file(self): if self.nc is None: self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=False, chunks={}) # Obtain some area definition attributes equatorial_radius = (self.nc.attrs['equatorial_radius'] * 1000.) polar_radius = (self.nc.attrs['north_polar_radius'] * 1000 + self.nc.attrs['south_polar_radius'] * 1000) * 0.5 ssp_lon = self.nc.attrs['longitude_of_SSP'] self.mda['projection_parameters'] = {'a': equatorial_radius, 'b': polar_radius, 'h': 35785831.00, 'ssp_longitude': ssp_lon} self.mda['number_of_lines'] = int(self.nc.dims['num_rows_vis_ir']) self.mda['number_of_columns'] = int(self.nc.dims['num_columns_vis_ir']) self.mda['hrv_number_of_lines'] = int(self.nc.dims['num_rows_hrv']) self.mda['hrv_number_of_columns'] = int(self.nc.dims['num_columns_hrv']) self.deltaSt = self.reference + datetime.timedelta( days=int(self.nc.attrs['true_repeat_cycle_start_day']), milliseconds=int(self.nc.attrs['true_repeat_cycle_start_mi_sec'])) self.deltaEnd = self.reference + datetime.timedelta( days=int(self.nc.attrs['planned_repeat_cycle_end_day']), milliseconds=int(self.nc.attrs['planned_repeat_cycle_end_mi_sec'])) self.north = int(self.nc.attrs['north_most_line']) self.east = int(self.nc.attrs['east_most_pixel']) self.west = int(self.nc.attrs['west_most_pixel']) self.south = int(self.nc.attrs['south_most_line']) def get_dataset(self, dataset_id, dataset_info): channel = dataset_id.name i = list(CHANNEL_NAMES.values()).index(channel) if (channel == 'HRV'): self.nc = self.nc.rename({'num_columns_hrv': 'x', 'num_rows_hrv': 'y'}) else: # the first channel of a composite will rename the dimension variable # but the later channels will raise a value error as its already been renamed # we can just ignore these exceptions try: self.nc = self.nc.rename({'num_columns_vis_ir': 'x', 'num_rows_vis_ir': 'y'}) except ValueError: pass dataset = self.nc[dataset_info['nc_key']] dataset.attrs.update(dataset_info) # Calibrate the data as needed # MPEF MSG calibration coeffiencts (gain and count) offset = dataset.attrs['add_offset'].astype('float32') gain = dataset.attrs['scale_factor'].astype('float32') self.platform_id = int(self.nc.attrs['satellite_id']) cal_type = self.nc['planned_chan_processing'].values[i] # Correct for the scan line order dataset = dataset.sel(y=slice(None, None, -1)) if dataset_id.calibration == 'counts': dataset.attrs['_FillValue'] = 0 if dataset_id.calibration in ['radiance', 'reflectance', 'brightness_temperature']: dataset = dataset.where(dataset != 0).astype('float32') dataset = self._convert_to_radiance(dataset, gain, offset) if dataset_id.calibration == 'reflectance': solar_irradiance = CALIB[int(self.platform_id)][channel]["F"] dataset = self._vis_calibrate(dataset, solar_irradiance) elif dataset_id.calibration == 'brightness_temperature': dataset = self._ir_calibrate(dataset, channel, cal_type) dataset.attrs.update(self.nc[dataset_info['nc_key']].attrs) dataset.attrs.update(dataset_info) dataset.attrs['platform_name'] = "Meteosat-" + SATNUM[self.platform_id] dataset.attrs['sensor'] = 'seviri' dataset.attrs['orbital_parameters'] = { 'projection_longitude': self.mda['projection_parameters']['ssp_longitude'], 'projection_latitude': 0., 'projection_altitude': self.mda['projection_parameters']['h']} return dataset def get_area_def(self, dataset_id): pdict = {} pdict['a'] = self.mda['projection_parameters']['a'] pdict['b'] = self.mda['projection_parameters']['b'] pdict['h'] = self.mda['projection_parameters']['h'] pdict['ssp_lon'] = self.mda['projection_parameters']['ssp_longitude'] if dataset_id.name == 'HRV': pdict['nlines'] = self.mda['hrv_number_of_lines'] pdict['ncols'] = self.mda['hrv_number_of_columns'] pdict['a_name'] = 'geosmsg_hrv' pdict['a_desc'] = 'MSG/SEVIRI high resolution channel area' pdict['p_id'] = 'msg_hires' else: pdict['nlines'] = self.mda['number_of_lines'] pdict['ncols'] = self.mda['number_of_columns'] pdict['a_name'] = 'geosmsg' pdict['a_desc'] = 'MSG/SEVIRI low resolution channel area' pdict['p_id'] = 'msg_lowres' area = get_area_definition(pdict, self.get_area_extent(dataset_id)) return area def get_area_extent(self, dsid): # following calculations assume grid origin is south-east corner # section 7.2.4 of MSG Level 1.5 Image Data Format Description origins = {0: 'NW', 1: 'SW', 2: 'SE', 3: 'NE'} grid_origin = self.nc.attrs['vis_ir_grid_origin'] grid_origin = int(grid_origin, 16) if grid_origin != 2: raise NotImplementedError( 'Grid origin not supported number: {}, {} corner' .format(grid_origin, origins[grid_origin]) ) center_point = 3712/2 column_step = self.nc.attrs['vis_ir_column_dir_grid_step'] * 1000.0 line_step = self.nc.attrs['vis_ir_line_dir_grid_step'] * 1000.0 # check for Earth model as this affects the north-south and # west-east offsets # section 3.1.4.2 of MSG Level 1.5 Image Data Format Description earth_model = int(self.nc.attrs['type_of_earth_model'], 16) if earth_model == 2: ns_offset = 0 # north +ve we_offset = 0 # west +ve elif earth_model == 1: ns_offset = -0.5 # north +ve we_offset = 0.5 # west +ve else: raise NotImplementedError( 'unrecognised earth model: {}'.format(earth_model) ) # section 3.1.5 of MSG Level 1.5 Image Data Format Description ll_c = (center_point - self.west - 0.5 + we_offset) * column_step ll_l = (self.south - center_point - 0.5 + ns_offset) * line_step ur_c = (center_point - self.east + 0.5 + we_offset) * column_step ur_l = (self.north - center_point + 0.5 + ns_offset) * line_step area_extent = (ll_c, ll_l, ur_c, ur_l) return area_extent class NCSEVIRIHRVFileHandler(BaseFileHandler, SEVIRICalibrationHandler): pass satpy-0.20.0/satpy/readers/seviri_l2_bufr.py000066400000000000000000000132751362525524100210210ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """SEVIRI L2 BUFR format reader.""" import logging from datetime import timedelta, datetime import numpy as np import xarray as xr import dask.array as da from satpy.readers.seviri_base import mpef_product_header from satpy.readers.eum_base import recarray2dict try: import eccodes as ec except ImportError: raise ImportError( "Missing eccodes-python and/or eccodes C-library installation. Use conda to install eccodes") from satpy.readers.file_handlers import BaseFileHandler from satpy import CHUNK_SIZE logger = logging.getLogger('SeviriL2Bufr') data_center_dict = {55: {'ssp': 'E0415', 'name': '08'}, 56: {'ssp': 'E0000', 'name': '09'}, 57: {'ssp': 'E0095', 'name': '10'}, 70: {'ssp': 'E0000', 'name': '11'}} seg_size_dict = {'seviri_l2_bufr_asr': 16, 'seviri_l2_bufr_cla': 16, 'seviri_l2_bufr_csr': 16, 'seviri_l2_bufr_gii': 3, 'seviri_l2_bufr_thu': 16, 'seviri_l2_bufr_toz': 3} class SeviriL2BufrFileHandler(BaseFileHandler): """File handler for SEVIRI L2 BUFR products.""" def __init__(self, filename, filename_info, filetype_info, **kwargs): """Initialise the file handler for SEVIRI L2 BUFR data.""" super(SeviriL2BufrFileHandler, self).__init__(filename, filename_info, filetype_info) if ('server' in filename_info): # EUMETSAT Offline Bufr product self.mpef_header = self._read_mpef_header() else: # Product was retrieved from the EUMETSAT Data Center timeStr = self.get_attribute('typicalDate')+self.get_attribute('typicalTime') buf_start_time = datetime.strptime(timeStr, "%Y%m%d%H%M%S") sc_id = self.get_attribute('satelliteIdentifier') self.mpef_header = {} self.mpef_header['NominalTime'] = buf_start_time self.mpef_header['SpacecraftName'] = data_center_dict[sc_id]['name'] self.mpef_header['RectificationLongitude'] = data_center_dict[sc_id]['ssp'] self.seg_size = seg_size_dict[filetype_info['file_type']] @property def start_time(self): """Return the repeat cycle start time.""" return self.mpef_header['NominalTime'] @property def end_time(self): """Return the repeat cycle end time.""" return self.start_time + timedelta(minutes=15) @property def platform_name(self): """Return spacecraft name.""" return 'MET{}'.format(self.mpef_header['SpacecraftName']) @property def ssp_lon(self): """Return subsatellite point longitude.""" # e.g. E0415 ssp_lon = self.mpef_header['RectificationLongitude'] return float(ssp_lon[1:])/10. def _read_mpef_header(self): """Read MPEF header.""" hdr = np.fromfile(self.filename, mpef_product_header, 1) return recarray2dict(hdr) def get_attribute(self, key): ''' Get BUFR attributes ''' # This function is inefficient as it is looping through the entire # file to get 1 attribute. It causes a problem though if you break # from the file early - dont know why but investigating - fix later fh = open(self.filename, "rb") while True: # get handle for message bufr = ec.codes_bufr_new_from_file(fh) if bufr is None: break ec.codes_set(bufr, 'unpack', 1) attr = ec.codes_get(bufr, key) ec.codes_release(bufr) fh.close() return attr def get_array(self, key): """Get all data from file for the given BUFR key.""" with open(self.filename, "rb") as fh: msgCount = 0 while True: bufr = ec.codes_bufr_new_from_file(fh) if bufr is None: break ec.codes_set(bufr, 'unpack', 1) # if is the first message initialise our final array if (msgCount == 0): arr = da.from_array(ec.codes_get_array( bufr, key, float), chunks=CHUNK_SIZE) else: tmpArr = da.from_array(ec.codes_get_array( bufr, key, float), chunks=CHUNK_SIZE) arr = da.concatenate((arr, tmpArr)) msgCount = msgCount+1 ec.codes_release(bufr) if arr.size == 1: arr = arr[0] return arr def get_dataset(self, dataset_id, dataset_info): """Get dataset using the BUFR key in dataset_info.""" arr = self.get_array(dataset_info['key']) arr[arr == dataset_info['fill_value']] = np.nan xarr = xr.DataArray(arr, dims=["y"]) xarr.attrs['sensor'] = 'SEVIRI' xarr.attrs['platform_name'] = self.platform_name xarr.attrs['ssp_lon'] = self.ssp_lon xarr.attrs['seg_size'] = self.seg_size xarr.attrs.update(dataset_info) return xarr satpy-0.20.0/satpy/readers/slstr_l1b.py000066400000000000000000000257331362525524100200140ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """SLSTR L1b reader.""" import logging import os import re from datetime import datetime import numpy as np import xarray as xr import dask.array as da from satpy.readers.file_handlers import BaseFileHandler from satpy import CHUNK_SIZE logger = logging.getLogger(__name__) PLATFORM_NAMES = {'S3A': 'Sentinel-3A', 'S3B': 'Sentinel-3B'} class NCSLSTRGeo(BaseFileHandler): """Filehandler for geo info.""" def __init__(self, filename, filename_info, filetype_info): """Initialize the geo filehandler.""" super(NCSLSTRGeo, self).__init__(filename, filename_info, filetype_info) self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=True, chunks={'columns': CHUNK_SIZE, 'rows': CHUNK_SIZE}) self.nc = self.nc.rename({'columns': 'x', 'rows': 'y'}) self.cache = {} def get_dataset(self, key, info): """Load a dataset.""" logger.debug('Reading %s.', key.name) try: variable = self.nc[info['file_key']] except KeyError: return info.update(variable.attrs) variable.attrs = info return variable @property def start_time(self): """Get the start time.""" return datetime.strptime(self.nc.attrs['start_time'], '%Y-%m-%dT%H:%M:%S.%fZ') @property def end_time(self): """Get the end time.""" return datetime.strptime(self.nc.attrs['stop_time'], '%Y-%m-%dT%H:%M:%S.%fZ') class NCSLSTR1B(BaseFileHandler): """Filehandler for l1 SLSTR data.""" def __init__(self, filename, filename_info, filetype_info): """Initialize the SLSTR l1 data filehandler.""" super(NCSLSTR1B, self).__init__(filename, filename_info, filetype_info) self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=True, chunks={'columns': CHUNK_SIZE, 'rows': CHUNK_SIZE}) self.nc = self.nc.rename({'columns': 'x', 'rows': 'y'}) self.channel = filename_info['dataset_name'] self.stripe = self.filename[-5] self.view = self.filename[-4] cal_file = os.path.join(os.path.dirname(self.filename), 'viscal.nc') self.cal = xr.open_dataset(cal_file, decode_cf=True, mask_and_scale=True, chunks={'views': CHUNK_SIZE}) indices_file = os.path.join(os.path.dirname(self.filename), 'indices_{}{}.nc'.format(self.stripe, self.view)) self.indices = xr.open_dataset(indices_file, decode_cf=True, mask_and_scale=True, chunks={'columns': CHUNK_SIZE, 'rows': CHUNK_SIZE}) self.indices = self.indices.rename({'columns': 'x', 'rows': 'y'}) self.platform_name = PLATFORM_NAMES[filename_info['mission_id']] self.sensor = 'slstr' @staticmethod def _cal_rad(rad, didx, solar_flux=None): """Calibrate.""" indices = np.isfinite(didx) rad[indices] /= solar_flux[didx[indices].astype(int)] return rad def get_dataset(self, key, info): """Load a dataset.""" if self.channel not in key.name: return logger.debug('Reading %s.', key.name) if key.calibration == 'brightness_temperature': variable = self.nc['{}_BT_{}{}'.format(self.channel, self.stripe, self.view)] else: variable = self.nc['{}_radiance_{}{}'.format(self.channel, self.stripe, self.view)] radiances = variable units = variable.attrs['units'] if key.calibration == 'reflectance': # TODO take into account sun-earth distance solar_flux = self.cal[re.sub('_[^_]*$', '', key.name) + '_solar_irradiances'] d_index = self.indices['detector_{}{}'.format(self.stripe, self.view)] idx = 0 if self.view == 'n' else 1 # 0: Nadir view, 1: oblique (check). radiances.data = da.map_blocks( self._cal_rad, radiances.data, d_index.data, solar_flux=solar_flux[:, idx].values) radiances *= np.pi * 100 units = '%' info.update(radiances.attrs) info.update(key.to_dict()) info.update(dict(units=units, platform_name=self.platform_name, sensor=self.sensor, view=self.view)) radiances.attrs = info return radiances @property def start_time(self): """Get the start time.""" return datetime.strptime(self.nc.attrs['start_time'], '%Y-%m-%dT%H:%M:%S.%fZ') @property def end_time(self): """Get the end time.""" return datetime.strptime(self.nc.attrs['stop_time'], '%Y-%m-%dT%H:%M:%S.%fZ') class NCSLSTRAngles(BaseFileHandler): """Filehandler for angles.""" def __init__(self, filename, filename_info, filetype_info): """Initialize the angles reader.""" super(NCSLSTRAngles, self).__init__(filename, filename_info, filetype_info) self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=True, chunks={'columns': CHUNK_SIZE, 'rows': CHUNK_SIZE}) # TODO: get metadata from the manifest file (xfdumanifest.xml) self.platform_name = PLATFORM_NAMES[filename_info['mission_id']] self.sensor = 'slstr' self.view = filename_info['view'] self._start_time = filename_info['start_time'] self._end_time = filename_info['end_time'] cart_file = os.path.join( os.path.dirname(self.filename), 'cartesian_i{}.nc'.format(self.view)) self.cart = xr.open_dataset(cart_file, decode_cf=True, mask_and_scale=True, chunks={'columns': CHUNK_SIZE, 'rows': CHUNK_SIZE}) cartx_file = os.path.join( os.path.dirname(self.filename), 'cartesian_tx.nc') self.cartx = xr.open_dataset(cartx_file, decode_cf=True, mask_and_scale=True, chunks={'columns': CHUNK_SIZE, 'rows': CHUNK_SIZE}) def get_dataset(self, key, info): """Load a dataset.""" if not info['view'].startswith(self.view): return logger.debug('Reading %s.', key.name) # Check if file_key is specified in the yaml file_key = info.get('file_key', key.name) variable = self.nc[file_key] l_step = self.nc.attrs.get('al_subsampling_factor', 1) c_step = self.nc.attrs.get('ac_subsampling_factor', 16) if c_step != 1 or l_step != 1: logger.debug('Interpolating %s.', key.name) # TODO: do it in cartesian coordinates ! pbs at date line and # possible tie_x = self.cartx['x_tx'].data[0, :][::-1] tie_y = self.cartx['y_tx'].data[:, 0] full_x = self.cart['x_i' + self.view].data full_y = self.cart['y_i' + self.view].data variable = variable.fillna(0) from scipy.interpolate import RectBivariateSpline spl = RectBivariateSpline( tie_y, tie_x, variable.data[:, ::-1]) values = spl.ev(full_y, full_x) variable = xr.DataArray(da.from_array(values, chunks=(CHUNK_SIZE, CHUNK_SIZE)), dims=['y', 'x'], attrs=variable.attrs) variable.attrs['platform_name'] = self.platform_name variable.attrs['sensor'] = self.sensor if 'units' not in variable.attrs: variable.attrs['units'] = 'degrees' variable.attrs.update(key.to_dict()) return variable @property def start_time(self): """Get the start time.""" return datetime.strptime(self.nc.attrs['start_time'], '%Y-%m-%dT%H:%M:%S.%fZ') @property def end_time(self): """Get the end time.""" return datetime.strptime(self.nc.attrs['stop_time'], '%Y-%m-%dT%H:%M:%S.%fZ') class NCSLSTRFlag(BaseFileHandler): """File handler for flags.""" def __init__(self, filename, filename_info, filetype_info): """Initialize the flag reader.""" super(NCSLSTRFlag, self).__init__(filename, filename_info, filetype_info) self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=True, chunks={'columns': CHUNK_SIZE, 'rows': CHUNK_SIZE}) self.nc = self.nc.rename({'columns': 'x', 'rows': 'y'}) self.stripe = self.filename[-5] self.view = self.filename[-4] # TODO: get metadata from the manifest file (xfdumanifest.xml) self.platform_name = PLATFORM_NAMES[filename_info['mission_id']] self.sensor = 'slstr' def get_dataset(self, key, info): """Load a dataset.""" logger.debug('Reading %s.', key.name) variable = self.nc[key.name] info.update(variable.attrs) info.update(key.to_dict()) info.update(dict(platform_name=self.platform_name, sensor=self.sensor)) variable.attrs = info return variable @property def start_time(self): """Get the start time.""" return datetime.strptime(self.nc.attrs['start_time'], '%Y-%m-%dT%H:%M:%S.%fZ') @property def end_time(self): """Get the end time.""" return datetime.strptime(self.nc.attrs['stop_time'], '%Y-%m-%dT%H:%M:%S.%fZ') satpy-0.20.0/satpy/readers/slstr_l2.py000066400000000000000000000061631362525524100176470ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reader for Sentinel-3 SLSTR SST data.""" from datetime import datetime from satpy.readers.file_handlers import BaseFileHandler from satpy import CHUNK_SIZE import xarray as xr class SLSTRL2FileHandler(BaseFileHandler): """File handler for Sentinel-3 SSL L2 netCDF files.""" def __init__(self, filename, filename_info, filetype_info, engine=None): """Initialize the file handler for Sentinel-3 SSL L2 netCDF data.""" super(SLSTRL2FileHandler, self).__init__(filename, filename_info, filetype_info) if filename.endswith('tar'): import tarfile import os import tempfile with tempfile.TemporaryDirectory() as tempdir: with tarfile.open(name=filename, mode='r') as tf: sst_filename = next((name for name in tf.getnames() if name.endswith('nc') and 'GHRSST-SSTskin' in name)) tf.extract(sst_filename, tempdir) fullpath = os.path.join(tempdir, sst_filename) self.nc = xr.open_dataset(fullpath, decode_cf=True, mask_and_scale=True, engine=engine, chunks={'ni': CHUNK_SIZE, 'nj': CHUNK_SIZE}) else: self.nc = xr.open_dataset(filename, decode_cf=True, mask_and_scale=True, engine=engine, chunks={'ni': CHUNK_SIZE, 'nj': CHUNK_SIZE}) self.nc = self.nc.rename({'ni': 'x', 'nj': 'y'}) self.filename_info['start_time'] = datetime.strptime( self.nc.start_time, '%Y%m%dT%H%M%SZ') self.filename_info['end_time'] = datetime.strptime( self.nc.stop_time, '%Y%m%dT%H%M%SZ') def get_dataset(self, key, info): """Get any available dataset.""" stdname = info.get('standard_name') return self.nc[stdname].squeeze() @property def start_time(self): """Get start time.""" return self.filename_info['start_time'] @property def end_time(self): """Get end time.""" return self.filename_info['end_time'] satpy-0.20.0/satpy/readers/tropomi_l2.py000066400000000000000000000152221362525524100201650ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Interface to TROPOMI L2 Reader. The TROPOspheric Monitoring Instrument (TROPOMI) is the satellite instrument on board the Copernicus Sentinel-5 Precursor satellite. It measures key atmospheric trace gasses, such as ozone, nitrogen oxides, sulfur dioxide, carbon monoxide, methane, and formaldehyde. Level 2 data products are available via the Copernicus Open Access Hub. For more information visit the following URL: http://www.tropomi.eu/data-products/level-2-products """ from satpy.readers.netcdf_utils import NetCDF4FileHandler, netCDF4 import logging import numpy as np logger = logging.getLogger(__name__) class TROPOMIL2FileHandler(NetCDF4FileHandler): """File handler for TROPOMI L2 netCDF files.""" @property def start_time(self): """Get start time.""" return self.filename_info['start_time'] @property def end_time(self): """Get end time.""" return self.filename_info.get('end_time', self.start_time) @property def platform_shortname(self): """Get start time.""" return self.filename_info['platform_shortname'] @property def sensor(self): """Get sensor.""" res = self['/attr/sensor'] if isinstance(res, np.ndarray): return str(res.astype(str)) return res def available_datasets(self, configured_datasets=None): """Automatically determine datasets provided by this file.""" logger.debug("Available_datasets begin...") # Determine shape of the geolocation data (lat/lon) lat_shape = None for var_name, _val in self.file_content.items(): # Could probably avoid this hardcoding, will think on it if (var_name == 'PRODUCT/latitude'): lat_shape = self[var_name + "/shape"] break handled_variables = set() # update previously configured datasets logger.debug("Starting previously configured variables loop...") for is_avail, ds_info in (configured_datasets or []): # some other file handler knows how to load this if is_avail is not None: yield is_avail, ds_info var_name = ds_info.get('file_key', ds_info['name']) # logger.debug("Evaluating previously configured variable: %s", var_name) matches = self.file_type_matches(ds_info['file_type']) # we can confidently say that we can provide this dataset and can # provide more info if matches and var_name in self: logger.debug("Handling previously configured variable: %s", var_name) handled_variables.add(var_name) new_info = ds_info.copy() # don't mess up the above yielded yield True, new_info elif is_avail is None: # if we didn't know how to handle this dataset and no one else did # then we should keep it going down the chain yield is_avail, ds_info # This is where we dynamically add new datasets # We will sift through all groups and variables, looking for data matching # the geolocation bounds # Iterate over dataset contents for var_name, val in self.file_content.items(): # Only evaluate variables if isinstance(val, netCDF4.Variable): logger.debug("Evaluating new variable: %s", var_name) var_shape = self[var_name + "/shape"] logger.debug("Dims:{}".format(var_shape)) if (lat_shape == var_shape[:len(lat_shape)]): logger.debug("Found valid additional dataset: %s", var_name) # Skip anything we have already configured if (var_name in handled_variables): logger.debug("Already handled, skipping: %s", var_name) continue handled_variables.add(var_name) last_index_separator = var_name.rindex('/') last_index_separator = last_index_separator + 1 var_name_no_path = var_name[last_index_separator:] logger.debug("Using short name of: %s", var_name_no_path) # Create new ds_info object if var_name_no_path in ['latitude_bounds', 'longitude_bounds']: coordinates = [] else: coordinates = ['longitude', 'latitude'] new_info = { 'name': var_name_no_path, 'file_key': var_name, 'coordinates': coordinates, 'file_type': self.filetype_info['file_type'], 'resolution': None, } yield True, new_info def get_metadata(self, data, ds_info): """Get metadata.""" metadata = {} metadata.update(data.attrs) metadata.update(ds_info) metadata.update({ 'platform_shortname': self.platform_shortname, 'sensor': self.sensor, 'start_time': self.start_time, 'end_time': self.end_time, }) return metadata def _rename_dims(self, data_arr): """Normalize dimension names with the rest of Satpy.""" dims_dict = {} if 'ground_pixel' in data_arr.dims: dims_dict['ground_pixel'] = 'x' if 'scanline' in data_arr.dims: dims_dict['scanline'] = 'y' return data_arr.rename(dims_dict) def get_dataset(self, ds_id, ds_info): """Get dataset.""" logger.debug("Getting data for: %s", ds_id.name) file_key = ds_info.get('file_key', ds_id.name) data = self[file_key] data.attrs = self.get_metadata(data, ds_info) fill = data.attrs.pop('_FillValue') data = data.squeeze() data = data.where(data != fill) data = self._rename_dims(data) return data satpy-0.20.0/satpy/readers/utils.py000066400000000000000000000251321362525524100172400ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2014-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Helper functions for area extent calculations.""" import logging from contextlib import closing import tempfile import bz2 import sys import os import shutil import numpy as np import pyproj from six import BytesIO from subprocess import Popen, PIPE from pyresample.geometry import AreaDefinition from pyresample.boundary import AreaDefBoundary, Boundary from satpy import CHUNK_SIZE try: from shutil import which except ImportError: # python 2 - won't be used, but needed for mocking in tests which = None LOGGER = logging.getLogger(__name__) def np2str(value): """Convert an `numpy.string_` to str. Args: value (ndarray): scalar or 1-element numpy array to convert Raises: ValueError: if value is array larger than 1-element or it is not of type `numpy.string_` or it is not a numpy array """ if hasattr(value, 'dtype') and \ issubclass(value.dtype.type, (np.string_, np.object_)) \ and value.size == 1: value = value.item() if not isinstance(value, str): # python 3 - was scalar numpy array of bytes # otherwise python 2 - scalar numpy array of 'str' value = value.decode() return value else: raise ValueError("Array is not a string type or is larger than 1") def get_geostationary_angle_extent(geos_area): """Get the max earth (vs space) viewing angles in x and y.""" # TODO: take into account sweep_axis_angle parameter # get some projection parameters req = float(geos_area.proj_dict['a']) / 1000 rp = float(geos_area.proj_dict['b']) / 1000 h = float(geos_area.proj_dict['h']) / 1000 + req # compute some constants aeq = 1 - req**2 / (h ** 2) ap_ = 1 - rp**2 / (h ** 2) # generate points around the north hemisphere in satellite projection # make it a bit smaller so that we stay inside the valid area xmax = np.arccos(np.sqrt(aeq)) ymax = np.arccos(np.sqrt(ap_)) return xmax, ymax def get_geostationary_mask(area): """Compute a mask of the earth's shape as seen by a geostationary satellite. Args: area (pyresample.geometry.AreaDefinition) : Corresponding area definition Returns: Boolean mask, True inside the earth's shape, False outside. """ # Compute projection coordinates at the earth's limb h = area.proj_dict['h'] xmax, ymax = get_geostationary_angle_extent(area) xmax *= h ymax *= h # Compute projection coordinates at the centre of each pixel x, y = area.get_proj_coords(chunks=CHUNK_SIZE) # Compute mask of the earth's elliptical shape return ((x / xmax) ** 2 + (y / ymax) ** 2) <= 1 def _lonlat_from_geos_angle(x, y, geos_area): """Get lons and lats from x, y in projection coordinates.""" h = float(geos_area.proj_dict['h'] + geos_area.proj_dict['a']) / 1000 b__ = (geos_area.proj_dict['a'] / float(geos_area.proj_dict['b'])) ** 2 sd = np.sqrt((h * np.cos(x) * np.cos(y)) ** 2 - (np.cos(y)**2 + b__ * np.sin(y)**2) * (h**2 - (float(geos_area.proj_dict['a']) / 1000)**2)) # sd = 0 sn = (h * np.cos(x) * np.cos(y) - sd) / (np.cos(y)**2 + b__ * np.sin(y)**2) s1 = h - sn * np.cos(x) * np.cos(y) s2 = sn * np.sin(x) * np.cos(y) s3 = -sn * np.sin(y) sxy = np.sqrt(s1**2 + s2**2) lons = np.rad2deg(np.arctan2(s2, s1)) + geos_area.proj_dict.get('lon_0', 0) lats = np.rad2deg(-np.arctan2(b__ * s3, sxy)) return lons, lats def get_geostationary_bounding_box(geos_area, nb_points=50): """Get the bbox in lon/lats of the valid pixels inside *geos_area*. Args: nb_points: Number of points on the polygon """ xmax, ymax = get_geostationary_angle_extent(geos_area) # generate points around the north hemisphere in satellite projection # make it a bit smaller so that we stay inside the valid area x = np.cos(np.linspace(-np.pi, 0, nb_points // 2)) * (xmax - 0.001) y = -np.sin(np.linspace(-np.pi, 0, nb_points // 2)) * (ymax - 0.001) # clip the projection coordinates to fit the area extent of geos_area ll_x, ll_y, ur_x, ur_y = (np.array(geos_area.area_extent) / float(geos_area.proj_dict['h'])) x = np.clip(np.concatenate([x, x[::-1]]), min(ll_x, ur_x), max(ll_x, ur_x)) y = np.clip(np.concatenate([y, -y]), min(ll_y, ur_y), max(ll_y, ur_y)) return _lonlat_from_geos_angle(x, y, geos_area) def get_area_slices(data_area, area_to_cover): """Compute the slice to read from an *area* based on an *area_to_cover*.""" if data_area.proj_dict['proj'] != 'geos': raise NotImplementedError('Only geos supported') # Intersection only required for two different projections if area_to_cover.proj_dict['proj'] == data_area.proj_dict['proj']: LOGGER.debug('Projections for data and slice areas are' ' identical: {}'.format(area_to_cover.proj_dict['proj'])) # Get xy coordinates llx, lly, urx, ury = area_to_cover.area_extent x, y = data_area.get_xy_from_proj_coords([llx, urx], [lly, ury]) return slice(x[0], x[1] + 1), slice(y[1], y[0] + 1) data_boundary = Boundary(*get_geostationary_bounding_box(data_area)) area_boundary = AreaDefBoundary(area_to_cover, 100) intersection = data_boundary.contour_poly.intersection( area_boundary.contour_poly) x, y = data_area.get_xy_from_lonlat(np.rad2deg(intersection.lon), np.rad2deg(intersection.lat)) return slice(min(x), max(x) + 1), slice(min(y), max(y) + 1) def get_sub_area(area, xslice, yslice): """Apply slices to the area_extent and size of the area.""" new_area_extent = ((area.pixel_upper_left[0] + (xslice.start - 0.5) * area.pixel_size_x), (area.pixel_upper_left[1] - (yslice.stop - 0.5) * area.pixel_size_y), (area.pixel_upper_left[0] + (xslice.stop - 0.5) * area.pixel_size_x), (area.pixel_upper_left[1] - (yslice.start - 0.5) * area.pixel_size_y)) return AreaDefinition(area.area_id, area.name, area.proj_id, area.proj_dict, xslice.stop - xslice.start, yslice.stop - yslice.start, new_area_extent) def unzip_file(filename): """Unzip the file if file is bzipped = ending with 'bz2'.""" if filename.endswith('bz2'): fdn, tmpfilepath = tempfile.mkstemp() LOGGER.info("Using temp file for BZ2 decompression: %s", tmpfilepath) # If in python 3, try pbzip2 if sys.version_info.major >= 3: pbzip = which('pbzip2') # Run external pbzip2 if pbzip is not None: n_thr = os.environ.get('OMP_NUM_THREADS') if n_thr: runner = [pbzip, '-dc', '-p'+str(n_thr), filename] else: runner = [pbzip, '-dc', filename] p = Popen(runner, stdout=PIPE, stderr=PIPE) stdout = BytesIO(p.communicate()[0]) status = p.returncode if status != 0: raise IOError("pbzip2 error '%s', failed, status=%d" % (filename, status)) with closing(os.fdopen(fdn, 'wb')) as ofpt: try: stdout.seek(0) shutil.copyfileobj(stdout, ofpt) except IOError: import traceback traceback.print_exc() LOGGER.info("Failed to read bzipped file %s", str(filename)) os.remove(tmpfilepath) raise return tmpfilepath # Otherwise, fall back to the original method bz2file = bz2.BZ2File(filename) with closing(os.fdopen(fdn, 'wb')) as ofpt: try: ofpt.write(bz2file.read()) except IOError: import traceback traceback.print_exc() LOGGER.info("Failed to read bzipped file %s", str(filename)) os.remove(tmpfilepath) return None return tmpfilepath return None def bbox(img): """Find the bounding box around nonzero elements in the given array. Copied from https://stackoverflow.com/a/31402351/5703449 . Returns: rowmin, rowmax, colmin, colmax """ rows = np.any(img, axis=1) cols = np.any(img, axis=0) rmin, rmax = np.where(rows)[0][[0, -1]] cmin, cmax = np.where(cols)[0][[0, -1]] return rmin, rmax, cmin, cmax def get_earth_radius(lon, lat, a, b): """Compute radius of the earth ellipsoid at the given longitude and latitude. Args: lon: Geodetic longitude (degrees) lat: Geodetic latitude (degrees) a: Semi-major axis of the ellipsoid (meters) b: Semi-minor axis of the ellipsoid (meters) Returns: Earth Radius (meters) """ geocent = pyproj.Proj(proj='geocent', a=a, b=b, units='m') latlong = pyproj.Proj(proj='latlong', a=a, b=b, units='m') x, y, z = pyproj.transform(latlong, geocent, lon, lat, 0.) return np.sqrt(x**2 + y**2 + z**2) def reduce_mda(mda, max_size=100): """Recursively remove arrays with more than `max_size` elements from the given metadata dictionary.""" reduced = {} for key, val in mda.items(): if isinstance(val, dict): reduced[key] = reduce_mda(val, max_size) elif not (isinstance(val, np.ndarray) and val.size > max_size): reduced[key] = val return reduced satpy-0.20.0/satpy/readers/vaisala_gld360.py000066400000000000000000000060111362525524100205720ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Vaisala Global Lightning Dataset 360 reader Vaisala Global Lightning Dataset GLD360 is data as a service that provides real-time lightning data for accurate and early detection and tracking of severe weather. The data provided is generated by a Vaisala owned and operated world-wide lightning detection sensor network. References: - [GLD360] https://www.vaisala.com/en/products/data-subscriptions-and-reports/data-sets/gld360 """ import logging import pandas as pd import dask.array as da import xarray as xr from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler logger = logging.getLogger(__name__) class VaisalaGLD360TextFileHandler(BaseFileHandler): """ASCII reader for Vaisala GDL360 data.""" def __init__(self, filename, filename_info, filetype_info): super(VaisalaGLD360TextFileHandler, self).__init__(filename, filename_info, filetype_info) names = ['date', 'time', 'latitude', 'longitude', 'power', 'unit'] types = ['str', 'str', 'float', 'float', 'float', 'str'] dtypes = dict(zip(names, types)) # Combine 'date' and 'time' into a datetime object parse_dates = {'datetime': ['date', 'time']} self.data = pd.read_csv(filename, delim_whitespace=True, header=None, names=names, dtype=dtypes, parse_dates=parse_dates) @property def start_time(self): return self.data['datetime'].iloc[0] @property def end_time(self): return self.data['datetime'].iloc[-1] def get_dataset(self, dataset_id, dataset_info): """Load a dataset.""" xarr = xr.DataArray(da.from_array(self.data[dataset_id.name], chunks=CHUNK_SIZE), dims=["y"]) # Add time, longitude, and latitude as non-dimensional y-coordinates xarr['time'] = ('y', self.data['datetime']) xarr['longitude'] = ('y', self.data['longitude']) xarr['latitude'] = ('y', self.data['latitude']) if dataset_id.name == 'power': # Check that units in the file match the unit specified in the # reader yaml-file if not (self.data.unit == dataset_info['units']).all(): raise ValueError('Inconsistent units found in file!') xarr.attrs.update(dataset_info) return xarr satpy-0.20.0/satpy/readers/viirs_compact.py000066400000000000000000000460551362525524100207510ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2014-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Compact viirs format. This is a reader for the Compact VIIRS format shipped on Eumetcast for the VIIRS SDR. The format is compressed in multiple ways, notably by shipping only tie-points for geographical data. The interpolation of this data is done using dask operations, so it should be relatively performant. For more information on this format, the reader can refer to the `Compact VIIRS SDR Product Format User Guide` that can be found on this EARS_ page. .. _EARS: https://www.eumetsat.int/website/home/Data/RegionalDataServiceEARS/EARSVIIRS/index.html """ import logging from datetime import datetime, timedelta import h5py import numpy as np import xarray as xr import dask.array as da from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.utils import np2str from satpy.utils import angle2xyz, lonlat2xyz, xyz2angle, xyz2lonlat from satpy import CHUNK_SIZE try: import tables except ImportError: tables = None chans_dict = {"M01": "M1", "M02": "M2", "M03": "M3", "M04": "M4", "M05": "M5", "M06": "M6", "M07": "M7", "M08": "M8", "M09": "M9", "M10": "M10", "M11": "M11", "M12": "M12", "M13": "M13", "M14": "M14", "M15": "M15", "M16": "M16", "DNB": "DNB"} logger = logging.getLogger(__name__) c = 299792458 # m.s-1 h = 6.6260755e-34 # m2kg.s-1 k = 1.380658e-23 # m2kg.s-2.K-1 short_names = {'NPP': 'Suomi-NPP', 'J01': 'NOAA-20', 'J02': 'NOAA-21'} class VIIRSCompactFileHandler(BaseFileHandler): """A file handler class for VIIRS compact format.""" def __init__(self, filename, filename_info, filetype_info): """Initialize the reader.""" super(VIIRSCompactFileHandler, self).__init__(filename, filename_info, filetype_info) self.h5f = h5py.File(self.filename, "r") self.finfo = filename_info self.lons = None self.lats = None if filetype_info['file_type'] == 'compact_m': self.ch_type = 'MOD' elif filetype_info['file_type'] == 'compact_dnb': self.ch_type = 'DNB' else: raise IOError('Compact Viirs file type not recognized.') geo_data = self.h5f["Data_Products"]["VIIRS-%s-GEO" % self.ch_type]["VIIRS-%s-GEO_Gran_0" % self.ch_type] self.min_lat = geo_data.attrs['South_Bounding_Coordinate'].item() self.max_lat = geo_data.attrs['North_Bounding_Coordinate'].item() self.min_lon = geo_data.attrs['West_Bounding_Coordinate'].item() self.max_lon = geo_data.attrs['East_Bounding_Coordinate'].item() self.switch_to_cart = ((abs(self.max_lon - self.min_lon) > 90) or (max(abs(self.min_lat), abs(self.max_lat)) > 60)) self.scans = self.h5f["All_Data"]["NumberOfScans"][0] self.geostuff = self.h5f["All_Data"]['VIIRS-%s-GEO_All' % self.ch_type] for key in self.h5f["All_Data"].keys(): if key.startswith("VIIRS") and key.endswith("SDR_All"): channel = key.split('-')[1] break # FIXME: this supposes there is only one tiepoint zone in the # track direction self.scan_size = self.h5f["All_Data/VIIRS-%s-SDR_All" % channel].attrs["TiePointZoneSizeTrack"].item() self.track_offset = self.h5f["All_Data/VIIRS-%s-SDR_All" % channel].attrs["PixelOffsetTrack"] self.scan_offset = self.h5f["All_Data/VIIRS-%s-SDR_All" % channel].attrs["PixelOffsetScan"] try: self.group_locations = self.geostuff[ "TiePointZoneGroupLocationScanCompact"][()] except KeyError: self.group_locations = [0] self.tpz_sizes = da.from_array(self.h5f["All_Data/VIIRS-%s-SDR_All" % channel].attrs["TiePointZoneSizeScan"], chunks=1) if len(self.tpz_sizes.shape) == 2: if self.tpz_sizes.shape[1] != 1: raise NotImplementedError("Can't handle 2 dimensional tiepoint zones.") self.tpz_sizes = self.tpz_sizes.squeeze(1) self.nb_tpzs = self.geostuff["NumberOfTiePointZonesScan"] self.c_align = da.from_array(self.geostuff["AlignmentCoefficient"], chunks=tuple(self.nb_tpzs)) self.c_exp = da.from_array(self.geostuff["ExpansionCoefficient"], chunks=tuple(self.nb_tpzs)) self.nb_tpzs = da.from_array(self.nb_tpzs, chunks=1) self._expansion_coefs = None self.cache = {} self.mda = {} short_name = np2str(self.h5f.attrs['Platform_Short_Name']) self.mda['platform_name'] = short_names.get(short_name, short_name) self.mda['sensor'] = 'viirs' def get_dataset(self, key, info): """Load a dataset.""" logger.debug('Reading %s.', key.name) if key.name in chans_dict: m_data = self.read_dataset(key, info) else: m_data = self.read_geo(key, info) m_data.attrs.update(info) return m_data def get_bounding_box(self): """Get the bounding box of the data.""" for key in self.h5f["Data_Products"].keys(): if key.startswith("VIIRS") and key.endswith("GEO"): lats = self.h5f["Data_Products"][key][ key + '_Gran_0'].attrs['G-Ring_Latitude'] lons = self.h5f["Data_Products"][key][ key + '_Gran_0'].attrs['G-Ring_Longitude'] break else: raise KeyError('Cannot find bounding coordinates!') return lons.ravel(), lats.ravel() @property def start_time(self): """Get the start time.""" return self.finfo['start_time'] @property def end_time(self): """Get the end time.""" end_time = datetime.combine(self.start_time.date(), self.finfo['end_time'].time()) if end_time < self.start_time: end_time += timedelta(days=1) return end_time def read_geo(self, key, info): """Read angles.""" pairs = {('satellite_azimuth_angle', 'satellite_zenith_angle'): ("SatelliteAzimuthAngle", "SatelliteZenithAngle"), ('solar_azimuth_angle', 'solar_zenith_angle'): ("SolarAzimuthAngle", "SolarZenithAngle"), ('dnb_solar_azimuth_angle', 'dnb_solar_zenith_angle'): ("SolarAzimuthAngle", "SolarZenithAngle"), ('dnb_lunar_azimuth_angle', 'dnb_lunar_zenith_angle'): ("LunarAzimuthAngle", "LunarZenithAngle"), } if self.lons is None or self.lats is None: self.lons, self.lats = self.navigate() for pair, fkeys in pairs.items(): if key.name in pair: if (self.cache.get(pair[0]) is None or self.cache.get(pair[1]) is None): angles = self.angles(*fkeys) self.cache[pair[0]], self.cache[pair[1]] = angles if key.name == pair[0]: return xr.DataArray(self.cache[pair[0]], name=key.name, attrs=self.mda, dims=('y', 'x')) else: return xr.DataArray(self.cache[pair[1]], name=key.name, attrs=self.mda, dims=('y', 'x')) if info.get('standard_name') in ['latitude', 'longitude']: if self.lons is None or self.lats is None: self.lons, self.lats = self.navigate() mda = self.mda.copy() mda.update(info) if info['standard_name'] == 'longitude': return xr.DataArray(self.lons, attrs=mda, dims=('y', 'x')) else: return xr.DataArray(self.lats, attrs=mda, dims=('y', 'x')) if key.name == 'dnb_moon_illumination_fraction': mda = self.mda.copy() mda.update(info) return xr.DataArray(da.from_array(self.geostuff["MoonIllumFraction"]), attrs=info) def read_dataset(self, dataset_key, info): """Read a dataset.""" h5f = self.h5f channel = chans_dict[dataset_key.name] chan_dict = dict([(key.split("-")[1], key) for key in h5f["All_Data"].keys() if key.startswith("VIIRS")]) h5rads = h5f["All_Data"][chan_dict[channel]]["Radiance"] chunks = h5rads.chunks or CHUNK_SIZE rads = xr.DataArray(da.from_array(h5rads, chunks=chunks), name=dataset_key.name, dims=['y', 'x']).astype(np.float32) h5attrs = h5rads.attrs scans = h5f["All_Data"]["NumberOfScans"][0] rads = rads[:scans * 16, :] # if channel in ("M9", ): # arr = rads[:scans * 16, :].astype(np.float32) # arr[arr > 65526] = np.nan # arr = np.ma.masked_array(arr, mask=arr_mask) # else: # arr = np.ma.masked_greater(rads[:scans * 16, :].astype(np.float32), # 65526) rads = rads.where(rads <= 65526) try: rads = xr.where(rads <= h5attrs['Threshold'], rads * h5attrs['RadianceScaleLow'] + h5attrs['RadianceOffsetLow'], rads * h5attrs['RadianceScaleHigh'] + h5attrs['RadianceOffsetHigh']) except (KeyError, AttributeError): logger.info("Missing attribute for scaling of %s.", channel) pass unit = "W m-2 sr-1 μm-1" if dataset_key.calibration == 'counts': raise NotImplementedError("Can't get counts from this data") if dataset_key.calibration in ['reflectance', 'brightness_temperature']: # do calibrate try: # First guess: VIS or NIR data a_vis = h5attrs['EquivalentWidth'] b_vis = h5attrs['IntegratedSolarIrradiance'] dse = h5attrs['EarthSunDistanceNormalised'] rads *= 100 * np.pi * a_vis / b_vis * (dse**2) unit = "%" except KeyError: # Maybe it's IR data? try: a_ir = h5attrs['BandCorrectionCoefficientA'] b_ir = h5attrs['BandCorrectionCoefficientB'] lambda_c = h5attrs['CentralWaveLength'] rads *= 1e6 rads = (h * c) / (k * lambda_c * np.log(1 + (2 * h * c ** 2) / ((lambda_c ** 5) * rads))) rads *= a_ir rads += b_ir unit = "K" except KeyError: logger.warning("Calibration failed.") elif dataset_key.calibration != 'radiance': raise ValueError("Calibration parameter should be radiance, " "reflectance or brightness_temperature") rads = rads.clip(min=0) rads.attrs = self.mda rads.attrs['units'] = unit return rads def expand(self, data, coefs): """Perform the expansion in numpy domain.""" data = data.reshape(data.shape[:-1]) coefs = coefs.reshape(self.scans, self.scan_size, data.shape[1] - 1, -1, 4) coef_a = coefs[:, :, :, :, 0] coef_b = coefs[:, :, :, :, 1] coef_c = coefs[:, :, :, :, 2] coef_d = coefs[:, :, :, :, 3] data_a = data[:self.scans * 2:2, np.newaxis, :-1, np.newaxis] data_b = data[:self.scans * 2:2, np.newaxis, 1:, np.newaxis] data_c = data[1:self.scans * 2:2, np.newaxis, 1:, np.newaxis] data_d = data[1:self.scans * 2:2, np.newaxis, :-1, np.newaxis] fdata = (coef_a * data_a + coef_b * data_b + coef_d * data_d + coef_c * data_c) return fdata.reshape(self.scans * self.scan_size, -1) def expand_angle_and_nav(self, arrays): """Expand angle and navigation datasets.""" res = [] for array in arrays: res.append(da.map_blocks(self.expand, array[:, :, np.newaxis], self.expansion_coefs, dtype=array.dtype, drop_axis=2, chunks=self.expansion_coefs.chunks[:-1])) return res def get_coefs(self, c_align, c_exp, tpz_size, nb_tpz, v_track): """Compute the coeffs in numpy domain.""" nties = nb_tpz.item() tpz_size = tpz_size.item() v_scan = (np.arange(nties * tpz_size) % tpz_size + self.scan_offset) / tpz_size s_scan, s_track = np.meshgrid(v_scan, v_track) s_track = s_track.reshape(self.scans, self.scan_size, nties, tpz_size) s_scan = s_scan.reshape(self.scans, self.scan_size, nties, tpz_size) c_align = c_align[np.newaxis, np.newaxis, :, np.newaxis] c_exp = c_exp[np.newaxis, np.newaxis, :, np.newaxis] a_scan = s_scan + s_scan * (1 - s_scan) * c_exp + s_track * ( 1 - s_track) * c_align a_track = s_track coef_a = (1 - a_track) * (1 - a_scan) coef_b = (1 - a_track) * a_scan coef_d = a_track * (1 - a_scan) coef_c = a_track * a_scan res = np.stack([coef_a, coef_b, coef_c, coef_d], axis=4).reshape(self.scans * self.scan_size, -1, 4) return res @property def expansion_coefs(self): """Compute the expansion coefficients.""" if self._expansion_coefs is not None: return self._expansion_coefs v_track = (np.arange(self.scans * self.scan_size) % self.scan_size + self.track_offset) / self.scan_size self.tpz_sizes = self.tpz_sizes.persist() self.nb_tpzs = self.nb_tpzs.persist() col_chunks = (self.tpz_sizes * self.nb_tpzs).compute() self._expansion_coefs = da.map_blocks(self.get_coefs, self.c_align, self.c_exp, self.tpz_sizes, self.nb_tpzs, dtype=np.float64, v_track=v_track, new_axis=[0, 2], chunks=(self.scans * self.scan_size, tuple(col_chunks), 4)) return self._expansion_coefs def navigate(self): """Generate the navigation datasets.""" shape = self.geostuff['Longitude'].shape hchunks = (self.nb_tpzs + 1).compute() chunks = (shape[0], tuple(hchunks)) lon = da.from_array(self.geostuff["Longitude"], chunks=chunks) lat = da.from_array(self.geostuff["Latitude"], chunks=chunks) if self.switch_to_cart: arrays = lonlat2xyz(lon, lat) else: arrays = (lon, lat) expanded = self.expand_angle_and_nav(arrays) if self.switch_to_cart: return xyz2lonlat(*expanded) return expanded def angles(self, azi_name, zen_name): """Generate the angle datasets.""" shape = self.geostuff['Longitude'].shape hchunks = (self.nb_tpzs + 1).compute() chunks = (shape[0], tuple(hchunks)) azi = self.geostuff[azi_name] zen = self.geostuff[zen_name] switch_to_cart = ((np.max(azi) - np.min(azi) > 5) or (np.min(zen) < 10) or (max(abs(self.min_lat), abs(self.max_lat)) > 80)) azi = da.from_array(azi, chunks=chunks) zen = da.from_array(zen, chunks=chunks) if switch_to_cart: arrays = convert_from_angles(azi, zen) else: arrays = (azi, zen) expanded = self.expand_angle_and_nav(arrays) if switch_to_cart: return convert_to_angles(*expanded) return expanded def convert_from_angles(azi, zen): """Convert the angles to cartesian coordinates.""" x, y, z, = angle2xyz(azi, zen) # Conversion to ECEF is recommended by the provider, but no significant # difference has been seen. # x, y, z = (-np.sin(lon) * x + np.cos(lon) * y, # -np.sin(lat) * np.cos(lon) * x - np.sin(lat) * np.sin(lon) * y + np.cos(lat) * z, # np.cos(lat) * np.cos(lon) * x + np.cos(lat) * np.sin(lon) * y + np.sin(lat) * z) return x, y, z def convert_to_angles(x, y, z): """Convert the cartesian coordinates to angles.""" # Conversion to ECEF is recommended by the provider, but no significant # difference has been seen. # x, y, z = (-np.sin(lon) * x - np.sin(lat) * np.cos(lon) * y + np.cos(lat) * np.cos(lon) * z, # np.cos(lon) * x - np.sin(lat) * np.sin(lon) * y + np.cos(lat) * np.sin(lon) * z, # np.cos(lat) * y + np.sin(lat) * z) azi, zen = xyz2angle(x, y, z, acos=True) return azi, zen def expand_arrays(arrays, scans, c_align, c_exp, scan_size=16, tpz_size=16, nties=200, track_offset=0.5, scan_offset=0.5): """Expand *data* according to alignment and expansion.""" nties = nties.item() tpz_size = tpz_size.item() s_scan, s_track = da.meshgrid(da.arange(nties * tpz_size), da.arange(scans * scan_size)) s_track = (s_track.reshape(scans, scan_size, nties, tpz_size) % scan_size + track_offset) / scan_size s_scan = (s_scan.reshape(scans, scan_size, nties, tpz_size) % tpz_size + scan_offset) / tpz_size a_scan = s_scan + s_scan * (1 - s_scan) * c_exp + s_track * ( 1 - s_track) * c_align a_track = s_track expanded = [] coef_a = (1 - a_track) * (1 - a_scan) coef_b = (1 - a_track) * a_scan coef_d = a_track * (1 - a_scan) coef_c = a_track * a_scan for data in arrays: data_a = data[:scans * 2:2, np.newaxis, :-1, np.newaxis] data_b = data[:scans * 2:2, np.newaxis, 1:, np.newaxis] data_c = data[1:scans * 2:2, np.newaxis, 1:, np.newaxis] data_d = data[1:scans * 2:2, np.newaxis, :-1, np.newaxis] fdata = (coef_a * data_a + coef_b * data_b + coef_d * data_d + coef_c * data_c) expanded.append(fdata.reshape(scans * scan_size, nties * tpz_size)) return expanded satpy-0.20.0/satpy/readers/viirs_edr_active_fires.py000066400000000000000000000126231362525524100226120ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """VIIRS Active Fires reader. This module implements readers for VIIRS Active Fires NetCDF and ASCII files. """ from satpy.readers.netcdf_utils import NetCDF4FileHandler from satpy.readers.file_handlers import BaseFileHandler import dask.dataframe as dd import xarray as xr # map platform attributes to Oscar standard name PLATFORM_MAP = { "NPP": "Suomi-NPP", "J01": "NOAA-20", "J02": "NOAA-21" } class VIIRSActiveFiresFileHandler(NetCDF4FileHandler): """NetCDF4 reader for VIIRS Active Fires.""" def __init__(self, filename, filename_info, filetype_info, auto_maskandscale=False, xarray_kwargs=None): """Open and perform initial investigation of NetCDF file.""" super(VIIRSActiveFiresFileHandler, self).__init__( filename, filename_info, filetype_info, auto_maskandscale=auto_maskandscale, xarray_kwargs=xarray_kwargs) self.prefix = filetype_info.get('variable_prefix') def get_dataset(self, dsid, dsinfo): """Get requested data as DataArray. Args: dsid: Dataset ID param2: Dataset Information Returns: Dask DataArray: Data """ key = dsinfo.get('file_key', dsid.name).format(variable_prefix=self.prefix) data = self[key] # rename "phoney dims" data = data.rename(dict(zip(data.dims, ['y', 'x']))) # handle attributes from YAML for key in ('units', 'standard_name', 'flag_meanings', 'flag_values', '_FillValue'): # we only want to add information that isn't present already if key in dsinfo and key not in data.attrs: data.attrs[key] = dsinfo[key] if isinstance(data.attrs.get('flag_meanings'), str): data.attrs['flag_meanings'] = data.attrs['flag_meanings'].split(' ') # use more common CF standard units if data.attrs.get('units') == 'kelvins': data.attrs['units'] = 'K' data.attrs["platform_name"] = PLATFORM_MAP.get(self.filename_info['satellite_name'].upper(), "unknown") data.attrs["sensor"] = "VIIRS" return data @property def start_time(self): """Get first date/time when observations were recorded.""" return self.filename_info['start_time'] @property def end_time(self): """Get last date/time when observations were recorded.""" return self.filename_info.get('end_time', self.start_time) @property def sensor_name(self): """Name of sensor for this file.""" return self["sensor"] @property def platform_name(self): """Name of platform/satellite for this file.""" return self["platform_name"] class VIIRSActiveFiresTextFileHandler(BaseFileHandler): """ASCII reader for VIIRS Active Fires.""" def __init__(self, filename, filename_info, filetype_info): """Make sure filepath is valid and then reads data into a Dask DataFrame. Args: filename: Filename filename_info: Filename information filetype_info: Filetype information """ skip_rows = filetype_info.get('skip_rows', 15) columns = filetype_info['columns'] self.file_content = dd.read_csv(filename, skiprows=skip_rows, header=None, names=columns) super(VIIRSActiveFiresTextFileHandler, self).__init__(filename, filename_info, filetype_info) self.platform_name = PLATFORM_MAP.get(self.filename_info['satellite_name'].upper(), "unknown") def get_dataset(self, dsid, dsinfo): """Get requested data as DataArray.""" ds = self[dsid.name].to_dask_array(lengths=True) data = xr.DataArray(ds, dims=("y",), attrs={"platform_name": self.platform_name, "sensor": "VIIRS"}) for key in ('units', 'standard_name', 'flag_meanings', 'flag_values', '_FillValue'): # we only want to add information that isn't present already if key in dsinfo and key not in data.attrs: data.attrs[key] = dsinfo[key] if isinstance(data.attrs.get('flag_meanings'), str): data.attrs['flag_meanings'] = data.attrs['flag_meanings'].split(' ') return data @property def start_time(self): """Get first date/time when observations were recorded.""" return self.filename_info['start_time'] @property def end_time(self): """Get last date/time when observations were recorded.""" return self.filename_info.get('end_time', self.start_time) def __getitem__(self, key): """Get file content for 'key'.""" return self.file_content[key] def __contains__(self, item): """Check if variable is in current file.""" return item in self.file_content satpy-0.20.0/satpy/readers/viirs_edr_flood.py000066400000000000000000000065121362525524100212520ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Interface to VIIRS flood product.""" from satpy.readers.hdf4_utils import HDF4FileHandler from pyresample import geometry import numpy as np class VIIRSEDRFlood(HDF4FileHandler): """VIIRS EDR Flood-product handler for HDF4 files.""" @property def start_time(self): """Get start time.""" return self.filename_info['start_time'] @property def end_time(self): """Get end time.""" return self.filename_info.get('end_time', self.start_time) @property def sensor_name(self): """Get sensor name.""" sensor = self['/attr/SensorIdentifyCode'] if isinstance(sensor, np.ndarray): return str(sensor.astype(str)).lower() return sensor.lower() @property def platform_name(self): """Get platform name.""" platform_name = self['/attr/Satellitename'] if isinstance(platform_name, np.ndarray): return str(platform_name.astype(str)).lower() return platform_name.lower() def get_metadata(self, data, ds_info): """Get metadata.""" metadata = {} metadata.update(data.attrs) metadata.update(ds_info) metadata.update({ 'sensor': self.sensor_name, 'platform_name': self.platform_name, 'start_time': self.start_time, 'end_time': self.end_time, }) return metadata def get_dataset(self, ds_id, ds_info): """Get dataset.""" data = self[ds_id.name] data.attrs = self.get_metadata(data, ds_info) fill = data.attrs.pop('_Fillvalue') offset = data.attrs.get('add_offset') scale_factor = data.attrs.get('scale_factor') data = data.where(data != fill) if scale_factor is not None and offset is not None: data *= scale_factor data += offset return data def get_area_def(self, ds_id): """Get area definition.""" data = self[ds_id.name] proj_dict = { 'proj': 'latlong', 'datum': 'WGS84', 'ellps': 'WGS84', 'no_defs': True } area_extent = [data.attrs.get('ProjectionMinLongitude'), data.attrs.get('ProjectionMinLatitude'), data.attrs.get('ProjectionMaxLongitude'), data.attrs.get('ProjectionMaxLatitude')] area = geometry.AreaDefinition( 'viirs_flood_area', 'name_of_proj', 'id_of_proj', proj_dict, int(self.filename_info['dim0']), int(self.filename_info['dim1']), np.asarray(area_extent) ) return area satpy-0.20.0/satpy/readers/viirs_l1b.py000066400000000000000000000241321362525524100177710ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2011-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Interface to VIIRS L1B format.""" import logging from datetime import datetime import numpy as np from satpy.readers.netcdf_utils import NetCDF4FileHandler LOG = logging.getLogger(__name__) class VIIRSL1BFileHandler(NetCDF4FileHandler): """VIIRS L1B File Reader.""" def _parse_datetime(self, datestr): """Parse datetime.""" return datetime.strptime(datestr, "%Y-%m-%dT%H:%M:%S.000Z") @property def start_orbit_number(self): """Get start orbit number.""" try: return int(self['/attr/orbit_number']) except KeyError: return int(self['/attr/OrbitNumber']) @property def end_orbit_number(self): """Get end orbit number.""" try: return int(self['/attr/orbit_number']) except KeyError: return int(self['/attr/OrbitNumber']) @property def platform_name(self): """Get platform name.""" try: res = self.get('/attr/platform', self.filename_info['platform_shortname']) except KeyError: res = 'Unknown' return { 'JPSS-1': 'NOAA-20', 'NP': 'Suomi-NPP', 'J1': 'NOAA-20', 'J2': 'NOAA-21', 'JPSS-2': 'NOAA-21', }.get(res, res) @property def sensor_name(self): """Get sensor name.""" res = self['/attr/instrument'] if isinstance(res, np.ndarray): return str(res.astype(str)) else: return res def adjust_scaling_factors(self, factors, file_units, output_units): """Adjust scaling factors.""" if factors is None or factors[0] is None: factors = [1, 0] if file_units == output_units: LOG.debug("File units and output units are the same (%s)", file_units) return factors factors = np.array(factors) if file_units == "W cm-2 sr-1" and output_units == "W m-2 sr-1": LOG.debug("Adjusting scaling factors to convert '%s' to '%s'", file_units, output_units) factors[::2] = np.where(factors[::2] != -999, factors[::2] * 10000.0, -999) factors[1::2] = np.where(factors[1::2] != -999, factors[1::2] * 10000.0, -999) return factors elif file_units == "1" and output_units == "%": LOG.debug("Adjusting scaling factors to convert '%s' to '%s'", file_units, output_units) factors[::2] = np.where(factors[::2] != -999, factors[::2] * 100.0, -999) factors[1::2] = np.where(factors[1::2] != -999, factors[1::2] * 100.0, -999) return factors else: return factors def get_shape(self, ds_id, ds_info): """Get shape.""" var_path = ds_info.get('file_key', 'observation_data/{}'.format(ds_id.name)) return self.get(var_path + '/shape', 1) @property def start_time(self): """Get start time.""" return self._parse_datetime(self['/attr/time_coverage_start']) @property def end_time(self): """Get end time.""" return self._parse_datetime(self['/attr/time_coverage_end']) def _get_dataset_file_units(self, dataset_id, ds_info, var_path): file_units = ds_info.get('file_units') if file_units is None: file_units = self.get(var_path + '/attr/units') # they were almost completely CF compliant... if file_units == "none": file_units = "1" if dataset_id.calibration == 'radiance' and ds_info['units'] == 'W m-2 um-1 sr-1': rad_units_path = var_path + '/attr/radiance_units' if rad_units_path in self: if file_units is None: file_units = self[var_path + '/attr/radiance_units'] if file_units == 'Watts/meter^2/steradian/micrometer': file_units = 'W m-2 um-1 sr-1' elif ds_info.get('units') == '%' and file_units is None: # v1.1 and above of level 1 processing removed 'units' attribute # for all reflectance channels file_units = "1" return file_units def _get_dataset_valid_range(self, dataset_id, ds_info, var_path): if dataset_id.calibration == 'radiance' and ds_info['units'] == 'W m-2 um-1 sr-1': rad_units_path = var_path + '/attr/radiance_units' if rad_units_path in self: # we are getting a reflectance band but we want the radiance values # special scaling parameters scale_factor = self[var_path + '/attr/radiance_scale_factor'] scale_offset = self[var_path + '/attr/radiance_add_offset'] else: # we are getting a btemp band but we want the radiance values # these are stored directly in the primary variable scale_factor = self[var_path + '/attr/scale_factor'] scale_offset = self[var_path + '/attr/add_offset'] valid_min = self[var_path + '/attr/valid_min'] valid_max = self[var_path + '/attr/valid_max'] elif ds_info.get('units') == '%': # normal reflectance valid_min = self[var_path + '/attr/valid_min'] valid_max = self[var_path + '/attr/valid_max'] scale_factor = self[var_path + '/attr/scale_factor'] scale_offset = self[var_path + '/attr/add_offset'] elif ds_info.get('units') == 'K': # normal brightness temperature # use a special LUT to get the actual values lut_var_path = ds_info.get('lut', var_path + '_brightness_temperature_lut') # we get the BT values from a look up table using the scaled radiance integers valid_min = self[lut_var_path + '/attr/valid_min'] valid_max = self[lut_var_path + '/attr/valid_max'] scale_factor = scale_offset = None else: valid_min = self.get(var_path + '/attr/valid_min') valid_max = self.get(var_path + '/attr/valid_max') scale_factor = self.get(var_path + '/attr/scale_factor') scale_offset = self.get(var_path + '/attr/add_offset') return valid_min, valid_max, scale_factor, scale_offset def get_metadata(self, dataset_id, ds_info): """Get metadata.""" var_path = ds_info.get('file_key', 'observation_data/{}'.format(dataset_id.name)) shape = self.get_shape(dataset_id, ds_info) file_units = self._get_dataset_file_units(dataset_id, ds_info, var_path) # Get extra metadata if '/dimension/number_of_scans' in self: rows_per_scan = int(shape[0] / self['/dimension/number_of_scans']) ds_info.setdefault('rows_per_scan', rows_per_scan) i = getattr(self[var_path], 'attrs', {}) i.update(ds_info) i.update(dataset_id.to_dict()) i.update({ "shape": shape, "units": ds_info.get("units", file_units), "file_units": file_units, "platform_name": self.platform_name, "sensor": self.sensor_name, "start_orbit": self.start_orbit_number, "end_orbit": self.end_orbit_number, }) i.update(dataset_id.to_dict()) return i def get_dataset(self, dataset_id, ds_info): """Get dataset.""" var_path = ds_info.get('file_key', 'observation_data/{}'.format(dataset_id.name)) metadata = self.get_metadata(dataset_id, ds_info) shape = metadata['shape'] valid_min, valid_max, scale_factor, scale_offset = self._get_dataset_valid_range(dataset_id, ds_info, var_path) if dataset_id.calibration == 'radiance' and ds_info['units'] == 'W m-2 um-1 sr-1': data = self[var_path] elif ds_info.get('units') == '%': data = self[var_path] elif ds_info.get('units') == 'K': # normal brightness temperature # use a special LUT to get the actual values lut_var_path = ds_info.get('lut', var_path + '_brightness_temperature_lut') data = self[var_path] # we get the BT values from a look up table using the scaled radiance integers index_arr = data.data.astype(np.int) coords = data.coords data.data = self[lut_var_path].data[index_arr.ravel()].reshape(data.shape) data = data.assign_coords(**coords) elif shape == 1: data = self[var_path] else: data = self[var_path] data.attrs.update(metadata) if valid_min is not None and valid_max is not None: data = data.where((data >= valid_min) & (data <= valid_max)) if data.attrs.get('units') in ['%', 'K', '1', 'W m-2 um-1 sr-1'] and \ 'flag_meanings' in data.attrs: # flag meanings don't mean anything anymore for these variables # these aren't category products data.attrs.pop('flag_meanings', None) data.attrs.pop('flag_values', None) factors = (scale_factor, scale_offset) factors = self.adjust_scaling_factors(factors, metadata['file_units'], ds_info.get("units")) if factors[0] != 1 or factors[1] != 0: data *= factors[0] data += factors[1] # rename dimensions to correspond to satpy's 'y' and 'x' standard if 'number_of_lines' in data.dims: data = data.rename({'number_of_lines': 'y', 'number_of_pixels': 'x'}) return data satpy-0.20.0/satpy/readers/viirs_sdr.py000066400000000000000000000615711362525524100201130ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2011-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Interface to VIIRS SDR format. This reader implements the support of VIIRS SDR files as produced by CSPP and CLASS. It is comprised of two parts: - A subclass of the YAMLFileReader class to allow handling all the files - A filehandler class to implement the actual reading Format documentation: - http://npp.gsfc.nasa.gov/science/sciencedocuments/082012/474-00001-03_CDFCBVolIII_RevC.pdf """ import logging from datetime import datetime, timedelta from glob import glob import os.path import numpy as np import dask.array as da import xarray as xr from satpy.readers.hdf5_utils import HDF5FileHandler from satpy.readers.yaml_reader import FileYAMLReader NO_DATE = datetime(1958, 1, 1) EPSILON_TIME = timedelta(days=2) LOG = logging.getLogger(__name__) def _get_invalid_info(granule_data): """Get a detailed report of the missing data. N/A: not applicable MISS: required value missing at time of processing OBPT: onboard pixel trim (overlapping/bow-tie pixel removed during SDR processing) OGPT: on-ground pixel trim (overlapping/bow-tie pixel removed during EDR processing) ERR: error occurred during processing / non-convergence ELINT: ellipsoid intersect failed / instrument line-of-sight does not intersect the Earth’s surface VDNE: value does not exist / processing algorithm did not execute SOUB: scaled out-of-bounds / solution not within allowed range """ if issubclass(granule_data.dtype.type, np.integer): msg = ("na:" + str((granule_data == 65535).sum()) + " miss:" + str((granule_data == 65534).sum()) + " obpt:" + str((granule_data == 65533).sum()) + " ogpt:" + str((granule_data == 65532).sum()) + " err:" + str((granule_data == 65531).sum()) + " elint:" + str((granule_data == 65530).sum()) + " vdne:" + str((granule_data == 65529).sum()) + " soub:" + str((granule_data == 65528).sum())) elif issubclass(granule_data.dtype.type, np.floating): msg = ("na:" + str((granule_data == -999.9).sum()) + " miss:" + str((granule_data == -999.8).sum()) + " obpt:" + str((granule_data == -999.7).sum()) + " ogpt:" + str((granule_data == -999.6).sum()) + " err:" + str((granule_data == -999.5).sum()) + " elint:" + str((granule_data == -999.4).sum()) + " vdne:" + str((granule_data == -999.3).sum()) + " soub:" + str((granule_data == -999.2).sum())) return msg DATASET_KEYS = {'GDNBO': 'VIIRS-DNB-GEO', 'SVDNB': 'VIIRS-DNB-SDR', 'GITCO': 'VIIRS-IMG-GEO-TC', 'GIMGO': 'VIIRS-IMG-GEO', 'SVI01': 'VIIRS-I1-SDR', 'SVI02': 'VIIRS-I2-SDR', 'SVI03': 'VIIRS-I3-SDR', 'SVI04': 'VIIRS-I4-SDR', 'SVI05': 'VIIRS-I5-SDR', 'GMTCO': 'VIIRS-MOD-GEO-TC', 'GMODO': 'VIIRS-MOD-GEO', 'SVM01': 'VIIRS-M1-SDR', 'SVM02': 'VIIRS-M2-SDR', 'SVM03': 'VIIRS-M3-SDR', 'SVM04': 'VIIRS-M4-SDR', 'SVM05': 'VIIRS-M5-SDR', 'SVM06': 'VIIRS-M6-SDR', 'SVM07': 'VIIRS-M7-SDR', 'SVM08': 'VIIRS-M8-SDR', 'SVM09': 'VIIRS-M9-SDR', 'SVM10': 'VIIRS-M10-SDR', 'SVM11': 'VIIRS-M11-SDR', 'SVM12': 'VIIRS-M12-SDR', 'SVM13': 'VIIRS-M13-SDR', 'SVM14': 'VIIRS-M14-SDR', 'SVM15': 'VIIRS-M15-SDR', 'SVM16': 'VIIRS-M16-SDR', 'IVCDB': 'VIIRS-DualGain-Cal-IP' } class VIIRSSDRFileHandler(HDF5FileHandler): """VIIRS HDF5 File Reader.""" def __init__(self, filename, filename_info, filetype_info, use_tc=None, **kwargs): """Initialize file handler.""" self.datasets = filename_info['datasets'].split('-') self.use_tc = use_tc super(VIIRSSDRFileHandler, self).__init__(filename, filename_info, filetype_info) def __getitem__(self, item): """Get item.""" if '*' in item: # this is an aggregated field that can't easily be loaded, need to # join things together idx = 0 base_item = item item = base_item.replace('*', str(idx)) result = [] while True: try: res = super(VIIRSSDRFileHandler, self).__getitem__(item) result.append(res) except KeyError: # no more granule keys LOG.debug("Aggregated granule stopping on '%s'", item) break idx += 1 item = base_item.replace('*', str(idx)) return result else: return super(VIIRSSDRFileHandler, self).__getitem__(item) def _parse_datetime(self, datestr, timestr): try: datetime_str = datestr + timestr except TypeError: datetime_str = str(datestr.astype(str)) + str(timestr.astype(str)) time_val = datetime.strptime(datetime_str, '%Y%m%d%H%M%S.%fZ') if abs(time_val - NO_DATE) < EPSILON_TIME: # catch rare case when SDR files have incorrect date raise ValueError("Datetime invalid {}".format(time_val)) return time_val @property def start_time(self): """Get start time.""" dataset_group = DATASET_KEYS[self.datasets[0]] default_start_date = 'Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/AggregateBeginningDate' default_start_time = 'Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/AggregateBeginningTime' date_var_path = self.filetype_info.get('start_date', default_start_date).format(dataset_group=dataset_group) time_var_path = self.filetype_info.get('start_time', default_start_time).format(dataset_group=dataset_group) return self._parse_datetime(self[date_var_path], self[time_var_path]) @property def end_time(self): """Get end time.""" dataset_group = DATASET_KEYS[self.datasets[0]] default_end_date = 'Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/AggregateEndingDate' default_end_time = 'Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/AggregateEndingTime' date_var_path = self.filetype_info.get('end_date', default_end_date).format(dataset_group=dataset_group) time_var_path = self.filetype_info.get('end_time', default_end_time).format(dataset_group=dataset_group) return self._parse_datetime(self[date_var_path], self[time_var_path]) @property def start_orbit_number(self): """Get start orbit number.""" dataset_group = DATASET_KEYS[self.datasets[0]] default = 'Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/AggregateBeginningOrbitNumber' start_orbit_path = self.filetype_info.get('start_orbit', default).format(dataset_group=dataset_group) return int(self[start_orbit_path]) @property def end_orbit_number(self): """Get end orbit number.""" dataset_group = DATASET_KEYS[self.datasets[0]] default = 'Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/AggregateEndingOrbitNumber' end_orbit_path = self.filetype_info.get('end_orbit', default).format(dataset_group=dataset_group) return int(self[end_orbit_path]) @property def platform_name(self): """Get platform name.""" default = '/attr/Platform_Short_Name' platform_path = self.filetype_info.get( 'platform_name', default).format(**self.filetype_info) platform_dict = {'NPP': 'Suomi-NPP', 'JPSS-1': 'NOAA-20', 'J01': 'NOAA-20', 'JPSS-2': 'NOAA-21', 'J02': 'NOAA-21'} return platform_dict.get(self[platform_path], self[platform_path]) @property def sensor_name(self): """Get sensor name.""" dataset_group = DATASET_KEYS[self.datasets[0]] default = 'Data_Products/{dataset_group}/attr/Instrument_Short_Name' sensor_path = self.filetype_info.get( 'sensor_name', default).format(dataset_group=dataset_group) return self[sensor_path].lower() def get_file_units(self, dataset_id, ds_info): """Get file units.""" file_units = ds_info.get("file_units") # Guess the file units if we need to (normally we would get this from # the file) if file_units is None: if dataset_id.calibration == 'radiance': if "dnb" in dataset_id.name.lower(): return 'W m-2 sr-1' else: return 'W cm-2 sr-1' elif dataset_id.calibration == 'reflectance': # CF compliant unit for dimensionless file_units = "1" elif dataset_id.calibration == 'brightness_temperature': file_units = "K" else: LOG.debug("Unknown units for file key '%s'", dataset_id) return file_units def scale_swath_data(self, data, scaling_factors): """Scale swath data using scaling factors and offsets. Multi-granule (a.k.a. aggregated) files will have more than the usual two values. """ num_grans = len(scaling_factors) // 2 gran_size = data.shape[0] // num_grans factors = scaling_factors.where(scaling_factors > -999) factors = factors.data.reshape((-1, 2)) factors = xr.DataArray(da.repeat(factors, gran_size, axis=0), dims=(data.dims[0], 'factors')) data = data * factors[:, 0] + factors[:, 1] return data def adjust_scaling_factors(self, factors, file_units, output_units): """Adjust scaling factors.""" if file_units == output_units: LOG.debug("File units and output units are the same (%s)", file_units) return factors if factors is None: factors = xr.DataArray(da.from_array([1, 0], chunks=1)) factors = factors.where(factors != -999.) if file_units == "W cm-2 sr-1" and output_units == "W m-2 sr-1": LOG.debug("Adjusting scaling factors to convert '%s' to '%s'", file_units, output_units) factors = factors * 10000. return factors elif file_units == "1" and output_units == "%": LOG.debug("Adjusting scaling factors to convert '%s' to '%s'", file_units, output_units) factors = factors * 100. return factors else: return factors def _generate_file_key(self, ds_id, ds_info, factors=False): var_path = ds_info.get('file_key', 'All_Data/{dataset_group}_All/{calibration}') calibration = { 'radiance': 'Radiance', 'reflectance': 'Reflectance', 'brightness_temperature': 'BrightnessTemperature', }.get(ds_id.calibration) var_path = var_path.format(calibration=calibration, dataset_group=DATASET_KEYS[ds_info['dataset_group']]) if ds_id.name in ['dnb_longitude', 'dnb_latitude']: if self.use_tc is True: return var_path + '_TC' elif self.use_tc is None and var_path + '_TC' in self.file_content: return var_path + '_TC' return var_path @staticmethod def expand_single_values(var, scans): """Expand single valued variable to full scan lengths.""" if scans.size == 1: return var else: expanded = np.repeat(var, scans) expanded.attrs = var.attrs expanded.rename({expanded.dims[0]: 'y'}) return expanded def concatenate_dataset(self, dataset_group, var_path): """Concatenate dataset.""" if 'I' in dataset_group: scan_size = 32 else: scan_size = 16 scans_path = 'All_Data/{dataset_group}_All/NumberOfScans' number_of_granules_path = 'Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/AggregateNumberGranules' nb_granules_path = number_of_granules_path.format(dataset_group=DATASET_KEYS[dataset_group]) scans = [] for granule in range(self[nb_granules_path]): scans_path = 'Data_Products/{dataset_group}/{dataset_group}_Gran_{granule}/attr/N_Number_Of_Scans' scans_path = scans_path.format(dataset_group=DATASET_KEYS[dataset_group], granule=granule) scans.append(self[scans_path]) start_scan = 0 data_chunks = [] scans = xr.DataArray(scans) variable = self[var_path] # check if these are single per-granule value if variable.size != scans.size: for gscans in scans.values: data_chunks.append(self[var_path].isel(y=slice(start_scan, start_scan + gscans * scan_size))) start_scan += scan_size * 48 return xr.concat(data_chunks, 'y') else: return self.expand_single_values(variable, scans) def mask_fill_values(self, data, ds_info): """Mask fill values.""" is_floating = np.issubdtype(data.dtype, np.floating) if is_floating: # If the data is a float then we mask everything <= -999.0 fill_max = float(ds_info.pop("fill_max_float", -999.0)) return data.where(data > fill_max) else: # If the data is an integer then we mask everything >= fill_min_int fill_min = int(ds_info.pop("fill_min_int", 65528)) return data.where(data < fill_min) def get_dataset(self, dataset_id, ds_info): """Get the dataset corresponding to *dataset_id*. The size of the return DataArray will be dependent on the number of scans actually sensed, and not necessarily the regular 768 scanlines that the file contains for each granule. To that end, the number of scans for each granule is read from: ``Data_Products/...Gran_x/N_Number_Of_Scans``. """ dataset_group = [ds_group for ds_group in ds_info['dataset_groups'] if ds_group in self.datasets] if not dataset_group: return else: dataset_group = dataset_group[0] ds_info['dataset_group'] = dataset_group var_path = self._generate_file_key(dataset_id, ds_info) factor_var_path = ds_info.get("factors_key", var_path + "Factors") data = self.concatenate_dataset(dataset_group, var_path) data = self.mask_fill_values(data, ds_info) factors = self.get(factor_var_path) if factors is None: LOG.debug("No scaling factors found for %s", dataset_id) file_units = self.get_file_units(dataset_id, ds_info) output_units = ds_info.get("units", file_units) factors = self.adjust_scaling_factors(factors, file_units, output_units) if factors is not None: data = self.scale_swath_data(data, factors) i = getattr(data, 'attrs', {}) i.update(ds_info) i.update({ "units": ds_info.get("units", file_units), "platform_name": self.platform_name, "sensor": self.sensor_name, "start_orbit": self.start_orbit_number, "end_orbit": self.end_orbit_number, }) i.update(dataset_id.to_dict()) data.attrs.update(i) return data def get_bounding_box(self): """Get the bounding box of this file.""" from pyproj import Geod geod = Geod(ellps='WGS84') dataset_group = DATASET_KEYS[self.datasets[0]] idx = 0 lons_ring = None lats_ring = None while True: path = 'Data_Products/{dataset_group}/{dataset_group}_Gran_{idx}/attr/' prefix = path.format(dataset_group=dataset_group, idx=idx) try: lats = self.file_content[prefix + 'G-Ring_Latitude'] lons = self.file_content[prefix + 'G-Ring_Longitude'] if lons_ring is None: lons_ring = lons lats_ring = lats else: prev_lon = lons_ring[0] prev_lat = lats_ring[0] dists = list(geod.inv(lon, lat, prev_lon, prev_lat)[2] for lon, lat in zip(lons, lats)) first_idx = np.argmin(dists) if first_idx == 2 and len(lons) == 8: lons_ring = np.hstack((lons[:3], lons_ring[:-2], lons[4:])) lats_ring = np.hstack((lats[:3], lats_ring[:-2], lats[4:])) else: raise NotImplementedError("Don't know how to handle G-Rings of length %d" % len(lons)) except KeyError: break idx += 1 return lons_ring, lats_ring def available_datasets(self, configured_datasets=None): """Generate dataset info and their availablity. See :meth:`satpy.readers.file_handlers.BaseFileHandler.available_datasets` for details. """ for is_avail, ds_info in (configured_datasets or []): if is_avail is not None: yield is_avail, ds_info continue dataset_group = [ds_group for ds_group in ds_info['dataset_groups'] if ds_group in self.datasets] if dataset_group: yield True, ds_info elif is_avail is None: yield is_avail, ds_info def split_desired_other(fhs, req_geo, rem_geo): """Split the provided filehandlers *fhs* into desired filehandlers and others.""" desired = [] other = [] for fh in fhs: if req_geo in fh.datasets: desired.append(fh) elif rem_geo in fh.datasets: other.append(fh) return desired, other class VIIRSSDRReader(FileYAMLReader): """Custom file reader for finding VIIRS SDR geolocation at runtime.""" def __init__(self, config_files, use_tc=None, **kwargs): """Initialize file reader and adjust geolocation preferences. Args: config_files (iterable): yaml config files passed to base class use_tc (boolean): If `True` use the terrain corrected files. If `False`, switch to non-TC files. If `None` (default), use TC if available, non-TC otherwise. """ super(VIIRSSDRReader, self).__init__(config_files, **kwargs) self.use_tc = use_tc def filter_filenames_by_info(self, filename_items): """Filter out file using metadata from the filenames. This sorts out the different lon and lat datasets depending on TC is desired or not. """ filename_items = list(filename_items) geo_keep = [] geo_del = [] for filename, filename_info in filename_items: filename_info['datasets'] = datasets = filename_info['datasets'].split('-') if ('GITCO' in datasets) or ('GMTCO' in datasets): if self.use_tc is False: geo_del.append(filename) else: geo_keep.append(filename) elif ('GIMGO' in datasets) or ('GMODO' in datasets): if self.use_tc is True: geo_del.append(filename) else: geo_keep.append(filename) if geo_keep: fdict = dict(filename_items) for to_del in geo_del: for dataset in ['GITCO', 'GMTCO', 'GIMGO', 'GMODO']: try: fdict[to_del]['datasets'].remove(dataset) except ValueError: pass if not fdict[to_del]['datasets']: del fdict[to_del] filename_items = fdict.items() for _filename, filename_info in filename_items: filename_info['datasets'] = '-'.join(filename_info['datasets']) return super(VIIRSSDRReader, self).filter_filenames_by_info(filename_items) def _load_from_geo_ref(self, dsid): """Load filenames from the N_GEO_Ref attribute of a dataset's file.""" file_handlers = self._get_file_handlers(dsid) if not file_handlers: return None fns = [] for fh in file_handlers: base_dir = os.path.dirname(fh.filename) try: # get the filename and remove the creation time # which is often wrong fn = fh['/attr/N_GEO_Ref'][:46] + '*.h5' fns.extend(glob(os.path.join(base_dir, fn))) # usually is non-terrain corrected file, add the terrain # corrected file too if fn[:5] == 'GIMGO': fn = 'GITCO' + fn[5:] elif fn[:5] == 'GMODO': fn = 'GMTCO' + fn[5:] else: continue fns.extend(glob(os.path.join(base_dir, fn))) except KeyError: LOG.debug("Could not load geo-reference information from {}".format(fh.filename)) return fns def _get_req_rem_geo(self, ds_info): """Find out which geolocation files are needed.""" if ds_info['dataset_groups'][0].startswith('GM'): if self.use_tc is False: req_geo = 'GMODO' rem_geo = 'GMTCO' else: req_geo = 'GMTCO' rem_geo = 'GMODO' elif ds_info['dataset_groups'][0].startswith('GI'): if self.use_tc is False: req_geo = 'GIMGO' rem_geo = 'GITCO' else: req_geo = 'GITCO' rem_geo = 'GIMGO' else: raise ValueError('Unknown dataset group %s' % ds_info['dataset_groups'][0]) return req_geo, rem_geo def get_right_geo_fhs(self, dsid, fhs): """Find the right geographical file handlers for given dataset ID *dsid*.""" ds_info = self.all_ids[dsid] req_geo, rem_geo = self._get_req_rem_geo(ds_info) desired, other = split_desired_other(fhs, req_geo, rem_geo) if desired: try: ds_info['dataset_groups'].remove(rem_geo) except ValueError: pass return desired else: return other def _get_file_handlers(self, dsid): """Get the file handler to load this dataset.""" ds_info = self.all_ids[dsid] fhs = [fh for fh in self.file_handlers['generic_file'] if set(fh.datasets) & set(ds_info['dataset_groups'])] if not fhs: LOG.warning("Required file type '%s' not found or loaded for " "'%s'", ds_info['file_type'], dsid.name) else: if len(set(ds_info['dataset_groups']) & set(['GITCO', 'GIMGO', 'GMTCO', 'GMODO'])) > 1: fhs = self.get_right_geo_fhs(dsid, fhs) return fhs def _get_coordinates_for_dataset_key(self, dsid): """Get the coordinate dataset keys for `dsid`. Wraps the base class method in order to load geolocation files from the geo reference attribute in the datasets file. """ coords = super(VIIRSSDRReader, self)._get_coordinates_for_dataset_key(dsid) for c_id in coords: c_info = self.all_ids[c_id] # c_info['dataset_groups'] should be a list of 2 elements self._get_file_handlers(c_id) if len(c_info['dataset_groups']) == 1: # filtering already done continue try: req_geo, rem_geo = self._get_req_rem_geo(c_info) except ValueError: # DNB continue # check the dataset file for the geolocation filename geo_filenames = self._load_from_geo_ref(dsid) if not geo_filenames: c_info['dataset_groups'] = [rem_geo] else: # concatenate all values new_fhs = sum(self.create_filehandlers(geo_filenames).values(), []) desired, other = split_desired_other(new_fhs, req_geo, rem_geo) if desired: c_info['dataset_groups'].remove(rem_geo) else: c_info['dataset_groups'].remove(req_geo) return coords satpy-0.20.0/satpy/readers/virr_l1b.py000066400000000000000000000146731362525524100176300ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Interface to VIRR (Visible and Infra-Red Radiometer) level 1b format. The file format is HDF5. Important attributes: - Latitude - Longitude - SolarZenith - EV_Emissive - EV_RefSB - Emissive_Radiance_Offsets - Emissive_Radiance_Scales - RefSB_Cal_Coefficients - RefSB_Effective_Wavelength - Emmisive_Centroid_Wave_Number Supported satellites: - FY-3B and FY-3C. For more information: - https://www.wmo-sat.info/oscar/instruments/view/607. """ from datetime import datetime from satpy.readers.hdf5_utils import HDF5FileHandler from pyspectral.blackbody import blackbody_wn_rad2temp as rad2temp import numpy as np import dask.array as da import logging LOG = logging.getLogger(__name__) # PROVIDED BY NIGEL ATKINSON - 2013 # FY3B_REF_COEFFS = [ # 0.12640, -1.43200, #channel1# # 0.13530, -1.62360, #channel2# # 0.09193, -2.48207, #channel6# # 0.07480, -0.90980, #channel7# # 0.07590, -0.91080, #channel8# # 0.07460, -0.89520, #channel9# # 0.06300, -0.76280] #channel10# # CMA - 2015 - http://www.nsmc.org.cn/en/NSMC/Contents/100089.html FY3B_REF_COEFFS = [ 0.1264, -1.4320, 0.1353, -1.6236, 0.0919, -2.4821, 0.0938, -1.1494, 0.0857, -1.0280, 0.0803, -0.9636, 0.0630, -0.7628] class VIRR_L1B(HDF5FileHandler): """VIRR Level 1b reader.""" def __init__(self, filename, filename_info, filetype_info): """Open file and perform initial setup.""" super(VIRR_L1B, self).__init__(filename, filename_info, filetype_info) LOG.debug('day/night flag for {0}: {1}'.format(filename, self['/attr/Day Or Night Flag'])) self.geolocation_prefix = filetype_info['geolocation_prefix'] self.platform_id = filename_info['platform_id'] self.l1b_prefix = 'Data/' self.wave_number = 'Emissive_Centroid_Wave_Number' # Else filename_info['platform_id'] == FY3C. if filename_info['platform_id'] == 'FY3B': self.l1b_prefix = '' self.wave_number = 'Emmisive_Centroid_Wave_Number' def get_dataset(self, dataset_id, ds_info): """Create DataArray from file content for `dataset_id`.""" file_key = self.geolocation_prefix + ds_info.get('file_key', dataset_id.name) if self.platform_id == 'FY3B': file_key = file_key.replace('Data/', '') data = self[file_key] band_index = ds_info.get('band_index') if band_index is not None: data = data[band_index] data = data.where((data >= self[file_key + '/attr/valid_range'][0]) & (data <= self[file_key + '/attr/valid_range'][1])) if 'Emissive' in file_key: slope = self._correct_slope(self[self.l1b_prefix + 'Emissive_Radiance_Scales']. data[:, band_index][:, np.newaxis]) intercept = self[self.l1b_prefix + 'Emissive_Radiance_Offsets'].data[:, band_index][:, np.newaxis] # Converts cm^-1 (wavenumbers) and (mW/m^2)/(str/cm^-1) (radiance data) # to SI units m^-1, mW*m^-3*str^-1. wave_number = self['/attr/' + self.wave_number][band_index] * 100 bt_data = rad2temp(wave_number, (data.data * slope + intercept) * 1e-5) if isinstance(bt_data, np.ndarray): # old versions of pyspectral produce numpy arrays data.data = da.from_array(bt_data, chunks=data.data.chunks) else: # new versions of pyspectral can do dask arrays data.data = bt_data elif 'RefSB' in file_key: if self.platform_id == 'FY3B': coeffs = da.from_array(FY3B_REF_COEFFS, chunks=-1) else: coeffs = self['/attr/RefSB_Cal_Coefficients'] slope = self._correct_slope(coeffs[0::2]) intercept = coeffs[1::2] data = data * slope[band_index] + intercept[band_index] else: slope = self._correct_slope(self[file_key + '/attr/Slope']) intercept = self[file_key + '/attr/Intercept'] data = data.where((data >= self[file_key + '/attr/valid_range'][0]) & (data <= self[file_key + '/attr/valid_range'][1])) data = data * slope + intercept new_dims = {old: new for old, new in zip(data.dims, ('y', 'x'))} data = data.rename(new_dims) # use lowercase sensor name to be consistent with the rest of satpy data.attrs.update({'platform_name': self['/attr/Satellite Name'], 'sensor': self['/attr/Sensor Identification Code'].lower()}) data.attrs.update(ds_info) units = self.get(file_key + '/attr/units') if units is not None and str(units).lower() != 'none': data.attrs.update({'units': self.get(file_key + '/attr/units')}) elif data.attrs.get('calibration') == 'reflectance': data.attrs.update({'units': '%'}) else: data.attrs.update({'units': '1'}) return data def _correct_slope(self, slope): # 0 slope is invalid. Note: slope can be a scalar or array. return da.where(slope == 0, 1, slope) @property def start_time(self): """Get starting observation time.""" start_time = self['/attr/Observing Beginning Date'] + 'T' + self['/attr/Observing Beginning Time'] + 'Z' return datetime.strptime(start_time, '%Y-%m-%dT%H:%M:%S.%fZ') @property def end_time(self): """Get ending observation time.""" end_time = self['/attr/Observing Ending Date'] + 'T' + self['/attr/Observing Ending Time'] + 'Z' return datetime.strptime(end_time, '%Y-%m-%dT%H:%M:%S.%fZ') satpy-0.20.0/satpy/readers/xmlformat.py000066400000000000000000000133661362525524100201170ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2012 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reads a format from an xml file to create dtypes and scaling factor arrays.""" from xml.etree.ElementTree import ElementTree import numpy as np VARIABLES = {} TYPEC = {"boolean": ">i1", "integer2": ">i2", "integer4": ">i4", "uinteger2": ">u2", "uinteger4": ">u4", } def process_delimiter(elt, ascii=False): """Process a 'delimiter' tag.""" del elt, ascii def process_field(elt, ascii=False): """Process a 'field' tag.""" # NOTE: if there is a variable defined in this field and it is different # from the default, we could change the value and restart. scale = np.uint8(1) if elt.get("type") == "bitfield" and not ascii: current_type = ">u" + str(int(elt.get("length")) // 8) scale = np.dtype(current_type).type(1) elif (elt.get("length") is not None): if ascii: add = 33 else: add = 0 current_type = "S" + str(int(elt.get("length")) + add) else: current_type = TYPEC[elt.get("type")] try: scale = (10 / float(elt.get("scaling-factor", "10").replace("^", "e"))) except ValueError: scale = (10 / np.array( elt.get("scaling-factor").replace("^", "e").split(","), dtype=np.float)) return ((elt.get("name"), current_type, scale)) def process_array(elt, ascii=False): """Process an 'array' tag.""" del ascii chld = list(elt) if len(chld) > 1: raise ValueError() chld = chld[0] try: name, current_type, scale = CASES[chld.tag](chld) size = None except ValueError: name, current_type, size, scale = CASES[chld.tag](chld) del name myname = elt.get("name") or elt.get("label") if elt.get("length").startswith("$"): length = int(VARIABLES[elt.get("length")[1:]]) else: length = int(elt.get("length")) if size is not None: return (myname, current_type, (length, ) + size, scale) else: return (myname, current_type, (length, ), scale) CASES = {"delimiter": process_delimiter, "field": process_field, "array": process_array, } def to_dtype(val): """Parse *val* to return a dtype.""" return np.dtype([i[:-1] for i in val]) def to_scaled_dtype(val): """Parse *val* to return a dtype.""" res = [] for i in val: if i[1].startswith("S"): res.append((i[0], i[1]) + i[2:-1]) else: try: res.append((i[0], i[-1].dtype) + i[2:-1]) except AttributeError: res.append((i[0], type(i[-1])) + i[2:-1]) return np.dtype(res) def to_scales(val): """Parse *val* to return an array of scale factors.""" res = [] for i in val: if len(i) == 3: res.append((i[0], type(i[2]))) else: try: res.append((i[0], i[3].dtype, i[2])) except AttributeError: res.append((i[0], type(i[3]), i[2])) dtype = np.dtype(res) scales = np.zeros((1, ), dtype=dtype) for i in val: try: scales[i[0]] = i[-1] except ValueError: scales[i[0]] = np.repeat(np.array(i[-1]), i[2][1]).reshape(i[2]) return scales def parse_format(xml_file): """Parse the xml file to create types, scaling factor types, and scales.""" tree = ElementTree() tree.parse(xml_file) for param in tree.find("parameters"): VARIABLES[param.get("name")] = param.get("value") types_scales = {} for prod in tree.find("product"): ascii = (prod.tag in ["mphr", "sphr"]) res = [] for i in prod: lres = CASES[i.tag](i, ascii) if lres is not None: res.append(lres) types_scales[(prod.tag, int(prod.get("subclass")))] = res types = {} stypes = {} scales = {} for key, val in types_scales.items(): types[key] = to_dtype(val) stypes[key] = to_scaled_dtype(val) scales[key] = to_scales(val) return types, stypes, scales def _apply_scales(array, scales, dtype): """Apply scales to the array.""" new_array = np.empty(array.shape, dtype) for i in array.dtype.names: try: new_array[i] = array[i] * scales[i] except TypeError: if np.all(scales[i] == 1): new_array[i] = array[i] else: raise return new_array class XMLFormat(object): """XMLFormat object.""" def __init__(self, filename): """Init the format reader.""" self.types, self.stypes, self.scales = parse_format(filename) self.translator = {} for key, val in self.types.items(): self.translator[val] = (self.scales[key], self.stypes[key]) def dtype(self, key): """Get the dtype for the format object.""" return self.types[key] def apply_scales(self, array): """Apply scales to *array*.""" return _apply_scales(array, *self.translator[array.dtype]) if __name__ == '__main__': pass satpy-0.20.0/satpy/readers/yaml_reader.py000066400000000000000000001222501362525524100203630ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Base classes and utilities for all readers configured by YAML files.""" import glob import itertools import logging import os import warnings from abc import ABCMeta, abstractmethod from collections import deque, OrderedDict from fnmatch import fnmatch from weakref import WeakValueDictionary import six import xarray as xr import yaml import numpy as np try: from yaml import UnsafeLoader except ImportError: from yaml import Loader as UnsafeLoader from pyresample.geometry import StackedAreaDefinition, SwathDefinition from pyresample.boundary import AreaDefBoundary, Boundary from satpy.resample import get_area_def from satpy.config import recursive_dict_update from satpy.dataset import DATASET_KEYS, DatasetID from satpy.readers import DatasetDict, get_key from satpy.resample import add_crs_xy_coords from trollsift.parser import globify, parse from pyresample.geometry import AreaDefinition logger = logging.getLogger(__name__) def listify_string(something): """Take *something* and make it a list. *something* is either a list of strings or a string, in which case the function returns a list containing the string. If *something* is None, an empty list is returned. """ if isinstance(something, (str, six.text_type)): return [something] elif something is not None: return list(something) else: return list() def get_filebase(path, pattern): """Get the end of *path* of same length as *pattern*.""" # convert any `/` on Windows to `\\` path = os.path.normpath(path) # A pattern can include directories tail_len = len(pattern.split(os.path.sep)) return os.path.join(*str(path).split(os.path.sep)[-tail_len:]) def match_filenames(filenames, pattern): """Get the filenames matching *pattern*.""" matching = [] for filename in filenames: if fnmatch(get_filebase(filename, pattern), globify(pattern)): matching.append(filename) return matching class AbstractYAMLReader(six.with_metaclass(ABCMeta, object)): """Base class for all readers that use YAML configuration files. This class should only be used in rare cases. Its child class `FileYAMLReader` should be used in most cases. """ def __init__(self, config_files): """Load information from YAML configuration file about how to read data files.""" self.config = {} self.config_files = config_files for config_file in config_files: with open(config_file) as fd: self.config = recursive_dict_update(self.config, yaml.load(fd, Loader=UnsafeLoader)) self.info = self.config['reader'] self.name = self.info['name'] self.file_patterns = [] for file_type, filetype_info in self.config['file_types'].items(): filetype_info.setdefault('file_type', file_type) # correct separator if needed file_patterns = [os.path.join(*pattern.split('/')) for pattern in filetype_info['file_patterns']] filetype_info['file_patterns'] = file_patterns self.file_patterns.extend(file_patterns) if 'sensors' in self.info and not isinstance(self.info['sensors'], (list, tuple)): self.info['sensors'] = [self.info['sensors']] self.datasets = self.config.get('datasets', {}) self.info['filenames'] = [] self.all_ids = {} self.load_ds_ids_from_config() @property def sensor_names(self): """Names of sensors whose data is being loaded by this reader.""" return self.info['sensors'] or [] @property def all_dataset_ids(self): """Get DatasetIDs of all datasets known to this reader.""" return self.all_ids.keys() @property def all_dataset_names(self): """Get names of all datasets known to this reader.""" # remove the duplicates from various calibration and resolutions return set(ds_id.name for ds_id in self.all_dataset_ids) @property def available_dataset_ids(self): """Get DatasetIDs that are loadable by this reader.""" logger.warning( "Available datasets are unknown, returning all datasets...") return self.all_dataset_ids @property def available_dataset_names(self): """Get names of datasets that are loadable by this reader.""" return (ds_id.name for ds_id in self.available_dataset_ids) @property @abstractmethod def start_time(self): """Start time of the reader.""" @property @abstractmethod def end_time(self): """End time of the reader.""" @abstractmethod def filter_selected_filenames(self, filenames): """Filter provided filenames by parameters in reader configuration. Returns: iterable of usable files """ @abstractmethod def load(self, dataset_keys): """Load *dataset_keys*.""" def supports_sensor(self, sensor): """Check if *sensor* is supported. Returns True is *sensor* is None. """ if sensor and not (set(self.info.get("sensors")) & set(listify_string(sensor))): return False else: return True def select_files_from_directory(self, directory=None): """Find files for this reader in *directory*. If directory is None or '', look in the current directory. """ filenames = [] if directory is None: directory = '' for pattern in self.file_patterns: matching = glob.iglob(os.path.join(directory, globify(pattern))) filenames.extend(matching) return filenames def select_files_from_pathnames(self, filenames): """Select the files from *filenames* this reader can handle.""" selected_filenames = [] for pattern in self.file_patterns: matching = match_filenames(filenames, pattern) for fname in matching: if fname not in selected_filenames: selected_filenames.append(fname) if len(selected_filenames) == 0: logger.warning("No filenames found for reader: %s", self.name) return selected_filenames def get_dataset_key(self, key, **kwargs): """Get the fully qualified `DatasetID` matching `key`. See `satpy.readers.get_key` for more information about kwargs. """ return get_key(key, self.all_ids.keys(), **kwargs) def load_ds_ids_from_config(self): """Get the dataset ids from the config.""" ids = [] for dataset in self.datasets.values(): # xarray doesn't like concatenating attributes that are lists # https://github.com/pydata/xarray/issues/2060 if 'coordinates' in dataset and \ isinstance(dataset['coordinates'], list): dataset['coordinates'] = tuple(dataset['coordinates']) # Build each permutation/product of the dataset id_kwargs = [] for key in DATASET_KEYS: val = dataset.get(key) if key in ["wavelength", "modifiers"] and isinstance(val, list): # special case: wavelength can be [min, nominal, max] # but is still considered 1 option # it also needs to be a tuple so it can be used in # a dictionary key (DatasetID) id_kwargs.append((tuple(val), )) elif key == "modifiers" and val is None: # empty modifiers means no modifiers applied id_kwargs.append((tuple(), )) elif isinstance(val, (list, tuple, set)): # this key has multiple choices # (ex. 250 meter, 500 meter, 1000 meter resolutions) id_kwargs.append(val) elif isinstance(val, dict): id_kwargs.append(val.keys()) else: # this key only has one choice so make it a one # item iterable id_kwargs.append((val, )) for id_params in itertools.product(*id_kwargs): dsid = DatasetID(*id_params) ids.append(dsid) # create dataset infos specifically for this permutation ds_info = dataset.copy() for key in DATASET_KEYS: if isinstance(ds_info.get(key), dict): ds_info.update(ds_info[key][getattr(dsid, key)]) # this is important for wavelength which was converted # to a tuple ds_info[key] = getattr(dsid, key) self.all_ids[dsid] = ds_info return ids class FileYAMLReader(AbstractYAMLReader): """Primary reader base class that is configured by a YAML file. This class uses the idea of per-file "file handler" objects to read file contents and determine what is available in the file. This differs from the base :class:`AbstractYAMLReader` which does not depend on individual file handler objects. In almost all cases this class should be used over its base class and can be used as a reader by itself and requires no subclassing. """ def __init__(self, config_files, filter_parameters=None, filter_filenames=True, **kwargs): """Set up initial internal storage for loading file data.""" super(FileYAMLReader, self).__init__(config_files) self.file_handlers = {} self.available_ids = {} self.filter_filenames = self.info.get('filter_filenames', filter_filenames) self.filter_parameters = filter_parameters or {} self.coords_cache = WeakValueDictionary() @property def sensor_names(self): """Names of sensors whose data is being loaded by this reader.""" if not self.file_handlers: return self.info['sensors'] file_handlers = (handlers[0] for handlers in self.file_handlers.values()) sensor_names = set() for fh in file_handlers: try: sensor_names.update(fh.sensor_names) except NotImplementedError: continue if not sensor_names: return self.info['sensors'] return sorted(sensor_names) @property def available_dataset_ids(self): """Get DatasetIDs that are loadable by this reader.""" return self.available_ids.keys() @property def start_time(self): """Start time of the earlier file used by this reader.""" if not self.file_handlers: raise RuntimeError("Start time unknown until files are selected") return min(x[0].start_time for x in self.file_handlers.values()) @property def end_time(self): """End time of the latest file used by this reader.""" if not self.file_handlers: raise RuntimeError("End time unknown until files are selected") return max(x[-1].end_time for x in self.file_handlers.values()) @staticmethod def check_file_covers_area(file_handler, check_area): """Check if the file covers the current area. If the file doesn't provide any bounding box information or 'area' was not provided in `filter_parameters`, the check returns True. """ try: gbb = Boundary(*file_handler.get_bounding_box()) except NotImplementedError as err: logger.debug("Bounding box computation not implemented: %s", str(err)) else: abb = AreaDefBoundary(get_area_def(check_area), frequency=1000) intersection = gbb.contour_poly.intersection(abb.contour_poly) if not intersection: return False return True def find_required_filehandlers(self, requirements, filename_info): """Find the necessary file handlers for the given requirements. We assume here requirements are available. Raises: KeyError, if no handler for the given requirements is available. RuntimeError, if there is a handler for the given requirements, but it doesn't match the filename info. """ req_fh = [] filename_info = set(filename_info.items()) if requirements: for requirement in requirements: for fhd in self.file_handlers[requirement]: if set(fhd.filename_info.items()).issubset(filename_info): req_fh.append(fhd) break else: raise RuntimeError("No matching requirement file of type " "{}".format(requirement)) # break everything and continue to next # filetype! return req_fh def sorted_filetype_items(self): """Sort the instance's filetypes in using order.""" processed_types = [] file_type_items = deque(self.config['file_types'].items()) while len(file_type_items): filetype, filetype_info = file_type_items.popleft() requirements = filetype_info.get('requires') if requirements is not None: # requirements have not been processed yet -> wait missing = [req for req in requirements if req not in processed_types] if missing: file_type_items.append((filetype, filetype_info)) continue processed_types.append(filetype) yield filetype, filetype_info @staticmethod def filename_items_for_filetype(filenames, filetype_info): """Iterate over the filenames matching *filetype_info*.""" matched_files = [] for pattern in filetype_info['file_patterns']: for filename in match_filenames(filenames, pattern): if filename in matched_files: continue try: filename_info = parse( pattern, get_filebase(filename, pattern)) except ValueError: logger.debug("Can't parse %s with %s.", filename, pattern) continue matched_files.append(filename) yield filename, filename_info def new_filehandler_instances(self, filetype_info, filename_items, fh_kwargs=None): """Generate new filehandler instances.""" requirements = filetype_info.get('requires') filetype_cls = filetype_info['file_reader'] if fh_kwargs is None: fh_kwargs = {} for filename, filename_info in filename_items: try: req_fh = self.find_required_filehandlers(requirements, filename_info) except KeyError as req: msg = "No handler for reading requirement {} for {}".format( req, filename) warnings.warn(msg) continue except RuntimeError as err: warnings.warn(str(err) + ' for {}'.format(filename)) continue yield filetype_cls(filename, filename_info, filetype_info, *req_fh, **fh_kwargs) def time_matches(self, fstart, fend): """Check that a file's start and end time mtach filter_parameters of this reader.""" start_time = self.filter_parameters.get('start_time') end_time = self.filter_parameters.get('end_time') fend = fend or fstart if start_time and fend and fend < start_time: return False if end_time and fstart and fstart > end_time: return False return True def metadata_matches(self, sample_dict, file_handler=None): """Check that file metadata matches filter_parameters of this reader.""" # special handling of start/end times if not self.time_matches( sample_dict.get('start_time'), sample_dict.get('end_time')): return False for key, val in self.filter_parameters.items(): if key != 'area' and key not in sample_dict: continue if key in ['start_time', 'end_time']: continue elif key == 'area' and file_handler: if not self.check_file_covers_area(file_handler, val): logger.info('Filtering out %s based on area', file_handler.filename) break elif key in sample_dict and val != sample_dict[key]: # don't use this file break else: # all the metadata keys are equal return True return False def filter_filenames_by_info(self, filename_items): """Filter out file using metadata from the filenames. Currently only uses start and end time. If only start time is available from the filename, keep all the filename that have a start time before the requested end time. """ for filename, filename_info in filename_items: fend = filename_info.get('end_time') fstart = filename_info.setdefault('start_time', fend) if fend and fend < fstart: # correct for filenames with 1 date and 2 times fend = fend.replace(year=fstart.year, month=fstart.month, day=fstart.day) filename_info['end_time'] = fend if self.metadata_matches(filename_info): yield filename, filename_info def filter_fh_by_metadata(self, filehandlers): """Filter out filehandlers using provide filter parameters.""" for filehandler in filehandlers: filehandler.metadata['start_time'] = filehandler.start_time filehandler.metadata['end_time'] = filehandler.end_time if self.metadata_matches(filehandler.metadata, filehandler): yield filehandler def filter_selected_filenames(self, filenames): """Filter provided files based on metadata in the filename.""" for _, filetype_info in self.sorted_filetype_items(): filename_iter = self.filename_items_for_filetype(filenames, filetype_info) if self.filter_filenames: filename_iter = self.filter_filenames_by_info(filename_iter) for fn, _ in filename_iter: yield fn def new_filehandlers_for_filetype(self, filetype_info, filenames, fh_kwargs=None): """Create filehandlers for a given filetype.""" filename_iter = self.filename_items_for_filetype(filenames, filetype_info) if self.filter_filenames: # preliminary filter of filenames based on start/end time # to reduce the number of files to open filename_iter = self.filter_filenames_by_info(filename_iter) filehandler_iter = self.new_filehandler_instances(filetype_info, filename_iter, fh_kwargs=fh_kwargs) filtered_iter = self.filter_fh_by_metadata(filehandler_iter) return list(filtered_iter) def create_filehandlers(self, filenames, fh_kwargs=None): """Organize the filenames into file types and create file handlers.""" filenames = list(OrderedDict.fromkeys(filenames)) logger.debug("Assigning to %s: %s", self.info['name'], filenames) self.info.setdefault('filenames', []).extend(filenames) filename_set = set(filenames) created_fhs = {} # load files that we know about by creating the file handlers for filetype, filetype_info in self.sorted_filetype_items(): filehandlers = self.new_filehandlers_for_filetype(filetype_info, filename_set, fh_kwargs=fh_kwargs) filename_set -= set([fhd.filename for fhd in filehandlers]) if filehandlers: created_fhs[filetype] = filehandlers self.file_handlers[filetype] = sorted( self.file_handlers.get(filetype, []) + filehandlers, key=lambda fhd: (fhd.start_time, fhd.filename)) # load any additional dataset IDs determined dynamically from the file # and update any missing metadata that only the file knows self.update_ds_ids_from_file_handlers() return created_fhs def _file_handlers_available_datasets(self): """Generate a series of available dataset information. This is done by chaining file handler's :meth:`satpy.readers.file_handlers.BaseFileHandler.available_datasets` together. See that method's documentation for more information. Returns: Generator of (bool, dict) where the boolean tells whether the current dataset is available from any of the file handlers. The boolean can also be None in the case where no loaded file handler is configured to load the dataset. The dictionary is the metadata provided either by the YAML configuration files or by the file handler itself if it is a new dataset. The file handler may have also supplemented or modified the information. """ # flatten all file handlers in to one list flat_fhs = (fh for fhs in self.file_handlers.values() for fh in fhs) id_values = list(self.all_ids.values()) configured_datasets = ((None, ds_info) for ds_info in id_values) for fh in flat_fhs: # chain the 'available_datasets' methods together by calling the # current file handler's method with the previous ones result configured_datasets = fh.available_datasets(configured_datasets=configured_datasets) return configured_datasets def update_ds_ids_from_file_handlers(self): """Add or modify available dataset information. Each file handler is consulted on whether or not it can load the dataset with the provided information dictionary. See :meth:`satpy.readers.file_handlers.BaseFileHandler.available_datasets` for more information. """ avail_datasets = self._file_handlers_available_datasets() new_ids = {} for is_avail, ds_info in avail_datasets: # especially from the yaml config coordinates = ds_info.get('coordinates') if isinstance(coordinates, list): # xarray doesn't like concatenating attributes that are # lists: https://github.com/pydata/xarray/issues/2060 ds_info['coordinates'] = tuple(ds_info['coordinates']) ds_info.setdefault('modifiers', tuple()) # default to no mods ds_id = DatasetID.from_dict(ds_info) # all datasets new_ids[ds_id] = ds_info # available datasets # False == we have the file type but it doesn't have this dataset # None == we don't have the file type object to ask if is_avail: self.available_ids[ds_id] = ds_info self.all_ids = new_ids @staticmethod def _load_dataset(dsid, ds_info, file_handlers, dim='y', **kwargs): """Load only a piece of the dataset.""" slice_list = [] failure = True for fh in file_handlers: try: projectable = fh.get_dataset(dsid, ds_info) if projectable is not None: slice_list.append(projectable) failure = False except KeyError: logger.warning("Failed to load {} from {}".format(dsid, fh), exc_info=True) if failure: raise KeyError( "Could not load {} from any provided files".format(dsid)) if dim not in slice_list[0].dims: return slice_list[0] res = xr.concat(slice_list, dim=dim) combined_info = file_handlers[0].combine_info( [p.attrs for p in slice_list]) res.attrs = combined_info return res def _load_dataset_data(self, file_handlers, dsid, **kwargs): ds_info = self.all_ids[dsid] proj = self._load_dataset(dsid, ds_info, file_handlers, **kwargs) # FIXME: areas could be concatenated here # Update the metadata proj.attrs['start_time'] = file_handlers[0].start_time proj.attrs['end_time'] = file_handlers[-1].end_time return proj def _preferred_filetype(self, filetypes): """Get the preferred filetype out of the *filetypes* list. At the moment, it just returns the first filetype that has been loaded. """ if not isinstance(filetypes, list): filetypes = [filetypes] # look through the file types and use the first one that we have loaded for filetype in filetypes: if filetype in self.file_handlers: return filetype return None def _load_area_def(self, dsid, file_handlers, **kwargs): """Load the area definition of *dsid*.""" return _load_area_def(dsid, file_handlers) def _get_coordinates_for_dataset_key(self, dsid): """Get the coordinate dataset keys for *dsid*.""" ds_info = self.all_ids[dsid] cids = [] for cinfo in ds_info.get('coordinates', []): if not isinstance(cinfo, dict): cinfo = {'name': cinfo} cinfo['resolution'] = ds_info['resolution'] if 'polarization' in ds_info: cinfo['polarization'] = ds_info['polarization'] cid = DatasetID(**cinfo) cids.append(self.get_dataset_key(cid)) return cids def _get_coordinates_for_dataset_keys(self, dsids): """Get all coordinates.""" coordinates = {} for dsid in dsids: cids = self._get_coordinates_for_dataset_key(dsid) coordinates.setdefault(dsid, []).extend(cids) return coordinates def _get_file_handlers(self, dsid): """Get the file handler to load this dataset.""" ds_info = self.all_ids[dsid] filetype = self._preferred_filetype(ds_info['file_type']) if filetype is None: logger.warning("Required file type '%s' not found or loaded for " "'%s'", ds_info['file_type'], dsid.name) else: return self.file_handlers[filetype] def _make_area_from_coords(self, coords): """Create an appropriate area with the given *coords*.""" if len(coords) == 2: lon_sn = coords[0].attrs.get('standard_name') lat_sn = coords[1].attrs.get('standard_name') if lon_sn == 'longitude' and lat_sn == 'latitude': key = None try: key = (coords[0].data.name, coords[1].data.name) sdef = self.coords_cache.get(key) except AttributeError: sdef = None if sdef is None: sdef = SwathDefinition(*coords) sensor_str = '_'.join(self.info['sensors']) shape_str = '_'.join(map(str, coords[0].shape)) sdef.name = "{}_{}_{}_{}".format(sensor_str, shape_str, coords[0].attrs['name'], coords[1].attrs['name']) if key is not None: self.coords_cache[key] = sdef return sdef else: raise ValueError( 'Coordinates info object missing standard_name key: ' + str(coords)) elif len(coords) != 0: raise NameError("Don't know what to do with coordinates " + str( coords)) def _load_dataset_area(self, dsid, file_handlers, coords, **kwargs): """Get the area for *dsid*.""" try: return self._load_area_def(dsid, file_handlers, **kwargs) except NotImplementedError: if any(x is None for x in coords): logger.warning( "Failed to load coordinates for '{}'".format(dsid)) return None area = self._make_area_from_coords(coords) if area is None: logger.debug("No coordinates found for %s", str(dsid)) return area def _load_dataset_with_area(self, dsid, coords, **kwargs): """Load *dsid* and its area if available.""" file_handlers = self._get_file_handlers(dsid) if not file_handlers: return area = self._load_dataset_area(dsid, file_handlers, coords, **kwargs) try: ds = self._load_dataset_data(file_handlers, dsid, **kwargs) except (KeyError, ValueError) as err: logger.exception("Could not load dataset '%s': %s", dsid, str(err)) return None if area is not None: ds.attrs['area'] = area ds = add_crs_xy_coords(ds, area) return ds def _load_ancillary_variables(self, datasets): """Load the ancillary variables of `datasets`.""" all_av_ids = set() for dataset in datasets.values(): ancillary_variables = dataset.attrs.get('ancillary_variables', []) if not isinstance(ancillary_variables, (list, tuple, set)): ancillary_variables = ancillary_variables.split(' ') av_ids = [] for key in ancillary_variables: try: av_ids.append(self.get_dataset_key(key)) except KeyError: logger.warning("Can't load ancillary dataset %s", str(key)) all_av_ids |= set(av_ids) dataset.attrs['ancillary_variables'] = av_ids loadable_av_ids = [av_id for av_id in all_av_ids if av_id not in datasets] if not all_av_ids: return if loadable_av_ids: self.load(loadable_av_ids, previous_datasets=datasets) for dataset in datasets.values(): new_vars = [] for av_id in dataset.attrs.get('ancillary_variables', []): if isinstance(av_id, DatasetID): new_vars.append(datasets[av_id]) else: new_vars.append(av_id) dataset.attrs['ancillary_variables'] = new_vars def get_dataset_key(self, key, available_only=False, **kwargs): """Get the fully qualified `DatasetID` matching `key`. This will first search through available DatasetIDs, datasets that should be possible to load, and fallback to "known" datasets, those that are configured but aren't loadable from the provided files. Providing ``available_only=True`` will stop this fallback behavior and raise a ``KeyError`` exception if no available dataset is found. Args: key (str, float, DatasetID): Key to search for in this reader. available_only (bool): Search only loadable datasets for the provided key. Loadable datasets are always searched first, but if ``available_only=False`` (default) then all known datasets will be searched. kwargs: See :func:`satpy.readers.get_key` for more information about kwargs. Returns: Best matching DatasetID to the provided ``key``. Raises: KeyError: if no key match is found. """ try: return get_key(key, self.available_ids.keys(), **kwargs) except KeyError: if available_only: raise return get_key(key, self.all_ids.keys(), **kwargs) def load(self, dataset_keys, previous_datasets=None, **kwargs): """Load `dataset_keys`. If `previous_datasets` is provided, do not reload those. """ all_datasets = previous_datasets or DatasetDict() datasets = DatasetDict() # Include coordinates in the list of datasets to load dsids = [self.get_dataset_key(ds_key) for ds_key in dataset_keys] coordinates = self._get_coordinates_for_dataset_keys(dsids) all_dsids = list(set().union(*coordinates.values())) + dsids for dsid in all_dsids: if dsid in all_datasets: continue coords = [all_datasets.get(cid, None) for cid in coordinates.get(dsid, [])] ds = self._load_dataset_with_area(dsid, coords, **kwargs) if ds is not None: all_datasets[dsid] = ds if dsid in dsids: datasets[dsid] = ds self._load_ancillary_variables(all_datasets) return datasets def _load_area_def(dsid, file_handlers): """Load the area definition of *dsid*.""" area_defs = [fh.get_area_def(dsid) for fh in file_handlers] area_defs = [area_def for area_def in area_defs if area_def is not None] final_area = StackedAreaDefinition(*area_defs) return final_area.squeeze() class GEOSegmentYAMLReader(FileYAMLReader): """Reader for segmented geostationary data. This reader pads the data to full geostationary disk if necessary. This reader uses an optional ``pad_data`` keyword argument that can be passed to :meth:`Scene.load` to control if padding is done (True by default). Passing `pad_data=False` will return data unpadded. When using this class in a reader's YAML configuration, segmented file types (files that may have multiple segments) should specify an extra ``expected_segments`` piece of file_type metadata. This tells this reader how many total segments it should expect when padding data. Alternatively, the file patterns for a file type can include a ``total_segments`` field which will be used if ``expected_segments`` is not defined. This will default to 1 segment. """ def create_filehandlers(self, filenames, fh_kwargs=None): """Create file handler objects and determine expected segments for each.""" created_fhs = super(GEOSegmentYAMLReader, self).create_filehandlers( filenames, fh_kwargs=fh_kwargs) # add "expected_segments" information for fhs in created_fhs.values(): for fh in fhs: # check the filename for total_segments parameter as a fallback ts = fh.filename_info.get('total_segments', 1) # if the YAML has segments explicitly specified then use that fh.filetype_info.setdefault('expected_segments', ts) return created_fhs @staticmethod def _load_dataset(dsid, ds_info, file_handlers, dim='y', pad_data=True): """Load only a piece of the dataset.""" if not pad_data: return FileYAMLReader._load_dataset(dsid, ds_info, file_handlers) counter, expected_segments, slice_list, failure, projectable = \ _find_missing_segments(file_handlers, ds_info, dsid) if projectable is None or failure: raise KeyError( "Could not load {} from any provided files".format(dsid)) empty_segment = xr.full_like(projectable, np.nan) for i, sli in enumerate(slice_list): if sli is None: slice_list[i] = empty_segment while expected_segments > counter: slice_list.append(empty_segment) counter += 1 if dim not in slice_list[0].dims: return slice_list[0] res = xr.concat(slice_list, dim=dim) combined_info = file_handlers[0].combine_info( [p.attrs for p in slice_list]) res.attrs = combined_info return res def _load_area_def(self, dsid, file_handlers, pad_data=True): """Load the area definition of *dsid* with padding.""" if not pad_data: return _load_area_def(dsid, file_handlers) return _load_area_def_with_padding(dsid, file_handlers) def _load_area_def_with_padding(dsid, file_handlers): """Load the area definition of *dsid* with padding.""" # Pad missing segments between the first available and expected area_defs = _pad_later_segments_area(file_handlers, dsid) # Add missing start segments area_defs = _pad_earlier_segments_area(file_handlers, dsid, area_defs) # Stack the area definitions area_def = _stack_area_defs(area_defs) return area_def def _stack_area_defs(area_def_dict): """Stack given dict of area definitions and return a StackedAreaDefinition.""" area_defs = [area_def_dict[area_def] for area_def in sorted(area_def_dict.keys()) if area_def is not None] area_def = StackedAreaDefinition(*area_defs) area_def = area_def.squeeze() return area_def def _pad_later_segments_area(file_handlers, dsid): """Pad area definitions for missing segments that are later in sequence than the first available.""" seg_size = None expected_segments = file_handlers[0].filetype_info['expected_segments'] available_segments = [int(fh.filename_info.get('segment', 1)) for fh in file_handlers] area_defs = {} for segment in range(available_segments[0], expected_segments + 1): try: idx = available_segments.index(segment) fh = file_handlers[idx] area = fh.get_area_def(dsid) except ValueError: logger.debug("Padding to full disk with segment nr. %d", segment) ext_diff = area.area_extent[1] - area.area_extent[3] new_ll_y = area.area_extent[1] + ext_diff new_ur_y = area.area_extent[1] fill_extent = (area.area_extent[0], new_ll_y, area.area_extent[2], new_ur_y) area = AreaDefinition('fill', 'fill', 'fill', area.proj_dict, seg_size[1], seg_size[0], fill_extent) area_defs[segment] = area seg_size = area.shape return area_defs def _pad_earlier_segments_area(file_handlers, dsid, area_defs): """Pad area definitions for missing segments that are earlier in sequence than the first available.""" available_segments = [int(fh.filename_info.get('segment', 1)) for fh in file_handlers] area = file_handlers[0].get_area_def(dsid) seg_size = area.shape proj_dict = area.proj_dict for segment in range(available_segments[0] - 1, 0, -1): logger.debug("Padding segment %d to full disk.", segment) ext_diff = area.area_extent[1] - area.area_extent[3] new_ll_y = area.area_extent[3] new_ur_y = area.area_extent[3] - ext_diff fill_extent = (area.area_extent[0], new_ll_y, area.area_extent[2], new_ur_y) area = AreaDefinition('fill', 'fill', 'fill', proj_dict, seg_size[1], seg_size[0], fill_extent) area_defs[segment] = area seg_size = area.shape return area_defs def _find_missing_segments(file_handlers, ds_info, dsid): """Find missing segments.""" slice_list = [] failure = True counter = 1 expected_segments = 1 # get list of file handlers in segment order # (ex. first segment, second segment, etc) handlers = sorted(file_handlers, key=lambda x: x.filename_info.get('segment', 1)) projectable = None for fh in handlers: if fh.filetype_info['file_type'] in ds_info['file_type']: expected_segments = fh.filetype_info['expected_segments'] while int(fh.filename_info.get('segment', 1)) > counter: slice_list.append(None) counter += 1 try: projectable = fh.get_dataset(dsid, ds_info) if projectable is not None: slice_list.append(projectable) failure = False counter += 1 except KeyError: logger.warning("Failed to load %s from %s", str(dsid), str(fh), exc_info=True) # The last segment is missing? if len(slice_list) < expected_segments: slice_list.append(None) return counter, expected_segments, slice_list, failure, projectable satpy-0.20.0/satpy/resample.py000066400000000000000000001466531362525524100162770ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Satpy resampling module. Satpy provides multiple resampling algorithms for resampling geolocated data to uniform projected grids. The easiest way to perform resampling in Satpy is through the :class:`~satpy.scene.Scene` object's :meth:`~satpy.scene.Scene.resample` method. Additional utility functions are also available to assist in resampling data. Below is more information on resampling with Satpy as well as links to the relevant API documentation for available keyword arguments. Resampling algorithms --------------------- .. csv-table:: Available Resampling Algorithms :header-rows: 1 :align: center "Resampler", "Description", "Related" "nearest", "Nearest Neighbor", :class:`~satpy.resample.KDTreeResampler` "ewa", "Elliptical Weighted Averaging", :class:`~satpy.resample.EWAResampler` "native", "Native", :class:`~satpy.resample.NativeResampler` "bilinear", "Bilinear", :class:`~satpy.resample.BilinearResampler` "bucket_avg", "Average Bucket Resampling", :class:`~satpy.resample.BucketAvg` "bucket_sum", "Sum Bucket Resampling", :class:`~satpy.resample.BucketSum` "bucket_count", "Count Bucket Resampling", :class:`~satpy.resample.BucketCount` "bucket_fraction", "Fraction Bucket Resampling", :class:`~satpy.resample.BucketFraction` "gradient_search", "Gradient Search Resampling", :class:`~pyresample.gradient.GradientSearchResampler` The resampling algorithm used can be specified with the ``resampler`` keyword argument and defaults to ``nearest``: .. code-block:: python >>> scn = Scene(...) >>> euro_scn = global_scene.resample('euro4', resampler='nearest') .. warning:: Some resampling algorithms expect certain forms of data. For example, the EWA resampling expects polar-orbiting swath data and prefers if the data can be broken in to "scan lines". See the API documentation for a specific algorithm for more information. Resampling for comparison and composites ---------------------------------------- While all the resamplers can be used to put datasets of different resolutions on to a common area, the 'native' resampler is designed to match datasets to one resolution in the dataset's original projection. This is extremely useful when generating composites between bands of different resolutions. .. code-block:: python >>> new_scn = scn.resample(resampler='native') By default this resamples to the :meth:`highest resolution area ` (smallest footprint per pixel) shared between the loaded datasets. You can easily specify the lower resolution area: .. code-block:: python >>> new_scn = scn.resample(scn.min_area(), resampler='native') Providing an area that is neither the minimum or maximum resolution area may work, but behavior is currently undefined. Caching for geostationary data ------------------------------ Satpy will do its best to reuse calculations performed to resample datasets, but it can only do this for the current processing and will lose this information when the process/script ends. Some resampling algorithms, like ``nearest`` and ``bilinear``, can benefit by caching intermediate data on disk in the directory specified by `cache_dir` and using it next time. This is most beneficial with geostationary satellite data where the locations of the source data and the target pixels don't change over time. >>> new_scn = scn.resample('euro4', cache_dir='/path/to/cache_dir') See the documentation for specific algorithms to see availability and limitations of caching for that algorithm. Create custom area definition ----------------------------- See :class:`pyresample.geometry.AreaDefinition` for information on creating areas that can be passed to the resample method:: >>> from pyresample.geometry import AreaDefinition >>> my_area = AreaDefinition(...) >>> local_scene = global_scene.resample(my_area) Create dynamic area definition ------------------------------ See :class:`pyresample.geometry.DynamicAreaDefinition` for more information. Examples coming soon... Store area definitions ---------------------- Area definitions can be added to a custom YAML file (see `pyresample's documentation `_ for more information) and loaded using pyresample's utility methods:: >>> from pyresample.utils import parse_area_file >>> my_area = parse_area_file('my_areas.yaml', 'my_area')[0] Examples coming soon... """ import hashlib import json import os from logging import getLogger from weakref import WeakValueDictionary import warnings import numpy as np import xarray as xr import dask import dask.array as da import zarr import six from pyresample.ewa import fornav, ll2cr from pyresample.geometry import SwathDefinition try: from pyresample.resampler import BaseResampler as PRBaseResampler from pyresample.gradient import GradientSearchResampler except ImportError: warnings.warn('Gradient search resampler not available, upgrade Pyresample.') PRBaseResampler = None GradientSearchResampler = None from satpy import CHUNK_SIZE from satpy.config import config_search_paths, get_config_path # In Python3 os.mkdir raises FileExistsError, in Python2 OSError if six.PY2: FileExistsError = OSError LOG = getLogger(__name__) CACHE_SIZE = 10 NN_COORDINATES = {'valid_input_index': ('y1', 'x1'), 'valid_output_index': ('y2', 'x2'), 'index_array': ('y2', 'x2', 'z2')} BIL_COORDINATES = {'bilinear_s': ('x1', ), 'bilinear_t': ('x1', ), 'slices_x': ('x1', 'n'), 'slices_y': ('x1', 'n'), 'mask_slices': ('x1', 'n'), 'out_coords_x': ('x2', ), 'out_coords_y': ('y2', )} resamplers_cache = WeakValueDictionary() def hash_dict(the_dict, the_hash=None): """Calculate a hash for a dictionary.""" if the_hash is None: the_hash = hashlib.sha1() the_hash.update(json.dumps(the_dict, sort_keys=True).encode('utf-8')) return the_hash def get_area_file(): """Find area file(s) to use. The files are to be named `areas.yaml` or `areas.def`. """ paths = config_search_paths('areas.yaml') if paths: return paths else: return get_config_path('areas.def') def get_area_def(area_name): """Get the definition of *area_name* from file. The file is defined to use is to be placed in the $PPP_CONFIG_DIR directory, and its name is defined in satpy's configuration file. """ try: from pyresample import parse_area_file except ImportError: from pyresample.utils import parse_area_file return parse_area_file(get_area_file(), area_name)[0] def add_xy_coords(data_arr, area, crs=None): """Assign x/y coordinates to DataArray from provided area. If 'x' and 'y' coordinates already exist then they will not be added. Args: data_arr (xarray.DataArray): data object to add x/y coordinates to area (pyresample.geometry.AreaDefinition): area providing the coordinate data. crs (pyproj.crs.CRS or None): CRS providing additional information about the area's coordinate reference system if available. Requires pyproj 2.0+. Returns (xarray.DataArray): Updated DataArray object """ if 'x' in data_arr.coords and 'y' in data_arr.coords: # x/y coords already provided return data_arr elif 'x' not in data_arr.dims or 'y' not in data_arr.dims: # no defined x and y dimensions return data_arr if hasattr(area, 'get_proj_vectors'): x, y = area.get_proj_vectors() else: return data_arr # convert to DataArrays y_attrs = {} x_attrs = {} if crs is not None: units = crs.axis_info[0].unit_name # fix udunits/CF standard units units = units.replace('metre', 'meter') if units == 'degree': y_attrs['units'] = 'degrees_north' x_attrs['units'] = 'degrees_east' else: y_attrs['units'] = units x_attrs['units'] = units y = xr.DataArray(y, dims=('y',), attrs=y_attrs) x = xr.DataArray(x, dims=('x',), attrs=x_attrs) return data_arr.assign_coords(y=y, x=x) def add_crs_xy_coords(data_arr, area): """Add :class:`pyproj.crs.CRS` and x/y or lons/lats to coordinates. For SwathDefinition or GridDefinition areas this will add a `crs` coordinate and coordinates for the 2D arrays of `lons` and `lats`. For AreaDefinition areas this will add a `crs` coordinate and the 1-dimensional `x` and `y` coordinate variables. Args: data_arr (xarray.DataArray): DataArray to add the 'crs' coordinate. area (pyresample.geometry.AreaDefinition): Area to get CRS information from. """ # add CRS object if pyproj 2.0+ try: from pyproj import CRS except ImportError: LOG.debug("Could not add 'crs' coordinate with pyproj<2.0") crs = None else: # default lat/lon projection latlon_proj = "+proj=latlong +datum=WGS84 +ellps=WGS84" # otherwise get it from the area definition if hasattr(area, 'crs'): crs = area.crs else: proj_str = getattr(area, 'proj_str', latlon_proj) crs = CRS.from_string(proj_str) data_arr = data_arr.assign_coords(crs=crs) # Add x/y coordinates if possible if isinstance(area, SwathDefinition): # add lon/lat arrays for swath definitions # SwathDefinitions created by Satpy should be assigning DataArray # objects as the lons/lats attributes so use those directly to # maintain original .attrs metadata (instead of converting to dask # array). lons = area.lons lats = area.lats lons.attrs.setdefault('standard_name', 'longitude') lons.attrs.setdefault('long_name', 'longitude') lons.attrs.setdefault('units', 'degrees_east') lats.attrs.setdefault('standard_name', 'latitude') lats.attrs.setdefault('long_name', 'latitude') lats.attrs.setdefault('units', 'degrees_north') # See https://github.com/pydata/xarray/issues/3068 # data_arr = data_arr.assign_coords(longitude=lons, latitude=lats) else: # Gridded data (AreaDefinition/StackedAreaDefinition) data_arr = add_xy_coords(data_arr, area, crs=crs) return data_arr def update_resampled_coords(old_data, new_data, new_area): """Add coordinate information to newly resampled DataArray. Args: old_data (xarray.DataArray): Old data before resampling. new_data (xarray.DataArray): New data after resampling. new_area (pyresample.geometry.BaseDefinition): Area definition for the newly resampled data. """ # copy over other non-x/y coordinates # this *MUST* happen before we set 'crs' below otherwise any 'crs' # coordinate in the coordinate variables we are copying will overwrite the # 'crs' coordinate we just assigned to the data ignore_coords = ('y', 'x', 'crs') new_coords = {} for cname, cval in old_data.coords.items(): # we don't want coordinates that depended on the old x/y dimensions has_ignored_dims = any(dim in cval.dims for dim in ignore_coords) if cname in ignore_coords or has_ignored_dims: continue new_coords[cname] = cval new_data = new_data.assign_coords(**new_coords) # add crs, x, and y coordinates new_data = add_crs_xy_coords(new_data, new_area) return new_data class BaseResampler(object): """Base abstract resampler class.""" def __init__(self, source_geo_def, target_geo_def): """Initialize resampler with geolocation information. Args: source_geo_def (SwathDefinition, AreaDefinition): Geolocation definition for the data to be resampled target_geo_def (CoordinateDefinition, AreaDefinition): Geolocation definition for the area to resample data to. """ self.source_geo_def = source_geo_def self.target_geo_def = target_geo_def def get_hash(self, source_geo_def=None, target_geo_def=None, **kwargs): """Get hash for the current resample with the given *kwargs*.""" if source_geo_def is None: source_geo_def = self.source_geo_def if target_geo_def is None: target_geo_def = self.target_geo_def the_hash = source_geo_def.update_hash() target_geo_def.update_hash(the_hash) hash_dict(kwargs, the_hash) return the_hash.hexdigest() def precompute(self, **kwargs): """Do the precomputation. This is an optional step if the subclass wants to implement more complex features like caching or can share some calculations between multiple datasets to be processed. """ return None def compute(self, data, **kwargs): """Do the actual resampling. This must be implemented by subclasses. """ raise NotImplementedError def resample(self, data, cache_dir=None, mask_area=None, **kwargs): """Resample `data` by calling `precompute` and `compute` methods. Only certain resampling classes may use `cache_dir` and the `mask` provided when `mask_area` is True. The return value of calling the `precompute` method is passed as the `cache_id` keyword argument of the `compute` method, but may not be used directly for caching. It is up to the individual resampler subclasses to determine how this is used. Args: data (xarray.DataArray): Data to be resampled cache_dir (str): directory to cache precomputed results (default False, optional) mask_area (bool): Mask geolocation data where data values are invalid. This should be used when data values may affect what neighbors are considered valid. Returns (xarray.DataArray): Data resampled to the target area """ # default is to mask areas for SwathDefinitions if mask_area is None and isinstance( self.source_geo_def, SwathDefinition): mask_area = True if mask_area: if isinstance(self.source_geo_def, SwathDefinition): geo_dims = self.source_geo_def.lons.dims else: geo_dims = ('y', 'x') flat_dims = [dim for dim in data.dims if dim not in geo_dims] if np.issubdtype(data.dtype, np.integer): kwargs['mask'] = data == data.attrs.get('_FillValue', np.iinfo(data.dtype.type).max) else: kwargs['mask'] = data.isnull() kwargs['mask'] = kwargs['mask'].all(dim=flat_dims) cache_id = self.precompute(cache_dir=cache_dir, **kwargs) return self.compute(data, cache_id=cache_id, **kwargs) def _create_cache_filename(self, cache_dir=None, prefix='', fmt='.zarr', **kwargs): """Create filename for the cached resampling parameters.""" cache_dir = cache_dir or '.' hash_str = self.get_hash(**kwargs) return os.path.join(cache_dir, prefix + hash_str + fmt) class KDTreeResampler(BaseResampler): """Resample using a KDTree-based nearest neighbor algorithm. This resampler implements on-disk caching when the `cache_dir` argument is provided to the `resample` method. This should provide significant performance improvements on consecutive resampling of geostationary data. It is not recommended to provide `cache_dir` when the `mask` keyword argument is provided to `precompute` which occurs by default for `SwathDefinition` source areas. Args: cache_dir (str): Long term storage directory for intermediate results. mask (bool): Force resampled data's invalid pixel mask to be used when searching for nearest neighbor pixels. By default this is True for SwathDefinition source areas and False for all other area definition types. radius_of_influence (float): Search radius cut off distance in meters epsilon (float): Allowed uncertainty in meters. Increasing uncertainty reduces execution time. """ def __init__(self, source_geo_def, target_geo_def): """Init KDTreeResampler.""" super(KDTreeResampler, self).__init__(source_geo_def, target_geo_def) self.resampler = None self._index_caches = {} def precompute(self, mask=None, radius_of_influence=None, epsilon=0, cache_dir=None, **kwargs): """Create a KDTree structure and store it for later use. Note: The `mask` keyword should be provided if geolocation may be valid where data points are invalid. """ from pyresample.kd_tree import XArrayResamplerNN del kwargs if mask is not None and cache_dir is not None: LOG.warning("Mask and cache_dir both provided to nearest " "resampler. Cached parameters are affected by " "masked pixels. Will not cache results.") cache_dir = None if radius_of_influence is None and not hasattr(self.source_geo_def, 'geocentric_resolution'): warnings.warn("Upgrade 'pyresample' for a more accurate default 'radius_of_influence'.") try: radius_of_influence = self.source_geo_def.lons.resolution * 3 except AttributeError: try: radius_of_influence = max(abs(self.source_geo_def.pixel_size_x), abs(self.source_geo_def.pixel_size_y)) * 3 except AttributeError: radius_of_influence = 1000 except TypeError: radius_of_influence = 10000 kwargs = dict(source_geo_def=self.source_geo_def, target_geo_def=self.target_geo_def, radius_of_influence=radius_of_influence, neighbours=1, epsilon=epsilon) if self.resampler is None: # FIXME: We need to move all of this caching logic to pyresample self.resampler = XArrayResamplerNN(**kwargs) try: self.load_neighbour_info(cache_dir, mask=mask, **kwargs) LOG.debug("Read pre-computed kd-tree parameters") except IOError: LOG.debug("Computing kd-tree parameters") self.resampler.get_neighbour_info(mask=mask) self.save_neighbour_info(cache_dir, mask=mask, **kwargs) def _apply_cached_index(self, val, idx_name, persist=False): """Reassign resampler index attributes.""" if isinstance(val, np.ndarray): val = da.from_array(val, chunks=CHUNK_SIZE) elif persist and isinstance(val, da.Array): val = val.persist() setattr(self.resampler, idx_name, val) return val def _check_numpy_cache(self, cache_dir, mask=None, **kwargs): """Check if there's Numpy cache file and convert it to zarr.""" fname_np = self._create_cache_filename(cache_dir, prefix='resample_lut-', mask=mask, fmt='.npz', **kwargs) fname_zarr = self._create_cache_filename(cache_dir, prefix='nn_lut-', mask=mask, fmt='.zarr', **kwargs) LOG.debug("Check if %s exists", fname_np) if os.path.exists(fname_np) and not os.path.exists(fname_zarr): import warnings warnings.warn("Using Numpy files as resampling cache is " "deprecated.") LOG.warning("Converting resampling LUT from .npz to .zarr") zarr_out = xr.Dataset() with np.load(fname_np, 'r') as fid: for idx_name, coord in NN_COORDINATES.items(): zarr_out[idx_name] = (coord, fid[idx_name]) # Write indices to Zarr file zarr_out.to_zarr(fname_zarr) LOG.debug("Resampling LUT saved to %s", fname_zarr) def load_neighbour_info(self, cache_dir, mask=None, **kwargs): """Read index arrays from either the in-memory or disk cache.""" mask_name = getattr(mask, 'name', None) cached = {} self._check_numpy_cache(cache_dir, mask=mask_name, **kwargs) filename = self._create_cache_filename(cache_dir, prefix='nn_lut-', mask=mask_name, **kwargs) for idx_name in NN_COORDINATES.keys(): if mask_name in self._index_caches: cached[idx_name] = self._apply_cached_index( self._index_caches[mask_name][idx_name], idx_name) elif cache_dir: try: fid = zarr.open(filename, 'r') cache = np.array(fid[idx_name]) if idx_name == 'valid_input_index': # valid input index array needs to be boolean cache = cache.astype(np.bool) except ValueError: raise IOError cache = self._apply_cached_index(cache, idx_name) cached[idx_name] = cache else: raise IOError self._index_caches[mask_name] = cached def save_neighbour_info(self, cache_dir, mask=None, **kwargs): """Cache resampler's index arrays if there is a cache dir.""" if cache_dir: mask_name = getattr(mask, 'name', None) cache = self._read_resampler_attrs() filename = self._create_cache_filename( cache_dir, prefix='nn_lut-', mask=mask_name, **kwargs) LOG.info('Saving kd_tree neighbour info to %s', filename) zarr_out = xr.Dataset() for idx_name, coord in NN_COORDINATES.items(): # update the cache in place with persisted dask arrays cache[idx_name] = self._apply_cached_index(cache[idx_name], idx_name, persist=True) zarr_out[idx_name] = (coord, cache[idx_name]) # Write indices to Zarr file zarr_out.to_zarr(filename) self._index_caches[mask_name] = cache def _read_resampler_attrs(self): """Read certain attributes from the resampler for caching.""" return {attr_name: getattr(self.resampler, attr_name) for attr_name in NN_COORDINATES.keys()} def compute(self, data, weight_funcs=None, fill_value=np.nan, with_uncert=False, **kwargs): """Resample data.""" del kwargs LOG.debug("Resampling %s", str(data.name)) res = self.resampler.get_sample_from_neighbour_info(data, fill_value) return update_resampled_coords(data, res, self.target_geo_def) class EWAResampler(BaseResampler): """Resample using an elliptical weighted averaging algorithm. This algorithm does **not** use caching or any externally provided data mask (unlike the 'nearest' resampler). This algorithm works under the assumption that the data is observed one scan line at a time. However, good results can still be achieved for non-scan based data provided `rows_per_scan` is set to the number of rows in the entire swath or by setting it to `None`. Args: rows_per_scan (int, None): Number of data rows for every observed scanline. If None then the entire swath is treated as one large scanline. weight_count (int): number of elements to create in the gaussian weight table. Default is 10000. Must be at least 2 weight_min (float): the minimum value to store in the last position of the weight table. Default is 0.01, which, with a `weight_distance_max` of 1.0 produces a weight of 0.01 at a grid cell distance of 1.0. Must be greater than 0. weight_distance_max (float): distance in grid cell units at which to apply a weight of `weight_min`. Default is 1.0. Must be greater than 0. weight_delta_max (float): maximum distance in grid cells in each grid dimension over which to distribute a single swath cell. Default is 10.0. weight_sum_min (float): minimum weight sum value. Cells whose weight sums are less than `weight_sum_min` are set to the grid fill value. Default is EPSILON. maximum_weight_mode (bool): If False (default), a weighted average of all swath cells that map to a particular grid cell is used. If True, the swath cell having the maximum weight of all swath cells that map to a particular grid cell is used. This option should be used for coded/category data, i.e. snow cover. """ def __init__(self, source_geo_def, target_geo_def): """Init EWAResampler.""" super(EWAResampler, self).__init__(source_geo_def, target_geo_def) self.cache = {} def resample(self, *args, **kwargs): """Run precompute and compute methods. .. note:: This sets the default of 'mask_area' to False since it is not needed in EWA resampling currently. """ kwargs.setdefault('mask_area', False) return super(EWAResampler, self).resample(*args, **kwargs) def _call_ll2cr(self, lons, lats, target_geo_def, swath_usage=0): """Wrap ll2cr() for handling dask delayed calls better.""" new_src = SwathDefinition(lons, lats) swath_points_in_grid, cols, rows = ll2cr(new_src, target_geo_def) # FIXME: How do we check swath usage/coverage if we only do this # per-block # # Determine if enough of the input swath was used # grid_name = getattr(self.target_geo_def, "name", "N/A") # fraction_in = swath_points_in_grid / float(lons.size) # swath_used = fraction_in > swath_usage # if not swath_used: # LOG.info("Data does not fit in grid %s because it only %f%% of " # "the swath is used" % # (grid_name, fraction_in * 100)) # raise RuntimeError("Data does not fit in grid %s" % (grid_name,)) # else: # LOG.debug("Data fits in grid %s and uses %f%% of the swath", # grid_name, fraction_in * 100) return np.stack([cols, rows], axis=0) def precompute(self, cache_dir=None, swath_usage=0, **kwargs): """Generate row and column arrays and store it for later use.""" if self.cache: # this resampler should be used for one SwathDefinition # no need to recompute ll2cr output again return None if kwargs.get('mask') is not None: LOG.warning("'mask' parameter has no affect during EWA " "resampling") del kwargs source_geo_def = self.source_geo_def target_geo_def = self.target_geo_def if cache_dir: LOG.warning("'cache_dir' is not used by EWA resampling") # Satpy/PyResample don't support dynamic grids out of the box yet lons, lats = source_geo_def.get_lonlats() if isinstance(lons, xr.DataArray): # get dask arrays lons = lons.data lats = lats.data # we are remapping to a static unchanging grid/area with all of # its parameters specified chunks = (2,) + lons.chunks res = da.map_blocks(self._call_ll2cr, lons, lats, target_geo_def, swath_usage, dtype=lons.dtype, chunks=chunks, new_axis=[0]) cols = res[0] rows = res[1] # save the dask arrays in the class instance cache # the on-disk cache will store the numpy arrays self.cache = { "rows": rows, "cols": cols, } return None def _call_fornav(self, cols, rows, target_geo_def, data, grid_coverage=0, **kwargs): """Wrap fornav() to run as a dask delayed.""" num_valid_points, res = fornav(cols, rows, target_geo_def, data, **kwargs) if isinstance(data, tuple): # convert 'res' from tuple of arrays to one array res = np.stack(res) num_valid_points = sum(num_valid_points) grid_covered_ratio = num_valid_points / float(res.size) grid_covered = grid_covered_ratio > grid_coverage if not grid_covered: msg = "EWA resampling only found %f%% of the grid covered " \ "(need %f%%)" % (grid_covered_ratio * 100, grid_coverage * 100) raise RuntimeError(msg) LOG.debug("EWA resampling found %f%% of the grid covered" % (grid_covered_ratio * 100)) return res def compute(self, data, cache_id=None, fill_value=0, weight_count=10000, weight_min=0.01, weight_distance_max=1.0, weight_delta_max=1.0, weight_sum_min=-1.0, maximum_weight_mode=False, grid_coverage=0, **kwargs): """Resample the data according to the precomputed X/Y coordinates.""" rows = self.cache["rows"] cols = self.cache["cols"] # if the data is scan based then check its metadata or the passed # kwargs otherwise assume the entire input swath is one large # "scanline" rows_per_scan = kwargs.get('rows_per_scan', data.attrs.get("rows_per_scan", data.shape[0])) if data.ndim == 3 and 'bands' in data.dims: data_in = tuple(data.sel(bands=band).data for band in data['bands']) elif data.ndim == 2: data_in = data.data else: raise ValueError("Unsupported data shape for EWA resampling.") res = dask.delayed(self._call_fornav)( cols, rows, self.target_geo_def, data_in, grid_coverage=grid_coverage, rows_per_scan=rows_per_scan, weight_count=weight_count, weight_min=weight_min, weight_distance_max=weight_distance_max, weight_delta_max=weight_delta_max, weight_sum_min=weight_sum_min, maximum_weight_mode=maximum_weight_mode) if isinstance(data_in, tuple): new_shape = (len(data_in),) + self.target_geo_def.shape else: new_shape = self.target_geo_def.shape data_arr = da.from_delayed(res, new_shape, data.dtype) # from delayed creates one large chunk, break it up a bit if we can data_arr = data_arr.rechunk([CHUNK_SIZE] * data_arr.ndim) if data.ndim == 3 and data.dims[0] == 'bands': dims = ('bands', 'y', 'x') elif data.ndim == 2: dims = ('y', 'x') else: dims = data.dims res = xr.DataArray(data_arr, dims=dims, attrs=data.attrs.copy()) return update_resampled_coords(data, res, self.target_geo_def) class BilinearResampler(BaseResampler): """Resample using bilinear interpolation. This resampler implements on-disk caching when the `cache_dir` argument is provided to the `resample` method. This should provide significant performance improvements on consecutive resampling of geostationary data. Args: cache_dir (str): Long term storage directory for intermediate results. radius_of_influence (float): Search radius cut off distance in meters epsilon (float): Allowed uncertainty in meters. Increasing uncertainty reduces execution time. reduce_data (bool): Reduce the input data to (roughly) match the target area. """ def __init__(self, source_geo_def, target_geo_def): """Init BilinearResampler.""" super(BilinearResampler, self).__init__(source_geo_def, target_geo_def) self.resampler = None def precompute(self, mask=None, radius_of_influence=50000, epsilon=0, reduce_data=True, cache_dir=False, **kwargs): """Create bilinear coefficients and store them for later use.""" from pyresample.bilinear.xarr import XArrayResamplerBilinear del kwargs del mask if self.resampler is None: kwargs = dict(source_geo_def=self.source_geo_def, target_geo_def=self.target_geo_def, radius_of_influence=radius_of_influence, neighbours=32, epsilon=epsilon) self.resampler = XArrayResamplerBilinear(**kwargs) try: self.load_bil_info(cache_dir, **kwargs) LOG.debug("Loaded bilinear parameters") except IOError: LOG.debug("Computing bilinear parameters") self.resampler.get_bil_info() LOG.debug("Saving bilinear parameters.") self.save_bil_info(cache_dir, **kwargs) def load_bil_info(self, cache_dir, **kwargs): """Load bilinear resampling info from cache directory.""" if cache_dir: filename = self._create_cache_filename(cache_dir, prefix='bil_lut-', **kwargs) try: fid = zarr.open(filename, 'r') for val in BIL_COORDINATES.keys(): cache = np.array(fid[val]) setattr(self.resampler, val, cache) except ValueError: raise IOError else: raise IOError def save_bil_info(self, cache_dir, **kwargs): """Save bilinear resampling info to cache directory.""" if cache_dir: filename = self._create_cache_filename(cache_dir, prefix='bil_lut-', **kwargs) # There are some old caches, move them out of the way if os.path.exists(filename): _move_existing_caches(cache_dir, filename) LOG.info('Saving BIL neighbour info to %s', filename) zarr_out = xr.Dataset() for idx_name, coord in BIL_COORDINATES.items(): var = getattr(self.resampler, idx_name) if isinstance(var, np.ndarray): var = da.from_array(var, chunks=CHUNK_SIZE) else: var = var.rechunk(CHUNK_SIZE) zarr_out[idx_name] = (coord, var) zarr_out.to_zarr(filename) def compute(self, data, fill_value=None, **kwargs): """Resample the given data using bilinear interpolation.""" del kwargs if fill_value is None: fill_value = data.attrs.get('_FillValue') target_shape = self.target_geo_def.shape res = self.resampler.get_sample_from_bil_info(data, fill_value=fill_value, output_shape=target_shape) return update_resampled_coords(data, res, self.target_geo_def) def _move_existing_caches(cache_dir, filename): """Move existing cache files out of the way.""" import os import shutil old_cache_dir = os.path.join(cache_dir, 'moved_by_satpy') try: os.mkdir(old_cache_dir) except FileExistsError: pass try: shutil.move(filename, old_cache_dir) except shutil.Error: os.remove(os.path.join(old_cache_dir, os.path.basename(filename))) shutil.move(filename, old_cache_dir) LOG.warning("Old cache file was moved to %s", old_cache_dir) def _mean(data, y_size, x_size): rows, cols = data.shape new_shape = (int(rows / y_size), int(y_size), int(cols / x_size), int(x_size)) data_mean = np.nanmean(data.reshape(new_shape), axis=(1, 3)) return data_mean class NativeResampler(BaseResampler): """Expand or reduce input datasets to be the same shape. If data is higher resolution (more pixels) than the destination area then data is averaged to match the destination resolution. If data is lower resolution (less pixels) than the destination area then data is repeated to match the destination resolution. This resampler does not perform any caching or masking due to the simplicity of the operations. """ def resample(self, data, cache_dir=None, mask_area=False, **kwargs): """Run NativeResampler.""" # use 'mask_area' with a default of False. It wouldn't do anything. return super(NativeResampler, self).resample(data, cache_dir=cache_dir, mask_area=mask_area, **kwargs) @staticmethod def aggregate(d, y_size, x_size): """Average every 4 elements (2x2) in a 2D array.""" if d.ndim != 2: # we can't guarantee what blocks we are getting and how # it should be reshaped to do the averaging. raise ValueError("Can't aggregrate (reduce) data arrays with " "more than 2 dimensions.") if not (x_size.is_integer() and y_size.is_integer()): raise ValueError("Aggregation factors are not integers") for agg_size, chunks in zip([y_size, x_size], d.chunks): for chunk_size in chunks: if chunk_size % agg_size != 0: raise ValueError("Aggregation requires arrays with " "shapes and chunks divisible by the " "factor") new_chunks = (tuple(int(x / y_size) for x in d.chunks[0]), tuple(int(x / x_size) for x in d.chunks[1])) return da.core.map_blocks(_mean, d, y_size, x_size, dtype=d.dtype, chunks=new_chunks) @classmethod def expand_reduce(cls, d_arr, repeats): """Expand reduce.""" if not isinstance(d_arr, da.Array): d_arr = da.from_array(d_arr, chunks=CHUNK_SIZE) if all(x == 1 for x in repeats.values()): return d_arr elif all(x >= 1 for x in repeats.values()): # rechunk so new chunks are the same size as old chunks c_size = max(x[0] for x in d_arr.chunks) def _calc_chunks(c, c_size): whole_chunks = [c_size] * int(sum(c) // c_size) remaining = sum(c) - sum(whole_chunks) if remaining: whole_chunks += [remaining] return tuple(whole_chunks) new_chunks = [_calc_chunks(x, int(c_size // repeats[axis])) for axis, x in enumerate(d_arr.chunks)] d_arr = d_arr.rechunk(new_chunks) for axis, factor in repeats.items(): if not factor.is_integer(): raise ValueError("Expand factor must be a whole number") d_arr = da.repeat(d_arr, int(factor), axis=axis) return d_arr elif all(x <= 1 for x in repeats.values()): # reduce y_size = 1. / repeats[0] x_size = 1. / repeats[1] return cls.aggregate(d_arr, y_size, x_size) else: raise ValueError("Must either expand or reduce in both " "directions") def compute(self, data, expand=True, **kwargs): """Resample data with NativeResampler.""" if isinstance(self.target_geo_def, (list, tuple)): # find the highest/lowest area among the provided test_func = max if expand else min target_geo_def = test_func(self.target_geo_def, key=lambda x: x.shape) else: target_geo_def = self.target_geo_def # convert xarray backed with numpy array to dask array if 'x' not in data.dims or 'y' not in data.dims: if data.ndim not in [2, 3]: raise ValueError("Can only handle 2D or 3D arrays without dimensions.") # assume rows is the second to last axis y_axis = data.ndim - 2 x_axis = data.ndim - 1 else: y_axis = data.dims.index('y') x_axis = data.dims.index('x') out_shape = target_geo_def.shape in_shape = data.shape y_repeats = out_shape[0] / float(in_shape[y_axis]) x_repeats = out_shape[1] / float(in_shape[x_axis]) repeats = {axis_idx: 1. for axis_idx in range(data.ndim) if axis_idx not in [y_axis, x_axis]} repeats[y_axis] = y_repeats repeats[x_axis] = x_repeats d_arr = self.expand_reduce(data.data, repeats) new_data = xr.DataArray(d_arr, dims=data.dims) return update_resampled_coords(data, new_data, target_geo_def) class BucketResamplerBase(BaseResampler): """Base class for bucket resampling which implements averaging.""" def __init__(self, source_geo_def, target_geo_def): """Initialize bucket resampler.""" super(BucketResamplerBase, self).__init__(source_geo_def, target_geo_def) self.resampler = None def precompute(self, **kwargs): """Create X and Y indices and store them for later use.""" from pyresample import bucket LOG.debug("Initializing bucket resampler.") source_lons, source_lats = self.source_geo_def.get_lonlats( chunks=CHUNK_SIZE) self.resampler = bucket.BucketResampler(self.target_geo_def, source_lons, source_lats) def compute(self, data, **kwargs): """Call the resampling.""" raise NotImplementedError("Use the sub-classes") def resample(self, data, **kwargs): """Resample `data` by calling `precompute` and `compute` methods. Args: data (xarray.DataArray): Data to be resampled Returns (xarray.DataArray): Data resampled to the target area """ self.precompute(**kwargs) attrs = data.attrs.copy() data_arr = data.data if data.ndim == 3 and data.dims[0] == 'bands': dims = ('bands', 'y', 'x') # Both one and two dimensional input data results in 2D output elif data.ndim in (1, 2): dims = ('y', 'x') else: dims = data.dims result = self.compute(data_arr, **kwargs) coords = {} if 'bands' in data.coords: coords['bands'] = data.coords['bands'] # Fractions are returned in a dict elif isinstance(result, dict): coords['categories'] = sorted(result.keys()) dims = ('categories', 'y', 'x') new_result = [] for cat in coords['categories']: new_result.append(result[cat]) result = da.stack(new_result) if result.ndim > len(dims): result = da.squeeze(result) # Adjust some attributes if "BucketFraction" in str(self): attrs['units'] = '' attrs['calibration'] = '' attrs['standard_name'] = 'area_fraction' elif "BucketCount" in str(self): attrs['units'] = '' attrs['calibration'] = '' attrs['standard_name'] = 'number_of_observations' result = xr.DataArray(result, dims=dims, coords=coords, attrs=attrs) return result class BucketAvg(BucketResamplerBase): """Class for averaging bucket resampling. Bucket resampling calculates the average of all the values that are closest to each bin and inside the target area. Parameters ---------- fill_value : float (default: np.nan) Fill value for missing data mask_all_nans : boolean (default: False) Mask all locations with all-NaN values """ def compute(self, data, fill_value=np.nan, mask_all_nan=False, **kwargs): """Call the resampling.""" results = [] if data.ndim == 3: for i in range(data.shape[0]): res = self.resampler.get_average(data[i, :, :], fill_value=fill_value, mask_all_nan=mask_all_nan) results.append(res) else: res = self.resampler.get_average(data, fill_value=fill_value, mask_all_nan=mask_all_nan) results.append(res) return da.stack(results) class BucketSum(BucketResamplerBase): """Class for bucket resampling which implements accumulation (sum). This resampler calculates the cumulative sum of all the values that are closest to each bin and inside the target area. Parameters ---------- fill_value : float (default: np.nan) Fill value for missing data mask_all_nans : boolean (default: False) Mask all locations with all-NaN values """ def compute(self, data, mask_all_nan=False, **kwargs): """Call the resampling.""" LOG.debug("Resampling %s", str(data.name)) results = [] if data.ndim == 3: for i in range(data.shape[0]): res = self.resampler.get_sum(data[i, :, :], mask_all_nan=mask_all_nan) results.append(res) else: res = self.resampler.get_sum(data, mask_all_nan=mask_all_nan) results.append(res) return da.stack(results) class BucketCount(BucketResamplerBase): """Class for bucket resampling which implements hit-counting. This resampler calculates the number of occurences of the input data closest to each bin and inside the target area. """ def compute(self, data, **kwargs): """Call the resampling.""" LOG.debug("Resampling %s", str(data.name)) results = [] if data.ndim == 3: for _i in range(data.shape[0]): res = self.resampler.get_count() results.append(res) else: res = self.resampler.get_count() results.append(res) return da.stack(results) class BucketFraction(BucketResamplerBase): """Class for bucket resampling to compute category fractions. This resampler calculates the fraction of occurences of the input data per category. """ def compute(self, data, fill_value=np.nan, categories=None, **kwargs): """Call the resampling.""" LOG.debug("Resampling %s", str(data.name)) if data.ndim > 2: raise ValueError("BucketFraction not implemented for 3D datasets") result = self.resampler.get_fractions(data, categories=categories, fill_value=fill_value) return result # TODO: move this to pyresample.resampler RESAMPLERS = {"kd_tree": KDTreeResampler, "nearest": KDTreeResampler, "ewa": EWAResampler, "bilinear": BilinearResampler, "native": NativeResampler, "gradient_search": GradientSearchResampler, "bucket_avg": BucketAvg, "bucket_sum": BucketSum, "bucket_count": BucketCount, "bucket_fraction": BucketFraction, } if PRBaseResampler is None: PRBaseResampler = BaseResampler # TODO: move this to pyresample def prepare_resampler(source_area, destination_area, resampler=None, **resample_kwargs): """Instantiate and return a resampler.""" if resampler is None: LOG.info("Using default KDTree resampler") resampler = 'kd_tree' if isinstance(resampler, (BaseResampler, PRBaseResampler)): raise ValueError("Trying to create a resampler when one already " "exists.") elif isinstance(resampler, str): resampler_class = RESAMPLERS.get(resampler, None) if resampler_class is None: raise KeyError("Resampler '%s' not available" % resampler) else: resampler_class = resampler key = (resampler_class, source_area, destination_area, hash_dict(resample_kwargs).hexdigest()) try: resampler_instance = resamplers_cache[key] except KeyError: resampler_instance = resampler_class(source_area, destination_area) resamplers_cache[key] = resampler_instance return key, resampler_instance # TODO: move this to pyresample def resample(source_area, data, destination_area, resampler=None, **kwargs): """Do the resampling.""" if not isinstance(resampler, (BaseResampler, PRBaseResampler)): # we don't use the first argument (cache key) _, resampler_instance = prepare_resampler(source_area, destination_area, resampler) else: resampler_instance = resampler if isinstance(data, list): res = [resampler_instance.resample(ds, **kwargs) for ds in data] else: res = resampler_instance.resample(data, **kwargs) return res def get_fill_value(dataset): """Get the fill value of the *dataset*, defaulting to np.nan.""" if np.issubdtype(dataset.dtype, np.integer): return dataset.attrs.get('_FillValue', np.nan) return np.nan def resample_dataset(dataset, destination_area, **kwargs): """Resample *dataset* and return the resampled version. Args: dataset (xarray.DataArray): Data to be resampled. destination_area: The destination onto which to project the data, either a full blown area definition or a string corresponding to the name of the area as defined in the area file. **kwargs: The extra parameters to pass to the resampler objects. Returns: A resampled DataArray with updated ``.attrs["area"]`` field. The dtype of the array is preserved. """ # call the projection stuff here try: source_area = dataset.attrs["area"] except KeyError: LOG.info("Cannot reproject dataset %s, missing area info", dataset.attrs['name']) return dataset fill_value = kwargs.pop('fill_value', get_fill_value(dataset)) new_data = resample(source_area, dataset, destination_area, fill_value=fill_value, **kwargs) new_attrs = new_data.attrs new_data.attrs = dataset.attrs.copy() new_data.attrs.update(new_attrs) new_data.attrs.update(area=destination_area) return new_data satpy-0.20.0/satpy/scene.py000066400000000000000000001676031362525524100155620ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2010-2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Scene object to hold satellite data.""" import logging import os from satpy.composites import CompositorLoader, IncompatibleAreas from satpy.config import get_environ_config_dir from satpy.dataset import (DatasetID, MetadataObject, dataset_walker, replace_anc, combine_metadata) from satpy.node import DependencyTree from satpy.readers import DatasetDict, load_readers from satpy.resample import (resample_dataset, prepare_resampler, get_area_def) from satpy.writers import load_writer from pyresample.geometry import AreaDefinition, BaseDefinition, SwathDefinition import xarray as xr from xarray import DataArray import numpy as np import six try: import configparser except ImportError: from six.moves import configparser # noqa: F401 LOG = logging.getLogger(__name__) class DelayedGeneration(KeyError): """Mark that a dataset can't be generated without further modification.""" pass class Scene(MetadataObject): """The Almighty Scene Class. Example usage:: from satpy import Scene from glob import glob # create readers and open files scn = Scene(filenames=glob('/path/to/files/*'), reader='viirs_sdr') # load datasets from input files scn.load(['I01', 'I02']) # resample from satellite native geolocation to builtin 'eurol' Area new_scn = scn.resample('eurol') # save all resampled datasets to geotiff files in the current directory new_scn.save_datasets() """ def __init__(self, filenames=None, reader=None, filter_parameters=None, reader_kwargs=None, ppp_config_dir=None, base_dir=None, sensor=None, start_time=None, end_time=None, area=None): """Initialize Scene with Reader and Compositor objects. To load data `filenames` and preferably `reader` must be specified. If `filenames` is provided without `reader` then the available readers will be searched for a Reader that can support the provided files. This can take a considerable amount of time so it is recommended that `reader` always be provided. Note without `filenames` the Scene is created with no Readers available requiring Datasets to be added manually:: scn = Scene() scn['my_dataset'] = Dataset(my_data_array, **my_info) Args: filenames (iterable or dict): A sequence of files that will be used to load data from. A ``dict`` object should map reader names to a list of filenames for that reader. reader (str or list): The name of the reader to use for loading the data or a list of names. filter_parameters (dict): Specify loaded file filtering parameters. Shortcut for `reader_kwargs['filter_parameters']`. reader_kwargs (dict): Keyword arguments to pass to specific reader instances. ppp_config_dir (str): The directory containing the configuration files for satpy. base_dir (str): (DEPRECATED) The directory to search for files containing the data to load. If *filenames* is also provided, this is ignored. sensor (list or str): (DEPRECATED: Use `find_files_and_readers` function) Limit used files by provided sensors. area (AreaDefinition): (DEPRECATED: Use `filter_parameters`) Limit used files by geographic area. start_time (datetime): (DEPRECATED: Use `filter_parameters`) Limit used files by starting time. end_time (datetime): (DEPRECATED: Use `filter_parameters`) Limit used files by ending time. """ super(Scene, self).__init__() if ppp_config_dir is None: ppp_config_dir = get_environ_config_dir() # Set the PPP_CONFIG_DIR in the environment in case it's used elsewhere in pytroll LOG.debug("Setting 'PPP_CONFIG_DIR' to '%s'", ppp_config_dir) os.environ["PPP_CONFIG_DIR"] = self.ppp_config_dir = ppp_config_dir if not filenames and (start_time or end_time or base_dir): import warnings warnings.warn( "Deprecated: Use " + "'from satpy import find_files_and_readers' to find files") from satpy import find_files_and_readers filenames = find_files_and_readers( start_time=start_time, end_time=end_time, base_dir=base_dir, reader=reader, sensor=sensor, ppp_config_dir=self.ppp_config_dir, reader_kwargs=reader_kwargs, ) elif start_time or end_time or area: import warnings warnings.warn( "Deprecated: Use " + "'filter_parameters' to filter loaded files by 'start_time', " + "'end_time', or 'area'.") fp = filter_parameters if filter_parameters else {} fp.update({ 'start_time': start_time, 'end_time': end_time, 'area': area, }) filter_parameters = fp if filter_parameters: if reader_kwargs is None: reader_kwargs = {} else: reader_kwargs = reader_kwargs.copy() reader_kwargs.setdefault('filter_parameters', {}).update(filter_parameters) if filenames and isinstance(filenames, str): raise ValueError("'filenames' must be a list of files: Scene(filenames=[filename])") self.readers = self.create_reader_instances(filenames=filenames, reader=reader, reader_kwargs=reader_kwargs) self.attrs.update(self._compute_metadata_from_readers()) self.datasets = DatasetDict() self.cpl = CompositorLoader(self.ppp_config_dir) comps, mods = self.cpl.load_compositors(self.attrs['sensor']) self.wishlist = set() self.dep_tree = DependencyTree(self.readers, comps, mods) self.resamplers = {} def _ipython_key_completions_(self): return [x.name for x in self.datasets.keys()] def _compute_metadata_from_readers(self): """Determine pieces of metadata from the readers loaded.""" mda = {'sensor': self._get_sensor_names()} # overwrite the request start/end times with actual loaded data limits if self.readers: mda['start_time'] = min(x.start_time for x in self.readers.values()) mda['end_time'] = max(x.end_time for x in self.readers.values()) return mda def _get_sensor_names(self): """Join the sensors from all loaded readers.""" # if the user didn't tell us what sensors to work with, let's figure it # out if not self.attrs.get('sensor'): # reader finder could return multiple readers return set([sensor for reader_instance in self.readers.values() for sensor in reader_instance.sensor_names]) elif not isinstance(self.attrs['sensor'], (set, tuple, list)): return set([self.attrs['sensor']]) else: return set(self.attrs['sensor']) def create_reader_instances(self, filenames=None, reader=None, reader_kwargs=None): """Find readers and return their instances.""" return load_readers(filenames=filenames, reader=reader, reader_kwargs=reader_kwargs, ppp_config_dir=self.ppp_config_dir) @property def start_time(self): """Return the start time of the file.""" return self.attrs['start_time'] @property def end_time(self): """Return the end time of the file.""" return self.attrs['end_time'] @property def missing_datasets(self): """Set of DatasetIDs that have not been successfully loaded.""" return set(self.wishlist) - set(self.datasets.keys()) def _compare_areas(self, datasets=None, compare_func=max): """Compare areas for the provided datasets. Args: datasets (iterable): Datasets whose areas will be compared. Can be either `xarray.DataArray` objects or identifiers to get the DataArrays from the current Scene. Defaults to all datasets. This can also be a series of area objects, typically AreaDefinitions. compare_func (callable): `min` or `max` or other function used to compare the dataset's areas. """ if datasets is None: datasets = list(self.values()) areas = [] for ds in datasets: if isinstance(ds, BaseDefinition): areas.append(ds) continue elif not isinstance(ds, DataArray): ds = self[ds] area = ds.attrs.get('area') areas.append(area) areas = [x for x in areas if x is not None] if not areas: raise ValueError("No dataset areas available") if not all(isinstance(x, type(areas[0])) for x in areas[1:]): raise ValueError("Can't compare areas of different types") elif isinstance(areas[0], AreaDefinition): first_pstr = areas[0].proj_str if not all(ad.proj_str == first_pstr for ad in areas[1:]): raise ValueError("Can't compare areas with different " "projections.") def key_func(ds): return 1. / ds.pixel_size_x else: def key_func(ds): return ds.shape # find the highest/lowest area among the provided return compare_func(areas, key=key_func) def max_area(self, datasets=None): """Get highest resolution area for the provided datasets. Args: datasets (iterable): Datasets whose areas will be compared. Can be either `xarray.DataArray` objects or identifiers to get the DataArrays from the current Scene. Defaults to all datasets. """ return self._compare_areas(datasets=datasets, compare_func=max) def min_area(self, datasets=None): """Get lowest resolution area for the provided datasets. Args: datasets (iterable): Datasets whose areas will be compared. Can be either `xarray.DataArray` objects or identifiers to get the DataArrays from the current Scene. Defaults to all datasets. """ return self._compare_areas(datasets=datasets, compare_func=min) def available_dataset_ids(self, reader_name=None, composites=False): """Get DatasetIDs of loadable datasets. This can be for all readers loaded by this Scene or just for ``reader_name`` if specified. Available dataset names are determined by what each individual reader can load. This is normally determined by what files are needed to load a dataset and what files have been provided to the scene/reader. Some readers dynamically determine what is available based on the contents of the files provided. Returns: list of available dataset names """ try: if reader_name: readers = [self.readers[reader_name]] else: readers = self.readers.values() except (AttributeError, KeyError): raise KeyError("No reader '%s' found in scene" % reader_name) available_datasets = sorted([dataset_id for reader in readers for dataset_id in reader.available_dataset_ids]) if composites: available_datasets += sorted(self.available_composite_ids()) return available_datasets def available_dataset_names(self, reader_name=None, composites=False): """Get the list of the names of the available datasets.""" return sorted(set(x.name for x in self.available_dataset_ids( reader_name=reader_name, composites=composites))) def all_dataset_ids(self, reader_name=None, composites=False): """Get names of all datasets from loaded readers or `reader_name` if specified. Returns: list of all dataset names """ try: if reader_name: readers = [self.readers[reader_name]] else: readers = self.readers.values() except (AttributeError, KeyError): raise KeyError("No reader '%s' found in scene" % reader_name) all_datasets = [dataset_id for reader in readers for dataset_id in reader.all_dataset_ids] if composites: all_datasets += self.all_composite_ids() return all_datasets def all_dataset_names(self, reader_name=None, composites=False): """Get all known dataset names configured for the loaded readers. Note that some readers dynamically determine what datasets are known by reading the contents of the files they are provided. This means that the list of datasets returned by this method may change depending on what files are provided even if a product/dataset is a "standard" product for a particular reader. """ return sorted(set(x.name for x in self.all_dataset_ids( reader_name=reader_name, composites=composites))) def _check_known_composites(self, available_only=False): """Create new dependency tree and check what composites we know about.""" # Note if we get compositors from the dep tree then it will include # modified composites which we don't want sensor_comps, mods = self.cpl.load_compositors(self.attrs['sensor']) # recreate the dependency tree so it doesn't interfere with the user's # wishlist from self.dep_tree dep_tree = DependencyTree(self.readers, sensor_comps, mods, available_only=True) # ignore inline compositor dependencies starting with '_' comps = (comp for comp_dict in sensor_comps.values() for comp in comp_dict.keys() if not comp.name.startswith('_')) # make sure that these composites are even create-able by these readers all_comps = set(comps) # find_dependencies will update the all_comps set with DatasetIDs dep_tree.find_dependencies(all_comps) available_comps = set(x.name for x in dep_tree.trunk()) # get rid of modified composites that are in the trunk return sorted(available_comps & set(all_comps)) def available_composite_ids(self): """Get names of composites that can be generated from the available datasets.""" return self._check_known_composites(available_only=True) def available_composite_names(self): """All configured composites known to this Scene.""" return sorted(set(x.name for x in self.available_composite_ids())) def all_composite_ids(self): """Get all IDs for configured composites.""" return self._check_known_composites() def all_composite_names(self): """Get all names for all configured composites.""" return sorted(set(x.name for x in self.all_composite_ids())) def all_modifier_names(self): """Get names of configured modifier objects.""" return sorted(self.dep_tree.modifiers.keys()) def __str__(self): """Generate a nice print out for the scene.""" res = (str(proj) for proj in self.datasets.values()) return "\n".join(res) def __iter__(self): """Iterate over the datasets.""" for x in self.datasets.values(): yield x def iter_by_area(self): """Generate datasets grouped by Area. :return: generator of (area_obj, list of dataset objects) """ datasets_by_area = {} for ds in self: a = ds.attrs.get('area') datasets_by_area.setdefault(a, []).append( DatasetID.from_dict(ds.attrs)) return datasets_by_area.items() def keys(self, **kwargs): """Get DatasetID keys for the underlying data container.""" return self.datasets.keys(**kwargs) def values(self): """Get values for the underlying data container.""" return self.datasets.values() def copy(self, datasets=None): """Create a copy of the Scene including dependency information. Args: datasets (list, tuple): `DatasetID` objects for the datasets to include in the new Scene object. """ new_scn = self.__class__() new_scn.attrs = self.attrs.copy() new_scn.dep_tree = self.dep_tree.copy() for ds_id in (datasets or self.keys()): # NOTE: Must use `.datasets` or side effects of `__setitem__` # could hurt us with regards to the wishlist new_scn.datasets[ds_id] = self[ds_id] if not datasets: new_scn.wishlist = self.wishlist.copy() else: new_scn.wishlist = set([DatasetID.from_dict(ds.attrs) for ds in new_scn]) return new_scn @property def all_same_area(self): """All contained data arrays are on the same area.""" all_areas = [x.attrs.get('area', None) for x in self.values()] all_areas = [x for x in all_areas if x is not None] return all(all_areas[0] == x for x in all_areas[1:]) @property def all_same_proj(self): """All contained data array are in the same projection.""" all_areas = [x.attrs.get('area', None) for x in self.values()] all_areas = [x for x in all_areas if x is not None] return all(all_areas[0].proj_str == x.proj_str for x in all_areas[1:]) def _slice_area_from_bbox(self, src_area, dst_area, ll_bbox=None, xy_bbox=None): """Slice the provided area using the bounds provided.""" if ll_bbox is not None: dst_area = AreaDefinition( 'crop_area', 'crop_area', 'crop_latlong', {'proj': 'latlong'}, 100, 100, ll_bbox) elif xy_bbox is not None: dst_area = AreaDefinition( 'crop_area', 'crop_area', 'crop_xy', src_area.proj_dict, src_area.x_size, src_area.y_size, xy_bbox) x_slice, y_slice = src_area.get_area_slices(dst_area) return src_area[y_slice, x_slice], y_slice, x_slice def _slice_datasets(self, dataset_ids, slice_key, new_area, area_only=True): """Slice scene in-place for the datasets specified.""" new_datasets = {} datasets = (self[ds_id] for ds_id in dataset_ids) for ds, parent_ds in dataset_walker(datasets): ds_id = DatasetID.from_dict(ds.attrs) # handle ancillary variables pres = None if parent_ds is not None: pres = new_datasets[DatasetID.from_dict(parent_ds.attrs)] if ds_id in new_datasets: replace_anc(ds, pres) continue if area_only and ds.attrs.get('area') is None: new_datasets[ds_id] = ds replace_anc(ds, pres) continue if not isinstance(slice_key, dict): # match dimension name to slice object key = dict(zip(ds.dims, slice_key)) else: key = slice_key new_ds = ds.isel(**key) if new_area is not None: new_ds.attrs['area'] = new_area new_datasets[ds_id] = new_ds if parent_ds is None: # don't use `__setitem__` because we don't want this to # affect the existing wishlist/dep tree self.datasets[ds_id] = new_ds else: replace_anc(new_ds, pres) def slice(self, key): """Slice Scene by dataset index. .. note:: DataArrays that do not have an ``area`` attribute will not be sliced. """ if not self.all_same_area: raise RuntimeError("'Scene' has different areas and cannot " "be usefully sliced.") # slice new_scn = self.copy() new_scn.wishlist = self.wishlist for area, dataset_ids in self.iter_by_area(): if area is not None: # assume dimensions for area are y and x one_ds = self[dataset_ids[0]] area_key = tuple(sl for dim, sl in zip(one_ds.dims, key) if dim in ['y', 'x']) new_area = area[area_key] else: new_area = None new_scn._slice_datasets(dataset_ids, key, new_area) return new_scn def crop(self, area=None, ll_bbox=None, xy_bbox=None, dataset_ids=None): """Crop Scene to a specific Area boundary or bounding box. Args: area (AreaDefinition): Area to crop the current Scene to ll_bbox (tuple, list): 4-element tuple where values are in lon/lat degrees. Elements are ``(xmin, ymin, xmax, ymax)`` where X is longitude and Y is latitude. xy_bbox (tuple, list): Same as `ll_bbox` but elements are in projection units. dataset_ids (iterable): DatasetIDs to include in the returned `Scene`. Defaults to all datasets. This method will attempt to intelligently slice the data to preserve relationships between datasets. For example, if we are cropping two DataArrays of 500m and 1000m pixel resolution then this method will assume that exactly 4 pixels of the 500m array cover the same geographic area as a single 1000m pixel. It handles these cases based on the shapes of the input arrays and adjusting slicing indexes accordingly. This method will have trouble handling cases where data arrays seem related but don't cover the same geographic area or if the coarsest resolution data is not related to the other arrays which are related. It can be useful to follow cropping with a call to the native resampler to resolve all datasets to the same resolution and compute any composites that could not be generated previously:: >>> cropped_scn = scn.crop(ll_bbox=(-105., 40., -95., 50.)) >>> remapped_scn = cropped_scn.resample(resampler='native') .. note:: The `resample` method automatically crops input data before resampling to save time/memory. """ if len([x for x in [area, ll_bbox, xy_bbox] if x is not None]) != 1: raise ValueError("One and only one of 'area', 'll_bbox', " "or 'xy_bbox' can be specified.") new_scn = self.copy(datasets=dataset_ids) if not new_scn.all_same_proj and xy_bbox is not None: raise ValueError("Can't crop when dataset_ids are not all on the " "same projection.") # get the lowest resolution area, use it as the base of the slice # this makes sure that the other areas *should* be a consistent factor min_area = new_scn.min_area() if isinstance(area, str): area = get_area_def(area) new_min_area, min_y_slice, min_x_slice = self._slice_area_from_bbox( min_area, area, ll_bbox, xy_bbox) new_target_areas = {} for src_area, dataset_ids in new_scn.iter_by_area(): if src_area is None: for ds_id in dataset_ids: new_scn.datasets[ds_id] = self[ds_id] continue y_factor, y_remainder = np.divmod(float(src_area.shape[0]), min_area.shape[0]) x_factor, x_remainder = np.divmod(float(src_area.shape[1]), min_area.shape[1]) y_factor = int(y_factor) x_factor = int(x_factor) if y_remainder == 0 and x_remainder == 0: y_slice = slice(min_y_slice.start * y_factor, min_y_slice.stop * y_factor) x_slice = slice(min_x_slice.start * x_factor, min_x_slice.stop * x_factor) new_area = src_area[y_slice, x_slice] slice_key = {'y': y_slice, 'x': x_slice} new_scn._slice_datasets(dataset_ids, slice_key, new_area) else: new_target_areas[src_area] = self._slice_area_from_bbox( src_area, area, ll_bbox, xy_bbox ) return new_scn def aggregate(self, dataset_ids=None, boundary='exact', side='left', func='mean', **dim_kwargs): """Create an aggregated version of the Scene. Args: dataset_ids (iterable): DatasetIDs to include in the returned `Scene`. Defaults to all datasets. func (string): Function to apply on each aggregation window. One of 'mean', 'sum', 'min', 'max', 'median', 'argmin', 'argmax', 'prod', 'std', 'var'. 'mean' is the default. boundary: Not implemented. side: Not implemented. dim_kwargs: the size of the windows to aggregate. Returns: A new aggregated scene See also: xarray.DataArray.coarsen Example: `scn.aggregate(func='min', x=2, y=2)` will aggregate 2x2 pixels by applying the `min` function. """ new_scn = self.copy(datasets=dataset_ids) for src_area, ds_ids in new_scn.iter_by_area(): if src_area is None: for ds_id in ds_ids: new_scn.datasets[ds_id] = self[ds_id] continue if boundary != 'exact': raise NotImplementedError("boundary modes appart from 'exact' are not implemented yet.") target_area = src_area.aggregate(**dim_kwargs) try: resolution = max(target_area.pixel_size_x, target_area.pixel_size_y) except AttributeError: resolution = max(target_area.lats.resolution, target_area.lons.resolution) for ds_id in ds_ids: res = self[ds_id].coarsen(boundary=boundary, side=side, **dim_kwargs) new_scn.datasets[ds_id] = getattr(res, func)() new_scn.datasets[ds_id].attrs['area'] = target_area new_scn.datasets[ds_id].attrs['resolution'] = resolution return new_scn def get(self, key, default=None): """Return value from DatasetDict with optional default.""" return self.datasets.get(key, default) def __getitem__(self, key): """Get a dataset or create a new 'slice' of the Scene.""" if isinstance(key, tuple) and not isinstance(key, DatasetID): return self.slice(key) return self.datasets[key] def __setitem__(self, key, value): """Add the item to the scene.""" self.datasets[key] = value # this could raise a KeyError but never should in this case ds_id = self.datasets.get_key(key) self.wishlist.add(ds_id) self.dep_tree.add_leaf(ds_id) def __delitem__(self, key): """Remove the item from the scene.""" k = self.datasets.get_key(key) self.wishlist.discard(k) del self.datasets[k] def __contains__(self, name): """Check if the dataset is in the scene.""" return name in self.datasets def _read_datasets(self, dataset_nodes, **kwargs): """Read the given datasets from file.""" # Sort requested datasets by reader reader_datasets = {} for node in dataset_nodes: ds_id = node.name # if we already have this node loaded or the node was assigned # by the user (node data is None) then don't try to load from a # reader if ds_id in self.datasets or not isinstance(node.data, dict): continue reader_name = node.data.get('reader_name') if reader_name is None: # This shouldn't be possible raise RuntimeError("Dependency tree has a corrupt node.") reader_datasets.setdefault(reader_name, set()).add(ds_id) # load all datasets for one reader at a time loaded_datasets = DatasetDict() for reader_name, ds_ids in reader_datasets.items(): reader_instance = self.readers[reader_name] new_datasets = reader_instance.load(ds_ids, **kwargs) loaded_datasets.update(new_datasets) self.datasets.update(loaded_datasets) return loaded_datasets def _get_prereq_datasets(self, comp_id, prereq_nodes, keepables, skip=False): """Get a composite's prerequisites, generating them if needed. Args: comp_id (DatasetID): DatasetID for the composite whose prerequisites are being collected. prereq_nodes (sequence of Nodes): Prerequisites to collect keepables (set): `set` to update if any prerequisites can't be loaded at this time (see `_generate_composite`). skip (bool): If True, consider prerequisites as optional and only log when they are missing. If False, prerequisites are considered required and will raise an exception and log a warning if they can't be collected. Defaults to False. Raises: KeyError: If required (skip=False) prerequisite can't be collected. """ prereq_datasets = [] delayed_gen = False for prereq_node in prereq_nodes: prereq_id = prereq_node.name if prereq_id not in self.datasets and prereq_id not in keepables \ and not prereq_node.is_leaf: self._generate_composite(prereq_node, keepables) if prereq_node is self.dep_tree.empty_node: # empty sentinel node - no need to load it continue elif prereq_id in self.datasets: prereq_datasets.append(self.datasets[prereq_id]) elif not prereq_node.is_leaf and prereq_id in keepables: delayed_gen = True continue elif not skip: LOG.debug("Missing prerequisite for '{}': '{}'".format( comp_id, prereq_id)) raise KeyError("Missing composite prerequisite for" " '{}': '{}'".format(comp_id, prereq_id)) else: LOG.debug("Missing optional prerequisite for {}: {}".format(comp_id, prereq_id)) if delayed_gen: keepables.add(comp_id) keepables.update([x.name for x in prereq_nodes]) LOG.debug("Delaying generation of %s because of dependency's delayed generation: %s", comp_id, prereq_id) if not skip: LOG.debug("Delayed prerequisite for '{}': '{}'".format(comp_id, prereq_id)) raise DelayedGeneration( "Delayed composite prerequisite for " "'{}': '{}'".format(comp_id, prereq_id)) else: LOG.debug("Delayed optional prerequisite for {}: {}".format(comp_id, prereq_id)) return prereq_datasets def _generate_composite(self, comp_node, keepables): """Collect all composite prereqs and create the specified composite. Args: comp_node (Node): Composite Node to generate a Dataset for keepables (set): `set` to update if any datasets are needed when generation is continued later. This can happen if generation is delayed to incompatible areas which would require resampling first. """ if comp_node.name in self.datasets: # already loaded return compositor, prereqs, optional_prereqs = comp_node.data try: delayed_prereq = False prereq_datasets = self._get_prereq_datasets( comp_node.name, prereqs, keepables, ) except DelayedGeneration: # if we are missing a required dependency that could be generated # later then we need to wait to return until after we've also # processed the optional dependencies delayed_prereq = True except KeyError: # we are missing a hard requirement that will never be available # there is no need to "keep" optional dependencies return optional_datasets = self._get_prereq_datasets( comp_node.name, optional_prereqs, keepables, skip=True ) # we are missing some prerequisites # in the future we may be able to generate this composite (delayed) # so we need to hold on to successfully loaded prerequisites and # optional prerequisites if delayed_prereq: preservable_datasets = set(self.datasets.keys()) prereq_ids = set(p.name for p in prereqs) opt_prereq_ids = set(p.name for p in optional_prereqs) keepables |= preservable_datasets & (prereq_ids | opt_prereq_ids) return try: composite = compositor(prereq_datasets, optional_datasets=optional_datasets, **self.attrs) cid = DatasetID.from_dict(composite.attrs) self.datasets[cid] = composite # update the node with the computed DatasetID if comp_node.name in self.wishlist: self.wishlist.remove(comp_node.name) self.wishlist.add(cid) comp_node.name = cid except IncompatibleAreas: LOG.debug("Delaying generation of %s because of incompatible areas", str(compositor.id)) preservable_datasets = set(self.datasets.keys()) prereq_ids = set(p.name for p in prereqs) opt_prereq_ids = set(p.name for p in optional_prereqs) keepables |= preservable_datasets & (prereq_ids | opt_prereq_ids) # even though it wasn't generated keep a list of what # might be needed in other compositors keepables.add(comp_node.name) return def _read_composites(self, compositor_nodes): """Read (generate) composites.""" keepables = set() for item in compositor_nodes: self._generate_composite(item, keepables) return keepables def read(self, nodes=None, **kwargs): """Load datasets from the necessary reader. Args: nodes (iterable): DependencyTree Node objects **kwargs: Keyword arguments to pass to the reader's `load` method. Returns: DatasetDict of loaded datasets """ if nodes is None: required_nodes = self.wishlist - set(self.datasets.keys()) nodes = self.dep_tree.leaves(nodes=required_nodes) return self._read_datasets(nodes, **kwargs) def generate_composites(self, nodes=None): """Compute all the composites contained in `requirements`.""" if nodes is None: required_nodes = self.wishlist - set(self.datasets.keys()) nodes = set(self.dep_tree.trunk(nodes=required_nodes)) - \ set(self.datasets.keys()) return self._read_composites(nodes) def _remove_failed_datasets(self, keepables): keepables = keepables or set() # remove reader datasets that couldn't be loaded so they aren't # attempted again later for n in self.missing_datasets: if n not in keepables: self.wishlist.discard(n) def unload(self, keepables=None): """Unload all unneeded datasets. Datasets are considered unneeded if they weren't directly requested or added to the Scene by the user or they are no longer needed to generate composites that have yet to be generated. Args: keepables (iterable): DatasetIDs to keep whether they are needed or not. """ to_del = [ds_id for ds_id, projectable in self.datasets.items() if ds_id not in self.wishlist and (not keepables or ds_id not in keepables)] for ds_id in to_del: LOG.debug("Unloading dataset: %r", ds_id) del self.datasets[ds_id] def load(self, wishlist, calibration=None, resolution=None, polarization=None, level=None, generate=True, unload=True, **kwargs): """Read and generate requested datasets. When the `wishlist` contains `DatasetID` objects they can either be fully-specified `DatasetID` objects with every parameter specified or they can not provide certain parameters and the "best" parameter will be chosen. For example, if a dataset is available in multiple resolutions and no resolution is specified in the wishlist's DatasetID then the highest (smallest number) resolution will be chosen. Loaded `DataArray` objects are created and stored in the Scene object. Args: wishlist (iterable): Names (str), wavelengths (float), or DatasetID objects of the requested datasets to load. See `available_dataset_ids()` for what datasets are available. calibration (list, str): Calibration levels to limit available datasets. This is a shortcut to having to list each DatasetID in `wishlist`. resolution (list | float): Resolution to limit available datasets. This is a shortcut similar to calibration. polarization (list | str): Polarization ('V', 'H') to limit available datasets. This is a shortcut similar to calibration. level (list | str): Pressure level to limit available datasets. Pressure should be in hPa or mb. If an altitude is used it should be specified in inverse meters (1/m). The units of this parameter ultimately depend on the reader. generate (bool): Generate composites from the loaded datasets (default: True) unload (bool): Unload datasets that were required to generate the requested datasets (composite dependencies) but are no longer needed. """ dataset_keys = set(wishlist) needed_datasets = (self.wishlist | dataset_keys) - \ set(self.datasets.keys()) unknown = self.dep_tree.find_dependencies(needed_datasets, calibration=calibration, polarization=polarization, resolution=resolution, level=level) self.wishlist |= needed_datasets if unknown: unknown_str = ", ".join(map(str, unknown)) raise KeyError("Unknown datasets: {}".format(unknown_str)) self.read(**kwargs) if generate: keepables = self.generate_composites() else: # don't lose datasets we loaded to try to generate composites keepables = set(self.datasets.keys()) | self.wishlist if self.missing_datasets: # copy the set of missing datasets because they won't be valid # after they are removed in the next line missing = self.missing_datasets.copy() self._remove_failed_datasets(keepables) missing_str = ", ".join(str(x) for x in missing) LOG.warning("The following datasets were not created and may require " "resampling to be generated: {}".format(missing_str)) if unload: self.unload(keepables=keepables) def _slice_data(self, source_area, slices, dataset): """Slice the data to reduce it.""" slice_x, slice_y = slices dataset = dataset.isel(x=slice_x, y=slice_y) assert ('x', source_area.x_size) in dataset.sizes.items() assert ('y', source_area.y_size) in dataset.sizes.items() dataset.attrs['area'] = source_area return dataset def _resampled_scene(self, new_scn, destination_area, reduce_data=True, **resample_kwargs): """Resample `datasets` to the `destination` area. If data reduction is enabled, some local caching is perfomed in order to avoid recomputation of area intersections. """ new_datasets = {} datasets = list(new_scn.datasets.values()) if isinstance(destination_area, (str, six.text_type)): destination_area = get_area_def(destination_area) if hasattr(destination_area, 'freeze'): try: max_area = new_scn.max_area() destination_area = destination_area.freeze(max_area) except ValueError: raise ValueError("No dataset areas available to freeze " "DynamicAreaDefinition.") resamplers = {} reductions = {} for dataset, parent_dataset in dataset_walker(datasets): ds_id = DatasetID.from_dict(dataset.attrs) pres = None if parent_dataset is not None: pres = new_datasets[DatasetID.from_dict(parent_dataset.attrs)] if ds_id in new_datasets: replace_anc(new_datasets[ds_id], pres) if ds_id in new_scn.datasets: new_scn.datasets[ds_id] = new_datasets[ds_id] continue if dataset.attrs.get('area') is None: if parent_dataset is None: new_scn.datasets[ds_id] = dataset else: replace_anc(dataset, pres) continue LOG.debug("Resampling %s", ds_id) source_area = dataset.attrs['area'] try: if reduce_data: key = source_area try: (slice_x, slice_y), source_area = reductions[key] except KeyError: if resample_kwargs.get('resampler') == 'gradient_search': factor = resample_kwargs.get('shape_divisible_by', 2) else: factor = None try: slice_x, slice_y = source_area.get_area_slices( destination_area, shape_divisible_by=factor) except TypeError: slice_x, slice_y = source_area.get_area_slices( destination_area) source_area = source_area[slice_y, slice_x] reductions[key] = (slice_x, slice_y), source_area dataset = self._slice_data(source_area, (slice_x, slice_y), dataset) else: LOG.debug("Data reduction disabled by the user") except NotImplementedError: LOG.info("Not reducing data before resampling.") if source_area not in resamplers: key, resampler = prepare_resampler( source_area, destination_area, **resample_kwargs) resamplers[source_area] = resampler self.resamplers[key] = resampler kwargs = resample_kwargs.copy() kwargs['resampler'] = resamplers[source_area] res = resample_dataset(dataset, destination_area, **kwargs) new_datasets[ds_id] = res if ds_id in new_scn.datasets: new_scn.datasets[ds_id] = res if parent_dataset is not None: replace_anc(res, pres) def resample(self, destination=None, datasets=None, generate=True, unload=True, resampler=None, reduce_data=True, **resample_kwargs): """Resample datasets and return a new scene. Args: destination (AreaDefinition, GridDefinition): area definition to resample to. If not specified then the area returned by `Scene.max_area()` will be used. datasets (list): Limit datasets to resample to these specified `DatasetID` objects . By default all currently loaded datasets are resampled. generate (bool): Generate any requested composites that could not be previously due to incompatible areas (default: True). unload (bool): Remove any datasets no longer needed after requested composites have been generated (default: True). resampler (str): Name of resampling method to use. By default, this is a nearest neighbor KDTree-based resampling ('nearest'). Other possible values include 'native', 'ewa', etc. See the :mod:`~satpy.resample` documentation for more information. reduce_data (bool): Reduce data by matching the input and output areas and slicing the data arrays (default: True) resample_kwargs: Remaining keyword arguments to pass to individual resampler classes. See the individual resampler class documentation :mod:`here ` for available arguments. """ to_resample_ids = [dsid for (dsid, dataset) in self.datasets.items() if (not datasets) or dsid in datasets] if destination is None: destination = self.max_area(to_resample_ids) new_scn = self.copy(datasets=to_resample_ids) # we may have some datasets we asked for but don't exist yet new_scn.wishlist = self.wishlist.copy() self._resampled_scene(new_scn, destination, resampler=resampler, reduce_data=reduce_data, **resample_kwargs) # regenerate anything from the wishlist that needs it (combining # multiple resolutions, etc.) if generate: keepables = new_scn.generate_composites() else: # don't lose datasets that we may need later for generating # composites keepables = set(new_scn.datasets.keys()) | new_scn.wishlist if new_scn.missing_datasets: # copy the set of missing datasets because they won't be valid # after they are removed in the next line missing = new_scn.missing_datasets.copy() new_scn._remove_failed_datasets(keepables) missing_str = ", ".join(str(x) for x in missing) LOG.warning( "The following datasets " "were not created: {}".format(missing_str)) if unload: new_scn.unload(keepables) return new_scn def show(self, dataset_id, overlay=None): """Show the *dataset* on screen as an image. Show dataset on screen as an image, possibly with an overlay. Args: dataset_id (DatasetID or str): Either a DatasetID or a string representing a DatasetID, that has been previously loaded using Scene.load. overlay (dict, optional): Add an overlay before showing the image. The keys/values for this dictionary are as the arguments for :meth:`~satpy.writers.add_overlay`. The dictionary should contain at least the key ``"coast_dir"``, which should refer to a top-level directory containing shapefiles. See the pycoast_ package documentation for coastline shapefile installation instructions. .. _pycoast: https://pycoast.readthedocs.io/ """ from satpy.writers import get_enhanced_image from satpy.utils import in_ipynb img = get_enhanced_image(self[dataset_id].squeeze(), overlay=overlay) if not in_ipynb(): img.show() return img def to_geoviews(self, gvtype=None, datasets=None, kdims=None, vdims=None, dynamic=False): """Convert satpy Scene to geoviews. Args: gvtype (gv plot type): One of gv.Image, gv.LineContours, gv.FilledContours, gv.Points Default to :class:`geoviews.Image`. See Geoviews documentation for details. datasets (list): Limit included products to these datasets kdims (list of str): Key dimensions. See geoviews documentation for more information. vdims : list of str, optional Value dimensions. See geoviews documentation for more information. If not given defaults to first data variable dynamic : boolean, optional, default False Returns: geoviews object Todo: * better handling of projection information in datasets which are to be passed to geoviews """ import geoviews as gv from cartopy import crs # noqa if gvtype is None: gvtype = gv.Image ds = self.to_xarray_dataset(datasets) if vdims is None: # by default select first data variable as display variable vdims = ds.data_vars[list(ds.data_vars.keys())[0]].name if hasattr(ds, "area") and hasattr(ds.area, 'to_cartopy_crs'): dscrs = ds.area.to_cartopy_crs() gvds = gv.Dataset(ds, crs=dscrs) else: gvds = gv.Dataset(ds) if "latitude" in ds.coords.keys(): gview = gvds.to(gv.QuadMesh, kdims=["longitude", "latitude"], vdims=vdims, dynamic=dynamic) else: gview = gvds.to(gvtype, kdims=["x", "y"], vdims=vdims, dynamic=dynamic) return gview def to_xarray_dataset(self, datasets=None): """Merge all xr.DataArrays of a scene to a xr.DataSet. Parameters: datasets (list): List of products to include in the :class:`xarray.Dataset` Returns: :class:`xarray.Dataset` """ if datasets is not None: datasets = [self[ds] for ds in datasets] else: datasets = [self.datasets.get(ds) for ds in self.wishlist] datasets = [ds for ds in datasets if ds is not None] ds_dict = {i.attrs['name']: i.rename(i.attrs['name']) for i in datasets if i.attrs.get('area') is not None} mdata = combine_metadata(*tuple(i.attrs for i in datasets)) if mdata.get('area') is None or not isinstance(mdata['area'], SwathDefinition): # either don't know what the area is or we have an AreaDefinition ds = xr.merge(ds_dict.values()) else: # we have a swath definition and should use lon/lat values lons, lats = mdata['area'].get_lonlats() if not isinstance(lons, DataArray): lons = DataArray(lons, dims=('y', 'x')) lats = DataArray(lats, dims=('y', 'x')) # ds_dict['longitude'] = lons # ds_dict['latitude'] = lats ds = xr.Dataset(ds_dict, coords={"latitude": (["y", "x"], lats), "longitude": (["y", "x"], lons)}) ds.attrs = mdata return ds def images(self): """Generate images for all the datasets from the scene.""" for ds_id, projectable in self.datasets.items(): if ds_id in self.wishlist: yield projectable.to_image() def save_dataset(self, dataset_id, filename=None, writer=None, overlay=None, decorate=None, compute=True, **kwargs): """Save the ``dataset_id`` to file using ``writer``. Args: dataset_id (str or Number or DatasetID): Identifier for the dataset to save to disk. filename (str): Optionally specify the filename to save this dataset to. It may include string formatting patterns that will be filled in by dataset attributes. writer (str): Name of writer to use when writing data to disk. Default to ``"geotiff"``. If not provided, but ``filename`` is provided then the filename's extension is used to determine the best writer to use. See :meth:`Scene.get_writer_by_ext` for details. overlay (dict): See :func:`satpy.writers.add_overlay`. Only valid for "image" writers like `geotiff` or `simple_image`. decorate (dict): See :func:`satpy.writers.add_decorate`. Only valid for "image" writers like `geotiff` or `simple_image`. compute (bool): If `True` (default), compute all of the saves to disk. If `False` then the return value is either a :doc:`dask:delayed` object or two lists to be passed to a `dask.array.store` call. See return values below for more details. kwargs: Additional writer arguments. See :doc:`../writers` for more information. Returns: Value returned depends on `compute`. If `compute` is `True` then the return value is the result of computing a :doc:`dask:delayed` object or running :func:`dask.array.store`. If `compute` is `False` then the returned value is either a :doc:`dask:delayed` object that can be computed using `delayed.compute()` or a tuple of (source, target) that should be passed to :func:`dask.array.store`. If target is provided the the caller is responsible for calling `target.close()` if the target has this method. """ if writer is None and filename is None: writer = 'geotiff' elif writer is None: writer = self.get_writer_by_ext(os.path.splitext(filename)[1]) writer, save_kwargs = load_writer(writer, ppp_config_dir=self.ppp_config_dir, filename=filename, **kwargs) return writer.save_dataset(self[dataset_id], overlay=overlay, decorate=decorate, compute=compute, **save_kwargs) def save_datasets(self, writer=None, filename=None, datasets=None, compute=True, **kwargs): """Save all the datasets present in a scene to disk using ``writer``. Args: writer (str): Name of writer to use when writing data to disk. Default to ``"geotiff"``. If not provided, but ``filename`` is provided then the filename's extension is used to determine the best writer to use. See :meth:`Scene.get_writer_by_ext` for details. filename (str): Optionally specify the filename to save this dataset to. It may include string formatting patterns that will be filled in by dataset attributes. datasets (iterable): Limit written products to these datasets compute (bool): If `True` (default), compute all of the saves to disk. If `False` then the return value is either a :doc:`dask:delayed` object or two lists to be passed to a `dask.array.store` call. See return values below for more details. kwargs: Additional writer arguments. See :doc:`../writers` for more information. Returns: Value returned depends on `compute` keyword argument. If `compute` is `True` the value is the result of a either a `dask.array.store` operation or a :doc:`dask:delayed` compute, typically this is `None`. If `compute` is `False` then the result is either a :doc:`dask:delayed` object that can be computed with `delayed.compute()` or a two element tuple of sources and targets to be passed to :func:`dask.array.store`. If `targets` is provided then it is the caller's responsibility to close any objects that have a "close" method. """ if datasets is not None: datasets = [self[ds] for ds in datasets] else: datasets = [self.datasets.get(ds) for ds in self.wishlist] datasets = [ds for ds in datasets if ds is not None] if not datasets: raise RuntimeError("None of the requested datasets have been " "generated or could not be loaded. Requested " "composite inputs may need to have matching " "dimensions (eg. through resampling).") if writer is None and filename is None: writer = 'geotiff' elif writer is None: writer = self.get_writer_by_ext(os.path.splitext(filename)[1]) writer, save_kwargs = load_writer(writer, ppp_config_dir=self.ppp_config_dir, filename=filename, **kwargs) return writer.save_datasets(datasets, compute=compute, **save_kwargs) @classmethod def get_writer_by_ext(cls, extension): """Find the writer matching the ``extension``. Defaults to "simple_image". Example Mapping: - geotiff: .tif, .tiff - cf: .nc - mitiff: .mitiff - simple_image: .png, .jpeg, .jpg, ... Args: extension (str): Filename extension starting with "." (ex. ".png"). Returns: str: The name of the writer to use for this extension. """ mapping = {".tiff": "geotiff", ".tif": "geotiff", ".nc": "cf", ".mitiff": "mitiff"} return mapping.get(extension.lower(), 'simple_image') satpy-0.20.0/satpy/tests/000077500000000000000000000000001362525524100152405ustar00rootroot00000000000000satpy-0.20.0/satpy/tests/__init__.py000066400000000000000000000042221362525524100173510ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2014-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The tests package. """ import logging import sys from satpy.tests import (reader_tests, test_dataset, test_file_handlers, test_readers, test_resample, test_demo, test_scene, test_utils, test_writers, test_yaml_reader, writer_tests, enhancement_tests, compositor_tests, test_multiscene, test_crefl_utils, test_config) if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest def suite(): """The global test suite. """ logging.basicConfig(level=logging.DEBUG) mysuite = unittest.TestSuite() mysuite.addTests(test_scene.suite()) mysuite.addTests(test_dataset.suite()) mysuite.addTests(test_writers.suite()) mysuite.addTests(test_readers.suite()) mysuite.addTests(test_resample.suite()) mysuite.addTests(test_demo.suite()) mysuite.addTests(test_yaml_reader.suite()) mysuite.addTests(reader_tests.suite()) mysuite.addTests(writer_tests.suite()) mysuite.addTests(test_file_handlers.suite()) mysuite.addTests(test_utils.suite()) mysuite.addTests(enhancement_tests.suite()) mysuite.addTests(compositor_tests.suite()) mysuite.addTests(test_multiscene.suite()) mysuite.addTests(test_crefl_utils.suite()) mysuite.addTests(test_config.suite()) return mysuite def load_tests(loader, tests, pattern): return suite() satpy-0.20.0/satpy/tests/compositor_tests/000077500000000000000000000000001362525524100206605ustar00rootroot00000000000000satpy-0.20.0/satpy/tests/compositor_tests/__init__.py000066400000000000000000001617601362525524100230040ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for compositors.""" import xarray as xr import dask.array as da import dask import numpy as np from datetime import datetime from satpy.tests.compositor_tests import test_abi, test_ahi, test_viirs try: from unittest import mock except ImportError: import mock import unittest class TestMatchDataArrays(unittest.TestCase): """Test the utility method 'match_data_arrays'.""" def _get_test_ds(self, shape=(50, 100), dims=('y', 'x')): """Get a fake DataArray.""" from pyresample.geometry import AreaDefinition data = da.random.random(shape, chunks=25) area = AreaDefinition( 'test', 'test', 'test', {'proj': 'eqc', 'lon_0': 0.0, 'lat_0': 0.0}, shape[dims.index('x')], shape[dims.index('y')], (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) attrs = {'area': area} return xr.DataArray(data, dims=dims, attrs=attrs) def test_single_ds(self): """Test a single dataset is returned unharmed.""" from satpy.composites import CompositeBase ds1 = self._get_test_ds() comp = CompositeBase('test_comp') ret_datasets = comp.match_data_arrays((ds1,)) self.assertIs(ret_datasets[0], ds1) def test_mult_ds_area(self): """Test multiple datasets successfully pass.""" from satpy.composites import CompositeBase ds1 = self._get_test_ds() ds2 = self._get_test_ds() comp = CompositeBase('test_comp') ret_datasets = comp.match_data_arrays((ds1, ds2)) self.assertIs(ret_datasets[0], ds1) self.assertIs(ret_datasets[1], ds2) def test_mult_ds_no_area(self): """Test that all datasets must have an area attribute.""" from satpy.composites import CompositeBase ds1 = self._get_test_ds() ds2 = self._get_test_ds() del ds2.attrs['area'] comp = CompositeBase('test_comp') self.assertRaises(ValueError, comp.match_data_arrays, (ds1, ds2)) def test_mult_ds_diff_area(self): """Test that datasets with different areas fail.""" from satpy.composites import CompositeBase, IncompatibleAreas from pyresample.geometry import AreaDefinition ds1 = self._get_test_ds() ds2 = self._get_test_ds() ds2.attrs['area'] = AreaDefinition( 'test', 'test', 'test', {'proj': 'eqc', 'lon_0': 0.0, 'lat_0': 0.0}, 100, 50, (-30037508.34, -20018754.17, 10037508.34, 18754.17)) comp = CompositeBase('test_comp') self.assertRaises(IncompatibleAreas, comp.match_data_arrays, (ds1, ds2)) def test_mult_ds_diff_dims(self): """Test that datasets with different dimensions still pass.""" from satpy.composites import CompositeBase # x is still 50, y is still 100, even though they are in # different order ds1 = self._get_test_ds(shape=(50, 100), dims=('y', 'x')) ds2 = self._get_test_ds(shape=(3, 100, 50), dims=('bands', 'x', 'y')) comp = CompositeBase('test_comp') ret_datasets = comp.match_data_arrays((ds1, ds2)) self.assertIs(ret_datasets[0], ds1) self.assertIs(ret_datasets[1], ds2) def test_mult_ds_diff_size(self): """Test that datasets with different sizes fail.""" from satpy.composites import CompositeBase, IncompatibleAreas # x is 50 in this one, 100 in ds2 # y is 100 in this one, 50 in ds2 ds1 = self._get_test_ds(shape=(50, 100), dims=('x', 'y')) ds2 = self._get_test_ds(shape=(3, 50, 100), dims=('bands', 'y', 'x')) comp = CompositeBase('test_comp') self.assertRaises(IncompatibleAreas, comp.match_data_arrays, (ds1, ds2)) def test_nondimensional_coords(self): """Test the removal of non-dimensional coordinates when compositing.""" from satpy.composites import CompositeBase ds = self._get_test_ds(shape=(2, 2)) ds['acq_time'] = ('y', [0, 1]) comp = CompositeBase('test_comp') ret_datasets = comp.match_data_arrays([ds, ds]) self.assertNotIn('acq_time', ret_datasets[0].coords) class TestRatioSharpenedCompositors(unittest.TestCase): """Test RatioSharpenedRGB and SelfSharpendRGB compositors.""" def setUp(self): """Create test data.""" from pyresample.geometry import AreaDefinition area = AreaDefinition('test', 'test', 'test', {'proj': 'merc'}, 2, 2, (-2000, -2000, 2000, 2000)) attrs = {'area': area, 'start_time': datetime(2018, 1, 1, 18), 'modifiers': tuple(), 'resolution': 1000, 'name': 'test_vis'} ds1 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64), attrs=attrs, dims=('y', 'x'), coords={'y': [0, 1], 'x': [0, 1]}) self.ds1 = ds1 ds2 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64) + 2, attrs=attrs, dims=('y', 'x'), coords={'y': [0, 1], 'x': [0, 1]}) ds2.attrs['name'] += '2' self.ds2 = ds2 ds3 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64) + 3, attrs=attrs, dims=('y', 'x'), coords={'y': [0, 1], 'x': [0, 1]}) ds3.attrs['name'] += '3' self.ds3 = ds3 ds4 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64) + 4, attrs=attrs, dims=('y', 'x'), coords={'y': [0, 1], 'x': [0, 1]}) ds4.attrs['name'] += '4' ds4.attrs['resolution'] = 500 self.ds4 = ds4 # high res version ds4 = xr.DataArray(da.ones((4, 4), chunks=2, dtype=np.float64) + 4, attrs=attrs.copy(), dims=('y', 'x'), coords={'y': [0, 1, 2, 3], 'x': [0, 1, 2, 3]}) ds4.attrs['name'] += '4' ds4.attrs['resolution'] = 500 ds4.attrs['rows_per_scan'] = 1 ds4.attrs['area'] = AreaDefinition('test', 'test', 'test', {'proj': 'merc'}, 4, 4, (-2000, -2000, 2000, 2000)) self.ds4_big = ds4 def test_bad_color(self): """Test that only valid band colors can be provided.""" from satpy.composites import RatioSharpenedRGB self.assertRaises(ValueError, RatioSharpenedRGB, name='true_color', high_resolution_band='bad') def test_match_data_arrays(self): """Test that all of the areas have to be the same resolution.""" from satpy.composites import RatioSharpenedRGB, IncompatibleAreas comp = RatioSharpenedRGB(name='true_color') self.assertRaises(IncompatibleAreas, comp, (self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4_big,)) def test_more_than_three_datasets(self): """Test that only 3 datasets can be passed.""" from satpy.composites import RatioSharpenedRGB comp = RatioSharpenedRGB(name='true_color') self.assertRaises(ValueError, comp, (self.ds1, self.ds2, self.ds3, self.ds1), optional_datasets=(self.ds4_big,)) def test_basic_no_high_res(self): """Test that three datasets can be passed without optional high res.""" from satpy.composites import RatioSharpenedRGB comp = RatioSharpenedRGB(name='true_color') res = comp((self.ds1, self.ds2, self.ds3)) self.assertEqual(res.shape, (3, 2, 2)) def test_basic_no_sharpen(self): """Test that color None does no sharpening.""" from satpy.composites import RatioSharpenedRGB comp = RatioSharpenedRGB(name='true_color', high_resolution_band=None) res = comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4,)) self.assertEqual(res.shape, (3, 2, 2)) def test_basic_red(self): """Test that basic high resolution red can be passed.""" from satpy.composites import RatioSharpenedRGB comp = RatioSharpenedRGB(name='true_color') res = comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4,)) res = res.values self.assertEqual(res.shape, (3, 2, 2)) np.testing.assert_allclose(res[0], self.ds4.values) np.testing.assert_allclose(res[1], np.array([[4.5, 4.5], [4.5, 4.5]], dtype=np.float64)) np.testing.assert_allclose(res[2], np.array([[6, 6], [6, 6]], dtype=np.float64)) def test_self_sharpened_no_high_res(self): """Test for exception when no high res band is specified.""" from satpy.composites import SelfSharpenedRGB comp = SelfSharpenedRGB(name='true_color', high_resolution_band=None) self.assertRaises(ValueError, comp, (self.ds1, self.ds2, self.ds3)) def test_self_sharpened_basic(self): """Test that three datasets can be passed without optional high res.""" from satpy.composites import SelfSharpenedRGB comp = SelfSharpenedRGB(name='true_color') res = comp((self.ds1, self.ds2, self.ds3)) res = res.values self.assertEqual(res.shape, (3, 2, 2)) np.testing.assert_allclose(res[0], self.ds1.values) np.testing.assert_allclose(res[1], np.array([[3, 3], [3, 3]], dtype=np.float64)) np.testing.assert_allclose(res[2], np.array([[4, 4], [4, 4]], dtype=np.float64)) class TestSunZenithCorrector(unittest.TestCase): """Test case for the zenith corrector.""" def setUp(self): """Create test data.""" from pyresample.geometry import AreaDefinition area = AreaDefinition('test', 'test', 'test', {'proj': 'merc'}, 2, 2, (-2000, -2000, 2000, 2000)) attrs = {'area': area, 'start_time': datetime(2018, 1, 1, 18), 'modifiers': tuple(), 'name': 'test_vis'} ds1 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64), attrs=attrs, dims=('y', 'x'), coords={'y': [0, 1], 'x': [0, 1]}) self.ds1 = ds1 self.sza = xr.DataArray( np.rad2deg(np.arccos(da.from_array([[0.0149581333, 0.0146694376], [0.0150812684, 0.0147925727]], chunks=2))), attrs={'area': area}, dims=('y', 'x'), coords={'y': [0, 1], 'x': [0, 1]}, ) def test_basic_default_not_provided(self): """Test default limits when SZA isn't provided.""" from satpy.composites import SunZenithCorrector comp = SunZenithCorrector(name='sza_test', modifiers=tuple()) res = comp((self.ds1,), test_attr='test') np.testing.assert_allclose(res.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]])) self.assertIn('y', res.coords) self.assertIn('x', res.coords) ds1 = self.ds1.copy().drop_vars(('y', 'x')) res = comp((ds1,), test_attr='test') np.testing.assert_allclose(res.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]])) self.assertNotIn('y', res.coords) self.assertNotIn('x', res.coords) def test_basic_lims_not_provided(self): """Test custom limits when SZA isn't provided.""" from satpy.composites import SunZenithCorrector comp = SunZenithCorrector(name='sza_test', modifiers=tuple(), correction_limit=90) res = comp((self.ds1,), test_attr='test') np.testing.assert_allclose(res.values, np.array([[66.853262, 68.168939], [66.30742, 67.601493]])) def test_basic_default_provided(self): """Test default limits when SZA is provided.""" from satpy.composites import SunZenithCorrector comp = SunZenithCorrector(name='sza_test', modifiers=tuple()) res = comp((self.ds1, self.sza), test_attr='test') np.testing.assert_allclose(res.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]])) def test_basic_lims_provided(self): """Test custom limits when SZA is provided.""" from satpy.composites import SunZenithCorrector comp = SunZenithCorrector(name='sza_test', modifiers=tuple(), correction_limit=90) res = comp((self.ds1, self.sza), test_attr='test') np.testing.assert_allclose(res.values, np.array([[66.853262, 68.168939], [66.30742, 67.601493]])) class TestDifferenceCompositor(unittest.TestCase): """Test case for the difference compositor.""" def setUp(self): """Create test data.""" from pyresample.geometry import AreaDefinition area = AreaDefinition('test', 'test', 'test', {'proj': 'merc'}, 2, 2, (-2000, -2000, 2000, 2000)) attrs = {'area': area, 'start_time': datetime(2018, 1, 1, 18), 'modifiers': tuple(), 'resolution': 1000, 'name': 'test_vis'} ds1 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64), attrs=attrs, dims=('y', 'x'), coords={'y': [0, 1], 'x': [0, 1]}) self.ds1 = ds1 ds2 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64) + 2, attrs=attrs, dims=('y', 'x'), coords={'y': [0, 1], 'x': [0, 1]}) ds2.attrs['name'] += '2' self.ds2 = ds2 # high res version ds2 = xr.DataArray(da.ones((4, 4), chunks=2, dtype=np.float64) + 4, attrs=attrs.copy(), dims=('y', 'x'), coords={'y': [0, 1, 2, 3], 'x': [0, 1, 2, 3]}) ds2.attrs['name'] += '2' ds2.attrs['resolution'] = 500 ds2.attrs['rows_per_scan'] = 1 ds2.attrs['area'] = AreaDefinition('test', 'test', 'test', {'proj': 'merc'}, 4, 4, (-2000, -2000, 2000, 2000)) self.ds2_big = ds2 def test_basic_diff(self): """Test that a basic difference composite works.""" from satpy.composites import DifferenceCompositor comp = DifferenceCompositor(name='diff') res = comp((self.ds1, self.ds2)) np.testing.assert_allclose(res.values, -2) def test_bad_areas_diff(self): """Test that a difference where resolutions are different fails.""" from satpy.composites import DifferenceCompositor, IncompatibleAreas comp = DifferenceCompositor(name='diff') # too many arguments self.assertRaises(ValueError, comp, (self.ds1, self.ds2, self.ds2_big)) # different resolution self.assertRaises(IncompatibleAreas, comp, (self.ds1, self.ds2_big)) class TestDayNightCompositor(unittest.TestCase): """Test DayNightCompositor.""" def setUp(self): """Create test data.""" bands = ['R', 'G', 'B'] start_time = datetime(2018, 1, 1, 18, 0, 0) # RGB a = np.zeros((3, 2, 2), dtype=np.float) a[:, 0, 0] = 0.1 a[:, 0, 1] = 0.2 a[:, 1, 0] = 0.3 a[:, 1, 1] = 0.4 a = da.from_array(a, a.shape) self.data_a = xr.DataArray(a, attrs={'test': 'a', 'start_time': start_time}, coords={'bands': bands}, dims=('bands', 'y', 'x')) b = np.zeros((3, 2, 2), dtype=np.float) b[:, 0, 0] = np.nan b[:, 0, 1] = 0.25 b[:, 1, 0] = 0.50 b[:, 1, 1] = 0.75 b = da.from_array(b, b.shape) self.data_b = xr.DataArray(b, attrs={'test': 'b', 'start_time': start_time}, coords={'bands': bands}, dims=('bands', 'y', 'x')) sza = np.array([[80., 86.], [94., 100.]]) sza = da.from_array(sza, sza.shape) self.sza = xr.DataArray(sza, dims=('y', 'x')) # fake area my_area = mock.MagicMock() lons = np.array([[-95., -94.], [-93., -92.]]) lons = da.from_array(lons, lons.shape) lats = np.array([[40., 41.], [42., 43.]]) lats = da.from_array(lats, lats.shape) my_area.get_lonlats.return_value = (lons, lats) self.data_a.attrs['area'] = my_area self.data_b.attrs['area'] = my_area # not used except to check that it matches the data arrays self.sza.attrs['area'] = my_area def test_basic_sza(self): """Test compositor when SZA data is included.""" from satpy.composites import DayNightCompositor comp = DayNightCompositor(name='dn_test') res = comp((self.data_a, self.data_b, self.sza)) res = res.compute() expected = np.array([[0., 0.22122352], [0.5, 1.]]) np.testing.assert_allclose(res.values[0], expected) def test_basic_area(self): """Test compositor when SZA data is not provided.""" from satpy.composites import DayNightCompositor comp = DayNightCompositor(name='dn_test') res = comp((self.data_a, self.data_b)) res = res.compute() expected = np.array([[0., 0.33164983], [0.66835017, 1.]]) np.testing.assert_allclose(res.values[0], expected) class TestFillingCompositor(unittest.TestCase): """Test case for the filling compositor.""" def test_fill(self): """Test filling.""" from satpy.composites import FillingCompositor comp = FillingCompositor(name='fill_test') filler = xr.DataArray(np.array([1, 2, 3, 4, 3, 2, 1])) red = xr.DataArray(np.array([1, 2, 3, np.nan, 3, 2, 1])) green = xr.DataArray(np.array([np.nan, 2, 3, 4, 3, 2, np.nan])) blue = xr.DataArray(np.array([4, 3, 2, 1, 2, 3, 4])) res = comp([filler, red, green, blue]) np.testing.assert_allclose(res.sel(bands='R').data, filler.data) np.testing.assert_allclose(res.sel(bands='G').data, filler.data) np.testing.assert_allclose(res.sel(bands='B').data, blue.data) class TestLuminanceSharpeningCompositor(unittest.TestCase): """Test luminance sharpening compositor.""" def test_compositor(self): """Test luminance sharpening compositor.""" from satpy.composites import LuminanceSharpeningCompositor comp = LuminanceSharpeningCompositor(name='test') # Three shades of grey rgb_arr = np.array([1, 50, 100, 200, 1, 50, 100, 200, 1, 50, 100, 200]) rgb = xr.DataArray(rgb_arr.reshape((3, 2, 2)), dims=['bands', 'y', 'x']) # 100 % luminance -> all result values ~1.0 lum = xr.DataArray(np.array([[100., 100.], [100., 100.]]), dims=['y', 'x']) res = comp([lum, rgb]) np.testing.assert_allclose(res.data, 1., atol=1e-9) # 50 % luminance, all result values ~0.5 lum = xr.DataArray(np.array([[50., 50.], [50., 50.]]), dims=['y', 'x']) res = comp([lum, rgb]) np.testing.assert_allclose(res.data, 0.5, atol=1e-9) # 30 % luminance, all result values ~0.3 lum = xr.DataArray(np.array([[30., 30.], [30., 30.]]), dims=['y', 'x']) res = comp([lum, rgb]) np.testing.assert_allclose(res.data, 0.3, atol=1e-9) # 0 % luminance, all values ~0.0 lum = xr.DataArray(np.array([[0., 0.], [0., 0.]]), dims=['y', 'x']) res = comp([lum, rgb]) np.testing.assert_allclose(res.data, 0.0, atol=1e-9) class TestSandwichCompositor(unittest.TestCase): """Test sandwich compositor.""" @mock.patch('satpy.composites.enhance2dataset') def test_compositor(self, e2d): """Test luminance sharpening compositor.""" from satpy.composites import SandwichCompositor rgb_arr = da.from_array(np.random.random((3, 2, 2)), chunks=2) rgb = xr.DataArray(rgb_arr, dims=['bands', 'y', 'x']) lum_arr = da.from_array(100 * np.random.random((2, 2)), chunks=2) lum = xr.DataArray(lum_arr, dims=['y', 'x']) # Make enhance2dataset return unmodified dataset e2d.return_value = rgb comp = SandwichCompositor(name='test') res = comp([lum, rgb]) for i in range(3): np.testing.assert_allclose(res.data[i, :, :], rgb_arr[i, :, :] * lum_arr / 100.) class TestInlineComposites(unittest.TestCase): """Test inline composites.""" def test_inline_composites(self): """Test that inline composites are working.""" from satpy.composites import CompositorLoader cl_ = CompositorLoader() cl_.load_sensor_composites('visir') comps = cl_.compositors # Check that "fog" product has all its prerequisites defined keys = comps['visir'].keys() fog = [comps['visir'][dsid] for dsid in keys if "fog" == dsid.name][0] self.assertEqual(fog.attrs['prerequisites'][0], '_fog_dep_0') self.assertEqual(fog.attrs['prerequisites'][1], '_fog_dep_1') self.assertEqual(fog.attrs['prerequisites'][2], 10.8) # Check that the sub-composite dependencies use wavelengths # (numeric values) keys = comps['visir'].keys() fog_dep_ids = [dsid for dsid in keys if "fog_dep" in dsid.name] self.assertEqual(comps['visir'][fog_dep_ids[0]].attrs['prerequisites'], [12.0, 10.8]) self.assertEqual(comps['visir'][fog_dep_ids[1]].attrs['prerequisites'], [10.8, 8.7]) # Check the same for SEVIRI and verify channel names are used # in the sub-composite dependencies instead of wavelengths cl_ = CompositorLoader() cl_.load_sensor_composites('seviri') comps = cl_.compositors keys = comps['seviri'].keys() fog_dep_ids = [dsid for dsid in keys if "fog_dep" in dsid.name] self.assertEqual(comps['seviri'][fog_dep_ids[0]].attrs['prerequisites'], ['IR_120', 'IR_108']) self.assertEqual(comps['seviri'][fog_dep_ids[1]].attrs['prerequisites'], ['IR_108', 'IR_087']) class TestNIRReflectance(unittest.TestCase): """Test NIR reflectance compositor.""" @mock.patch('satpy.composites.sun_zenith_angle') @mock.patch('satpy.composites.NIRReflectance.apply_modifier_info') @mock.patch('satpy.composites.Calculator') def test_compositor(self, calculator, apply_modifier_info, sza): """Test NIR reflectance compositor.""" import numpy as np import xarray as xr import dask.array as da refl_arr = np.random.random((2, 2)) refl = da.from_array(refl_arr) refl_from_tbs = mock.MagicMock() refl_from_tbs.return_value = refl calculator.return_value = mock.MagicMock( reflectance_from_tbs=refl_from_tbs) from satpy.composites import NIRReflectance nir_arr = np.random.random((2, 2)) nir = xr.DataArray(da.from_array(nir_arr), dims=['y', 'x']) platform = 'Meteosat-11' sensor = 'seviri' chan_name = 'IR_039' nir.attrs['platform_name'] = platform nir.attrs['sensor'] = sensor nir.attrs['name'] = chan_name get_lonlats = mock.MagicMock() lons, lats = 1, 2 get_lonlats.return_value = (lons, lats) nir.attrs['area'] = mock.MagicMock(get_lonlats=get_lonlats) start_time = 1 nir.attrs['start_time'] = start_time ir_arr = 100 * np.random.random((2, 2)) ir_ = xr.DataArray(da.from_array(ir_arr), dims=['y', 'x']) sunz_arr = 100 * np.random.random((2, 2)) sunz = xr.DataArray(da.from_array(sunz_arr), dims=['y', 'x']) sunz.attrs['standard_name'] = 'solar_zenith_angle' sunz2 = da.from_array(sunz_arr) sza.return_value = sunz2 comp = NIRReflectance(name='test') info = {'modifiers': None} res = comp([nir, ir_], optional_datasets=[sunz], **info) self.assertEqual(res.attrs['units'], '%') self.assertEqual(res.attrs['platform_name'], platform) self.assertEqual(res.attrs['sensor'], sensor) self.assertEqual(res.attrs['name'], chan_name) calculator.assert_called() calculator.assert_called_with('Meteosat-11', 'seviri', 'IR_039') self.assertTrue(apply_modifier_info.call_args[0][0] is nir) self.assertTrue(comp._refl3x is calculator.return_value) refl_from_tbs.reset_mock() res = comp([nir, ir_], optional_datasets=[], **info) get_lonlats.assert_called() sza.assert_called_with(start_time, lons, lats) refl_from_tbs.assert_called_with(sunz2, nir.data, ir_.data, tb_ir_co2=None) refl_from_tbs.reset_mock() co2_arr = np.random.random((2, 2)) co2 = xr.DataArray(da.from_array(co2_arr), dims=['y', 'x']) co2.attrs['wavelength'] = [12.0, 13.0, 14.0] co2.attrs['units'] = 'K' res = comp([nir, ir_], optional_datasets=[co2], **info) refl_from_tbs.assert_called_with(sunz2, nir.data, ir_.data, tb_ir_co2=co2.data) class TestColormapCompositor(unittest.TestCase): """Test the ColormapCompositor.""" def test_build_colormap(self): """Test colormap building.""" from satpy.composites import ColormapCompositor cmap_comp = ColormapCompositor('test_cmap_compositor') palette = np.array([[0, 0, 0], [127, 127, 127], [255, 255, 255]]) cmap, sqpal = cmap_comp.build_colormap(palette, np.uint8, {}) self.assertTrue(np.allclose(cmap.values, [0, 1])) self.assertTrue(np.allclose(sqpal, palette / 255.0)) palette = xr.DataArray(np.array([[0, 0, 0], [127, 127, 127], [255, 255, 255]]), dims=['value', 'band']) palette.attrs['palette_meanings'] = [2, 3, 4] cmap, sqpal = cmap_comp.build_colormap(palette, np.uint8, {}) self.assertTrue(np.allclose(cmap.values, [2, 3, 4])) self.assertTrue(np.allclose(sqpal, palette / 255.0)) class TestPaletteCompositor(unittest.TestCase): """Test the PaletteCompositor.""" def test_call(self): """Test palette compositing.""" from satpy.composites import PaletteCompositor cmap_comp = PaletteCompositor('test_cmap_compositor') palette = xr.DataArray(np.array([[0, 0, 0], [127, 127, 127], [255, 255, 255]]), dims=['value', 'band']) palette.attrs['palette_meanings'] = [2, 3, 4] data = xr.DataArray(np.array([[4, 3, 2], [2, 3, 4]], dtype=np.uint8), dims=['y', 'x']) res = cmap_comp([data, palette]) exp = np.array([[[1., 0.498039, 0.], [0., 0.498039, 1.]], [[1., 0.498039, 0.], [0., 0.498039, 1.]], [[1., 0.498039, 0.], [0., 0.498039, 1.]]]) self.assertTrue(np.allclose(res, exp)) class TestCloudTopHeightCompositor(unittest.TestCase): """Test the CloudTopHeightCompositor.""" def test_call(self): """Test the CloudTopHeight composite generation.""" from satpy.composites.cloud_products import CloudTopHeightCompositor cmap_comp = CloudTopHeightCompositor('test_cmap_compositor') palette = xr.DataArray(np.array([[0, 0, 0], [127, 127, 127], [255, 255, 255]]), dims=['value', 'band']) palette.attrs['palette_meanings'] = [2, 3, 4] status = xr.DataArray(np.array([[1, 0, 1], [1, 0, 65535]]), dims=['y', 'x'], attrs={'_FillValue': 65535}) data = xr.DataArray(np.array([[4, 3, 2], [2, 3, 4]], dtype=np.uint8), dims=['y', 'x']) res = cmap_comp([data, palette, status]) exp = np.array([[[0., 0.49803922, 0.], [0., 0.49803922, np.nan]], [[0., 0.49803922, 0.], [0., 0.49803922, np.nan]], [[0., 0.49803922, 0.], [0., 0.49803922, np.nan]]]) np.testing.assert_allclose(res, exp) class TestPrecipCloudsCompositor(unittest.TestCase): """Test the PrecipClouds compositor.""" def test_call(self): """Test the precip composite generation.""" from satpy.composites.cloud_products import PrecipCloudsRGB cmap_comp = PrecipCloudsRGB('test_precip_compositor') data_light = xr.DataArray(np.array([[80, 70, 60, 0], [20, 30, 40, 255]], dtype=np.uint8), dims=['y', 'x'], attrs={'_FillValue': 255}) data_moderate = xr.DataArray(np.array([[60, 50, 40, 0], [20, 30, 40, 255]], dtype=np.uint8), dims=['y', 'x'], attrs={'_FillValue': 255}) data_intense = xr.DataArray(np.array([[40, 30, 20, 0], [20, 30, 40, 255]], dtype=np.uint8), dims=['y', 'x'], attrs={'_FillValue': 255}) data_flags = xr.DataArray(np.array([[0, 0, 4, 0], [0, 0, 0, 0]], dtype=np.uint8), dims=['y', 'x']) res = cmap_comp([data_light, data_moderate, data_intense, data_flags]) exp = np.array([[[0.24313725, 0.18235294, 0.12156863, np.nan], [0.12156863, 0.18235294, 0.24313725, np.nan]], [[0.62184874, 0.51820728, 0.41456583, np.nan], [0.20728291, 0.31092437, 0.41456583, np.nan]], [[0.82913165, 0.7254902, 0.62184874, np.nan], [0.20728291, 0.31092437, 0.41456583, np.nan]]]) np.testing.assert_allclose(res, exp) class TestSingleBandCompositor(unittest.TestCase): """Test the single-band compositor.""" def setUp(self): """Create test data.""" from satpy.composites import SingleBandCompositor self.comp = SingleBandCompositor(name='test') all_valid = np.ones((2, 2)) self.all_valid = xr.DataArray(all_valid, dims=['y', 'x']) def test_call(self): """Test calling the compositor.""" # Dataset with extra attributes all_valid = self.all_valid all_valid.attrs['sensor'] = 'foo' attrs = {'foo': 'bar', 'resolution': 333, 'units': 'K', 'calibration': 'BT', 'wavelength': 10.8} self.comp.attrs['resolution'] = None res = self.comp([self.all_valid], **attrs) # Verify attributes self.assertEqual(res.attrs.get('sensor'), 'foo') self.assertTrue('foo' in res.attrs) self.assertEqual(res.attrs.get('foo'), 'bar') self.assertTrue('units' in res.attrs) self.assertTrue('calibration' in res.attrs) self.assertFalse('modifiers' in res.attrs) self.assertEqual(res.attrs['wavelength'], 10.8) self.assertEqual(res.attrs['resolution'], 333) class TestGenericCompositor(unittest.TestCase): """Test generic compositor.""" def setUp(self): """Create test data.""" from satpy.composites import GenericCompositor self.comp = GenericCompositor(name='test') self.comp2 = GenericCompositor(name='test2', common_channel_mask=False) all_valid = np.ones((1, 2, 2)) self.all_valid = xr.DataArray(all_valid, dims=['bands', 'y', 'x']) first_invalid = np.reshape(np.array([np.nan, 1., 1., 1.]), (1, 2, 2)) self.first_invalid = xr.DataArray(first_invalid, dims=['bands', 'y', 'x']) second_invalid = np.reshape(np.array([1., np.nan, 1., 1.]), (1, 2, 2)) self.second_invalid = xr.DataArray(second_invalid, dims=['bands', 'y', 'x']) wrong_shape = np.reshape(np.array([1., 1., 1.]), (1, 3, 1)) self.wrong_shape = xr.DataArray(wrong_shape, dims=['bands', 'y', 'x']) def test_masking(self): """Test masking in generic compositor.""" # Single channel res = self.comp([self.all_valid]) np.testing.assert_allclose(res.data, 1., atol=1e-9) # Three channels, one value invalid res = self.comp([self.all_valid, self.all_valid, self.first_invalid]) correct = np.reshape(np.array([np.nan, 1., 1., 1.]), (2, 2)) for i in range(3): np.testing.assert_almost_equal(res.data[i, :, :], correct) # Three channels, two values invalid res = self.comp([self.all_valid, self.first_invalid, self.second_invalid]) correct = np.reshape(np.array([np.nan, np.nan, 1., 1.]), (2, 2)) for i in range(3): np.testing.assert_almost_equal(res.data[i, :, :], correct) def test_concat_datasets(self): """Test concatenation of datasets.""" from satpy.composites import IncompatibleAreas res = self.comp._concat_datasets([self.all_valid], 'L') num_bands = len(res.bands) self.assertEqual(num_bands, 1) self.assertEqual(res.shape[0], num_bands) self.assertTrue(res.bands[0] == 'L') res = self.comp._concat_datasets([self.all_valid, self.all_valid], 'LA') num_bands = len(res.bands) self.assertEqual(num_bands, 2) self.assertEqual(res.shape[0], num_bands) self.assertTrue(res.bands[0] == 'L') self.assertTrue(res.bands[1] == 'A') self.assertRaises(IncompatibleAreas, self.comp._concat_datasets, [self.all_valid, self.wrong_shape], 'LA') def test_get_sensors(self): """Test getting sensors from the dataset attributes.""" res = self.comp._get_sensors([self.all_valid]) self.assertIsNone(res) dset1 = self.all_valid dset1.attrs['sensor'] = 'foo' res = self.comp._get_sensors([dset1]) self.assertEqual(res, 'foo') dset2 = self.first_invalid dset2.attrs['sensor'] = 'bar' res = self.comp._get_sensors([dset1, dset2]) self.assertTrue('foo' in res) self.assertTrue('bar' in res) self.assertEqual(len(res), 2) self.assertTrue(isinstance(res, set)) @mock.patch('satpy.composites.GenericCompositor._get_sensors') @mock.patch('satpy.composites.combine_metadata') @mock.patch('satpy.composites.check_times') @mock.patch('satpy.composites.GenericCompositor.match_data_arrays') def test_call_with_mock(self, match_data_arrays, check_times, combine_metadata, get_sensors): """Test calling generic compositor.""" from satpy.composites import IncompatibleAreas combine_metadata.return_value = dict() get_sensors.return_value = 'foo' # One dataset, no mode given res = self.comp([self.all_valid]) self.assertEqual(res.shape[0], 1) self.assertEqual(res.attrs['mode'], 'L') match_data_arrays.assert_not_called() # This compositor has been initialized without common masking, so the # masking shouldn't have been called projectables = [self.all_valid, self.first_invalid, self.second_invalid] match_data_arrays.return_value = projectables res = self.comp2(projectables) match_data_arrays.assert_called_once() match_data_arrays.reset_mock() # Dataset for alpha given, so shouldn't be masked projectables = [self.all_valid, self.all_valid] match_data_arrays.return_value = projectables res = self.comp(projectables) match_data_arrays.assert_called_once() match_data_arrays.reset_mock() # When areas are incompatible, masking shouldn't happen match_data_arrays.side_effect = IncompatibleAreas() self.assertRaises(IncompatibleAreas, self.comp, [self.all_valid, self.wrong_shape]) match_data_arrays.assert_called_once() def test_call(self): """Test calling generic compositor.""" # Multiple datasets with extra attributes all_valid = self.all_valid all_valid.attrs['sensor'] = 'foo' attrs = {'foo': 'bar', 'resolution': 333} self.comp.attrs['resolution'] = None res = self.comp([self.all_valid, self.first_invalid], **attrs) # Verify attributes self.assertEqual(res.attrs.get('sensor'), 'foo') self.assertTrue('foo' in res.attrs) self.assertEqual(res.attrs.get('foo'), 'bar') self.assertTrue('units' not in res.attrs) self.assertTrue('calibration' not in res.attrs) self.assertTrue('modifiers' not in res.attrs) self.assertIsNone(res.attrs['wavelength']) self.assertEqual(res.attrs['mode'], 'LA') self.assertEqual(res.attrs['resolution'], 333) class TestAddBands(unittest.TestCase): """Test case for the `add_bands` function.""" def test_add_bands(self): """Test adding bands.""" from satpy.composites import add_bands import dask.array as da import numpy as np import xarray as xr # L + RGB -> RGB data = xr.DataArray(da.ones((1, 3, 3)), dims=('bands', 'y', 'x'), coords={'bands': ['L']}) new_bands = xr.DataArray(da.array(['R', 'G', 'B']), dims=('bands'), coords={'bands': ['R', 'G', 'B']}) res = add_bands(data, new_bands) res_bands = ['R', 'G', 'B'] self.assertEqual(res.mode, ''.join(res_bands)) np.testing.assert_array_equal(res.bands, res_bands) np.testing.assert_array_equal(res.coords['bands'], res_bands) # L + RGBA -> RGBA data = xr.DataArray(da.ones((1, 3, 3)), dims=('bands', 'y', 'x'), coords={'bands': ['L']}, attrs={'mode': 'L'}) new_bands = xr.DataArray(da.array(['R', 'G', 'B', 'A']), dims=('bands'), coords={'bands': ['R', 'G', 'B', 'A']}) res = add_bands(data, new_bands) res_bands = ['R', 'G', 'B', 'A'] self.assertEqual(res.mode, ''.join(res_bands)) np.testing.assert_array_equal(res.bands, res_bands) np.testing.assert_array_equal(res.coords['bands'], res_bands) # LA + RGB -> RGBA data = xr.DataArray(da.ones((2, 3, 3)), dims=('bands', 'y', 'x'), coords={'bands': ['L', 'A']}, attrs={'mode': 'LA'}) new_bands = xr.DataArray(da.array(['R', 'G', 'B']), dims=('bands'), coords={'bands': ['R', 'G', 'B']}) res = add_bands(data, new_bands) res_bands = ['R', 'G', 'B', 'A'] self.assertEqual(res.mode, ''.join(res_bands)) np.testing.assert_array_equal(res.bands, res_bands) np.testing.assert_array_equal(res.coords['bands'], res_bands) # RGB + RGBA -> RGBA data = xr.DataArray(da.ones((3, 3, 3)), dims=('bands', 'y', 'x'), coords={'bands': ['R', 'G', 'B']}, attrs={'mode': 'RGB'}) new_bands = xr.DataArray(da.array(['R', 'G', 'B', 'A']), dims=('bands'), coords={'bands': ['R', 'G', 'B', 'A']}) res = add_bands(data, new_bands) res_bands = ['R', 'G', 'B', 'A'] self.assertEqual(res.mode, ''.join(res_bands)) np.testing.assert_array_equal(res.bands, res_bands) np.testing.assert_array_equal(res.coords['bands'], res_bands) class TestStaticImageCompositor(unittest.TestCase): """Test case for the static compositor.""" @mock.patch('satpy.resample.get_area_def') def test_init(self, get_area_def): """Test the initializiation of static compositor.""" from satpy.composites import StaticImageCompositor # No filename given raises ValueError with self.assertRaises(ValueError): comp = StaticImageCompositor("name") # No area defined comp = StaticImageCompositor("name", filename="foo.tif") self.assertEqual(comp.filename, "foo.tif") self.assertIsNone(comp.area) # Area defined get_area_def.return_value = "bar" comp = StaticImageCompositor("name", filename="foo.tif", area="euro4") self.assertEqual(comp.filename, "foo.tif") self.assertEqual(comp.area, "bar") get_area_def.assert_called_once_with("euro4") @mock.patch('satpy.Scene') def test_call(self, Scene): # noqa """Test the static compositing.""" from satpy.composites import StaticImageCompositor class MockScene(dict): def load(self, arg): pass img = mock.MagicMock() img.attrs = {} scn = MockScene() scn['image'] = img Scene.return_value = scn comp = StaticImageCompositor("name", filename="foo.tif", area="euro4") res = comp() Scene.assert_called_once_with(reader='generic_image', filenames=[comp.filename]) self.assertTrue("start_time" in res.attrs) self.assertTrue("end_time" in res.attrs) self.assertIsNone(res.attrs['sensor']) self.assertTrue('modifiers' not in res.attrs) self.assertTrue('calibration' not in res.attrs) # Non-georeferenced image, no area given img.attrs.pop('area') comp = StaticImageCompositor("name", filename="foo.tif") with self.assertRaises(AttributeError): res = comp() # Non-georeferenced image, area given comp = StaticImageCompositor("name", filename="foo.tif", area='euro4') res = comp() self.assertEqual(res.attrs['area'].area_id, 'euro4') def _enhance2dataset(dataset): """Mock the enhance2dataset to return the original data.""" return dataset class TestBackgroundCompositor(unittest.TestCase): """Test case for the background compositor.""" @mock.patch('satpy.composites.enhance2dataset', _enhance2dataset) def test_call(self): """Test the background compositing.""" from satpy.composites import BackgroundCompositor import numpy as np comp = BackgroundCompositor("name") # L mode images attrs = {'mode': 'L', 'area': 'foo'} foreground = xr.DataArray(np.array([[[1., 0.5], [0., np.nan]]]), dims=('bands', 'y', 'x'), coords={'bands': [c for c in attrs['mode']]}, attrs=attrs) background = xr.DataArray(np.ones((1, 2, 2)), dims=('bands', 'y', 'x'), coords={'bands': [c for c in attrs['mode']]}, attrs=attrs) res = comp([foreground, background]) self.assertEqual(res.attrs['area'], 'foo') self.assertTrue(np.all(res == np.array([[1., 0.5], [0., 1.]]))) self.assertEqual(res.attrs['mode'], 'L') # LA mode images attrs = {'mode': 'LA', 'area': 'foo'} foreground = xr.DataArray(np.array([[[1., 0.5], [0., np.nan]], [[0.5, 0.5], [0.5, 0.5]]]), dims=('bands', 'y', 'x'), coords={'bands': [c for c in attrs['mode']]}, attrs=attrs) background = xr.DataArray(np.ones((2, 2, 2)), dims=('bands', 'y', 'x'), coords={'bands': [c for c in attrs['mode']]}, attrs=attrs) res = comp([foreground, background]) self.assertTrue(np.all(res == np.array([[1., 0.75], [0.5, 1.]]))) self.assertEqual(res.attrs['mode'], 'LA') # RGB mode images attrs = {'mode': 'RGB', 'area': 'foo'} foreground = xr.DataArray(np.array([[[1., 0.5], [0., np.nan]], [[1., 0.5], [0., np.nan]], [[1., 0.5], [0., np.nan]]]), dims=('bands', 'y', 'x'), coords={'bands': [c for c in attrs['mode']]}, attrs=attrs) background = xr.DataArray(np.ones((3, 2, 2)), dims=('bands', 'y', 'x'), coords={'bands': [c for c in attrs['mode']]}, attrs=attrs) res = comp([foreground, background]) self.assertTrue(np.all(res == np.array([[[1., 0.5], [0., 1.]], [[1., 0.5], [0., 1.]], [[1., 0.5], [0., 1.]]]))) self.assertEqual(res.attrs['mode'], 'RGB') # RGBA mode images attrs = {'mode': 'RGBA', 'area': 'foo'} foreground = xr.DataArray(np.array([[[1., 0.5], [0., np.nan]], [[1., 0.5], [0., np.nan]], [[1., 0.5], [0., np.nan]], [[0.5, 0.5], [0.5, 0.5]]]), dims=('bands', 'y', 'x'), coords={'bands': [c for c in attrs['mode']]}, attrs=attrs) background = xr.DataArray(np.ones((4, 2, 2)), dims=('bands', 'y', 'x'), coords={'bands': [c for c in attrs['mode']]}, attrs=attrs) res = comp([foreground, background]) self.assertTrue(np.all(res == np.array([[[1., 0.75], [0.5, 1.]], [[1., 0.75], [0.5, 1.]], [[1., 0.75], [0.5, 1.]]]))) self.assertEqual(res.attrs['mode'], 'RGBA') @mock.patch('satpy.composites.enhance2dataset', _enhance2dataset) def test_multiple_sensors(self): """Test the background compositing from multiple sensor data.""" from satpy.composites import BackgroundCompositor import numpy as np comp = BackgroundCompositor("name") # L mode images attrs = {'mode': 'L', 'area': 'foo'} foreground = xr.DataArray(np.array([[[1., 0.5], [0., np.nan]]]), dims=('bands', 'y', 'x'), coords={'bands': [c for c in attrs['mode']]}, attrs=attrs.copy()) foreground.attrs['sensor'] = 'abi' background = xr.DataArray(np.ones((1, 2, 2)), dims=('bands', 'y', 'x'), coords={'bands': [c for c in attrs['mode']]}, attrs=attrs.copy()) background.attrs['sensor'] = 'glm' res = comp([foreground, background]) self.assertEqual(res.attrs['area'], 'foo') self.assertTrue(np.all(res == np.array([[1., 0.5], [0., 1.]]))) self.assertEqual(res.attrs['mode'], 'L') self.assertEqual(res.attrs['sensor'], {'abi', 'glm'}) class TestPSPAtmosphericalCorrection(unittest.TestCase): """Test the pyspectral-based atmospheric correction modifier.""" def setUp(self): """Patch in-class imports.""" self.orbital = mock.MagicMock() modules = { 'pyspectral.atm_correction_ir': mock.MagicMock(), 'pyorbital.orbital': self.orbital, } self.module_patcher = mock.patch.dict('sys.modules', modules) self.module_patcher.start() def tearDown(self): """Unpatch in-class imports.""" self.module_patcher.stop() @mock.patch('satpy.composites.PSPAtmosphericalCorrection.apply_modifier_info') @mock.patch('satpy.composites.get_satpos') def test_call(self, get_satpos, *mocks): """Test atmospherical correction.""" from satpy.composites import PSPAtmosphericalCorrection # Patch methods get_satpos.return_value = 'sat_lon', 'sat_lat', 12345678 self.orbital.get_observer_look.return_value = 0, 0 area = mock.MagicMock() area.get_lonlats.return_value = 'lons', 'lats' band = mock.MagicMock(attrs={'area': area, 'start_time': 'start_time', 'name': 'name', 'platform_name': 'platform', 'sensor': 'sensor'}) # Perform atmospherical correction psp = PSPAtmosphericalCorrection(name='dummy') psp(projectables=[band]) # Check arguments of get_orbserver_look() call, especially the altitude # unit conversion from meters to kilometers self.orbital.get_observer_look.assert_called_with( 'sat_lon', 'sat_lat', 12345.678, 'start_time', 'lons', 'lats', 0) class TestPSPRayleighReflectance(unittest.TestCase): """Test the pyspectral-based rayleigh correction modifier.""" def setUp(self): """Patch in-class imports.""" self.astronomy = mock.MagicMock() self.orbital = mock.MagicMock() modules = { 'pyorbital.astronomy': self.astronomy, 'pyorbital.orbital': self.orbital, } self.module_patcher = mock.patch.dict('sys.modules', modules) self.module_patcher.start() def tearDown(self): """Unpatch in-class imports.""" self.module_patcher.stop() @mock.patch('satpy.composites.get_satpos') def test_get_angles(self, get_satpos): """Test sun and satellite angle calculation.""" from satpy.composites import PSPRayleighReflectance # Patch methods get_satpos.return_value = 'sat_lon', 'sat_lat', 12345678 self.orbital.get_observer_look.return_value = 0, 0 self.astronomy.get_alt_az.return_value = 0, 0 area = mock.MagicMock() lons = np.zeros((5, 5)) lons[1, 1] = np.inf lons = da.from_array(lons, chunks=5) lats = np.zeros((5, 5)) lats[1, 1] = np.inf lats = da.from_array(lats, chunks=5) area.get_lonlats.return_value = (lons, lats) vis = mock.MagicMock(attrs={'area': area, 'start_time': 'start_time'}) # Compute angles psp = PSPRayleighReflectance(name='dummy') psp.get_angles(vis) # Check arguments of get_orbserver_look() call, especially the altitude # unit conversion from meters to kilometers self.orbital.get_observer_look.assert_called_once() args = self.orbital.get_observer_look.call_args[0] self.assertEqual(args[:4], ('sat_lon', 'sat_lat', 12345.678, 'start_time')) self.assertIsInstance(args[4], da.Array) self.assertIsInstance(args[5], da.Array) self.assertEqual(args[6], 0) class TestMaskingCompositor(unittest.TestCase): """Test case for the simple masking compositor.""" def test_init(self): """Test the initializiation of compositor.""" from satpy.composites import MaskingCompositor # No transparency given raises ValueError with self.assertRaises(ValueError): comp = MaskingCompositor("name") # transparency defined comp = MaskingCompositor("name", transparency=0) self.assertEqual(comp.transparency, 0) def test_call(self): """Test call the compositor.""" from satpy.composites import MaskingCompositor from satpy.tests.utils import CustomScheduler flag_meanings = ['Cloud-free_land', 'Cloud-free_sea'] flag_values = da.array([1, 2]) transparency_data_v1 = {'Cloud-free_land': 100, 'Cloud-free_sea': 50} transparency_data_v2 = {1: 100, 2: 50} # 2D data array data = xr.DataArray(da.random.random((3, 3)), dims=['y', 'x']) # 2D CT data array ct_data = da.array([[1, 2, 2], [2, 1, 2], [2, 2, 1]]) ct_data = xr.DataArray(ct_data, dims=['y', 'x']) ct_data.attrs['flag_meanings'] = flag_meanings ct_data.attrs['flag_values'] = flag_values reference_alpha = da.array([[0, 0.5, 0.5], [0.5, 0, 0.5], [0.5, 0.5, 0]]) reference_alpha = xr.DataArray(reference_alpha, dims=['y', 'x']) # Test with numerical transparency data with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = MaskingCompositor("name", transparency=transparency_data_v1) res = comp([data, ct_data]) self.assertTrue(res.mode == 'LA') np.testing.assert_allclose(res.sel(bands='L'), data) np.testing.assert_allclose(res.sel(bands='A'), reference_alpha) # Test with named fields with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = MaskingCompositor("name", transparency=transparency_data_v2) res = comp([data, ct_data]) self.assertTrue(res.mode == 'LA') np.testing.assert_allclose(res.sel(bands='L'), data) np.testing.assert_allclose(res.sel(bands='A'), reference_alpha) # Test RGB dataset # 3D data array data = xr.DataArray(da.random.random((3, 3, 3)), dims=['bands', 'y', 'x'], coords={'bands': ['R', 'G', 'B'], 'y': np.arange(3), 'x': np.arange(3)}) with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = MaskingCompositor("name", transparency=transparency_data_v1) res = comp([data, ct_data]) self.assertTrue(res.mode == 'RGBA') np.testing.assert_allclose(res.sel(bands='R'), data.sel(bands='R')) np.testing.assert_allclose(res.sel(bands='G'), data.sel(bands='G')) np.testing.assert_allclose(res.sel(bands='B'), data.sel(bands='B')) np.testing.assert_allclose(res.sel(bands='A'), reference_alpha) # Test RGBA dataset data = xr.DataArray(da.random.random((4, 3, 3)), dims=['bands', 'y', 'x'], coords={'bands': ['R', 'G', 'B', 'A'], 'y': np.arange(3), 'x': np.arange(3)}) with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = MaskingCompositor("name", transparency=transparency_data_v2) res = comp([data, ct_data]) self.assertTrue(res.mode == 'RGBA') np.testing.assert_allclose(res.sel(bands='R'), data.sel(bands='R')) np.testing.assert_allclose(res.sel(bands='G'), data.sel(bands='G')) np.testing.assert_allclose(res.sel(bands='B'), data.sel(bands='B')) # The compositor should drop the original alpha band np.testing.assert_allclose(res.sel(bands='A'), reference_alpha) def suite(): """Test suite for all reader tests.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTests(test_abi.suite()) mysuite.addTests(test_ahi.suite()) mysuite.addTests(test_viirs.suite()) mysuite.addTest(loader.loadTestsFromTestCase(TestMatchDataArrays)) mysuite.addTest(loader.loadTestsFromTestCase(TestRatioSharpenedCompositors)) mysuite.addTest(loader.loadTestsFromTestCase(TestSunZenithCorrector)) mysuite.addTest(loader.loadTestsFromTestCase(TestDifferenceCompositor)) mysuite.addTest(loader.loadTestsFromTestCase(TestDayNightCompositor)) mysuite.addTest(loader.loadTestsFromTestCase(TestFillingCompositor)) mysuite.addTest(loader.loadTestsFromTestCase(TestSandwichCompositor)) mysuite.addTest(loader.loadTestsFromTestCase(TestLuminanceSharpeningCompositor)) mysuite.addTest(loader.loadTestsFromTestCase(TestInlineComposites)) mysuite.addTest(loader.loadTestsFromTestCase(TestColormapCompositor)) mysuite.addTest(loader.loadTestsFromTestCase(TestPaletteCompositor)) mysuite.addTest(loader.loadTestsFromTestCase(TestCloudTopHeightCompositor)) mysuite.addTest(loader.loadTestsFromTestCase(TestSingleBandCompositor)) mysuite.addTest(loader.loadTestsFromTestCase(TestGenericCompositor)) mysuite.addTest(loader.loadTestsFromTestCase(TestNIRReflectance)) mysuite.addTest(loader.loadTestsFromTestCase(TestPrecipCloudsCompositor)) mysuite.addTest(loader.loadTestsFromTestCase(TestAddBands)) mysuite.addTest(loader.loadTestsFromTestCase(TestBackgroundCompositor)) mysuite.addTest(loader.loadTestsFromTestCase(TestStaticImageCompositor)) mysuite.addTest(loader.loadTestsFromTestCase(TestPSPAtmosphericalCorrection)) mysuite.addTest(loader.loadTestsFromTestCase(TestPSPRayleighReflectance)) mysuite.addTest(loader.loadTestsFromTestCase(TestMaskingCompositor)) return mysuite if __name__ == '__main__': unittest.main() satpy-0.20.0/satpy/tests/compositor_tests/test_abi.py000066400000000000000000000056711362525524100230350ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for ABI compositors. """ import sys if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest class TestABIComposites(unittest.TestCase): """Test ABI-specific composites.""" def test_load_composite_yaml(self): """Test loading the yaml for this sensor.""" from satpy.composites import CompositorLoader cl = CompositorLoader() cl.load_sensor_composites('abi') def test_simulated_green(self): """Test creating a fake 'green' band.""" import xarray as xr import dask.array as da import numpy as np from satpy.composites.abi import SimulatedGreen from pyresample.geometry import AreaDefinition rows = 5 cols = 10 area = AreaDefinition( 'test', 'test', 'test', {'proj': 'eqc', 'lon_0': 0.0, 'lat_0': 0.0}, cols, rows, (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) comp = SimulatedGreen('green', prerequisites=('C01', 'C02', 'C03'), standard_name='toa_bidirectional_reflectance') c01 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.25, dims=('y', 'x'), attrs={'name': 'C01', 'area': area}) c02 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.30, dims=('y', 'x'), attrs={'name': 'C02', 'area': area}) c03 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.35, dims=('y', 'x'), attrs={'name': 'C03', 'area': area}) res = comp((c01, c02, c03)) self.assertIsInstance(res, xr.DataArray) self.assertIsInstance(res.data, da.Array) self.assertEqual(res.attrs['name'], 'green') self.assertEqual(res.attrs['standard_name'], 'toa_bidirectional_reflectance') data = res.compute() np.testing.assert_allclose(data, 0.28025) def suite(): """The test suite for test_abi. """ loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestABIComposites)) return mysuite satpy-0.20.0/satpy/tests/compositor_tests/test_ahi.py000066400000000000000000000053671362525524100230450ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for AHI compositors. """ import sys if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest class TestAHIComposites(unittest.TestCase): """Test AHI-specific composites.""" def test_load_composite_yaml(self): """Test loading the yaml for this sensor.""" from satpy.composites import CompositorLoader cl = CompositorLoader() cl.load_sensor_composites('abi') def test_corrected_green(self): """Test adjusting the 'green' band.""" import xarray as xr import dask.array as da import numpy as np from satpy.composites.ahi import GreenCorrector from pyresample.geometry import AreaDefinition rows = 5 cols = 10 area = AreaDefinition( 'test', 'test', 'test', {'proj': 'eqc', 'lon_0': 0.0, 'lat_0': 0.0}, cols, rows, (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) comp = GreenCorrector('green', prerequisites=(0.51, 0.85), standard_name='toa_bidirectional_reflectance') c01 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.25, dims=('y', 'x'), attrs={'name': 'C01', 'area': area}) c02 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.30, dims=('y', 'x'), attrs={'name': 'C02', 'area': area}) res = comp((c01, c02)) self.assertIsInstance(res, xr.DataArray) self.assertIsInstance(res.data, da.Array) self.assertEqual(res.attrs['name'], 'green') self.assertEqual(res.attrs['standard_name'], 'toa_bidirectional_reflectance') data = res.compute() np.testing.assert_allclose(data, 0.2575) def suite(): """The test suite for test_ahi. """ loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestAHIComposites)) return mysuite satpy-0.20.0/satpy/tests/compositor_tests/test_viirs.py000066400000000000000000000646041362525524100234370ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for VIIRS compositors.""" import unittest try: from unittest import mock except ImportError: import mock class TestVIIRSComposites(unittest.TestCase): """Test VIIRS-specific composites.""" def data_area_ref_corrector(self): """Create test area definition and data.""" import dask.array as da import numpy as np from pyresample.geometry import AreaDefinition rows = 5 cols = 10 area = AreaDefinition( 'some_area_name', 'On-the-fly area', 'geosabii', {'a': '6378137.0', 'b': '6356752.31414', 'h': '35786023.0', 'lon_0': '-89.5', 'proj': 'geos', 'sweep': 'x', 'units': 'm'}, cols, rows, (-5434894.954752679, -5434894.964451744, 5434894.964451744, 5434894.954752679)) dnb = np.zeros((rows, cols)) + 25 dnb[3, :] += 25 dnb[4:, :] += 50 dnb = da.from_array(dnb, chunks=100) return area, dnb def test_load_composite_yaml(self): """Test loading the yaml for this sensor.""" from satpy.composites import CompositorLoader cl = CompositorLoader() cl.load_sensor_composites('viirs') def test_histogram_dnb(self): """Test the 'histogram_dnb' compositor.""" import xarray as xr import dask.array as da import numpy as np from satpy.composites.viirs import HistogramDNB from pyresample.geometry import AreaDefinition rows = 5 cols = 10 area = AreaDefinition( 'test', 'test', 'test', {'proj': 'eqc', 'lon_0': 0.0, 'lat_0': 0.0}, cols, rows, (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) comp = HistogramDNB('histogram_dnb', prerequisites=('dnb',), standard_name='toa_outgoing_radiance_per_' 'unit_wavelength') dnb = np.zeros((rows, cols)) + 0.25 dnb[3, :] += 0.25 dnb[4:, :] += 0.5 dnb = da.from_array(dnb, chunks=25) c01 = xr.DataArray(dnb, dims=('y', 'x'), attrs={'name': 'DNB', 'area': area}) # data changes by row, sza changes by col for testing sza = np.zeros((rows, cols)) + 70.0 sza[:, 3] += 20.0 sza[:, 4:] += 45.0 sza = da.from_array(sza, chunks=25) c02 = xr.DataArray(sza, dims=('y', 'x'), attrs={'name': 'solar_zenith_angle', 'area': area}) res = comp((c01, c02)) self.assertIsInstance(res, xr.DataArray) self.assertIsInstance(res.data, da.Array) self.assertEqual(res.attrs['name'], 'histogram_dnb') self.assertEqual(res.attrs['standard_name'], 'equalized_radiance') data = res.compute() unique_values = np.unique(data) np.testing.assert_allclose(unique_values, [0.5994, 0.7992, 0.999], rtol=1e-3) def test_adaptive_dnb(self): """Test the 'adaptive_dnb' compositor.""" import xarray as xr import dask.array as da import numpy as np from satpy.composites.viirs import AdaptiveDNB from pyresample.geometry import AreaDefinition rows = 5 cols = 10 area = AreaDefinition( 'test', 'test', 'test', {'proj': 'eqc', 'lon_0': 0.0, 'lat_0': 0.0}, cols, rows, (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) comp = AdaptiveDNB('adaptive_dnb', prerequisites=('dnb',), standard_name='toa_outgoing_radiance_per_' 'unit_wavelength') dnb = np.zeros((rows, cols)) + 0.25 dnb[3, :] += 0.25 dnb[4:, :] += 0.5 dnb = da.from_array(dnb, chunks=25) c01 = xr.DataArray(dnb, dims=('y', 'x'), attrs={'name': 'DNB', 'area': area}) sza = np.zeros((rows, cols)) + 70.0 sza[:, 3] += 20.0 sza[:, 4:] += 45.0 sza = da.from_array(sza, chunks=25) c02 = xr.DataArray(sza, dims=('y', 'x'), attrs={'name': 'solar_zenith_angle', 'area': area}) res = comp((c01, c02)) self.assertIsInstance(res, xr.DataArray) self.assertIsInstance(res.data, da.Array) self.assertEqual(res.attrs['name'], 'adaptive_dnb') self.assertEqual(res.attrs['standard_name'], 'equalized_radiance') data = res.compute() np.testing.assert_allclose(data.data, 0.999, rtol=1e-4) def test_erf_dnb(self): """Test the 'dynamic_dnb' or ERF DNB compositor.""" import xarray as xr import dask.array as da import numpy as np from satpy.composites.viirs import ERFDNB from pyresample.geometry import AreaDefinition rows = 5 cols = 10 area = AreaDefinition( 'test', 'test', 'test', {'proj': 'eqc', 'lon_0': 0.0, 'lat_0': 0.0}, cols, rows, (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) comp = ERFDNB('dynamic_dnb', prerequisites=('dnb',), standard_name='toa_outgoing_radiance_per_' 'unit_wavelength') dnb = np.zeros((rows, cols)) + 0.25 dnb[3, :] += 0.25 dnb[4:, :] += 0.5 dnb = da.from_array(dnb, chunks=25) c01 = xr.DataArray(dnb, dims=('y', 'x'), attrs={'name': 'DNB', 'area': area}) sza = np.zeros((rows, cols)) + 70.0 sza[:, 3] += 20.0 sza[:, 4:] += 45.0 sza = da.from_array(sza, chunks=25) c02 = xr.DataArray(sza, dims=('y', 'x'), attrs={'name': 'solar_zenith_angle', 'area': area}) lza = np.zeros((rows, cols)) + 70.0 lza[:, 3] += 20.0 lza[:, 4:] += 45.0 lza = da.from_array(lza, chunks=25) c03 = xr.DataArray(lza, dims=('y', 'x'), attrs={'name': 'lunar_zenith_angle', 'area': area}) mif = xr.DataArray(da.zeros((5,), chunks=5) + 0.1, dims=('y',), attrs={'name': 'moon_illumination_fraction', 'area': area}) res = comp((c01, c02, c03, mif)) self.assertIsInstance(res, xr.DataArray) self.assertIsInstance(res.data, da.Array) self.assertEqual(res.attrs['name'], 'dynamic_dnb') self.assertEqual(res.attrs['standard_name'], 'equalized_radiance') data = res.compute() unique = np.unique(data) np.testing.assert_allclose(unique, [0.00000000e+00, 1.00446703e-01, 1.64116082e-01, 2.09233451e-01, 1.43916324e+02, 2.03528498e+02, 2.49270516e+02]) def test_hncc_dnb(self): """Test the 'hncc_dnb' compositor.""" import xarray as xr import dask.array as da import numpy as np from satpy.composites.viirs import NCCZinke from pyresample.geometry import AreaDefinition rows = 5 cols = 10 area = AreaDefinition( 'test', 'test', 'test', {'proj': 'eqc', 'lon_0': 0.0, 'lat_0': 0.0}, cols, rows, (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) comp = NCCZinke('hncc_dnb', prerequisites=('dnb',), standard_name='toa_outgoing_radiance_per_' 'unit_wavelength') dnb = np.zeros((rows, cols)) + 0.25 dnb[3, :] += 0.25 dnb[4:, :] += 0.5 dnb = da.from_array(dnb, chunks=25) c01 = xr.DataArray(dnb, dims=('y', 'x'), attrs={'name': 'DNB', 'area': area}) sza = np.zeros((rows, cols)) + 70.0 sza[:, 3] += 20.0 sza[:, 4:] += 45.0 sza = da.from_array(sza, chunks=25) c02 = xr.DataArray(sza, dims=('y', 'x'), attrs={'name': 'solar_zenith_angle', 'area': area}) lza = np.zeros((rows, cols)) + 70.0 lza[:, 3] += 20.0 lza[:, 4:] += 45.0 lza = da.from_array(lza, chunks=25) c03 = xr.DataArray(lza, dims=('y', 'x'), attrs={'name': 'lunar_zenith_angle', 'area': area}) mif = xr.DataArray(da.zeros((5,), chunks=5) + 0.1, dims=('y',), attrs={'name': 'moon_illumination_fraction', 'area': area}) res = comp((c01, c02, c03, mif)) self.assertIsInstance(res, xr.DataArray) self.assertIsInstance(res.data, da.Array) self.assertEqual(res.attrs['name'], 'hncc_dnb') self.assertEqual(res.attrs['standard_name'], 'ncc_radiance') data = res.compute() unique = np.unique(data) np.testing.assert_allclose( unique, [3.48479712e-04, 6.96955799e-04, 1.04543189e-03, 4.75394738e-03, 9.50784532e-03, 1.42617433e-02, 1.50001560e+03, 3.00001560e+03, 4.50001560e+03]) def test_reflectance_corrector_abi(self): """Test ReflectanceCorrector modifier with ABI data.""" import xarray as xr import dask.array as da import numpy as np from satpy.composites.viirs import ReflectanceCorrector from satpy import DatasetID ref_cor = ReflectanceCorrector(dem_filename='_fake.hdf', optional_prerequisites=[ DatasetID(name='satellite_azimuth_angle'), DatasetID(name='satellite_zenith_angle'), DatasetID(name='solar_azimuth_angle'), DatasetID(name='solar_zenith_angle')], name='C01', prerequisites=[], wavelength=(0.45, 0.47, 0.49), resolution=1000, calibration='reflectance', modifiers=('sunz_corrected', 'rayleigh_corrected_crefl',), sensor='abi') self.assertEqual(ref_cor.attrs['modifiers'], ('sunz_corrected', 'rayleigh_corrected_crefl',)) self.assertEqual(ref_cor.attrs['calibration'], 'reflectance') self.assertEqual(ref_cor.attrs['wavelength'], (0.45, 0.47, 0.49)) self.assertEqual(ref_cor.attrs['name'], 'C01') self.assertEqual(ref_cor.attrs['resolution'], 1000) self.assertEqual(ref_cor.attrs['sensor'], 'abi') self.assertEqual(ref_cor.attrs['prerequisites'], []) self.assertEqual(ref_cor.attrs['optional_prerequisites'], [ DatasetID(name='satellite_azimuth_angle'), DatasetID(name='satellite_zenith_angle'), DatasetID(name='solar_azimuth_angle'), DatasetID(name='solar_zenith_angle')]) area, dnb = self.data_area_ref_corrector() print(dnb.compute()) c01 = xr.DataArray(dnb, dims=('y', 'x'), attrs={'satellite_longitude': -89.5, 'satellite_latitude': 0.0, 'satellite_altitude': 35786023.4375, 'platform_name': 'GOES-16', 'calibration': 'reflectance', 'units': '%', 'wavelength': (0.45, 0.47, 0.49), 'name': 'C01', 'resolution': 1000, 'sensor': 'abi', 'start_time': '2017-09-20 17:30:40.800000', 'end_time': '2017-09-20 17:41:17.500000', 'area': area, 'ancillary_variables': []}) res = ref_cor([c01], []) self.assertIsInstance(res, xr.DataArray) self.assertIsInstance(res.data, da.Array) self.assertEqual(res.attrs['satellite_longitude'], -89.5) self.assertEqual(res.attrs['satellite_latitude'], 0.0) self.assertEqual(res.attrs['satellite_altitude'], 35786023.4375) self.assertEqual(res.attrs['modifiers'], ('sunz_corrected', 'rayleigh_corrected_crefl',)) self.assertEqual(res.attrs['platform_name'], 'GOES-16') self.assertEqual(res.attrs['calibration'], 'reflectance') self.assertEqual(res.attrs['units'], '%') self.assertEqual(res.attrs['wavelength'], (0.45, 0.47, 0.49)) self.assertEqual(res.attrs['name'], 'C01') self.assertEqual(res.attrs['resolution'], 1000) self.assertEqual(res.attrs['sensor'], 'abi') self.assertEqual(res.attrs['start_time'], '2017-09-20 17:30:40.800000') self.assertEqual(res.attrs['end_time'], '2017-09-20 17:41:17.500000') self.assertEqual(res.attrs['area'], area) self.assertEqual(res.attrs['ancillary_variables'], []) data = res.values self.assertLess(abs(np.nanmean(data) - 26.00760944144745), 1e-10) self.assertEqual(data.shape, (5, 10)) unique = np.unique(data[~np.isnan(data)]) np.testing.assert_allclose(unique, [-1.0, 4.210745457958135, 6.7833906076177595, 8.730371329824473, 10.286627569545209, 11.744159436709374, 12.20226097829902, 13.501444598985305, 15.344399223932212, 17.173329483996515, 17.28798660754271, 18.29594550575925, 19.076835059905125, 19.288331720959864, 19.77043407084455, 19.887082168377006, 20.091028778326375, 20.230341149334617, 20.457671064690196, 20.82686905639114, 21.021094816441195, 21.129963777952124, 41.601857910095575, 43.963919057675504, 46.21672174361075, 46.972099490462085, 47.497072794632835, 47.80393007974336, 47.956765988770385, 48.043025685032106, 51.909142813383916, 58.8234273736508, 68.84706145641482, 69.91085190887961, 71.10179768327806, 71.33161009169649]) def test_reflectance_corrector_viirs(self): """Test ReflectanceCorrector modifier with VIIRS data.""" import xarray as xr import dask.array as da import numpy as np import datetime from satpy.composites.viirs import ReflectanceCorrector from satpy import DatasetID ref_cor = ReflectanceCorrector(dem_filename='_fake.hdf', optional_prerequisites=[ DatasetID(name='satellite_azimuth_angle'), DatasetID(name='satellite_zenith_angle'), DatasetID(name='solar_azimuth_angle'), DatasetID(name='solar_zenith_angle')], name='I01', prerequisites=[], wavelength=(0.6, 0.64, 0.68), resolution=371, calibration='reflectance', modifiers=('sunz_corrected_iband', 'rayleigh_corrected_crefl_iband'), sensor='viirs') self.assertEqual(ref_cor.attrs['modifiers'], ('sunz_corrected_iband', 'rayleigh_corrected_crefl_iband')) self.assertEqual(ref_cor.attrs['calibration'], 'reflectance') self.assertEqual(ref_cor.attrs['wavelength'], (0.6, 0.64, 0.68)) self.assertEqual(ref_cor.attrs['name'], 'I01') self.assertEqual(ref_cor.attrs['resolution'], 371) self.assertEqual(ref_cor.attrs['sensor'], 'viirs') self.assertEqual(ref_cor.attrs['prerequisites'], []) self.assertEqual(ref_cor.attrs['optional_prerequisites'], [ DatasetID(name='satellite_azimuth_angle'), DatasetID(name='satellite_zenith_angle'), DatasetID(name='solar_azimuth_angle'), DatasetID(name='solar_zenith_angle')]) area, dnb = self.data_area_ref_corrector() def make_xarray(self, file_key, name, standard_name, wavelength=None, units='degrees', calibration=None, file_type=('gitco', 'gimgo')): return xr.DataArray(dnb, dims=('y', 'x'), attrs={'start_orbit': 1708, 'end_orbit': 1708, 'wavelength': wavelength, 'level': None, 'modifiers': None, 'calibration': calibration, 'file_key': file_key, 'resolution': 371, 'file_type': file_type, 'name': name, 'standard_name': standard_name, 'platform_name': 'Suomi-NPP', 'polarization': None, 'sensor': 'viirs', 'units': units, 'start_time': datetime.datetime(2012, 2, 25, 18, 1, 24, 570942), 'end_time': datetime.datetime(2012, 2, 25, 18, 11, 21, 175760), 'area': area, 'ancillary_variables': []}) c01 = make_xarray(self, None, 'I01', 'toa_bidirectional_reflectance', wavelength=(0.6, 0.64, 0.68), units='%', calibration='reflectance', file_type='svi01') c02 = make_xarray(self, 'All_Data/{file_group}_All/SatelliteAzimuthAngle', 'satellite_azimuth_angle', 'sensor_azimuth_angle') c03 = make_xarray(self, 'All_Data/{file_group}_All/SatelliteZenithAngle', 'satellite_zenith_angle', 'sensor_zenith_angle') c04 = make_xarray(self, 'All_Data/{file_group}_All/SolarAzimuthAngle', 'solar_azimuth_angle', 'solar_azimuth_angle') c05 = make_xarray(self, 'All_Data/{file_group}_All/SolarZenithAngle', 'solar_zenith_angle', 'solar_zenith_angle') res = ref_cor([c01], [c02, c03, c04, c05]) self.assertIsInstance(res, xr.DataArray) self.assertIsInstance(res.data, da.Array) self.assertEqual(res.attrs['wavelength'], (0.6, 0.64, 0.68)) self.assertEqual(res.attrs['modifiers'], ('sunz_corrected_iband', 'rayleigh_corrected_crefl_iband')) self.assertEqual(res.attrs['calibration'], 'reflectance') self.assertEqual(res.attrs['resolution'], 371) self.assertEqual(res.attrs['file_type'], 'svi01') self.assertEqual(res.attrs['name'], 'I01') self.assertEqual(res.attrs['standard_name'], 'toa_bidirectional_reflectance') self.assertEqual(res.attrs['platform_name'], 'Suomi-NPP') self.assertEqual(res.attrs['sensor'], 'viirs') self.assertEqual(res.attrs['units'], '%') self.assertEqual(res.attrs['start_time'], datetime.datetime(2012, 2, 25, 18, 1, 24, 570942)) self.assertEqual(res.attrs['end_time'], datetime.datetime(2012, 2, 25, 18, 11, 21, 175760)) self.assertEqual(res.attrs['area'], area) self.assertEqual(res.attrs['ancillary_variables'], []) data = res.values self.assertLess(abs(np.mean(data) - 40.7578684169142), 1e-10) self.assertEqual(data.shape, (5, 10)) unique = np.unique(data) np.testing.assert_allclose(unique, [25.20341702519979, 52.38819447051263, 75.79089653845898]) def test_reflectance_corrector_modis(self): """Test ReflectanceCorrector modifier with MODIS data.""" import xarray as xr import dask.array as da import numpy as np import datetime from satpy.composites.viirs import ReflectanceCorrector from satpy import DatasetID sataa_did = DatasetID(name='satellite_azimuth_angle') satza_did = DatasetID(name='satellite_zenith_angle') solaa_did = DatasetID(name='solar_azimuth_angle') solza_did = DatasetID(name='solar_zenith_angle') ref_cor = ReflectanceCorrector( dem_filename='_fake.hdf', optional_prerequisites=[sataa_did, satza_did, solaa_did, solza_did], name='1', prerequisites=[], wavelength=(0.62, 0.645, 0.67), resolution=250, calibration='reflectance', modifiers=('sunz_corrected', 'rayleigh_corrected_crefl'), sensor='modis') self.assertEqual(ref_cor.attrs['modifiers'], ('sunz_corrected', 'rayleigh_corrected_crefl')) self.assertEqual(ref_cor.attrs['calibration'], 'reflectance') self.assertEqual(ref_cor.attrs['wavelength'], (0.62, 0.645, 0.67)) self.assertEqual(ref_cor.attrs['name'], '1') self.assertEqual(ref_cor.attrs['resolution'], 250) self.assertEqual(ref_cor.attrs['sensor'], 'modis') self.assertEqual(ref_cor.attrs['prerequisites'], []) self.assertEqual(ref_cor.attrs['optional_prerequisites'], [ DatasetID(name='satellite_azimuth_angle'), DatasetID(name='satellite_zenith_angle'), DatasetID(name='solar_azimuth_angle'), DatasetID(name='solar_zenith_angle')]) area, dnb = self.data_area_ref_corrector() def make_xarray(self, name, calibration, wavelength=None, modifiers=None, resolution=1000, file_type='hdf_eos_geo'): return xr.DataArray(dnb, dims=('y', 'x'), attrs={'wavelength': wavelength, 'level': None, 'modifiers': modifiers, 'calibration': calibration, 'resolution': resolution, 'file_type': file_type, 'name': name, 'coordinates': ['longitude', 'latitude'], 'platform_name': 'EOS-Aqua', 'polarization': None, 'sensor': 'modis', 'units': '%', 'start_time': datetime.datetime(2012, 8, 13, 18, 46, 1, 439838), 'end_time': datetime.datetime(2012, 8, 13, 18, 57, 47, 746296), 'area': area, 'ancillary_variables': []}) c01 = make_xarray(self, '1', 'reflectance', wavelength=(0.62, 0.645, 0.67), modifiers='sunz_corrected', resolution=500, file_type='hdf_eos_data_500m') c02 = make_xarray(self, 'satellite_azimuth_angle', None) c03 = make_xarray(self, 'satellite_zenith_angle', None) c04 = make_xarray(self, 'solar_azimuth_angle', None) c05 = make_xarray(self, 'solar_zenith_angle', None) res = ref_cor([c01], [c02, c03, c04, c05]) self.assertIsInstance(res, xr.DataArray) self.assertIsInstance(res.data, da.Array) self.assertEqual(res.attrs['wavelength'], (0.62, 0.645, 0.67)) self.assertEqual(res.attrs['modifiers'], ('sunz_corrected', 'rayleigh_corrected_crefl',)) self.assertEqual(res.attrs['calibration'], 'reflectance') self.assertEqual(res.attrs['resolution'], 500) self.assertEqual(res.attrs['file_type'], 'hdf_eos_data_500m') self.assertEqual(res.attrs['name'], '1') self.assertEqual(res.attrs['platform_name'], 'EOS-Aqua') self.assertEqual(res.attrs['sensor'], 'modis') self.assertEqual(res.attrs['units'], '%') self.assertEqual(res.attrs['start_time'], datetime.datetime(2012, 8, 13, 18, 46, 1, 439838)) self.assertEqual(res.attrs['end_time'], datetime.datetime(2012, 8, 13, 18, 57, 47, 746296)) self.assertEqual(res.attrs['area'], area) self.assertEqual(res.attrs['ancillary_variables'], []) data = res.values if abs(np.mean(data) - 38.734365117099145) >= 1e-10: raise AssertionError('{} is not within {} of {}'.format(np.mean(data), 1e-10, 38.734365117099145)) self.assertEqual(data.shape, (5, 10)) unique = np.unique(data) np.testing.assert_allclose(unique, [24.641586, 50.431692, 69.315375]) class ViirsReflectanceCorrectorTest(unittest.TestCase): """Tests for the VIIRS/MODIS Corrected Reflectance modifier.""" def setUp(self): """Patch in-class imports.""" self.astronomy = mock.MagicMock() self.orbital = mock.MagicMock() modules = { 'pyorbital.astronomy': self.astronomy, 'pyorbital.orbital': self.orbital, } self.module_patcher = mock.patch.dict('sys.modules', modules) self.module_patcher.start() def tearDown(self): """Unpatch in-class imports.""" self.module_patcher.stop() @mock.patch('satpy.composites.viirs.get_satpos') def test_get_angles(self, get_satpos): """Test sun and satellite angle calculation.""" import numpy as np import dask.array as da from satpy.composites.viirs import ReflectanceCorrector # Patch methods get_satpos.return_value = 'sat_lon', 'sat_lat', 12345678 self.orbital.get_observer_look.return_value = 0, 0 self.astronomy.get_alt_az.return_value = 0, 0 area = mock.MagicMock() lons = np.zeros((5, 5)) lons[1, 1] = np.inf lons = da.from_array(lons, chunks=5) lats = np.zeros((5, 5)) lats[1, 1] = np.inf lats = da.from_array(lats, chunks=5) area.get_lonlats.return_value = (lons, lats) vis = mock.MagicMock(attrs={'area': area, 'start_time': 'start_time'}) # Compute angles psp = ReflectanceCorrector(name='dummy') psp.get_angles(vis) # Check arguments of get_orbserver_look() call, especially the altitude # unit conversion from meters to kilometers self.orbital.get_observer_look.assert_called_once() args = self.orbital.get_observer_look.call_args[0] self.assertEqual(args[:4], ('sat_lon', 'sat_lat', 12345.678, 'start_time')) self.assertIsInstance(args[4], da.Array) self.assertIsInstance(args[5], da.Array) self.assertEqual(args[6], 0) def suite(): """Create test suite for test_ahi.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestVIIRSComposites)) mysuite.addTest(loader.loadTestsFromTestCase(ViirsReflectanceCorrectorTest)) return mysuite satpy-0.20.0/satpy/tests/enhancement_tests/000077500000000000000000000000001362525524100207475ustar00rootroot00000000000000satpy-0.20.0/satpy/tests/enhancement_tests/__init__.py000066400000000000000000000023421362525524100230610ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The enhancements tests package.""" import sys from satpy.tests.enhancement_tests import (test_enhancements, test_viirs, test_abi) if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest def suite(): """Create test suite for all enhancement tests.""" mysuite = unittest.TestSuite() mysuite.addTests(test_enhancements.suite()) mysuite.addTests(test_viirs.suite()) mysuite.addTests(test_abi.suite()) return mysuite satpy-0.20.0/satpy/tests/enhancement_tests/test_abi.py000066400000000000000000000037231362525524100231200ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit testing for the ABI enhancement functions.""" import unittest import numpy as np import xarray as xr import dask.array as da class TestABIEnhancement(unittest.TestCase): """Test the ABI enhancement functions.""" def setUp(self): """Create fake data for the tests.""" data = da.linspace(0, 1, 16).reshape((4, 4)) self.da = xr.DataArray(data, dims=('y', 'x'), attrs={'test': 'test'}) def test_cimss_true_color_contrast(self): """Test the cimss_true_color_contrast enhancement.""" from satpy.enhancements.abi import cimss_true_color_contrast from trollimage.xrimage import XRImage expected = np.array([[ [0., 0., 0.05261956, 0.13396146], [0.21530335, 0.29664525, 0.37798715, 0.45932905], [0.54067095, 0.62201285, 0.70335475, 0.78469665], [0.86603854, 0.94738044, 1., 1.], ]]) img = XRImage(self.da) cimss_true_color_contrast(img) np.testing.assert_almost_equal(img.data.compute(), expected) def suite(): """Create the test suite for test_abi.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestABIEnhancement)) return mysuite satpy-0.20.0/satpy/tests/enhancement_tests/test_enhancements.py000066400000000000000000000414301362525524100250320ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit testing the enhancements functions, e.g. cira_stretch.""" import os import unittest import numpy as np import xarray as xr import dask.array as da try: from unittest import mock except ImportError: import mock class TestEnhancementStretch(unittest.TestCase): """Class for testing enhancements in satpy.enhancements.""" def setUp(self): """Create test data used by every test.""" data = np.arange(-210, 790, 100).reshape((2, 5)) * 0.95 data[0, 0] = np.nan # one bad value for testing crefl_data = np.arange(-210, 790, 100).reshape((2, 5)) * 0.95 crefl_data /= 5.605 crefl_data[0, 0] = np.nan # one bad value for testing crefl_data[0, 1] = 0. self.ch1 = xr.DataArray(data, dims=('y', 'x'), attrs={'test': 'test'}) self.ch2 = xr.DataArray(crefl_data, dims=('y', 'x'), attrs={'test': 'test'}) rgb_data = np.stack([data, data, data]) self.rgb = xr.DataArray(rgb_data, dims=('bands', 'y', 'x'), coords={'bands': ['R', 'G', 'B']}) def _test_enhancement(self, func, data, expected, **kwargs): """Perform basic checks that apply to multiple tests.""" from trollimage.xrimage import XRImage pre_attrs = data.attrs img = XRImage(data) func(img, **kwargs) self.assertIsInstance(img.data.data, da.Array) self.assertListEqual(sorted(pre_attrs.keys()), sorted(img.data.attrs.keys()), "DataArray attributes were not preserved") np.testing.assert_allclose(img.data.values, expected, atol=1.e-6, rtol=0) def test_cira_stretch(self): """Test applying the cira_stretch.""" from satpy.enhancements import cira_stretch expected = np.array([[ [np.nan, -7.04045974, -7.04045974, 0.79630132, 0.95947296], [1.05181359, 1.11651012, 1.16635571, 1.20691137, 1.24110186]]]) self._test_enhancement(cira_stretch, self.ch1, expected) def test_lookup(self): """Test the lookup enhancement function.""" from satpy.enhancements import lookup expected = np.array([[ [0., 0., 0., 0.333333, 0.705882], [1., 1., 1., 1., 1.]]]) lut = np.arange(256.) self._test_enhancement(lookup, self.ch1, expected, luts=lut) expected = np.array([[[0., 0., 0., 0.333333, 0.705882], [1., 1., 1., 1., 1.]], [[0., 0., 0., 0.333333, 0.705882], [1., 1., 1., 1., 1.]], [[0., 0., 0., 0.333333, 0.705882], [1., 1., 1., 1., 1.]]]) lut = np.arange(256.) lut = np.vstack((lut, lut, lut)).T self._test_enhancement(lookup, self.rgb, expected, luts=lut) def test_colorize(self): """Test the colorize enhancement function.""" from satpy.enhancements import colorize from trollimage.colormap import brbg expected = np.array([[ [np.nan, 3.29409498e-01, 3.29409498e-01, 4.35952940e-06, 4.35952940e-06], [4.35952940e-06, 4.35952940e-06, 4.35952940e-06, 4.35952940e-06, 4.35952940e-06]], [[np.nan, 1.88249866e-01, 1.88249866e-01, 2.35302110e-01, 2.35302110e-01], [2.35302110e-01, 2.35302110e-01, 2.35302110e-01, 2.35302110e-01, 2.35302110e-01]], [[np.nan, 1.96102817e-02, 1.96102817e-02, 1.88238767e-01, 1.88238767e-01], [1.88238767e-01, 1.88238767e-01, 1.88238767e-01, 1.88238767e-01, 1.88238767e-01]]]) self._test_enhancement(colorize, self.ch1, expected, palettes=brbg) def test_palettize(self): """Test the palettize enhancement function.""" from satpy.enhancements import palettize from trollimage.colormap import brbg expected = np.array([[[10, 0, 0, 10, 10], [10, 10, 10, 10, 10]]]) self._test_enhancement(palettize, self.ch1, expected, palettes=brbg) def test_three_d_effect(self): """Test the three_d_effect enhancement function.""" from satpy.enhancements import three_d_effect expected = np.array([[ [np.nan, np.nan, -389.5, -294.5, 826.5], [np.nan, np.nan, 85.5, 180.5, 1301.5]]]) self._test_enhancement(three_d_effect, self.ch1, expected) def test_crefl_scaling(self): """Test the crefl_scaling enhancement function.""" from satpy.enhancements import crefl_scaling expected = np.array([[ [np.nan, 0., 0., 0.44378, 0.631734], [0.737562, 0.825041, 0.912521, 1., 1.]]]) self._test_enhancement(crefl_scaling, self.ch2, expected, idx=[0., 25., 55., 100., 255.], sc=[0., 90., 140., 175., 255.]) def test_btemp_threshold(self): """Test applying the cira_stretch.""" from satpy.enhancements import btemp_threshold expected = np.array([[ [np.nan, 0.946207, 0.892695, 0.839184, 0.785672], [0.73216, 0.595869, 0.158745, -0.278379, -0.715503]]]) self._test_enhancement(btemp_threshold, self.ch1, expected, min_in=-200, max_in=500, threshold=350) def test_merge_colormaps(self): """Test merging colormaps.""" from trollimage.colormap import Colormap from satpy.enhancements import _merge_colormaps as mcp, create_colormap ret_map = mock.MagicMock() create_colormap_mock = mock.Mock(wraps=create_colormap) cmap1 = Colormap((1, (1., 1., 1.))) kwargs = {'palettes': cmap1} with mock.patch('satpy.enhancements.create_colormap', create_colormap_mock): res = mcp(kwargs) self.assertTrue(res is cmap1) create_colormap_mock.assert_not_called() create_colormap_mock.reset_mock() ret_map.reset_mock() cmap1 = {'colors': 'blues', 'min_value': 0, 'max_value': 1} kwargs = {'palettes': [cmap1]} with mock.patch('satpy.enhancements.create_colormap', create_colormap_mock),\ mock.patch('trollimage.colormap.blues', ret_map): _ = mcp(kwargs) create_colormap_mock.assert_called_once() ret_map.reverse.assert_not_called() ret_map.set_range.assert_called_with(0, 1) create_colormap_mock.reset_mock() ret_map.reset_mock() cmap2 = {'colors': 'blues', 'min_value': 2, 'max_value': 3, 'reverse': True} kwargs = {'palettes': [cmap2]} with mock.patch('trollimage.colormap.blues', ret_map): _ = mcp(kwargs) ret_map.reverse.assert_called_once() ret_map.set_range.assert_called_with(2, 3) create_colormap_mock.reset_mock() ret_map.reset_mock() kwargs = {'palettes': [cmap1, cmap2]} with mock.patch('trollimage.colormap.blues', ret_map): _ = mcp(kwargs) ret_map.__add__.assert_called_once() def tearDown(self): """Clean up.""" pass class TestColormapLoading(unittest.TestCase): """Test utilities used with colormaps.""" def test_cmap_from_file_rgb(self): """Test that colormaps can be loaded from a binary file.""" from satpy.enhancements import create_colormap from tempfile import NamedTemporaryFile # create the colormap file on disk with NamedTemporaryFile(suffix='.npy', delete=False) as tmp_cmap: cmap_filename = tmp_cmap.name np.save(cmap_filename, np.array([ [255, 0, 0], [255, 255, 0], [255, 255, 255], [0, 0, 255], ])) try: cmap = create_colormap({'filename': cmap_filename}) self.assertEqual(cmap.colors.shape[0], 4) np.testing.assert_equal(cmap.colors[0], [1.0, 0, 0]) self.assertEqual(cmap.values.shape[0], 4) self.assertEqual(cmap.values[0], 0) self.assertEqual(cmap.values[-1], 1.0) cmap = create_colormap({'filename': cmap_filename, 'min_value': 50, 'max_value': 100}) self.assertEqual(cmap.colors.shape[0], 4) np.testing.assert_equal(cmap.colors[0], [1.0, 0, 0]) self.assertEqual(cmap.values.shape[0], 4) self.assertEqual(cmap.values[0], 50) self.assertEqual(cmap.values[-1], 100) finally: os.remove(cmap_filename) def test_cmap_from_file_rgb_1(self): """Test that colormaps can be loaded from a binary file with 0-1 colors.""" from satpy.enhancements import create_colormap from tempfile import NamedTemporaryFile # create the colormap file on disk with NamedTemporaryFile(suffix='.npy', delete=False) as tmp_cmap: cmap_filename = tmp_cmap.name np.save(cmap_filename, np.array([ [1, 0, 0], [1, 1, 0], [1, 1, 1], [0, 0, 1], ])) try: cmap = create_colormap({'filename': cmap_filename, 'color_scale': 1}) self.assertEqual(cmap.colors.shape[0], 4) np.testing.assert_equal(cmap.colors[0], [1.0, 0, 0]) self.assertEqual(cmap.values.shape[0], 4) self.assertEqual(cmap.values[0], 0) self.assertEqual(cmap.values[-1], 1.0) cmap = create_colormap({'filename': cmap_filename, 'color_scale': 1, 'min_value': 50, 'max_value': 100}) self.assertEqual(cmap.colors.shape[0], 4) np.testing.assert_equal(cmap.colors[0], [1.0, 0, 0]) self.assertEqual(cmap.values.shape[0], 4) self.assertEqual(cmap.values[0], 50) self.assertEqual(cmap.values[-1], 100) finally: os.remove(cmap_filename) def test_cmap_from_file_vrgb(self): """Test that colormaps can be loaded from a binary file with values.""" from satpy.enhancements import create_colormap from tempfile import NamedTemporaryFile # create the colormap file on disk with NamedTemporaryFile(suffix='.npy', delete=False) as tmp_cmap: cmap_filename = tmp_cmap.name np.save(cmap_filename, np.array([ [128, 255, 0, 0], [130, 255, 255, 0], [132, 255, 255, 255], [134, 0, 0, 255], ])) try: # default mode of VRGB cmap = create_colormap({'filename': cmap_filename}) self.assertEqual(cmap.colors.shape[0], 4) np.testing.assert_equal(cmap.colors[0], [1.0, 0, 0]) self.assertEqual(cmap.values.shape[0], 4) self.assertEqual(cmap.values[0], 128) self.assertEqual(cmap.values[-1], 134) cmap = create_colormap({'filename': cmap_filename, 'colormap_mode': 'RGBA'}) self.assertEqual(cmap.colors.shape[0], 4) self.assertEqual(cmap.colors.shape[1], 4) # RGBA np.testing.assert_equal(cmap.colors[0], [128 / 255., 1.0, 0, 0]) self.assertEqual(cmap.values.shape[0], 4) self.assertEqual(cmap.values[0], 0) self.assertEqual(cmap.values[-1], 1.0) cmap = create_colormap({'filename': cmap_filename, 'min_value': 50, 'max_value': 100}) self.assertEqual(cmap.colors.shape[0], 4) np.testing.assert_equal(cmap.colors[0], [1.0, 0, 0]) self.assertEqual(cmap.values.shape[0], 4) self.assertEqual(cmap.values[0], 50) self.assertEqual(cmap.values[-1], 100) self.assertRaises(ValueError, create_colormap, {'filename': cmap_filename, 'colormap_mode': 'RGB', 'min_value': 50, 'max_value': 100}) finally: os.remove(cmap_filename) def test_cmap_from_file_vrgba(self): """Test that colormaps can be loaded RGBA colors and values.""" from satpy.enhancements import create_colormap from tempfile import NamedTemporaryFile # create the colormap file on disk with NamedTemporaryFile(suffix='.npy', delete=False) as tmp_cmap: cmap_filename = tmp_cmap.name np.save(cmap_filename, np.array([ [128, 128, 255, 0, 0], # value, R, G, B, A [130, 130, 255, 255, 0], [132, 132, 255, 255, 255], [134, 134, 0, 0, 255], ])) try: # default mode of VRGBA cmap = create_colormap({'filename': cmap_filename}) self.assertEqual(cmap.colors.shape[0], 4) self.assertEqual(cmap.colors.shape[1], 4) # RGBA np.testing.assert_equal(cmap.colors[0], [128 / 255.0, 1.0, 0, 0]) self.assertEqual(cmap.values.shape[0], 4) self.assertEqual(cmap.values[0], 128) self.assertEqual(cmap.values[-1], 134) self.assertRaises(ValueError, create_colormap, {'filename': cmap_filename, 'colormap_mode': 'RGBA'}) cmap = create_colormap({'filename': cmap_filename, 'min_value': 50, 'max_value': 100}) self.assertEqual(cmap.colors.shape[0], 4) self.assertEqual(cmap.colors.shape[1], 4) # RGBA np.testing.assert_equal(cmap.colors[0], [128 / 255.0, 1.0, 0, 0]) self.assertEqual(cmap.values.shape[0], 4) self.assertEqual(cmap.values[0], 50) self.assertEqual(cmap.values[-1], 100) finally: os.remove(cmap_filename) def test_cmap_from_file_bad_shape(self): """Test that unknown array shape causes an error.""" from satpy.enhancements import create_colormap from tempfile import NamedTemporaryFile # create the colormap file on disk with NamedTemporaryFile(suffix='.npy', delete=False) as tmp_cmap: cmap_filename = tmp_cmap.name np.save(cmap_filename, np.array([ [0], [64], [128], [255], ])) try: self.assertRaises(ValueError, create_colormap, {'filename': cmap_filename}) finally: os.remove(cmap_filename) def test_cmap_from_trollimage(self): """Test that colormaps in trollimage can be loaded.""" from satpy.enhancements import create_colormap cmap = create_colormap({'colors': 'pubu'}) from trollimage.colormap import pubu np.testing.assert_equal(cmap.colors, pubu.colors) np.testing.assert_equal(cmap.values, pubu.values) def test_cmap_no_colormap(self): """Test that being unable to create a colormap raises an error.""" from satpy.enhancements import create_colormap self.assertRaises(ValueError, create_colormap, {}) def test_cmap_list(self): """Test that colors can be a list/tuple.""" from satpy.enhancements import create_colormap colors = [ [0, 0, 1], [1, 0, 1], [0, 1, 1], [1, 1, 1], ] values = [2, 4, 6, 8] cmap = create_colormap({'colors': colors, 'color_scale': 1}) self.assertEqual(cmap.colors.shape[0], 4) np.testing.assert_equal(cmap.colors[0], [0.0, 0.0, 1.0]) self.assertEqual(cmap.values.shape[0], 4) self.assertEqual(cmap.values[0], 0) self.assertEqual(cmap.values[-1], 1.0) cmap = create_colormap({'colors': colors, 'color_scale': 1, 'values': values}) self.assertEqual(cmap.colors.shape[0], 4) np.testing.assert_equal(cmap.colors[0], [0.0, 0.0, 1.0]) self.assertEqual(cmap.values.shape[0], 4) self.assertEqual(cmap.values[0], 2) self.assertEqual(cmap.values[-1], 8) def suite(): """Create test suite for builtin enhancement functions.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestEnhancementStretch)) mysuite.addTest(loader.loadTestsFromTestCase(TestColormapLoading)) return mysuite if __name__ == "__main__": unittest.main() satpy-0.20.0/satpy/tests/enhancement_tests/test_viirs.py000066400000000000000000000076061362525524100235250ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit testing for the VIIRS enhancement function """ import unittest import numpy as np import xarray as xr import dask.array as da class TestVIIRSEnhancement(unittest.TestCase): """Class for testing the VIIRS enhancement function in satpy.enhancements.viirs""" def setUp(self): """Setup the test""" data = np.arange(15, 301, 15).reshape(2, 10) self.da = xr.DataArray(data, dims=('y', 'x'), attrs={'test': 'test'}) self.palette = {'colors': [[14, [0.0, 0.0, 0.0]], [15, [0.0, 0.0, 0.39215]], [16, [0.76862, 0.63529, 0.44705]], [17, [0.76862, 0.63529, 0.44705]], [18, [0.0, 0.0, 1.0]], [20, [1.0, 1.0, 1.0]], [27, [0.0, 1.0, 1.0]], [30, [0.78431, 0.78431, 0.78431]], [31, [0.39215, 0.39215, 0.39215]], [88, [0.70588, 0.0, 0.90196]], [100, [0.19607, 1.0, 0.39215]], [120, [0.19607, 1.0, 0.39215]], [121, [0.0, 1.0, 0.0]], [130, [0.0, 1.0, 0.0]], [131, [0.78431, 1.0, 0.0]], [140, [0.78431, 1.0, 0.0]], [141, [1.0, 1.0, 0.58823]], [150, [1.0, 1.0, 0.58823]], [151, [1.0, 1.0, 0.0]], [160, [1.0, 1.0, 0.0]], [161, [1.0, 0.78431, 0.0]], [170, [1.0, 0.78431, 0.0]], [171, [1.0, 0.58823, 0.19607]], [180, [1.0, 0.58823, 0.19607]], [181, [1.0, 0.39215, 0.0]], [190, [1.0, 0.39215, 0.0]], [191, [1.0, 0.0, 0.0]], [200, [1.0, 0.0, 0.0]], [201, [0.0, 0.0, 0.0]]], 'min_value': 0, 'max_value': 201} def test_viirs(self): from satpy.enhancements.viirs import water_detection expected = [[[1, 7, 8, 8, 8, 9, 10, 11, 14, 8], [20, 23, 26, 10, 12, 15, 18, 21, 24, 27]]] self._test_enhancement(water_detection, self.da, expected, palettes=self.palette) def _test_enhancement(self, func, data, expected, **kwargs): from trollimage.xrimage import XRImage pre_attrs = data.attrs img = XRImage(data) func(img, **kwargs) self.assertIsInstance(img.data.data, da.Array) self.assertListEqual(sorted(pre_attrs.keys()), sorted(img.data.attrs.keys()), "DataArray attributes were not preserved") np.testing.assert_allclose(img.data.values, expected, atol=1.e-6, rtol=0) def tearDown(self): """Clean up""" pass def suite(): """The test suite for test_viirs. """ loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestVIIRSEnhancement)) return mysuite satpy-0.20.0/satpy/tests/features/000077500000000000000000000000001362525524100170565ustar00rootroot00000000000000satpy-0.20.0/satpy/tests/features/feature-load.feature000066400000000000000000000032741362525524100230110ustar00rootroot00000000000000Feature: Simple and intuitive scene loading (sc. 1) The scientific user explores the data and prototypes new algorithms. It needs access not only to the calibrated data, but also to the raw data and probably a majority of the metadata. The user would work with data locally, and it has to be easy to tell satpy where the data is. Providing filename templates or editing config file before starting working is a pain, so it should be avoided. To load the data should be a simple 1-step procedure. At load time, the user provides the data and metadata he/she needs, and if some items are unavailable/unaccessible, the user should be informed in a gentle but clear way (ie. no crash). The data and metadata available from the file have to be explorable, so that the user don’t need to guess what the (meta)data is called. @download Scenario: 1-step data loading Given data is available When user loads the data without providing a config file Then the data is available in a scene object @download Scenario: No crash when metadata is missing Given data is available When user loads the data without providing a config file And some items are not available Then the data is available in a scene object @download Scenario: Data is explorable Given data is available When user wants to know what data is available Then available datasets is returned Scenario: Accessing datasets by name prefers less modified datasets Given datasets with the same name When a dataset is retrieved by name Then the least modified version of the dataset is returnedsatpy-0.20.0/satpy/tests/features/feature-real-load-process-write.feature000066400000000000000000000175251362525524100265420ustar00rootroot00000000000000Feature: Loading real data in many formats with the same command This feature loads real data from disk and generates resampled images. This is made as a way to system test satpy. To provide test data to this feature, add a directory called `test_data` in the current directory. Under this directory, created a directory for each data format you want to test, and under this a directory with data called data and a directory with reference images called `ref`, eg: test_data |_ seviri_l1b_hrit | |_ data | | |_ [all the MSG SEVIRI data files] | |_ ref | |_ overview_eurol.png | |_ ... |_ viirs_sdr | |_ data | | |_ [all the viirs SDR files] | |_ ref | |_ true_color_eurol.png | |_ ... ... @wip Scenario Outline: Reading and processing of real data Given data is available When the user loads the composite And the user resamples the data to And the user saves the composite to disk Then the resulting image should match the reference image Examples: AAPP L1 data | format | composite | area | | avhrr_l1b_aapp | overview | eurol | Examples: ABI L1 data | format | composite | area | | abi_l1b | overview | - | | abi_l1b | airmass | - | | abi_l1b | natural | - | # Examples: ACSPO data # | format | composite | area | # | acspo | overview | - | # | acspo | true_color | - | # | acspo | true_color | north_america | Examples: AHI L1 data | format | composite | area | | ahi_hsd | overview | - | | ahi_hsd | true_color | - | | ahi_hsd | true_color | australia | Examples: AMSR2 L1 data | format | composite | area | | amsr2_l1b | ice | moll | Examples: CLAVR-X data | format | composite | area | | clavrx | cloudtype | usa | Examples: EPS L1 data | format | composite | area | | avhrr_l1b_eps | overview | eurol | Examples: FCI FDHSI data | format | composite | area | | fci_l1c_fdhsi | overview | eurol | | fci_l1c_fdhsi | cloudtop | eurol | | fci_l1c_fdhsi | true_color | eurol | Examples: GAC data | format | composite | area | | avhrr_l1b_gaclac | overview | eurol | | avhrr_l1b_gaclac | cloudtop | eurol | # Examples: Generic Images # Examples: GEOCAT data # | format | composite | area | # | geocat | overview | - | # | geocat | true_color | - | # | geocat | true_color | north_america | # Examples: GHRSST OSISAF data # | format | composite | area | # | ghrsst_osisaf | overview | - | # | ghrsst_osisaf | true_color | - | # | ghrsst_osisaf | true_color | north_america | # Examples: Caliop v3 data # | format | composite | area | # | hdf4_caliopv3 | overview | - | # | hdf4_caliopv3 | true_color | - | # | hdf4_caliopv3 | true_color | north_america | Examples: MODIS HDF4-EOS data | format | composite | area | | modis_l1b | overview | eurol | | modis_l1b | true_color_lowres | eurol | | modis_l1b | true_color | eurol | Examples: Electro-L N2 HRIT data | format | composite | area | | electrol_hrit | overview | india | | electrol_hrit | cloudtop | india | Examples: GOES HRIT data | format | composite | area | | goes-imager_hrit | overview | usa | | goes-imager_hrit | cloudtop | usa | Examples: Himawari HRIT data | format | composite | area | | ahi_hrit | overview | australia | | ahi_hrit | cloudtop | australia | Examples: MSG HRIT data | format | composite | area | | seviri_l1b_hrit| overview | eurol | | seviri_l1b_hrit| cloudtop | eurol | Examples: HRPT data | format | composite | area | | avhrr_l1b_hrpt | overview | eurol | | avhrr_l1b_hrpt | cloudtop | eurol | # Examples: IASI L2 data # Examples: Lightning Imager L2 # Examples: MAIA data Examples: MSG Native data | format | composite | area | | seviri_l1b_native | overview | eurol | | seviri_l1b_native | snow | eurol | | seviri_l1b_native | HRV | - | | seviri_l1b_native | overview | - | Examples: NWCSAF GEO data | format | composite | area | | nwcsaf-geo | cloudtype | eurol | | nwcsaf-geo | ctth | eurol | Examples: NWCSAF PPS data | format | composite | area | | nwcsaf-pps_nc | cloudtype | eurol | | nwcsaf-pps_nc | ctth | eurol | Examples: MSG Native data | format | composite | area | | seviri_l1b_native | overview | eurol | | seviri_l1b_native | cloudtop | eurol | Examples: OLCI L1 data | format | composite | area | | olci_l1b | true_color | eurol | Examples: OLCI L2 data | format | composite | area | | olci_l2 | karo | eurol | Examples: SLSTR L1 data | format | composite | area | | slstr_l1b | true_color | eurol | # Examples: NUCAPS data # Examples: OMPS EDR Examples: SAFE MSI L1 data | format | composite | area | | msi_safe | true_color | eurol | Examples: SAR-C L1 data | format | composite | area | | sar-c_safe | sar-ice | euron1 | | sar-c_safe | sar-rgb | euron1 | | sar-c_safe | sar-quick | euron1 | # Examples: SCATSAT 1 data # | format | composite | area | # | sar_c | ice | eurol | Examples: VIIRS compact data | format | composite | area | | viirs_compact | overview | eurol | | viirs_compact | true_color | eurol | Examples: VIIRS L1B data | format | composite | area | | viirs_l1b | overview | eurol | | viirs_l1b | true_color | eurol | Examples: VIIRS SDR data | format | composite | area | | viirs_sdr | overview | eurol | | viirs_sdr | true_color_lowres | eurol | | viirs_sdr | fog | eurol | | viirs_sdr | dust | eurol | | viirs_sdr | ash | eurol | | viirs_sdr | natural_sun_lowres | eurol | | viirs_sdr | snow_age | eurol | | viirs_sdr | fire_temperature | eurol | | viirs_sdr | fire_temperature_awips | eurol | | viirs_sdr | fire_temperature_eumetsat | eurol | | viirs_sdr | fire_temperature_39refl | eurol | satpy-0.20.0/satpy/tests/features/feature-save.feature000066400000000000000000000016151362525524100230250ustar00rootroot00000000000000# Created by a001673 at 2015-12-07 Feature: Simple and intuitive saving Visualization of the data is important and should be an easy one-line, like eg show(my_dataset). In a similar way, saving the data to disk should be simple, for example save(dataset, filename), with sensible defaults provided depending on the filename extension (eg. geotiff for .tif, netcdf for .nc). Saving several datasets at once would be nice to have. Scenario: 1-step showing dataset Given a dataset is available When the show command is called Then an image should pop up Scenario: 1-step saving dataset Given a dataset is available When the save_dataset command is called Then a file should be saved on disk Scenario: 1-step saving all datasets Given a bunch of datasets are available When the save_datasets command is called Then a bunch of files should be saved on disk satpy-0.20.0/satpy/tests/features/steps/000077500000000000000000000000001362525524100202145ustar00rootroot00000000000000satpy-0.20.0/satpy/tests/features/steps/steps-load.py000066400000000000000000000115311362525524100226420ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """ """ import os import sys from behave import use_step_matcher, given, when, then if sys.version_info < (3, 0): from urllib2 import urlopen else: from urllib.request import urlopen use_step_matcher("re") @given(u'data is available') def step_impl_data_available(context): if not os.path.exists('/tmp/SVM02_npp_d20150311_t1122204_e1123446_b17451_c20150311113206961730_cspp_dev.h5'): response = urlopen('https://zenodo.org/record/16355/files/' 'SVM02_npp_d20150311_t1122204_e1123446_b17451_c20150311113206961730_cspp_dev.h5') with open('/tmp/SVM02_npp_d20150311_t1122204_e1123446_b17451_c20150311113206961730_cspp_dev.h5', mode="w") as fp: fp.write(response.read()) if not os.path.exists('/tmp/GMTCO_npp_d20150311_t1122204_e1123446_b17451_c20150311113205873710_cspp_dev.h5'): response = urlopen('https://zenodo.org/record/16355/files/' 'GMTCO_npp_d20150311_t1122204_e1123446_b17451_c20150311113205873710_cspp_dev.h5') with open('/tmp/GMTCO_npp_d20150311_t1122204_e1123446_b17451_c20150311113205873710_cspp_dev.h5', mode="w") as fp: fp.write(response.read()) @when(u'user loads the data without providing a config file') def step_impl_user_loads_no_config(context): from satpy import Scene, find_files_and_readers from datetime import datetime os.chdir("/tmp/") readers_files = find_files_and_readers(sensor='viirs', start_time=datetime(2015, 3, 11, 11, 20), end_time=datetime(2015, 3, 11, 11, 26)) scn = Scene(filenames=readers_files) scn.load(["M02"]) context.scene = scn @then(u'the data is available in a scene object') def step_impl_data_available_in_scene(context): assert (context.scene["M02"] is not None) try: context.scene["M01"] is None assert False except KeyError: assert True @when(u'some items are not available') def step_impl_items_not_available(context): context.scene.load(["M01"]) @when(u'user wants to know what data is available') def step_impl_user_checks_availability(context): from satpy import Scene, find_files_and_readers from datetime import datetime os.chdir("/tmp/") reader_files = find_files_and_readers(sensor="viirs", start_time=datetime(2015, 3, 11, 11, 20), end_time=datetime(2015, 3, 11, 11, 26)) scn = Scene(filenames=reader_files) context.available_dataset_ids = scn.available_dataset_ids() @then(u'available datasets are returned') def step_impl_available_datasets_are_returned(context): assert (len(context.available_dataset_ids) >= 5) @given("datasets with the same name") def step_impl_datasets_with_same_name(context): """Datasets with the same name but different other ID parameters.""" from satpy import Scene from xarray import DataArray from satpy.dataset import DatasetID scn = Scene() scn[DatasetID('ds1', calibration='radiance')] = DataArray([[1, 2], [3, 4]]) scn[DatasetID('ds1', resolution=500, calibration='reflectance')] = DataArray([[5, 6], [7, 8]]) scn[DatasetID('ds1', resolution=250, calibration='reflectance')] = DataArray([[5, 6], [7, 8]]) scn[DatasetID('ds1', resolution=1000, calibration='reflectance')] = DataArray([[5, 6], [7, 8]]) scn[DatasetID('ds1', resolution=500, calibration='radiance', modifiers=('mod1',))] = DataArray([[5, 6], [7, 8]]) ds_id = DatasetID('ds1', resolution=1000, calibration='radiance', modifiers=('mod1', 'mod2')) scn[ds_id] = DataArray([[5, 6], [7, 8]]) context.scene = scn @when("a dataset is retrieved by name") def step_impl_dataset_retrieved_by_name(context): """Use the Scene's getitem method to get a dataset.""" context.returned_dataset = context.scene['ds1'] @then("the least modified version of the dataset is returned") def step_impl_least_modified_dataset_returned(context): """The dataset should be one of the least modified datasets.""" assert(len(context.returned_dataset.attrs['modifiers']) == 0) satpy-0.20.0/satpy/tests/features/steps/steps-real-load-process-write.py000066400000000000000000000101431362525524100263650ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Step for the real load-process-write tests. """ import os import fnmatch from behave import given, when, then from tempfile import NamedTemporaryFile import numpy as np from PIL import Image def fft_proj_rms(a1, a2): """Compute the RMS of differences between two images. Compute the RMS of differences between two FFT vectors of a1 and projection of FFT vectors of a2. This metric is sensitive to large scale changes and image noise but insensitive to small rendering differences. """ ms = 0 # for i in range(a1.shape[-1]): fr1 = np.fft.rfftn(a1) fr2 = np.fft.rfftn(a2) ps1 = np.log10(fr1 * fr1.conj()).real ps2 = np.log10(fr2 * fr2.conj()).real p1 = np.arctan2(fr1.imag, fr1.real) p2 = np.arctan2(fr2.imag, fr2.real) theta = p2 - p1 l_factor = ps2 * np.cos(theta) ms += np.sum(((l_factor - ps1) ** 2)) / float(ps1.size) rms = np.sqrt(ms) return rms def assert_images_match(image1, image2, threshold=0.1): """Assert that images are matching.""" img1 = np.asarray(Image.open(image1)) img2 = np.asarray(Image.open(image2)) rms = fft_proj_rms(img1, img2) assert rms <= threshold, "Images {0} and {1} don't match: {2}".format( image1, image2, rms) def get_all_files(directory, pattern): """Find all files matching *pattern* under *directory*.""" matches = [] for root, dirnames, filenames in os.walk(directory): for filename in fnmatch.filter(filenames, pattern): matches.append(os.path.join(root, filename)) return matches def before_all(context): if not context.config.log_capture: from satpy.utils import debug_on debug_on() @given(u'{dformat} data is available') # noqa def step_impl(context, dformat): data_path = os.path.join('test_data', dformat) data_available = os.path.exists(data_path) if not data_available: context.scenario.skip(reason="No test data available for " + dformat) else: context.dformat = dformat context.data_path = data_path @when(u'the user loads the {composite} composite') # noqa def step_impl(context, composite): from satpy import Scene scn = Scene(reader=context.dformat, filenames=get_all_files(os.path.join(context.data_path, 'data'), '*')) scn.load([composite]) context.scn = scn context.composite = composite @when(u'the user resamples the data to {area}') # noqa def step_impl(context, area): if area != '-': context.lscn = context.scn.resample(area) else: context.lscn = context.scn.resample(resampler='native') context.area = area @when(u'the user saves the composite to disk') # noqa def step_impl(context): with NamedTemporaryFile(suffix='.png', delete=False) as tmp_file: context.lscn.save_dataset(context.composite, filename=tmp_file.name) context.new_filename = tmp_file.name @then(u'the resulting image should match the reference image') # noqa def step_impl(context): if context.area == '-': ref_filename = context.composite + ".png" else: ref_filename = context.composite + "_" + context.area + ".png" ref_filename = os.path.join(context.data_path, "ref", ref_filename) assert os.path.exists(ref_filename), "Missing reference file." assert_images_match(ref_filename, context.new_filename) os.remove(context.new_filename) satpy-0.20.0/satpy/tests/features/steps/steps-save.py000066400000000000000000000056551362525524100226730ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . from behave import given, when, then, use_step_matcher try: from unittest.mock import patch except ImportError: from mock import patch use_step_matcher("re") @given("a dataset is available") # noqa: F811 def step_impl(context): """ :type context: behave.runner.Context """ from satpy import Scene from xarray import DataArray scn = Scene() scn["MyDataset"] = DataArray([[1, 2], [3, 4]], dims=['y', 'x']) context.scene = scn @when("the show command is called") # noqa: F811 def step_impl(context): """ :type context: behave.runner.Context """ with patch('trollimage.xrimage.XRImage.show') as mock_show: context.scene.show("MyDataset") mock_show.assert_called_once_with() @then("an image should pop up") # noqa: F811 def step_impl(context): """ :type context: behave.runner.Context """ pass @when("the save_dataset command is called") # noqa: F811 def step_impl(context): """ :type context: behave.runner.Context """ context.filename = "/tmp/test_dataset.png" context.scene.save_dataset("MyDataset", context.filename) @then("a file should be saved on disk") # noqa: F811 def step_impl(context): """ :type context: behave.runner.Context """ import os assert(os.path.exists(context.filename)) os.remove(context.filename) @given("a bunch of datasets are available") # noqa: F811 def step_impl(context): """ :type context: behave.runner.Context """ from satpy import Scene from xarray import DataArray scn = Scene() scn["MyDataset"] = DataArray([[1, 2], [3, 4]], dims=['y', 'x']) scn["MyDataset2"] = DataArray([[5, 6], [7, 8]], dims=['y', 'x']) context.scene = scn @when("the save_datasets command is called") # noqa: F811 def step_impl(context): """ :type context: behave.runner.Context """ context.scene.save_datasets(writer="simple_image", filename="{name}.png") @then("a bunch of files should be saved on disk") # noqa: F811 def step_impl(context): """ :type context: behave.runner.Context """ import os for filename in ["MyDataset.png", "MyDataset2.png"]: assert(os.path.exists(filename)) os.remove(filename) satpy-0.20.0/satpy/tests/reader_tests/000077500000000000000000000000001362525524100177245ustar00rootroot00000000000000satpy-0.20.0/satpy/tests/reader_tests/__init__.py000066400000000000000000000127651362525524100220500ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The reader tests package.""" import sys from satpy.tests.reader_tests import (test_abi_l1b, test_agri_l1, test_hrit_base, test_viirs_sdr, test_viirs_l1b, test_virr_l1b, test_seviri_l1b_native, test_seviri_base, test_hdf5_utils, test_netcdf_utils, test_hdf4_utils, test_utils, test_acspo, test_amsr2_l1b, test_omps_edr, test_nucaps, test_geocat, test_seviri_l1b_calibration, test_clavrx, test_grib, test_goes_imager_hrit, test_ahi_hsd, test_iasi_l2, test_generic_image, test_scmi, test_ahi_hrit, test_goes_imager_nc, test_nc_slstr, test_olci_nc, test_viirs_edr_flood, test_nwcsaf_nc, test_seviri_l1b_hrit, test_sar_c_safe, test_safe_sar_l2_ocn, test_viirs_edr_active_fires, test_hdfeos_base, test_modis_l2, test_electrol_hrit, test_mersi2_l1b, test_avhrr_l1b_gaclac, test_vaisala_gld360, test_fci_l1c_fdhsi, test_tropomi_l2, test_hsaf_grib, test_abi_l2_nc, test_eum_base, test_ami_l1b, test_viirs_compact, test_seviri_l2_bufr, test_geos_area, test_nwcsaf_msg, test_glm_l2, test_seviri_l1b_icare, test_mimic_TPW2_nc, test_slstr_l2, test_aapp_l1b, test_eps_l1b, test_iasi_l2_so2_bufr) if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest def suite(): """Test suite for all reader tests.""" mysuite = unittest.TestSuite() mysuite.addTests(test_abi_l1b.suite()) mysuite.addTests(test_agri_l1.suite()) mysuite.addTests(test_viirs_sdr.suite()) mysuite.addTests(test_viirs_l1b.suite()) mysuite.addTests(test_virr_l1b.suite()) mysuite.addTests(test_hrit_base.suite()) mysuite.addTests(test_seviri_l1b_native.suite()) mysuite.addTests(test_seviri_base.suite()) mysuite.addTests(test_hdf4_utils.suite()) mysuite.addTests(test_hdf5_utils.suite()) mysuite.addTests(test_netcdf_utils.suite()) mysuite.addTests(test_utils.suite()) mysuite.addTests(test_acspo.suite()) mysuite.addTests(test_amsr2_l1b.suite()) mysuite.addTests(test_omps_edr.suite()) mysuite.addTests(test_nucaps.suite()) mysuite.addTests(test_geocat.suite()) mysuite.addTests(test_olci_nc.suite()) mysuite.addTests(test_seviri_l1b_calibration.suite()) mysuite.addTests(test_clavrx.suite()) mysuite.addTests(test_grib.suite()) mysuite.addTests(test_goes_imager_hrit.suite()) mysuite.addTests(test_ahi_hsd.suite()) mysuite.addTests(test_iasi_l2.suite()) mysuite.addTests(test_generic_image.suite()) mysuite.addTests(test_scmi.suite()) mysuite.addTests(test_viirs_edr_flood.suite()) mysuite.addTests(test_ahi_hrit.suite()) mysuite.addTests(test_goes_imager_nc.suite()) mysuite.addTests(test_nc_slstr.suite()) mysuite.addTests(test_nwcsaf_nc.suite()) mysuite.addTests(test_seviri_l1b_hrit.suite()) mysuite.addTests(test_sar_c_safe.suite()) mysuite.addTests(test_viirs_edr_active_fires.suite()) mysuite.addTests(test_safe_sar_l2_ocn.suite()) mysuite.addTests(test_hdfeos_base.suite()) mysuite.addTests(test_modis_l2.suite()) mysuite.addTests(test_electrol_hrit.suite()) mysuite.addTests(test_mersi2_l1b.suite()) mysuite.addTests(test_avhrr_l1b_gaclac.suite()) mysuite.addTests(test_vaisala_gld360.suite()) mysuite.addTests(test_fci_l1c_fdhsi.suite()) mysuite.addTests(test_abi_l2_nc.suite()) mysuite.addTests(test_tropomi_l2.suite()) mysuite.addTests(test_hsaf_grib.suite()) mysuite.addTests(test_eum_base.suite()) mysuite.addTests(test_viirs_compact.suite()) mysuite.addTests(test_ami_l1b.suite()) mysuite.addTests(test_geos_area.suite()) mysuite.addTests(test_seviri_l2_bufr.suite()) mysuite.addTests(test_nwcsaf_msg.suite()) mysuite.addTests(test_mimic_TPW2_nc.suite()) mysuite.addTests(test_glm_l2.suite()) mysuite.addTests(test_seviri_l1b_icare.suite()) mysuite.addTests(test_slstr_l2.suite()) mysuite.addTests(test_aapp_l1b.suite()) mysuite.addTests(test_eps_l1b.suite()) mysuite.addTests(test_iasi_l2_so2_bufr.suite()) return mysuite satpy-0.20.0/satpy/tests/reader_tests/test_aapp_l1b.py000066400000000000000000000145361362525524100230250ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Test module for the avhrr aapp l1b reader.""" import unittest import numpy as np from satpy.readers.aapp_l1b import _HEADERTYPE, _SCANTYPE, AVHRRAAPPL1BFile import tempfile import datetime from satpy import DatasetID class TestAAPPL1B(unittest.TestCase): """Test the filehandler.""" def setUp(self): """Set up the test case.""" self._header = np.zeros(1, dtype=_HEADERTYPE) self._data = np.zeros(3, dtype=_SCANTYPE) self._header['satid'][0] = 13 self._header['radtempcnv'][0] = [[267194, -171669, 1002811], [930310, -59084, 1001600], [828600, -37854, 1001147]] self._data['scnlinyr'][:] = 2020 self._data['scnlindy'][:] = 8 self._data['scnlintime'][0] = 30195225 self._data['scnlintime'][1] = 30195389 self._data['scnlintime'][2] = 30195556 self._data['scnlinbit'][0] = -16383 self._data['scnlinbit'][1] = -16383 self._data['scnlinbit'][2] = -16384 calvis = np.array([[[0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [543489984, -21941870, 1592440064, -545027008, 499]], [[0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [540780032, -22145690, 1584350080, -543935616, 500]], [[0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [257550000, -10449420, 1812019968, -785690304, 499]]]) self._data['calvis'][:] = calvis self._data['calir'] = [[[[0, -2675, 2655265], [0, 0, 0]], [[33605, -260786, 226818992], [0, 0, 0]], [[13869, -249508, 234624768], [0, 0, 0]]], [[[0, -2675, 2655265], [0, 0, 0]], [[33609, -260810, 226837328], [0, 0, 0]], [[13870, -249520, 234638704], [0, 0, 0]]], [[[0, 0, 0], [0, 0, 0]], [[33614, -260833, 226855664], [0, 0, 0]], [[13871, -249531, 234652640], [0, 0, 0]]]] self._data['hrpt'] = np.ones_like(self._data['hrpt']) * (np.arange(2048) // 2)[np.newaxis, :, np.newaxis] self.filename_info = {'platform_shortname': 'metop03', 'start_time': datetime.datetime(2020, 1, 8, 8, 19), 'orbit_number': 6071} self.filetype_info = {'file_reader': AVHRRAAPPL1BFile, 'file_patterns': ['hrpt_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit_number:05d}.l1b'], # noqa 'file_type': 'avhrr_aapp_l1b'} def test_read(self): """Test the reading.""" with tempfile.TemporaryFile() as tmpfile: self._header.tofile(tmpfile) tmpfile.seek(22016, 0) self._data.tofile(tmpfile) fh = AVHRRAAPPL1BFile(tmpfile, self.filename_info, self.filetype_info) info = {} mins = [] maxs = [] for name in ['1', '2', '3a']: key = DatasetID(name=name, calibration='reflectance') res = fh.get_dataset(key, info) assert(res.min() == 0) assert(res.max() >= 100) mins.append(res.min().values) maxs.append(res.max().values) if name == '3a': assert(np.all(np.isnan(res[:2, :]))) for name in ['3b', '4', '5']: key = DatasetID(name=name, calibration='reflectance') res = fh.get_dataset(key, info) mins.append(res.min().values) maxs.append(res.max().values) if name == '3b': assert(np.all(np.isnan(res[2:, :]))) np.testing.assert_allclose(mins, [0., 0., 0., 204.10106939, 103.23477235, 106.42609758]) np.testing.assert_allclose(maxs, [108.40391775, 107.68545158, 106.80061233, 337.71416096, 355.15898219, 350.87182166]) def test_angles(self): """Test reading the angles.""" with tempfile.TemporaryFile() as tmpfile: self._header.tofile(tmpfile) tmpfile.seek(22016, 0) self._data.tofile(tmpfile) fh = AVHRRAAPPL1BFile(tmpfile, self.filename_info, self.filetype_info) info = {} key = DatasetID(name='solar_zenith_angle') res = fh.get_dataset(key, info) assert(np.all(res == 0)) def test_navigation(self): """Test reading the lon and lats.""" with tempfile.TemporaryFile() as tmpfile: self._header.tofile(tmpfile) tmpfile.seek(22016, 0) self._data.tofile(tmpfile) fh = AVHRRAAPPL1BFile(tmpfile, self.filename_info, self.filetype_info) info = {} key = DatasetID(name='longitude') res = fh.get_dataset(key, info) assert(np.all(res == 0)) key = DatasetID(name='latitude') res = fh.get_dataset(key, info) assert(np.all(res == 0)) def suite(): """Test suite.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestAAPPL1B)) return mysuite satpy-0.20.0/satpy/tests/reader_tests/test_abi_l1b.py000066400000000000000000000237111362525524100226320ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The abi_l1b reader tests package.""" import numpy as np import xarray as xr import unittest try: from unittest import mock except ImportError: import mock class Test_NC_ABI_L1B_Base(unittest.TestCase): """Common setup for NC_ABI_L1B tests.""" @mock.patch('satpy.readers.abi_base.xr') def setUp(self, xr_, rad=None): """Create a fake dataset using the given radiance data.""" from satpy.readers.abi_l1b import NC_ABI_L1B x_image = xr.DataArray(0.) y_image = xr.DataArray(0.) time = xr.DataArray(0.) if rad is None: rad_data = (np.arange(10.).reshape((2, 5)) + 1.) * 50. rad_data = (rad_data + 1.) / 0.5 rad_data = rad_data.astype(np.int16) rad = xr.DataArray( rad_data, dims=('y', 'x'), attrs={ 'scale_factor': 0.5, 'add_offset': -1., '_FillValue': 1002, 'units': 'W m-2 um-1 sr-1' } ) rad.coords['t'] = time rad.coords['x_image'] = x_image rad.coords['y_image'] = y_image x__ = xr.DataArray( range(5), attrs={'scale_factor': 2., 'add_offset': -1.}, dims=('x',) ) y__ = xr.DataArray( range(2), attrs={'scale_factor': -2., 'add_offset': 1.}, dims=('y',) ) proj = xr.DataArray( [], attrs={ 'semi_major_axis': 1., 'semi_minor_axis': 1., 'perspective_point_height': 1., 'longitude_of_projection_origin': -90., 'latitude_of_projection_origin': 0., 'sweep_angle_axis': u'x' } ) fake_dataset = xr.Dataset( data_vars={ 'Rad': rad, 'band_id': np.array(8), # 'x': x__, # 'y': y__, 'x_image': x_image, 'y_image': y_image, 'goes_imager_projection': proj, 'yaw_flip_flag': np.array([1]), "planck_fk1": np.array(13432.1), "planck_fk2": np.array(1497.61), "planck_bc1": np.array(0.09102), "planck_bc2": np.array(0.99971), "esun": np.array(2017), "nominal_satellite_subpoint_lat": np.array(0.0), "nominal_satellite_subpoint_lon": np.array(-89.5), "nominal_satellite_height": np.array(35786.02), "earth_sun_distance_anomaly_in_AU": np.array(0.99) }, coords={ 't': rad.coords['t'], 'x': x__, 'y': y__, }, attrs={ "time_coverage_start": "2017-09-20T17:30:40.8Z", "time_coverage_end": "2017-09-20T17:41:17.5Z", }, ) xr_.open_dataset.return_value = fake_dataset self.reader = NC_ABI_L1B('filename', {'platform_shortname': 'G16', 'observation_type': 'Rad', 'scene_abbr': 'C', 'scan_mode': 'M3'}, {'filetype': 'info'}) class Test_NC_ABI_L1B(Test_NC_ABI_L1B_Base): """Test the NC_ABI_L1B reader.""" def test_basic_attributes(self): """Test getting basic file attributes.""" from datetime import datetime self.assertEqual(self.reader.start_time, datetime(2017, 9, 20, 17, 30, 40, 800000)) self.assertEqual(self.reader.end_time, datetime(2017, 9, 20, 17, 41, 17, 500000)) def test_get_dataset(self): """Test the get_dataset method.""" from satpy import DatasetID key = DatasetID(name='Rad', calibration='radiance') res = self.reader.get_dataset(key, {'info': 'info'}) exp = {'calibration': 'radiance', 'instrument_ID': None, 'modifiers': (), 'name': 'Rad', 'observation_type': 'Rad', 'orbital_parameters': {'projection_altitude': 1.0, 'projection_latitude': 0.0, 'projection_longitude': -90.0, 'satellite_nominal_altitude': 35786020., 'satellite_nominal_latitude': 0.0, 'satellite_nominal_longitude': -89.5, 'yaw_flip': True}, 'orbital_slot': None, 'platform_name': 'GOES-16', 'platform_shortname': 'G16', 'production_site': None, 'scan_mode': 'M3', 'scene_abbr': 'C', 'scene_id': None, 'sensor': 'abi', 'timeline_ID': None, 'units': 'W m-2 um-1 sr-1'} self.assertDictEqual(res.attrs, exp) # we remove any time dimension information self.assertNotIn('t', res.coords) self.assertNotIn('t', res.dims) self.assertNotIn('time', res.coords) self.assertNotIn('time', res.dims) def test_bad_calibration(self): """Test that asking for a bad calibration fails.""" from satpy import DatasetID self.assertRaises(ValueError, self.reader.get_dataset, DatasetID(name='C05', calibration='_bad_'), {}) @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') def test_get_area_def(self, adef): """Test the area generation.""" self.reader.get_area_def(None) self.assertEqual(adef.call_count, 1) call_args = tuple(adef.call_args)[0] self.assertDictEqual(call_args[3], {'a': 1.0, 'b': 1.0, 'h': 1.0, 'lon_0': -90.0, 'proj': 'geos', 'sweep': 'x', 'units': 'm'}) self.assertEqual(call_args[4], self.reader.ncols) self.assertEqual(call_args[5], self.reader.nlines) np.testing.assert_allclose(call_args[6], (-2, -2, 8, 2)) class Test_NC_ABI_L1B_ir_cal(Test_NC_ABI_L1B_Base): """Test the NC_ABI_L1B reader's IR calibration.""" def setUp(self): """Create fake data for the tests.""" rad_data = (np.arange(10.).reshape((2, 5)) + 1.) * 50. rad_data = (rad_data + 1.) / 0.5 rad_data = rad_data.astype(np.int16) rad = xr.DataArray( rad_data, dims=('y', 'x'), attrs={ 'scale_factor': 0.5, 'add_offset': -1., '_FillValue': 1002, } ) super(Test_NC_ABI_L1B_ir_cal, self).setUp(rad=rad) def test_ir_calibrate(self): """Test IR calibration.""" from satpy import DatasetID res = self.reader.get_dataset( DatasetID(name='C05', calibration='brightness_temperature'), {}) expected = np.array([[267.55572248, 305.15576503, 332.37383249, 354.73895301, 374.19710115], [391.68679226, 407.74064808, 422.69329105, 436.77021913, np.nan]]) self.assertTrue(np.allclose(res.data, expected, equal_nan=True)) # make sure the attributes from the file are in the data array self.assertNotIn('scale_factor', res.attrs) self.assertNotIn('_FillValue', res.attrs) self.assertEqual(res.attrs['standard_name'], 'toa_brightness_temperature') self.assertEqual(res.attrs['long_name'], 'Brightness Temperature') class Test_NC_ABI_L1B_vis_cal(Test_NC_ABI_L1B_Base): """Test the NC_ABI_L1B reader.""" def setUp(self): """Create fake data for the tests.""" rad_data = (np.arange(10.).reshape((2, 5)) + 1.) rad_data = (rad_data + 1.) / 0.5 rad_data = rad_data.astype(np.int16) rad = xr.DataArray( rad_data, dims=('y', 'x'), attrs={ 'scale_factor': 0.5, 'add_offset': -1., '_FillValue': 20, } ) super(Test_NC_ABI_L1B_vis_cal, self).setUp(rad=rad) def test_vis_calibrate(self): """Test VIS calibration.""" from satpy import DatasetID res = self.reader.get_dataset( DatasetID(name='C05', calibration='reflectance'), {}) expected = np.array([[0.15265617, 0.30531234, 0.45796851, 0.61062468, 0.76328085], [0.91593702, 1.06859319, 1.22124936, np.nan, 1.52656171]]) self.assertTrue(np.allclose(res.data, expected, equal_nan=True)) self.assertNotIn('scale_factor', res.attrs) self.assertNotIn('_FillValue', res.attrs) self.assertEqual(res.attrs['standard_name'], 'toa_bidirectional_reflectance') self.assertEqual(res.attrs['long_name'], 'Bidirectional Reflectance') def suite(): """Create test suite for test_scene.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(Test_NC_ABI_L1B)) mysuite.addTest(loader.loadTestsFromTestCase(Test_NC_ABI_L1B_ir_cal)) mysuite.addTest(loader.loadTestsFromTestCase(Test_NC_ABI_L1B_vis_cal)) return mysuite if __name__ == '__main__': unittest.main() satpy-0.20.0/satpy/tests/reader_tests/test_abi_l2_nc.py000066400000000000000000000176461362525524100231630ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . """The abi_l2_nc reader tests package.""" import sys import numpy as np import xarray as xr if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest try: from unittest import mock except ImportError: import mock class Test_NC_ABI_L2_base(unittest.TestCase): """Test the NC_ABI_L2 reader.""" @mock.patch('satpy.readers.abi_base.xr') def setUp(self, xr_): """Create fake data for the tests.""" from satpy.readers.abi_l2_nc import NC_ABI_L2 proj = xr.DataArray( [], attrs={ 'semi_major_axis': 1., 'semi_minor_axis': 1., 'perspective_point_height': 1., 'longitude_of_projection_origin': -90., 'sweep_angle_axis': u'x' } ) x__ = xr.DataArray( [0, 1], attrs={'scale_factor': 2., 'add_offset': -1.}, dims=('x',), ) y__ = xr.DataArray( [0, 1], attrs={'scale_factor': -2., 'add_offset': 1.}, dims=('y',), ) ht_da = xr.DataArray(np.array([2, -1, -32768, 32767]).astype(np.int16).reshape((2, 2)), dims=('y', 'x'), attrs={'scale_factor': 0.3052037, 'add_offset': 0., '_FillValue': np.array(-1).astype(np.int16), '_Unsigned': 'True', 'units': 'm'},) fake_dataset = xr.Dataset( data_vars={ 'goes_imager_projection': proj, 'x': x__, 'y': y__, 'HT': ht_da, "nominal_satellite_subpoint_lat": np.array(0.0), "nominal_satellite_subpoint_lon": np.array(-89.5), "nominal_satellite_height": np.array(35786020.), "spatial_resolution": "10km at nadir", }, attrs={ "time_coverage_start": "2017-09-20T17:30:40.8Z", "time_coverage_end": "2017-09-20T17:41:17.5Z", } ) xr_.open_dataset.return_value = fake_dataset self.reader = NC_ABI_L2('filename', {'platform_shortname': 'G16', 'observation_type': 'HT', 'scan_mode': 'M3'}, {'filetype': 'info'}) class Test_NC_ABI_L2_get_dataset(Test_NC_ABI_L2_base): """Test get dataset function of the NC_ABI_L2 reader.""" def test_get_dataset(self): """Test basic L2 load.""" from satpy import DatasetID key = DatasetID(name='HT') res = self.reader.get_dataset(key, {'file_key': 'HT'}) exp_data = np.array([[2 * 0.3052037, np.nan], [32768 * 0.3052037, 32767 * 0.3052037]]) exp_attrs = {'instrument_ID': None, 'modifiers': (), 'name': 'HT', 'orbital_slot': None, 'platform_name': 'GOES-16', 'platform_shortname': 'G16', 'production_site': None, 'satellite_altitude': 35786020., 'satellite_latitude': 0.0, 'satellite_longitude': -89.5, 'scan_mode': 'M3', 'scene_id': None, 'sensor': 'abi', 'timeline_ID': None, 'units': 'm'} self.assertTrue(np.allclose(res.data, exp_data, equal_nan=True)) self.assertDictEqual(dict(res.attrs), exp_attrs) class Test_NC_ABI_L2_area_fixedgrid(Test_NC_ABI_L2_base): """Test the NC_ABI_L2 reader.""" @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') def test_get_area_def_fixedgrid(self, adef): """Test the area generation.""" self.reader.get_area_def(None) self.assertEqual(adef.call_count, 1) call_args = tuple(adef.call_args)[0] self.assertDictEqual(call_args[3], {'a': 1.0, 'b': 1.0, 'h': 1.0, 'lon_0': -90.0, 'proj': 'geos', 'sweep': 'x', 'units': 'm'}) self.assertEqual(call_args[4], self.reader.ncols) self.assertEqual(call_args[5], self.reader.nlines) np.testing.assert_allclose(call_args[6], (-2., -2., 2., 2.)) class Test_NC_ABI_L2_area_latlon(unittest.TestCase): """Test the NC_ABI_L2 reader.""" @mock.patch('satpy.readers.abi_base.xr') def setUp(self, xr_): """Create fake data for the tests.""" from satpy.readers.abi_l2_nc import NC_ABI_L2 proj = xr.DataArray( [], attrs={'semi_major_axis': 1., 'semi_minor_axis': 1., 'inverse_flattening': 1., 'longitude_of_prime_meridian': 0.0, } ) proj_ext = xr.DataArray( [], attrs={'geospatial_westbound_longitude': -85.0, 'geospatial_eastbound_longitude': -65.0, 'geospatial_northbound_latitude': 20.0, 'geospatial_southbound_latitude': -20.0, 'geospatial_lat_center': 0.0, 'geospatial_lon_center': -75.0, }) x__ = xr.DataArray( [0, 1], attrs={'scale_factor': 2., 'add_offset': -1.}, dims=('lon',), ) y__ = xr.DataArray( [0, 1], attrs={'scale_factor': -2., 'add_offset': 1.}, dims=('lat',), ) fake_dataset = xr.Dataset( data_vars={ 'goes_lat_lon_projection': proj, 'geospatial_lat_lon_extent': proj_ext, 'lon': x__, 'lat': y__, 'RSR': xr.DataArray(np.ones((2, 2)), dims=('lat', 'lon')), }, ) xr_.open_dataset.return_value = fake_dataset self.reader = NC_ABI_L2('filename', {'platform_shortname': 'G16', 'observation_type': 'RSR', 'scene_abbr': 'C', 'scan_mode': 'M3'}, {'filetype': 'info'}) @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') def test_get_area_def_latlon(self, adef): """Test the area generation.""" self.reader.get_area_def(None) self.assertEqual(adef.call_count, 1) call_args = tuple(adef.call_args)[0] self.assertDictEqual(call_args[3], {'proj': 'latlong', 'a': 1.0, 'b': 1.0, 'fi': 1.0, 'pm': 0.0, 'lon_0': -75.0, 'lat_0': 0.0}) self.assertEqual(call_args[4], self.reader.ncols) self.assertEqual(call_args[5], self.reader.nlines) np.testing.assert_allclose(call_args[6], (-85.0, -20.0, -65.0, 20)) def suite(): """Create test suite for test_scene.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(Test_NC_ABI_L2_area_latlon)) mysuite.addTest(loader.loadTestsFromTestCase(Test_NC_ABI_L2_area_fixedgrid)) mysuite.addTest(loader.loadTestsFromTestCase(Test_NC_ABI_L2_get_dataset)) return mysuite if __name__ == '__main__': unittest.main() satpy-0.20.0/satpy/tests/reader_tests/test_acspo.py000066400000000000000000000147461362525524100224560ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.acspo module. """ import os import sys from datetime import datetime, timedelta import numpy as np from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler from satpy.tests.utils import convert_file_content_to_data_array if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest try: from unittest import mock except ImportError: import mock DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (10, 300) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32) DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0) DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) class FakeNetCDF4FileHandler2(FakeNetCDF4FileHandler): """Swap-in NetCDF4 File Handler""" def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content""" dt = filename_info.get('start_time', datetime(2016, 1, 1, 12, 0, 0)) sat, inst = { 'VIIRS_NPP': ('NPP', 'VIIRS'), }[filename_info['sensor_id']] file_content = { '/attr/platform': sat, '/attr/sensor': inst, '/attr/spatial_resolution': '742 m at nadir', '/attr/time_coverage_start': dt.strftime('%Y%m%dT%H%M%SZ'), '/attr/time_coverage_end': (dt + timedelta(minutes=6)).strftime('%Y%m%dT%H%M%SZ'), } file_content['lat'] = DEFAULT_LAT_DATA file_content['lat/attr/comment'] = 'Latitude of retrievals' file_content['lat/attr/long_name'] = 'latitude' file_content['lat/attr/standard_name'] = 'latitude' file_content['lat/attr/units'] = 'degrees_north' file_content['lat/attr/valid_min'] = -90. file_content['lat/attr/valid_max'] = 90. file_content['lat/shape'] = DEFAULT_FILE_SHAPE file_content['lon'] = DEFAULT_LON_DATA file_content['lon/attr/comment'] = 'Longitude of retrievals' file_content['lon/attr/long_name'] = 'longitude' file_content['lon/attr/standard_name'] = 'longitude' file_content['lon/attr/units'] = 'degrees_east' file_content['lon/attr/valid_min'] = -180. file_content['lon/attr/valid_max'] = 180. file_content['lon/shape'] = DEFAULT_FILE_SHAPE for k in ['sea_surface_temperature', 'satellite_zenith_angle', 'sea_ice_fraction', 'wind_speed']: file_content[k] = DEFAULT_FILE_DATA[None, ...] file_content[k + '/attr/scale_factor'] = 1.1 file_content[k + '/attr/add_offset'] = 0.1 file_content[k + '/attr/units'] = 'some_units' file_content[k + '/attr/comment'] = 'comment' file_content[k + '/attr/standard_name'] = 'standard_name' file_content[k + '/attr/long_name'] = 'long_name' file_content[k + '/attr/valid_min'] = 0 file_content[k + '/attr/valid_max'] = 65534 file_content[k + '/attr/_FillValue'] = 65534 file_content[k + '/shape'] = (1, DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1]) file_content['l2p_flags'] = np.zeros( (1, DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1]), dtype=np.uint16) convert_file_content_to_data_array(file_content) return file_content class TestACSPOReader(unittest.TestCase): """Test ACSPO Reader""" yaml_file = "acspo.yaml" def setUp(self): """Wrap NetCDF4 file handler with our own fake handler""" from satpy.config import config_search_paths from satpy.readers.acspo import ACSPOFileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(ACSPOFileHandler, '__bases__', (FakeNetCDF4FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the NetCDF4 file handler""" self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ '20170401174600-STAR-L2P_GHRSST-SSTskin-VIIRS_NPP-ACSPO_V2.40-v02.0-fv01.0.nc', ]) self.assertTrue(len(loadables), 1) r.create_filehandlers(loadables) # make sure we have some files self.assertTrue(r.file_handlers) def test_load_every_dataset(self): """Test loading all datasets""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ '20170401174600-STAR-L2P_GHRSST-SSTskin-VIIRS_NPP-ACSPO_V2.40-v02.0-fv01.0.nc', ]) r.create_filehandlers(loadables) datasets = r.load(['sst', 'satellite_zenith_angle', 'sea_ice_fraction', 'wind_speed']) self.assertEqual(len(datasets), 4) for d in datasets.values(): self.assertTupleEqual(d.shape, DEFAULT_FILE_SHAPE) def suite(): """The test suite for test_acspo. """ loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestACSPOReader)) return mysuite satpy-0.20.0/satpy/tests/reader_tests/test_agri_l1.py000066400000000000000000000647001362525524100226620ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The agri_l1 reader tests package. """ from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler import numpy as np import dask.array as da import xarray as xr import os import unittest try: from unittest import mock except ImportError: import mock class FakeHDF5FileHandler2(FakeHDF5FileHandler): """Swap-in HDF5 File Handler.""" def make_test_data(self, cwl, ch, prefix, dims, file_type): if prefix == 'CAL': data = xr.DataArray( da.from_array((np.arange(10.) + 1.) / 10., [dims[0] * dims[1]]), attrs={ 'Slope': 1., 'Intercept': 0., 'FillValue': -65535.0, 'units': 'NUL', 'center_wavelength': '{}um'.format(cwl).encode('utf-8'), 'band_names': 'band{}(band number is range from 1 to 14)' .format(ch).encode('utf-8'), 'long_name': 'Calibration table of {}um Channel'.format(cwl).encode('utf-8'), 'valid_range': [0, 1.5], }, dims=('_const')) elif prefix == 'NOM': data = xr.DataArray( da.from_array(np.arange(10, dtype=np.uint16).reshape((2, 5)) + 1, [dim for dim in dims]), attrs={ 'Slope': 1., 'Intercept': 0., 'FillValue': 65535, 'units': 'DN', 'center_wavelength': '{}um'.format(cwl).encode('utf-8'), 'band_names': 'band{}(band number is range from 1 to 14)' .format(ch).encode('utf-8'), 'long_name': 'Calibration table of {}um Channel'.format(cwl).encode('utf-8'), 'valid_range': [0, 4095], }, dims=('_RegLength', '_RegWidth')) elif prefix == 'COEF': if file_type == '500': data = xr.DataArray( da.from_array((np.arange(2.).reshape((1, 2)) + 1.) / np.array([1E4, 1E2]), [1, 2]), attrs={ 'Slope': 1., 'Intercept': 0., 'FillValue': 0, 'units': 'NUL', 'band_names': 'NUL'.format(ch).encode('utf-8'), 'long_name': b'Calibration coefficient (SCALE and OFFSET)', 'valid_range': [-500, 500], }, dims=('_num_channel', '_coefs')) elif file_type == '1000': data = xr.DataArray( da.from_array((np.arange(6.).reshape((3, 2)) + 1.) / np.array([1E4, 1E2]), [3, 2]), attrs={ 'Slope': 1., 'Intercept': 0., 'FillValue': 0, 'units': 'NUL', 'band_names': 'NUL'.format(ch).encode('utf-8'), 'long_name': b'Calibration coefficient (SCALE and OFFSET)', 'valid_range': [-500, 500], }, dims=('_num_channel', '_coefs')) elif file_type == '2000': data = xr.DataArray( da.from_array((np.arange(14.).reshape((7, 2)) + 1.) / np.array([1E4, 1E2]), [7, 2]), attrs={ 'Slope': 1., 'Intercept': 0., 'FillValue': 0, 'units': 'NUL', 'band_names': 'NUL'.format(ch).encode('utf-8'), 'long_name': b'Calibration coefficient (SCALE and OFFSET)', 'valid_range': [-500, 500], }, dims=('_num_channel', '_coefs')) elif file_type == '4000': data = xr.DataArray( da.from_array((np.arange(28.).reshape((14, 2)) + 1.) / np.array([1E4, 1E2]), [14, 2]), attrs={ 'Slope': 1., 'Intercept': 0., 'FillValue': 0, 'units': 'NUL', 'band_names': 'NUL'.format(ch).encode('utf-8'), 'long_name': b'Calibration coefficient (SCALE and OFFSET)', 'valid_range': [-500, 500], }, dims=('_num_channel', '_coefs')) return data def _get_500m_data(self, file_type): dim_0 = 2 dim_1 = 5 chs = [2] cwls = [0.65] data = {} for index, cwl in enumerate(cwls): data['CALChannel' + '%02d' % chs[index]] = self.make_test_data(cwls[index], chs[index], 'CAL', [dim_0, dim_1], file_type) data['NOMChannel' + '%02d' % chs[index]] = self.make_test_data(cwls[index], chs[index], 'NOM', [dim_0, dim_1], file_type) data['CALIBRATION_COEF(SCALE+OFFSET)'] = self.make_test_data(cwls[index], chs[index], 'COEF', [dim_0, dim_1], file_type) return data def _get_1km_data(self, file_type): dim_0 = 2 dim_1 = 5 chs = np.linspace(1, 3, 3) cwls = [0.47, 0.65, 0.83] data = {} for index, cwl in enumerate(cwls): data['CALChannel' + '%02d' % chs[index]] = self.make_test_data(cwls[index], chs[index], 'CAL', [dim_0, dim_1], file_type) data['NOMChannel' + '%02d' % chs[index]] = self.make_test_data(cwls[index], chs[index], 'NOM', [dim_0, dim_1], file_type) data['CALIBRATION_COEF(SCALE+OFFSET)'] = self.make_test_data(cwls[index], chs[index], 'COEF', [dim_0, dim_1], file_type) return data def _get_2km_data(self, file_type): dim_0 = 2 dim_1 = 5 chs = np.linspace(1, 7, 7) cwls = [0.47, 0.65, 0.83, 1.37, 1.61, 2.22, 3.72] data = {} for index, cwl in enumerate(cwls): data['CALChannel' + '%02d' % chs[index]] = self.make_test_data(cwls[index], chs[index], 'CAL', [dim_0, dim_1], file_type) data['NOMChannel' + '%02d' % chs[index]] = self.make_test_data(cwls[index], chs[index], 'NOM', [dim_0, dim_1], file_type) data['CALIBRATION_COEF(SCALE+OFFSET)'] = self.make_test_data(cwls[index], chs[index], 'COEF', [dim_0, dim_1], file_type) return data def _get_4km_data(self, file_type): dim_0 = 2 dim_1 = 5 chs = np.linspace(1, 14, 14) cwls = [0.47, 0.65, 0.83, 1.37, 1.61, 2.22, 3.72, 3.72, 6.25, 7.10, 8.50, 10.8, 12, 13.5] data = {} for index, cwl in enumerate(cwls): data['CALChannel' + '%02d' % chs[index]] = self.make_test_data(cwls[index], chs[index], 'CAL', [dim_0, dim_1], file_type) data['NOMChannel' + '%02d' % chs[index]] = self.make_test_data(cwls[index], chs[index], 'NOM', [dim_0, dim_1], file_type) data['CALIBRATION_COEF(SCALE+OFFSET)'] = self.make_test_data(cwls[index], chs[index], 'COEF', [dim_0, dim_1], file_type) return data def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" global_attrs = { '/attr/NOMCenterLat': 0.0, '/attr/NOMCenterLon': 104.7, '/attr/NOMSatHeight': 3.5786E7, '/attr/dEA': 6378.14, '/attr/dObRecFlat': 298.257223563, '/attr/OBIType': 'REGC', '/attr/RegLength': 2.0, '/attr/RegWidth': 5.0, '/attr/Begin Line Number': 0, '/attr/End Line Number': 1, '/attr/Observing Beginning Date': '2019-06-03', '/attr/Observing Beginning Time': '00:30:01.807', '/attr/Observing Ending Date': '2019-06-03', '/attr/Observing Ending Time': '00:34:07.572', '/attr/Satellite Name': 'FY4A', '/attr/Sensor Identification Code': 'AGRI', '/attr/Sensor Name': 'AGRI', } data = {} if self.filetype_info['file_type'] == 'agri_l1_0500m': data = self._get_500m_data('500') elif self.filetype_info['file_type'] == 'agri_l1_1000m': data = self._get_1km_data('1000') elif self.filetype_info['file_type'] == 'agri_l1_2000m': data = self._get_2km_data('2000') elif self.filetype_info['file_type'] == 'agri_l1_4000m': data = self._get_4km_data('4000') test_content = {} test_content.update(global_attrs) test_content.update(data) return test_content class Test_HDF_AGRI_L1_cal(unittest.TestCase): """Test VIRR L1B Reader.""" yaml_file = "agri_l1.yaml" def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy.readers.agri_l1 import HDF_AGRI_L1 from satpy.config import config_search_paths self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(HDF_AGRI_L1, '__bases__', (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the HDF5 file handler.""" self.p.stop() def test_fy4a_all_resolutions(self): """Test loading data when all resolutions are available.""" from satpy import DatasetID from satpy.readers import load_reader, get_key filenames = [ 'FY4A-_AGRI--_N_REGC_1047E_L1-_FDI-_MULT_NOM_20190603003000_20190603003416_0500M_V0001.HDF', 'FY4A-_AGRI--_N_REGC_1047E_L1-_FDI-_MULT_NOM_20190603003000_20190603003416_1000M_V0001.HDF', 'FY4A-_AGRI--_N_REGC_1047E_L1-_FDI-_MULT_NOM_20190603003000_20190603003416_2000M_V0001.HDF', 'FY4A-_AGRI--_N_REGC_1047E_L1-_FDI-_MULT_NOM_20190603003000_20190603003416_4000M_V0001.HDF', ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) self.assertTrue(4, len(files)) reader.create_filehandlers(files) # Make sure we have some files self.assertTrue(reader.file_handlers) available_datasets = reader.available_dataset_ids # 500m band_names = ['C' + '%02d' % ch for ch in np.linspace(2, 2, 1)] for band_name in band_names: ds_id = DatasetID(name=band_name, resolution=500) res = get_key(ds_id, available_datasets, num_results=0, best=False) self.assertEqual(2, len(res)) # 1km band_names = ['C' + '%02d' % ch for ch in np.linspace(1, 3, 3)] for band_name in band_names: ds_id = DatasetID(name=band_name, resolution=1000) res = get_key(ds_id, available_datasets, num_results=0, best=False) self.assertEqual(2, len(res)) # 2km band_names = ['C' + '%02d' % ch for ch in np.linspace(1, 7, 7)] for band_name in band_names: ds_id = DatasetID(name=band_name, resolution=2000) res = get_key(ds_id, available_datasets, num_results=0, best=False) if band_name < 'C07': self.assertEqual(2, len(res)) else: self.assertEqual(3, len(res)) band_names = ['C' + '%02d' % ch for ch in np.linspace(1, 14, 14)] res = reader.load(band_names) self.assertEqual(14, len(res)) for band_name in band_names: self.assertEqual((2, 5), res[band_name].shape) if band_name < 'C07': self.assertEqual('reflectance', res[band_name].attrs['calibration']) else: self.assertEqual('brightness_temperature', res[band_name].attrs['calibration']) if band_name < 'C07': self.assertEqual('%', res[band_name].attrs['units']) else: self.assertEqual('K', res[band_name].attrs['units']) def test_fy4a_counts_calib(self): """Test loading data at counts calibration.""" from satpy import DatasetID from satpy.readers import load_reader filenames = [ 'FY4A-_AGRI--_N_REGC_1047E_L1-_FDI-_MULT_NOM_20190603003000_20190603003416_0500M_V0001.HDF', 'FY4A-_AGRI--_N_REGC_1047E_L1-_FDI-_MULT_NOM_20190603003000_20190603003416_1000M_V0001.HDF', 'FY4A-_AGRI--_N_REGC_1047E_L1-_FDI-_MULT_NOM_20190603003000_20190603003416_2000M_V0001.HDF', 'FY4A-_AGRI--_N_REGC_1047E_L1-_FDI-_MULT_NOM_20190603003000_20190603003416_4000M_V0001.HDF', ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) self.assertTrue(4, len(files)) reader.create_filehandlers(files) # Make sure we have some files self.assertTrue(reader.file_handlers) ds_ids = [] band_names = ['C' + '%02d' % ch for ch in np.linspace(1, 14, 14)] for band_name in band_names: ds_ids.append(DatasetID(name=band_name, calibration='counts')) res = reader.load(ds_ids) self.assertEqual(14, len(res)) for band_name in band_names: self.assertEqual((2, 5), res[band_name].shape) self.assertEqual('counts', res[band_name].attrs['calibration']) self.assertEqual(res[band_name].dtype, np.uint16) self.assertEqual('1', res[band_name].attrs['units']) def test_fy4a_4km_resolutions(self): """Test loading data when only 4km resolutions are available.""" from satpy import DatasetID from satpy.readers import load_reader, get_key filenames = [ 'FY4A-_AGRI--_N_REGC_1047E_L1-_FDI-_MULT_NOM_20190603003000_20190603003416_4000M_V0001.HDF', ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) self.assertTrue(1, len(files)) reader.create_filehandlers(files) # Make sure we have some files self.assertTrue(reader.file_handlers) # Verify that the resolution is only 4km available_datasets = reader.available_dataset_ids band_names = ['C' + '%02d' % ch for ch in np.linspace(1, 14, 14)] for band_name in band_names: ds_id = DatasetID(name=band_name, resolution=500) res = get_key(ds_id, available_datasets, num_results=0, best=False) self.assertEqual(0, len(res)) ds_id = DatasetID(name=band_name, resolution=1000) res = get_key(ds_id, available_datasets, num_results=0, best=False) self.assertEqual(0, len(res)) ds_id = DatasetID(name=band_name, resolution=2000) res = get_key(ds_id, available_datasets, num_results=0, best=False) self.assertEqual(0, len(res)) ds_id = DatasetID(name=band_name, resolution=4000) res = get_key(ds_id, available_datasets, num_results=0, best=False) if band_name < 'C07': self.assertEqual(2, len(res)) else: self.assertEqual(3, len(res)) res = reader.load(band_names) self.assertEqual(14, len(res)) expected = { 1: np.array([[2.01, 2.02, 2.03, 2.04, 2.05], [2.06, 2.07, 2.08, 2.09, 2.1]]), 2: np.array([[4.03, 4.06, 4.09, 4.12, 4.15], [4.18, 4.21, 4.24, 4.27, 4.3]]), 3: np.array([[6.05, 6.1, 6.15, 6.2, 6.25], [6.3, 6.35, 6.4, 6.45, 6.5]]), 4: np.array([[8.07, 8.14, 8.21, 8.28, 8.35], [8.42, 8.49, 8.56, 8.63, 8.7]]), 5: np.array([[10.09, 10.18, 10.27, 10.36, 10.45], [10.54, 10.63, 10.72, 10.81, 10.9]]), 6: np.array([[12.11, 12.22, 12.33, 12.44, 12.55], [12.66, 12.77, 12.88, 12.99, 13.1]]) } for i in range(7, 15): expected[i] = np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]) for index, band_name in enumerate(band_names): self.assertEqual((2, 5), res[band_name].shape) if band_name < 'C07': self.assertEqual('reflectance', res[band_name].attrs['calibration']) else: self.assertEqual('brightness_temperature', res[band_name].attrs['calibration']) if band_name < 'C07': self.assertEqual('%', res[band_name].attrs['units']) else: self.assertEqual('K', res[band_name].attrs['units']) self.assertTrue(np.allclose(res[band_name].values, expected[index + 1], equal_nan=True)) def test_fy4a_2km_resolutions(self): """Test loading data when only 2km resolutions are available.""" from satpy import DatasetID from satpy.readers import load_reader, get_key filenames = [ 'FY4A-_AGRI--_N_REGC_1047E_L1-_FDI-_MULT_NOM_20190603003000_20190603003416_2000M_V0001.HDF', ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) self.assertTrue(1, len(files)) reader.create_filehandlers(files) # Make sure we have some files self.assertTrue(reader.file_handlers) # Verify that the resolution is only 2km available_datasets = reader.available_dataset_ids band_names = ['C' + '%02d' % ch for ch in np.linspace(1, 7, 7)] for band_name in band_names: ds_id = DatasetID(name=band_name, resolution=500) res = get_key(ds_id, available_datasets, num_results=0, best=False) self.assertEqual(0, len(res)) ds_id = DatasetID(name=band_name, resolution=1000) res = get_key(ds_id, available_datasets, num_results=0, best=False) self.assertEqual(0, len(res)) ds_id = DatasetID(name=band_name, resolution=2000) res = get_key(ds_id, available_datasets, num_results=0, best=False) if band_name < 'C07': self.assertEqual(2, len(res)) else: self.assertEqual(3, len(res)) ds_id = DatasetID(name=band_name, resolution=4000) res = get_key(ds_id, available_datasets, num_results=0, best=False) self.assertEqual(0, len(res)) res = reader.load(band_names) self.assertEqual(7, len(res)) expected = { 1: np.array([[2.01, 2.02, 2.03, 2.04, 2.05], [2.06, 2.07, 2.08, 2.09, 2.1]]), 2: np.array([[4.03, 4.06, 4.09, 4.12, 4.15], [4.18, 4.21, 4.24, 4.27, 4.3]]), 3: np.array([[6.05, 6.1, 6.15, 6.2, 6.25], [6.3, 6.35, 6.4, 6.45, 6.5]]), 4: np.array([[8.07, 8.14, 8.21, 8.28, 8.35], [8.42, 8.49, 8.56, 8.63, 8.7]]), 5: np.array([[10.09, 10.18, 10.27, 10.36, 10.45], [10.54, 10.63, 10.72, 10.81, 10.9]]), 6: np.array([[12.11, 12.22, 12.33, 12.44, 12.55], [12.66, 12.77, 12.88, 12.99, 13.1]]), 7: np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]) } for index, band_name in enumerate(band_names): self.assertEqual((2, 5), res[band_name].shape) if band_name < 'C07': self.assertEqual('reflectance', res[band_name].attrs['calibration']) else: self.assertEqual('brightness_temperature', res[band_name].attrs['calibration']) if band_name < 'C07': self.assertEqual('%', res[band_name].attrs['units']) else: self.assertEqual('K', res[band_name].attrs['units']) self.assertTrue(np.allclose(res[band_name].values, expected[index + 1], equal_nan=True)) def test_fy4a_1km_resolutions(self): """Test loading data when only 1km resolutions are available.""" from satpy import DatasetID from satpy.readers import load_reader, get_key filenames = [ 'FY4A-_AGRI--_N_REGC_1047E_L1-_FDI-_MULT_NOM_20190603003000_20190603003416_1000M_V0001.HDF', ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) self.assertTrue(1, len(files)) reader.create_filehandlers(files) # Make sure we have some files self.assertTrue(reader.file_handlers) # Verify that the resolution is only 1km available_datasets = reader.available_dataset_ids band_names = ['C' + '%02d' % ch for ch in np.linspace(1, 3, 3)] for band_name in band_names: ds_id = DatasetID(name=band_name, resolution=500) res = get_key(ds_id, available_datasets, num_results=0, best=False) self.assertEqual(0, len(res)) ds_id = DatasetID(name=band_name, resolution=1000) res = get_key(ds_id, available_datasets, num_results=0, best=False) self.assertEqual(2, len(res)) ds_id = DatasetID(name=band_name, resolution=2000) res = get_key(ds_id, available_datasets, num_results=0, best=False) self.assertEqual(0, len(res)) ds_id = DatasetID(name=band_name, resolution=4000) res = get_key(ds_id, available_datasets, num_results=0, best=False) self.assertEqual(0, len(res)) res = reader.load(band_names) self.assertEqual(3, len(res)) expected = { 1: np.array([[2.01, 2.02, 2.03, 2.04, 2.05], [2.06, 2.07, 2.08, 2.09, 2.1]]), 2: np.array([[4.03, 4.06, 4.09, 4.12, 4.15], [4.18, 4.21, 4.24, 4.27, 4.3]]), 3: np.array([[6.05, 6.1, 6.15, 6.2, 6.25], [6.3, 6.35, 6.4, 6.45, 6.5]]) } for index, band_name in enumerate(band_names): self.assertEqual(1, res[band_name].attrs['sensor'].islower()) self.assertEqual((2, 5), res[band_name].shape) self.assertEqual('reflectance', res[band_name].attrs['calibration']) self.assertEqual('%', res[band_name].attrs['units']) self.assertTrue(np.allclose(res[band_name].values, expected[index + 1], equal_nan=True)) def test_fy4a_500m_resolutions(self): """Test loading data when only 500m resolutions are available.""" from satpy import DatasetID from satpy.readers import load_reader, get_key filenames = [ 'FY4A-_AGRI--_N_REGC_1047E_L1-_FDI-_MULT_NOM_20190603003000_20190603003416_0500M_V0001.HDF', ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) self.assertTrue(1, len(files)) reader.create_filehandlers(files) # Make sure we have some files self.assertTrue(reader.file_handlers) # Verify that the resolution is only 500m available_datasets = reader.available_dataset_ids band_names = ['C' + '%02d' % ch for ch in np.linspace(2, 2, 1)] for band_name in band_names: ds_id = DatasetID(name=band_name, resolution=500) res = get_key(ds_id, available_datasets, num_results=0, best=False) self.assertEqual(2, len(res)) ds_id = DatasetID(name=band_name, resolution=1000) res = get_key(ds_id, available_datasets, num_results=0, best=False) self.assertEqual(0, len(res)) ds_id = DatasetID(name=band_name, resolution=2000) res = get_key(ds_id, available_datasets, num_results=0, best=False) self.assertEqual(0, len(res)) ds_id = DatasetID(name=band_name, resolution=4000) res = get_key(ds_id, available_datasets, num_results=0, best=False) self.assertEqual(0, len(res)) res = reader.load(band_names) self.assertEqual(1, len(res)) expected = np.array([[2.01, 2.02, 2.03, 2.04, 2.05], [2.06, 2.07, 2.08, 2.09, 2.1]]) for band_name in band_names: self.assertEqual((2, 5), res[band_name].shape) self.assertEqual('reflectance', res[band_name].attrs['calibration']) self.assertEqual('%', res[band_name].attrs['units']) self.assertTrue(np.allclose(res[band_name].values, expected, equal_nan=True)) def suite(): """The test suite for test_agri_l1.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(Test_HDF_AGRI_L1_cal)) return mysuite if __name__ == "__main__": # So you can run tests from this module individually. unittest.main() satpy-0.20.0/satpy/tests/reader_tests/test_ahi_hrit.py000066400000000000000000000301101362525524100231170ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The hrit ahi reader tests package.""" import sys import numpy as np import dask.array as da from xarray import DataArray if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest try: from unittest import mock except ImportError: import mock class TestHRITJMAFileHandler(unittest.TestCase): """Test the HRITJMAFileHandler.""" @mock.patch('satpy.readers.hrit_jma.HRITFileHandler.__init__') def _get_reader(self, mocked_init, mda, filename_info=None): from satpy.readers.hrit_jma import HRITJMAFileHandler if not filename_info: filename_info = {} HRITJMAFileHandler.filename = 'filename' HRITJMAFileHandler.mda = mda return HRITJMAFileHandler('filename', filename_info, {}) def _get_mda(self, loff=5500.0, coff=5500.0, nlines=11000, ncols=11000, segno=0, numseg=1, vis=True): """Create metadata dict like HRITFileHandler would do it.""" if vis: idf = b'$HALFTONE:=16\r_NAME:=VISIBLE\r_UNIT:=ALBEDO(%)\r' \ b'0:=-0.10\r1023:=100.00\r65535:=100.00\r' else: idf = b'$HALFTONE:=16\r_NAME:=INFRARED\r_UNIT:=KELVIN\r' \ b'0:=329.98\r1023:=130.02\r65535:=130.02\r' return {'image_segm_seq_no': segno, 'total_no_image_segm': numseg, 'projection_name': b'GEOS(140.70) ', 'projection_parameters': { 'a': 6378169.00, 'b': 6356583.80, 'h': 35785831.00, }, 'cfac': 10233128, 'lfac': 10233128, 'coff': coff, 'loff': loff, 'number_of_columns': ncols, 'number_of_lines': nlines, 'image_data_function': idf} def test_init(self): """Test creating the file handler.""" from satpy.readers.hrit_jma import UNKNOWN_AREA, HIMAWARI8 # Test addition of extra metadata mda = self._get_mda() mda_expected = mda.copy() mda_expected.update( {'planned_end_segment_number': 1, 'planned_start_segment_number': 1, 'segment_sequence_number': 0, 'unit': 'ALBEDO(%)'}) mda_expected['projection_parameters']['SSP_longitude'] = 140.7 reader = self._get_reader(mda=mda) self.assertEqual(reader.mda, mda_expected) # Check projection name self.assertEqual(reader.projection_name, 'GEOS(140.70)') # Check calibration table cal_expected = np.array([[0, -0.1], [1023, 100], [65535, 100]]) self.assertTrue(np.all(reader.calibration_table == cal_expected)) # Check platform self.assertEqual(reader.platform, HIMAWARI8) # Check is_segmented attribute expected = {0: False, 1: True, 8: True} for segno, is_segmented in expected.items(): mda = self._get_mda(segno=segno) reader = self._get_reader(mda=mda) self.assertEqual(reader.is_segmented, is_segmented) # Check area IDs expected = [ ({'area': 1}, 1), ({'area': 1234}, UNKNOWN_AREA), ({}, UNKNOWN_AREA) ] mda = self._get_mda() for filename_info, area_id in expected: reader = self._get_reader(mda=mda, filename_info=filename_info) self.assertEqual(reader.area_id, area_id) @mock.patch('satpy.readers.hrit_jma.HRITJMAFileHandler.__init__') def test_get_platform(self, mocked_init): """Test platform identification.""" from satpy.readers.hrit_jma import HRITJMAFileHandler from satpy.readers.hrit_jma import PLATFORMS, UNKNOWN_PLATFORM mocked_init.return_value = None reader = HRITJMAFileHandler() for proj_name, platform in PLATFORMS.items(): reader.projection_name = proj_name self.assertEqual(reader._get_platform(), platform) with mock.patch('logging.Logger.error') as mocked_log: reader.projection_name = 'invalid' self.assertEqual(reader._get_platform(), UNKNOWN_PLATFORM) mocked_log.assert_called() def test_get_area_def(self): """Test getting an AreaDefinition.""" from satpy.readers.hrit_jma import (FULL_DISK, NORTH_HEMIS, SOUTH_HEMIS, AREA_NAMES) cases = [ # Non-segmented, full disk {'loff': 1375.0, 'coff': 1375.0, 'nlines': 2750, 'ncols': 2750, 'segno': 0, 'numseg': 1, 'area': FULL_DISK, 'extent': (-5498000.088960204, -5498000.088960204, 5502000.089024927, 5502000.089024927)}, # Non-segmented, northern hemisphere {'loff': 1325.0, 'coff': 1375.0, 'nlines': 1375, 'ncols': 2750, 'segno': 0, 'numseg': 1, 'area': NORTH_HEMIS, 'extent': (-5498000.088960204, -198000.00320373234, 5502000.089024927, 5302000.085788833)}, # Non-segmented, southern hemisphere {'loff': 50, 'coff': 1375.0, 'nlines': 1375, 'ncols': 2750, 'segno': 0, 'numseg': 1, 'area': SOUTH_HEMIS, 'extent': (-5498000.088960204, -5298000.085724112, 5502000.089024927, 202000.0032684542)}, # Segmented, segment #1 {'loff': 1375.0, 'coff': 1375.0, 'nlines': 275, 'ncols': 2750, 'segno': 1, 'numseg': 10, 'area': FULL_DISK, 'extent': (-5498000.088960204, 4402000.071226413, 5502000.089024927, 5502000.089024927)}, # Segmented, segment #7 {'loff': 1375.0, 'coff': 1375.0, 'nlines': 275, 'ncols': 2750, 'segno': 7, 'numseg': 10, 'area': FULL_DISK, 'extent': (-5498000.088960204, -2198000.035564665, 5502000.089024927, -1098000.0177661523)}, ] for case in cases: mda = self._get_mda(loff=case['loff'], coff=case['coff'], nlines=case['nlines'], ncols=case['ncols'], segno=case['segno'], numseg=case['numseg']) reader = self._get_reader(mda=mda, filename_info={'area': case['area']}) area = reader.get_area_def('some_id') self.assertTupleEqual(area.area_extent, case['extent']) self.assertEqual(area.description, AREA_NAMES[case['area']]['long']) def test_calibrate(self): """Test calibration.""" # Generate test data counts = np.linspace(0, 1200, 25).reshape(5, 5) counts[-1, -1] = 65535 counts = DataArray(da.from_array(counts, chunks=5)) refl = np.array( [[-0.1, 4.79247312, 9.68494624, 14.57741935, 19.46989247], [24.36236559, 29.25483871, 34.14731183, 39.03978495, 43.93225806], [48.82473118, 53.7172043, 58.60967742, 63.50215054, 68.39462366], [73.28709677, 78.17956989, 83.07204301, 87.96451613, 92.85698925], [97.74946237, 100., 100., 100., np.nan]] ) bt = np.array( [[329.98, 320.20678397, 310.43356794, 300.66035191, 290.88713587], [281.11391984, 271.34070381, 261.56748778, 251.79427175, 242.02105572], [232.24783969, 222.47462366, 212.70140762, 202.92819159, 193.15497556], [183.38175953, 173.6085435, 163.83532747, 154.06211144, 144.28889541], [134.51567937, 130.02, 130.02, 130.02, np.nan]] ) # Choose an area near the subsatellite point to avoid masking # of space pixels mda = self._get_mda(nlines=5, ncols=5, loff=1375.0, coff=1375.0, segno=0) reader = self._get_reader(mda=mda) # 1. Counts res = reader.calibrate(data=counts, calibration='counts') self.assertTrue(np.all(counts.values == res.values)) # 2. Reflectance res = reader.calibrate(data=counts, calibration='reflectance') np.testing.assert_allclose(refl, res.values) # also compares NaN # 3. Brightness temperature mda_bt = self._get_mda(nlines=5, ncols=5, loff=1375.0, coff=1375.0, segno=0, vis=False) reader_bt = self._get_reader(mda=mda_bt) res = reader_bt.calibrate(data=counts, calibration='brightness_temperature') np.testing.assert_allclose(bt, res.values) # also compares NaN def test_mask_space(self): """Test masking of space pixels.""" mda = self._get_mda(loff=1375.0, coff=1375.0, nlines=275, ncols=1375, segno=1, numseg=10) reader = self._get_reader(mda=mda) data = DataArray(da.ones((275, 1375), chunks=1024)) masked = reader._mask_space(data) # First line of the segment should be space, in the middle of the # last line there should be some valid pixels np.testing.assert_allclose(masked.values[0, :], np.nan) self.assertTrue(np.all(masked.values[-1, 588:788] == 1)) @mock.patch('satpy.readers.hrit_jma.HRITFileHandler.get_dataset') def test_get_dataset(self, base_get_dataset): """Test getting a dataset.""" from satpy.readers.hrit_jma import HIMAWARI8 mda = self._get_mda(loff=1375.0, coff=1375.0, nlines=275, ncols=1375, segno=1, numseg=10) reader = self._get_reader(mda=mda) key = mock.MagicMock() key.calibration = 'reflectance' base_get_dataset.return_value = DataArray(da.ones((275, 1375), chunks=1024)) # Check attributes res = reader.get_dataset(key, {'units': '%', 'sensor': 'ahi'}) self.assertEqual(res.attrs['units'], '%') self.assertEqual(res.attrs['sensor'], 'ahi') self.assertEqual(res.attrs['platform_name'], HIMAWARI8) self.assertEqual(res.attrs['satellite_longitude'], 140.7) self.assertEqual(res.attrs['satellite_latitude'], 0.) self.assertEqual(res.attrs['satellite_altitude'], 35785831.0) self.assertDictEqual(res.attrs['orbital_parameters'], {'projection_longitude': 140.7, 'projection_latitude': 0., 'projection_altitude': 35785831.0}) # Check called methods with mock.patch.object(reader, '_mask_space') as mask_space: with mock.patch.object(reader, 'calibrate') as calibrate: reader.get_dataset(key, {'units': '%', 'sensor': 'ahi'}) mask_space.assert_called() calibrate.assert_called() with mock.patch('logging.Logger.error') as log_mock: reader.get_dataset(key, {'units': '%', 'sensor': 'jami'}) log_mock.assert_called() def suite(): """Test suite for test_scene.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestHRITJMAFileHandler)) return mysuite if __name__ == '__main__': unittest.main() satpy-0.20.0/satpy/tests/reader_tests/test_ahi_hsd.py000066400000000000000000000415211362525524100227370ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The abi_l1b reader tests package.""" import unittest try: from unittest import mock except ImportError: import mock import warnings import numpy as np import dask.array as da from datetime import datetime from pyresample.geometry import AreaDefinition from satpy.readers.ahi_hsd import AHIHSDFileHandler from satpy.readers.utils import get_geostationary_mask class TestAHIHSDNavigation(unittest.TestCase): """Test the AHI HSD reader navigation.""" @mock.patch('satpy.readers.ahi_hsd.np2str') @mock.patch('satpy.readers.ahi_hsd.np.fromfile') def test_region(self, fromfile, np2str): """Test region navigation.""" np2str.side_effect = lambda x: x m = mock.mock_open() with mock.patch('satpy.readers.ahi_hsd.open', m, create=True): fh = AHIHSDFileHandler('somefile', {'segment': 1, 'total_segments': 1}, None) fh.proj_info = {'CFAC': 40932549, 'COFF': -591.5, 'LFAC': 40932549, 'LOFF': 5132.5, 'blocklength': 127, 'coeff_for_sd': 1737122264.0, 'distance_from_earth_center': 42164.0, 'earth_equatorial_radius': 6378.137, 'earth_polar_radius': 6356.7523, 'hblock_number': 3, 'req2_rpol2': 1.006739501, 'req2_rpol2_req2': 0.0066943844, 'resampling_size': 4, 'resampling_types': 0, 'rpol2_req2': 0.993305616, 'spare': '', 'sub_lon': 140.7} fh.data_info = {'blocklength': 50, 'compression_flag_for_data': 0, 'hblock_number': 2, 'number_of_bits_per_pixel': 16, 'number_of_columns': 1000, 'number_of_lines': 1000, 'spare': ''} area_def = fh.get_area_def(None) proj_dict = area_def.proj_dict self.assertEqual(proj_dict['a'], 6378137.0) self.assertEqual(proj_dict['b'], 6356752.3) self.assertEqual(proj_dict['h'], 35785863.0) self.assertEqual(proj_dict['lon_0'], 140.7) self.assertEqual(proj_dict['proj'], 'geos') self.assertEqual(proj_dict['units'], 'm') np.testing.assert_allclose(area_def.area_extent, (592000.0038256242, 4132000.0267018233, 1592000.0102878273, 5132000.033164027)) @mock.patch('satpy.readers.ahi_hsd.np2str') @mock.patch('satpy.readers.ahi_hsd.np.fromfile') def test_segment(self, fromfile, np2str): """Test segment navigation.""" np2str.side_effect = lambda x: x m = mock.mock_open() with mock.patch('satpy.readers.ahi_hsd.open', m, create=True): fh = AHIHSDFileHandler('somefile', {'segment': 8, 'total_segments': 10}, None) fh.proj_info = {'CFAC': 40932549, 'COFF': 5500.5, 'LFAC': 40932549, 'LOFF': 5500.5, 'blocklength': 127, 'coeff_for_sd': 1737122264.0, 'distance_from_earth_center': 42164.0, 'earth_equatorial_radius': 6378.137, 'earth_polar_radius': 6356.7523, 'hblock_number': 3, 'req2_rpol2': 1.006739501, 'req2_rpol2_req2': 0.0066943844, 'resampling_size': 4, 'resampling_types': 0, 'rpol2_req2': 0.993305616, 'spare': '', 'sub_lon': 140.7} fh.data_info = {'blocklength': 50, 'compression_flag_for_data': 0, 'hblock_number': 2, 'number_of_bits_per_pixel': 16, 'number_of_columns': 11000, 'number_of_lines': 1100, 'spare': ''} area_def = fh.get_area_def(None) proj_dict = area_def.proj_dict self.assertEqual(proj_dict['a'], 6378137.0) self.assertEqual(proj_dict['b'], 6356752.3) self.assertEqual(proj_dict['h'], 35785863.0) self.assertEqual(proj_dict['lon_0'], 140.7) self.assertEqual(proj_dict['proj'], 'geos') self.assertEqual(proj_dict['units'], 'm') np.testing.assert_allclose(area_def.area_extent, (-5500000.035542117, -3300000.021325271, 5500000.035542117, -2200000.0142168473)) class TestAHIHSDFileHandler(unittest.TestCase): """Test case for the file reading.""" def new_unzip(fname): """Fake unzipping.""" if(fname[-3:] == 'bz2'): return fname[:-4] return fname @mock.patch('satpy.readers.ahi_hsd.np2str') @mock.patch('satpy.readers.ahi_hsd.np.fromfile') @mock.patch('satpy.readers.ahi_hsd.unzip_file', mock.MagicMock(side_effect=new_unzip)) def setUp(self, fromfile, np2str): """Create a test file handler.""" np2str.side_effect = lambda x: x m = mock.mock_open() with mock.patch('satpy.readers.ahi_hsd.open', m, create=True): # Check if file handler raises exception for invalid calibration mode with self.assertRaises(ValueError): fh = AHIHSDFileHandler('somefile', {'segment': 8, 'total_segments': 10}, None, calib_mode='BAD_MODE') in_fname = 'test_file.bz2' fh = AHIHSDFileHandler(in_fname, {'segment': 8, 'total_segments': 10}, None) # Check that the filename is altered for bz2 format files self.assertNotEqual(in_fname, fh.filename) fh.proj_info = {'CFAC': 40932549, 'COFF': 5500.5, 'LFAC': 40932549, 'LOFF': 5500.5, 'blocklength': 127, 'coeff_for_sd': 1737122264.0, 'distance_from_earth_center': 42164.0, 'earth_equatorial_radius': 6378.137, 'earth_polar_radius': 6356.7523, 'hblock_number': 3, 'req2_rpol2': 1.006739501, 'req2_rpol2_req2': 0.0066943844, 'resampling_size': 4, 'resampling_types': 0, 'rpol2_req2': 0.993305616, 'spare': '', 'sub_lon': 140.7} fh.nav_info = {'SSP_longitude': 140.66, 'SSP_latitude': 0.03, 'distance_earth_center_to_satellite': 42165.04, 'nadir_longitude': 140.67, 'nadir_latitude': 0.04} fh.data_info = {'blocklength': 50, 'compression_flag_for_data': 0, 'hblock_number': 2, 'number_of_bits_per_pixel': 16, 'number_of_columns': 11000, 'number_of_lines': 1100, 'spare': ''} fh.basic_info = { 'observation_start_time': np.array([58413.12523839]), 'observation_end_time': np.array([58413.12562439]), 'observation_timeline': np.array([300]), } self.fh = fh def test_time_properties(self): """Test start/end/scheduled time properties.""" self.assertEqual(self.fh.start_time, datetime(2018, 10, 22, 3, 0, 20, 596896)) self.assertEqual(self.fh.end_time, datetime(2018, 10, 22, 3, 0, 53, 947296)) self.assertEqual(self.fh.scheduled_time, datetime(2018, 10, 22, 3, 0, 0, 0)) @mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler.__init__', return_value=None) def test_calibrate(self, *mocks): """Test calibration.""" def_cali = [-0.0037, 15.20] upd_cali = [-0.0074, 30.40] bad_cali = [0.0, 0.0] fh = AHIHSDFileHandler() fh.calib_mode = 'NOMINAL' fh.is_zipped = False fh._header = { 'block5': {'band_number': [5], 'gain_count2rad_conversion': [def_cali[0]], 'offset_count2rad_conversion': [def_cali[1]], 'central_wave_length': [10.4073], }, 'calibration': {'coeff_rad2albedo_conversion': [0.0019255], 'speed_of_light': [299792458.0], 'planck_constant': [6.62606957e-34], 'boltzmann_constant': [1.3806488e-23], 'c0_rad2tb_conversion': [-0.116127314574], 'c1_rad2tb_conversion': [1.00099153832], 'c2_rad2tb_conversion': [-1.76961091571e-06], 'cali_gain_count2rad_conversion': [upd_cali[0]], 'cali_offset_count2rad_conversion': [upd_cali[1]]}, } # Counts self.assertEqual(fh.calibrate(data=123, calibration='counts'), 123) # Radiance counts = da.array(np.array([[0., 1000.], [2000., 5000.]])) rad_exp = np.array([[15.2, 11.5], [7.8, 0]]) rad = fh.calibrate(data=counts, calibration='radiance') self.assertTrue(np.allclose(rad, rad_exp)) # Brightness Temperature bt_exp = np.array([[330.978979, 310.524688], [285.845017, np.nan]]) bt = fh.calibrate(data=counts, calibration='brightness_temperature') np.testing.assert_allclose(bt, bt_exp) # Reflectance refl_exp = np.array([[2.92676, 2.214325], [1.50189, 0.]]) refl = fh.calibrate(data=counts, calibration='reflectance') self.assertTrue(np.allclose(refl, refl_exp)) # Updated calibration # Standard operation fh.calib_mode = 'UPDATE' rad_exp = np.array([[30.4, 23.0], [15.6, 0.]]) rad = fh.calibrate(data=counts, calibration='radiance') self.assertTrue(np.allclose(rad, rad_exp)) # Case for no updated calibration available (older data) fh._header = { 'block5': {'band_number': [5], 'gain_count2rad_conversion': [def_cali[0]], 'offset_count2rad_conversion': [def_cali[1]], 'central_wave_length': [10.4073], }, 'calibration': {'coeff_rad2albedo_conversion': [0.0019255], 'speed_of_light': [299792458.0], 'planck_constant': [6.62606957e-34], 'boltzmann_constant': [1.3806488e-23], 'c0_rad2tb_conversion': [-0.116127314574], 'c1_rad2tb_conversion': [1.00099153832], 'c2_rad2tb_conversion': [-1.76961091571e-06], 'cali_gain_count2rad_conversion': [bad_cali[0]], 'cali_offset_count2rad_conversion': [bad_cali[1]]}, } rad = fh.calibrate(data=counts, calibration='radiance') rad_exp = np.array([[15.2, 11.5], [7.8, 0]]) self.assertTrue(np.allclose(rad, rad_exp)) @mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._read_header') @mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._read_data') @mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._mask_invalid') @mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler.calibrate') def test_read_band(self, calibrate, *mocks): """Test masking of space pixels.""" nrows = 25 ncols = 100 self.fh.area = AreaDefinition('test', 'test', 'test', {'a': '6378137.0', 'b': '6356752.3', 'h': '35785863.0', 'lon_0': '140.7', 'proj': 'geos', 'units': 'm'}, ncols, nrows, [-5499999.901174725, -4399999.92093978, 5499999.901174725, -3299999.9407048346]) calibrate.return_value = np.ones((nrows, ncols)) m = mock.mock_open() with mock.patch('satpy.readers.ahi_hsd.open', m, create=True): im = self.fh.read_band(info=mock.MagicMock(), key=mock.MagicMock()) # Note: Within the earth's shape get_geostationary_mask() is True but the numpy.ma mask # is False mask = im.to_masked_array().mask ref_mask = np.logical_not(get_geostationary_mask(self.fh.area).compute()) self.assertTrue(np.all(mask == ref_mask)) # Test attributes orb_params_exp = {'projection_longitude': 140.7, 'projection_latitude': 0., 'projection_altitude': 35785863.0, 'satellite_actual_longitude': 140.66, 'satellite_actual_latitude': 0.03, 'nadir_longitude': 140.67, 'nadir_latitude': 0.04} self.assertTrue(set(orb_params_exp.items()).issubset(set(im.attrs['orbital_parameters'].items()))) self.assertTrue(np.isclose(im.attrs['orbital_parameters']['satellite_actual_altitude'], 35786903.00581372)) # Test if masking space pixels disables with appropriate flag self.fh.mask_space = False with mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._mask_space') as mask_space: self.fh.read_band(info=mock.MagicMock(), key=mock.MagicMock()) mask_space.assert_not_called() def test_blocklen_error(self, *mocks): """Test erraneous blocklength.""" open_name = '%s.open' % __name__ fpos = 50 with mock.patch(open_name, create=True) as mock_open: with mock_open(mock.MagicMock(), 'r') as fp_: # Expected and actual blocklength match fp_.tell.return_value = 50 with warnings.catch_warnings(record=True) as w: self.fh._check_fpos(fp_, fpos, 0, 'header 1') self.assertTrue(len(w) == 0) # Expected and actual blocklength do not match fp_.tell.return_value = 100 with warnings.catch_warnings(record=True) as w: self.fh._check_fpos(fp_, fpos, 0, 'header 1') self.assertTrue(len(w) > 0) @mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._check_fpos') def test_read_header(self, *mocks): """Test header reading.""" nhdr = [ {'blocklength': 0}, {'blocklength': 0}, {'blocklength': 0}, {'blocklength': 0}, {'blocklength': 0, 'band_number': [4]}, {'blocklength': 0}, {'blocklength': 0}, {'blocklength': 0}, {'blocklength': 0, 'numof_correction_info_data': [1]}, {'blocklength': 0}, {'blocklength': 0, 'number_of_observation_times': [1]}, {'blocklength': 0}, {'blocklength': 0, 'number_of_error_info_data': [1]}, {'blocklength': 0}, {'blocklength': 0}] with mock.patch('numpy.fromfile', side_effect=nhdr): self.fh._read_header(mock.MagicMock()) def suite(): """Test suite for test_scene.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestAHIHSDNavigation)) mysuite.addTest(loader.loadTestsFromTestCase(TestAHIHSDFileHandler)) return mysuite if __name__ == '__main__': unittest.main() satpy-0.20.0/satpy/tests/reader_tests/test_ami_l1b.py000066400000000000000000000333221362525524100226440ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The ami_l1b reader tests package.""" import numpy as np import xarray as xr import dask.array as da import unittest from unittest import mock class FakeDataset(object): """Mimic xarray Dataset object.""" def __init__(self, info, attrs): """Initialize test data.""" for var_name, var_data in list(info.items()): if isinstance(var_data, np.ndarray): info[var_name] = xr.DataArray(var_data) self.info = info self.attrs = attrs def __getitem__(self, key): """Mimic getitem method.""" return self.info[key] def __contains__(self, key): """Mimic contains method.""" return key in self.info def rename(self, *args, **kwargs): """Mimic rename method.""" return self def close(self): """Act like close method.""" return class TestAMIL1bNetCDFBase(unittest.TestCase): """Common setup for NC_ABI_L1B tests.""" @mock.patch('satpy.readers.ami_l1b.xr') def setUp(self, xr_, counts=None): """Create a fake dataset using the given counts data.""" from satpy.readers.ami_l1b import AMIL1bNetCDF if counts is None: rad_data = (np.arange(10.).reshape((2, 5)) + 1.) * 50. rad_data = (rad_data + 1.) / 0.5 rad_data = rad_data.astype(np.int16) counts = xr.DataArray( da.from_array(rad_data, chunks='auto'), dims=('y', 'x'), attrs={ 'channel_name': "VI006", 'detector_side': 2, 'number_of_total_pixels': 484000000, 'number_of_error_pixels': 113892451, 'max_pixel_value': 32768, 'min_pixel_value': 6, 'average_pixel_value': 8228.98770845248, 'stddev_pixel_value': 13621.130386551, 'number_of_total_bits_per_pixel': 16, 'number_of_data_quality_flag_bits_per_pixel': 2, 'number_of_valid_bits_per_pixel': 12, 'data_quality_flag_meaning': "0:good_pixel, 1:conditionally_usable_pixel, 2:out_of_scan_area_pixel, 3:error_pixel", 'ground_sample_distance_ew': 1.4e-05, 'ground_sample_distance_ns': 1.4e-05, } ) sc_position = xr.DataArray(0., attrs={ 'sc_position_center_pixel': [-26113466.1974016, 33100139.1630508, 3943.75470244799], }) xr_.open_dataset.return_value = FakeDataset( { 'image_pixel_values': counts, 'sc_position': sc_position, }, { "satellite_name": "GK-2A", "observation_start_time": 623084431.957882, "observation_end_time": 623084975.606133, "projection_type": "GEOS", "sub_longitude": 2.23751210105673, "cfac": 81701355.6133574, "lfac": -81701355.6133574, "coff": 11000.5, "loff": 11000.5, "nominal_satellite_height": 42164000., "earth_equatorial_radius": 6378137., "earth_polar_radius": 6356752.3, "number_of_columns": 22000, "number_of_lines": 22000, "observation_mode": "FD", "channel_spatial_resolution": "0.5", "Radiance_to_Albedo_c": 1, "DN_to_Radiance_Gain": -0.0144806550815701, "DN_to_Radiance_Offset": 118.050903320312, "Teff_to_Tbb_c0": -0.141418528203155, "Teff_to_Tbb_c1": 1.00052232906885, "Teff_to_Tbb_c2": -0.00000036287276076109, "light_speed": 2.9979245800E+08, "Boltzmann_constant_k": 1.3806488000E-23, "Plank_constant_h": 6.6260695700E-34, } ) self.reader = AMIL1bNetCDF('filename', {'platform_shortname': 'gk2a'}, {'filetype': 'info'}) class TestAMIL1bNetCDF(TestAMIL1bNetCDFBase): """Test the AMI L1b reader.""" def _check_orbital_parameters(self, orb_params): """Check that orbital parameters match expected values.""" exp_params = { 'projection_altitude': 35785863.0, 'projection_latitude': 0.0, 'projection_longitude': 128.2, 'satellite_actual_altitude': 35782654.56070405, 'satellite_actual_latitude': 0.005364927, 'satellite_actual_longitude': 128.2707, } for key, val in exp_params.items(): self.assertAlmostEqual(val, orb_params[key], places=3) def test_filename_grouping(self): """Test that filenames are grouped properly.""" from satpy.readers import group_files filenames = [ 'gk2a_ami_le1b_ir087_fd020ge_201909300300.nc', 'gk2a_ami_le1b_ir096_fd020ge_201909300300.nc', 'gk2a_ami_le1b_ir105_fd020ge_201909300300.nc', 'gk2a_ami_le1b_ir112_fd020ge_201909300300.nc', 'gk2a_ami_le1b_ir123_fd020ge_201909300300.nc', 'gk2a_ami_le1b_ir133_fd020ge_201909300300.nc', 'gk2a_ami_le1b_nr013_fd020ge_201909300300.nc', 'gk2a_ami_le1b_nr016_fd020ge_201909300300.nc', 'gk2a_ami_le1b_sw038_fd020ge_201909300300.nc', 'gk2a_ami_le1b_vi004_fd010ge_201909300300.nc', 'gk2a_ami_le1b_vi005_fd010ge_201909300300.nc', 'gk2a_ami_le1b_vi006_fd005ge_201909300300.nc', 'gk2a_ami_le1b_vi008_fd010ge_201909300300.nc', 'gk2a_ami_le1b_wv063_fd020ge_201909300300.nc', 'gk2a_ami_le1b_wv069_fd020ge_201909300300.nc', 'gk2a_ami_le1b_wv073_fd020ge_201909300300.nc'] groups = group_files(filenames, reader='ami_l1b') self.assertEqual(len(groups), 1) self.assertEqual(len(groups[0]['ami_l1b']), 16) def test_basic_attributes(self): """Test getting basic file attributes.""" from datetime import datetime self.assertEqual(self.reader.start_time, datetime(2019, 9, 30, 3, 0, 31, 957882)) self.assertEqual(self.reader.end_time, datetime(2019, 9, 30, 3, 9, 35, 606133)) def test_get_dataset(self): """Test gettting radiance data.""" from satpy import DatasetID key = DatasetID(name='VI006', calibration='radiance') res = self.reader.get_dataset(key, { 'file_key': 'image_pixel_values', 'standard_name': 'toa_outgoing_radiance_per_unit_wavelength', 'units': 'W m-2 um-1 sr-1', }) exp = {'calibration': 'radiance', 'modifiers': (), 'platform_name': 'GEO-KOMPSAT-2A', 'sensor': 'ami', 'units': 'W m-2 um-1 sr-1'} for key, val in exp.items(): self.assertEqual(val, res.attrs[key]) self._check_orbital_parameters(res.attrs['orbital_parameters']) def test_bad_calibration(self): """Test that asking for a bad calibration fails.""" from satpy import DatasetID self.assertRaises(ValueError, self.reader.get_dataset, DatasetID(name='VI006', calibration='_bad_'), {'file_key': 'image_pixel_values', 'standard_name': 'toa_outgoing_radiance_per_unit_wavelength', 'units': 'W m-2 um-1 sr-1', }) @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') def test_get_area_def(self, adef): """Test the area generation.""" self.reader.get_area_def(None) self.assertEqual(adef.call_count, 1) call_args = tuple(adef.call_args)[0] exp = {'a': 6378137.0, 'b': 6356752.3, 'h': 35785863.0, 'lon_0': 128.2, 'proj': 'geos', 'units': 'm'} for key, val in exp.items(): self.assertIn(key, call_args[3]) self.assertAlmostEqual(val, call_args[3][key]) self.assertEqual(call_args[4], self.reader.nc.attrs['number_of_columns']) self.assertEqual(call_args[5], self.reader.nc.attrs['number_of_lines']) np.testing.assert_allclose(call_args[6], [-5511022.902, -5511022.902, 5511022.902, 5511022.902]) def test_get_dataset_vis(self): """Test get visible calibrated data.""" from satpy import DatasetID key = DatasetID(name='VI006', calibration='reflectance') res = self.reader.get_dataset(key, { 'file_key': 'image_pixel_values', 'standard_name': 'toa_bidirectional_reflectance', 'units': '%', }) exp = {'calibration': 'reflectance', 'modifiers': (), 'platform_name': 'GEO-KOMPSAT-2A', 'sensor': 'ami', 'units': '%'} for key, val in exp.items(): self.assertEqual(val, res.attrs[key]) self._check_orbital_parameters(res.attrs['orbital_parameters']) def test_get_dataset_counts(self): """Test get counts data.""" from satpy import DatasetID key = DatasetID(name='VI006', calibration='counts') res = self.reader.get_dataset(key, { 'file_key': 'image_pixel_values', 'standard_name': 'counts', 'units': '1', }) exp = {'calibration': 'counts', 'modifiers': (), 'platform_name': 'GEO-KOMPSAT-2A', 'sensor': 'ami', 'units': '1'} for key, val in exp.items(): self.assertEqual(val, res.attrs[key]) self._check_orbital_parameters(res.attrs['orbital_parameters']) class TestAMIL1bNetCDFIRCal(TestAMIL1bNetCDFBase): """Test IR specific things about the AMI reader.""" def setUp(self): """Create test data for IR calibration tests.""" count_data = (np.arange(10).reshape((2, 5))) + 7000 count_data = count_data.astype(np.uint16) count = xr.DataArray( da.from_array(count_data, chunks='auto'), dims=('y', 'x'), attrs={ 'channel_name': "IR087", 'detector_side': 2, 'number_of_total_pixels': 484000000, 'number_of_error_pixels': 113892451, 'max_pixel_value': 32768, 'min_pixel_value': 6, 'average_pixel_value': 8228.98770845248, 'stddev_pixel_value': 13621.130386551, 'number_of_total_bits_per_pixel': 16, 'number_of_data_quality_flag_bits_per_pixel': 2, 'number_of_valid_bits_per_pixel': 13, 'data_quality_flag_meaning': "0:good_pixel, 1:conditionally_usable_pixel, 2:out_of_scan_area_pixel, 3:error_pixel", 'ground_sample_distance_ew': 1.4e-05, 'ground_sample_distance_ns': 1.4e-05, } ) super(TestAMIL1bNetCDFIRCal, self).setUp(counts=count) def test_ir_calibrate(self): """Test IR calibration.""" from satpy import DatasetID from satpy.readers.ami_l1b import rad2temp ds_id = DatasetID(name='IR087', wavelength=[8.415, 8.59, 8.765], calibration='brightness_temperature') ds_info = { 'file_key': 'image_pixel_values', 'wavelength': [8.415, 8.59, 8.765], 'standard_name': 'toa_brightness_temperature', 'units': 'K', } with mock.patch('satpy.readers.ami_l1b.rad2temp', wraps=rad2temp) as r2t_mock: res = self.reader.get_dataset(ds_id, ds_info) r2t_mock.assert_called_once() expected = np.array([[238.34385135, 238.31443527, 238.28500087, 238.25554813, 238.22607701], [238.1965875, 238.16707956, 238.13755317, 238.10800829, 238.07844489]]) np.testing.assert_allclose(res.data.compute(), expected, equal_nan=True) # make sure the attributes from the file are in the data array self.assertEqual(res.attrs['standard_name'], 'toa_brightness_temperature') # test builtin coefficients self.reader.calib_mode = 'FILE' with mock.patch('satpy.readers.ami_l1b.rad2temp', wraps=rad2temp) as r2t_mock: res = self.reader.get_dataset(ds_id, ds_info) r2t_mock.assert_not_called() # file coefficients are pretty close, give some wiggle room np.testing.assert_allclose(res.data.compute(), expected, equal_nan=True, atol=0.04) # make sure the attributes from the file are in the data array self.assertEqual(res.attrs['standard_name'], 'toa_brightness_temperature') def suite(): """Create the test suite for test_scene.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestAMIL1bNetCDF)) mysuite.addTest(loader.loadTestsFromTestCase(TestAMIL1bNetCDFIRCal)) return mysuite if __name__ == '__main__': unittest.main() satpy-0.20.0/satpy/tests/reader_tests/test_amsr2_l1b.py000066400000000000000000000175001362525524100231220ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.amsr2_l1b module. """ import os import sys import numpy as np from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler from satpy.tests.utils import convert_file_content_to_data_array if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest try: from unittest import mock except ImportError: import mock DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (10, 300) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32) DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0) DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) class FakeHDF5FileHandler2(FakeHDF5FileHandler): """Swap-in HDF5 File Handler""" def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content""" file_content = { '/attr/PlatformShortName': 'GCOM-W1', '/attr/SensorShortName': 'AMSR2', '/attr/StartOrbitNumber': '22210', '/attr/StopOrbitNumber': '22210', } for bt_chan in [ '(10.7GHz,H)', '(10.7GHz,V)', '(18.7GHz,H)', '(18.7GHz,V)', '(23.8GHz,H)', '(23.8GHz,V)', '(36.5GHz,H)', '(36.5GHz,V)', '(6.9GHz,H)', '(6.9GHz,V)', '(7.3GHz,H)', '(7.3GHz,V)', '(89.0GHz-A,H)', '(89.0GHz-A,V)', '(89.0GHz-B,H)', '(89.0GHz-B,V)', ]: k = 'Brightness Temperature {}'.format(bt_chan) file_content[k] = DEFAULT_FILE_DATA[:, ::2] file_content[k + '/shape'] = (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1] // 2) file_content[k + '/attr/UNIT'] = 'K' file_content[k + '/attr/SCALE FACTOR'] = 0.01 for bt_chan in [ '(89.0GHz-A,H)', '(89.0GHz-A,V)', '(89.0GHz-B,H)', '(89.0GHz-B,V)', ]: k = 'Brightness Temperature {}'.format(bt_chan) file_content[k] = DEFAULT_FILE_DATA file_content[k + '/shape'] = DEFAULT_FILE_SHAPE file_content[k + '/attr/UNIT'] = 'K' file_content[k + '/attr/SCALE FACTOR'] = 0.01 for nav_chan in ['89A', '89B']: lon_k = 'Longitude of Observation Point for ' + nav_chan lat_k = 'Latitude of Observation Point for ' + nav_chan file_content[lon_k] = DEFAULT_LON_DATA file_content[lon_k + '/shape'] = DEFAULT_FILE_SHAPE file_content[lon_k + '/attr/SCALE FACTOR'] = 1 file_content[lon_k + '/attr/UNIT'] = 'deg' file_content[lat_k] = DEFAULT_LAT_DATA file_content[lat_k + '/shape'] = DEFAULT_FILE_SHAPE file_content[lat_k + '/attr/SCALE FACTOR'] = 1 file_content[lat_k + '/attr/UNIT'] = 'deg' convert_file_content_to_data_array(file_content) return file_content class TestAMSR2L1BReader(unittest.TestCase): """Test AMSR2 L1B Reader""" yaml_file = "amsr2_l1b.yaml" def setUp(self): """Wrap HDF5 file handler with our own fake handler""" from satpy.config import config_search_paths from satpy.readers.amsr2_l1b import AMSR2L1BFileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(AMSR2L1BFileHandler, '__bases__', (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the HDF5 file handler""" self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'GW1AM2_201607201808_128A_L1DLBTBR_1110110.h5', ]) self.assertTrue(len(loadables), 1) r.create_filehandlers(loadables) # make sure we have some files self.assertTrue(r.file_handlers) def test_load_basic(self): """Test loading of basic channels""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'GW1AM2_201607201808_128A_L1DLBTBR_1110110.h5', ]) self.assertTrue(len(loadables), 1) r.create_filehandlers(loadables) ds = r.load([ 'btemp_10.7v', 'btemp_10.7h', 'btemp_6.9v', 'btemp_6.9h', 'btemp_7.3v', 'btemp_7.3h', 'btemp_18.7v', 'btemp_18.7h', 'btemp_23.8v', 'btemp_23.8h', 'btemp_36.5v', 'btemp_36.5h', ]) self.assertEqual(len(ds), 12) for d in ds.values(): self.assertEqual(d.attrs['calibration'], 'brightness_temperature') self.assertTupleEqual(d.shape, (DEFAULT_FILE_SHAPE[0], int(DEFAULT_FILE_SHAPE[1] // 2))) self.assertIn('area', d.attrs) self.assertIsNotNone(d.attrs['area']) self.assertTupleEqual(d.attrs['area'].lons.shape, (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1] // 2)) self.assertTupleEqual(d.attrs['area'].lats.shape, (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1] // 2)) def test_load_89ghz(self): """Test loading of 89GHz channels""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'GW1AM2_201607201808_128A_L1DLBTBR_1110110.h5', ]) self.assertTrue(len(loadables), 1) r.create_filehandlers(loadables) ds = r.load([ 'btemp_89.0av', 'btemp_89.0ah', 'btemp_89.0bv', 'btemp_89.0bh', ]) self.assertEqual(len(ds), 4) for d in ds.values(): self.assertEqual(d.attrs['calibration'], 'brightness_temperature') self.assertTupleEqual(d.shape, DEFAULT_FILE_SHAPE) self.assertIn('area', d.attrs) self.assertIsNotNone(d.attrs['area']) self.assertTupleEqual(d.attrs['area'].lons.shape, DEFAULT_FILE_SHAPE) self.assertTupleEqual(d.attrs['area'].lats.shape, DEFAULT_FILE_SHAPE) def suite(): """The test suite for test_amsr2_l1b. """ loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestAMSR2L1BReader)) return mysuite satpy-0.20.0/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py000066400000000000000000000405711362525524100245160ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2009-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Pygac interface.""" from datetime import datetime from unittest import TestCase, main, TestLoader, TestSuite import numpy as np try: from unittest import mock except ImportError: # python 2 import mock GAC_PATTERN = '{creation_site:3s}.{transfer_mode:4s}.{platform_id:2s}.D{start_time:%y%j.S%H%M}.E{end_time:%H%M}.B{orbit_number:05d}{end_orbit_last_digits:02d}.{station:2s}' # noqa GAC_POD_FILENAMES = ['NSS.GHRR.NA.D79184.S1150.E1337.B0008384.WI', 'NSS.GHRR.NA.D79184.S2350.E0137.B0008384.WI', 'NSS.GHRR.NA.D80021.S0927.E1121.B0295354.WI', 'NSS.GHRR.NA.D80021.S1120.E1301.B0295455.WI', 'NSS.GHRR.NA.D80021.S1256.E1450.B0295556.GC', 'NSS.GHRR.NE.D83208.S1219.E1404.B0171819.WI', 'NSS.GHRR.NG.D88002.S0614.E0807.B0670506.WI', 'NSS.GHRR.TN.D79183.S1258.E1444.B0369697.GC', 'NSS.GHRR.TN.D80003.S1147.E1332.B0630506.GC', 'NSS.GHRR.TN.D80003.S1328.E1513.B0630507.GC', 'NSS.GHRR.TN.D80003.S1509.E1654.B0630608.GC'] GAC_KLM_FILENAMES = ['NSS.GHRR.NK.D01235.S0252.E0446.B1703233.GC', 'NSS.GHRR.NL.D01288.S2315.E0104.B0549495.GC', 'NSS.GHRR.NM.D04111.S2305.E0050.B0947778.GC', 'NSS.GHRR.NN.D13011.S0559.E0741.B3939192.WI', 'NSS.GHRR.NP.D15361.S0121.E0315.B3547172.SV', 'NSS.GHRR.M1.D15362.S0031.E0129.B1699697.SV', 'NSS.GHRR.M2.D10178.S2359.E0142.B1914142.SV'] LAC_POD_FILENAMES = ['BRN.HRPT.ND.D95152.S1730.E1715.B2102323.UB', 'BRN.HRPT.ND.D95152.S1910.E1857.B2102424.UB', 'BRN.HRPT.NF.D85152.S1345.E1330.B0241414.UB', 'BRN.HRPT.NJ.D95152.S1233.E1217.B0216060.UB'] LAC_KLM_FILENAMES = ['BRN.HRPT.M1.D14152.S0958.E1012.B0883232.UB', 'BRN.HRPT.M1.D14152.S1943.E1958.B0883838.UB', 'BRN.HRPT.M2.D12153.S0912.E0922.B2914747.UB', 'BRN.HRPT.NN.D12153.S0138.E0152.B3622828.UB', 'BRN.HRPT.NN.D12153.S0139.E0153.B3622828.UB', 'BRN.HRPT.NN.D12153.S1309.E1324.B3623535.UB', 'BRN.HRPT.NP.D12153.S0003.E0016.B1707272.UB', 'BRN.HRPT.NP.D12153.S1134.E1148.B1707979.UB', 'BRN.HRPT.NP.D16184.S1256.E1311.B3813131.UB', 'BRN.HRPT.NP.D16184.S1438.E1451.B3813232.UB', 'BRN.HRPT.NP.D16184.S1439.E1451.B3813232.UB', 'BRN.HRPT.NP.D16185.S1245.E1259.B3814545.UB', 'BRN.HRPT.NP.D16185.S1427.E1440.B3814646.UB', 'NSS.FRAC.M2.D12153.S1729.E1910.B2915354.SV', 'NSS.LHRR.NP.D16306.S1803.E1814.B3985555.WI'] class TestGACLACFile(TestCase): """Test the GACLAC file handler.""" def setUp(self): """Patch pygac imports.""" self.pygac = mock.MagicMock() self.fhs = mock.MagicMock() modules = { 'pygac': self.pygac, 'pygac.gac_klm': self.pygac.gac_klm, 'pygac.gac_pod': self.pygac.gac_pod, 'pygac.lac_klm': self.pygac.lac_klm, 'pygac.lac_pod': self.pygac.lac_pod, 'pygac.utils': self.pygac.utils } self.module_patcher = mock.patch.dict('sys.modules', modules) self.module_patcher.start() # Import GACLACFile here to make it patchable. Otherwise self._get_fh # might import it first which would prevent a successful patch. from satpy.readers.avhrr_l1b_gaclac import GACLACFile self.GACLACFile = GACLACFile def tearDown(self): """Unpatch the pygac imports.""" self.module_patcher.stop() def _get_fh(self, filename='NSS.GHRR.NG.D88002.S0614.E0807.B0670506.WI', **kwargs): """Create a file handler.""" from trollsift import parse filename_info = parse(GAC_PATTERN, filename) return self.GACLACFile(filename, filename_info, {}, **kwargs) def test_init(self): """Test GACLACFile initialization.""" from pygac.gac_klm import GACKLMReader from pygac.gac_pod import GACPODReader from pygac.lac_klm import LACKLMReader from pygac.lac_pod import LACPODReader for filenames, reader_cls in zip([GAC_POD_FILENAMES, GAC_KLM_FILENAMES, LAC_POD_FILENAMES, LAC_KLM_FILENAMES], [GACPODReader, GACKLMReader, LACPODReader, LACKLMReader]): for filename in filenames: fh = self._get_fh(filename) self.assertLess(fh.start_time, fh.end_time, "Start time must precede end time.") self.assertIs(fh.reader_class, reader_cls, 'Wrong reader class assigned to {}'.format(filename)) def test_get_dataset(self): """Test getting the dataset.""" from pygac.gac_pod import GACPODReader from satpy.dataset import DatasetID fh = self._get_fh(strip_invalid_coords=False) lon_ones = np.ones((10, 10)) lat_ones = np.ones((10, 10)) ch_ones = np.ones((10, 10)) acq_ones = np.ones((10, ), dtype='datetime64[us]') angle_ones = np.ones((10, 10)) qualflags_ones = np.ones((10, 7)) miss_lines = np.array([1, 2]) # Channel key = DatasetID('1') info = {'name': '1', 'standard_name': 'reflectance'} GACPODReader.return_value.get_calibrated_channels.return_value.__getitem__.return_value = ch_ones GACPODReader.return_value.get_times.return_value = acq_ones GACPODReader.return_value.get_lonlat.return_value = lon_ones, lat_ones GACPODReader.return_value.get_qual_flags.return_value = qualflags_ones GACPODReader.return_value.get_miss_lines.return_value = miss_lines GACPODReader.return_value.get_midnight_scanline.return_value = 'midn_line' GACPODReader.return_value.mask = [0] GACPODReader.return_value.meta_data = {'missing_scanlines': miss_lines, 'midnight_scanline': 'midn_line'} res = fh.get_dataset(key, info) np.testing.assert_allclose(res.data, ch_ones) np.testing.assert_array_equal(res.coords['acq_time'].data, acq_ones) self.assertTupleEqual(res.dims, ('y', 'x')) self.assertEqual(fh.start_time, datetime(1970, 1, 1, 0, 0, 0, 1)) self.assertEqual(fh.end_time, datetime(1970, 1, 1, 0, 0, 0, 1)) np.testing.assert_array_equal(res.attrs['missing_scanlines'], miss_lines) self.assertEqual(res.attrs['midnight_scanline'], 'midn_line') # Angles for item in ['solar_zenith_angle', 'sensor_zenith_angle', 'solar_azimuth_angle', 'sensor_azimuth_angle', 'sun_sensor_azimuth_difference_angle']: key = DatasetID(item) info = {'name': item} GACPODReader.return_value.get_angles.return_value = (angle_ones, ) * 5 GACPODReader.return_value.get_tle_lines.return_value = 'tle1', 'tle2' res = fh.get_dataset(key, info) np.testing.assert_allclose(res.data, angle_ones) np.testing.assert_array_equal(res.coords['acq_time'].data, acq_ones) self.assertDictEqual(res.attrs['orbital_parameters'], {'tle': ('tle1', 'tle2')}) # Longitude key = DatasetID('longitude') info = {'name': 'longitude', 'unit': 'degrees_east'} res = fh.get_dataset(key, info) np.testing.assert_allclose(res.data, lon_ones) self.assertEqual(res.attrs['unit'], 'degrees_east') np.testing.assert_array_equal(res.coords['acq_time'].data, acq_ones) # Latitude key = DatasetID('latitude') info = {'name': 'latitude', 'unit': 'degrees_north'} res = fh.get_dataset(key, info) np.testing.assert_allclose(res.data, lat_ones) self.assertEqual(res.attrs['unit'], 'degrees_north') np.testing.assert_array_equal(res.coords['acq_time'].data, acq_ones) # Quality flags key = DatasetID('qual_flags') info = {'name': 'qual_flags', 'long_name': 'My long name'} res = fh.get_dataset(key, info) np.testing.assert_allclose(res.data, qualflags_ones) self.assertTupleEqual(res.dims, ('y', 'num_flags')) np.testing.assert_array_equal(res.coords['acq_time'].data, acq_ones) self.assertEqual(res.attrs['long_name'], 'My long name') # Buffering GACPODReader.return_value.get_calibrated_channels.assert_called_once() GACPODReader.return_value.get_calibrated_channels.assert_called_once() GACPODReader.return_value.get_qual_flags.assert_called_once() @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile.slice') @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_angle') @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_channel') def test_get_dataset_extras(self, get_channel, get_angle, slc): """Test getting the dataset with extra options.""" from satpy.dataset import DatasetID # Define test data lons = np.array([[1, 2], [3, 4], [5, 6], [7, 8]]) lats = lons.copy() angles = lons.copy() ch = np.array([[0.1, 0.2], [0.3, 0.4], [0.5, 0.6], [0.7, 0.8]]) acq = np.array([1, 2, 3, 4], dtype='datetime64[us]') # Mock reading reader = mock.MagicMock() reader.mask = [0] reader.get_lonlat.return_value = lons, lats reader.get_times.return_value = acq get_channel.return_value = ch get_angle.return_value = angles # Test slicing/stripping def slice_patched(data, times): if len(data.shape) == 2: return data[1:3, :], times[1:3] return data[1:3], times[1:3] slc.side_effect = slice_patched kwargs_list = [{'strip_invalid_coords': False, 'start_line': 123, 'end_line': 456}, {'strip_invalid_coords': True, 'start_line': None, 'end_line': None}, {'strip_invalid_coords': True, 'start_line': 123, 'end_line': 456}] for kwargs in kwargs_list: fh = self._get_fh(**kwargs) fh.reader = reader key = DatasetID('1') info = {'name': '1', 'standard_name': 'reflectance'} res = fh.get_dataset(key, info) np.testing.assert_array_equal(res.data, ch[1:3, :]) np.testing.assert_array_equal(res.coords['acq_time'].data, acq[1:3]) slc.assert_called_with(data=ch, times=acq) # Renaming of coordinates if interpolation is switched off fh = self._get_fh(interpolate_coords=False) fh.reader = reader key = DatasetID('latitude') info = {'name': 'latitude', 'unit': 'degrees_north'} res = fh.get_dataset(key, info) self.assertTupleEqual(res.dims, ('y', 'x_every_eighth')) key = DatasetID('solar_zenith_angle') info = {'name': 'solar_zenith_angle', 'unit': 'degrees'} res = fh.get_dataset(key, info) self.assertTupleEqual(res.dims, ('y', 'x_every_eighth')) def test_get_angle(self): """Test getting the angle.""" reader = mock.MagicMock() reader.get_angles.return_value = 1, 2, 3, 4, 5 fh = self._get_fh() fh.reader = reader # Test angle readout res = fh._get_angle('sensor_zenith_angle') self.assertEqual(res, 2) self.assertDictEqual(fh.angles, {'sensor_zenith_angle': 2, 'sensor_azimuth_angle': 1, 'solar_zenith_angle': 4, 'solar_azimuth_angle': 3, 'sun_sensor_azimuth_difference_angle': 5}) # Test buffering fh._get_angle('sensor_azimuth_angle') reader.get_angles.assert_called_once() def test_strip_invalid_lat(self): """Test stripping invalid coordinates.""" import pygac.utils reader = mock.MagicMock() reader.get_lonlat.return_value = None, None fh = self._get_fh() fh.reader = reader # Test stripping pygac.utils.strip_invalid_lat.return_value = 1, 2 start, end = fh._strip_invalid_lat() self.assertTupleEqual((start, end), (1, 2)) # Test buffering fh._strip_invalid_lat() pygac.utils.strip_invalid_lat.assert_called_once() @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._slice') def test_slice(self, _slice): """Test slicing.""" def slice_patched(data): if len(data.shape) == 2: return data[1:3, :], 'midn_line', np.array([1., 2., 3.]) return data[1:3], 'foo', np.array([0, 0, 0]) _slice.side_effect = slice_patched data = np.zeros((4, 2)) times = np.array([1, 2, 3, 4], dtype='datetime64[us]') fh = self._get_fh() data_slc, times_slc = fh.slice(data, times) np.testing.assert_array_equal(data_slc, data[1:3]) np.testing.assert_array_equal(times_slc, times[1:3]) self.assertEqual(fh.start_time, datetime(1970, 1, 1, 0, 0, 0, 2)) self.assertEqual(fh.end_time, datetime(1970, 1, 1, 0, 0, 0, 3)) self.assertEqual(fh.midnight_scanline, 'midn_line') np.testing.assert_array_equal(fh.missing_scanlines, np.array([1, 2, 3])) self.assertEqual(fh.missing_scanlines.dtype, int) @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_qual_flags') @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._strip_invalid_lat') def test__slice(self, strip_invalid_lat, get_qual_flags): """Test slicing.""" import pygac.utils pygac.utils.check_user_scanlines.return_value = 1, 2 pygac.utils.slice_channel.return_value = 'sliced', 'miss_lines', 'midn_line' strip_invalid_lat.return_value = 3, 4 get_qual_flags.return_value = 'qual_flags' data = np.zeros((2, 2)) # a) Only start/end line given fh = self._get_fh(start_line=5, end_line=6, strip_invalid_coords=False) data_slc, midn_line, miss_lines = fh._slice(data) self.assertEqual(data_slc, 'sliced') self.assertEqual(midn_line, 'midn_line') self.assertEqual(miss_lines, 'miss_lines') pygac.utils.check_user_scanlines.assert_called_with( start_line=5, end_line=6, first_valid_lat=None, last_valid_lat=None, along_track=2) pygac.utils.slice_channel.assert_called_with( data, start_line=1, end_line=2, first_valid_lat=None, last_valid_lat=None, midnight_scanline=None, miss_lines=None, qual_flags='qual_flags') # b) Only strip_invalid_coords=True fh = self._get_fh(strip_invalid_coords=True) fh._slice(data) pygac.utils.check_user_scanlines.assert_called_with( start_line=0, end_line=0, first_valid_lat=3, last_valid_lat=4, along_track=2) # c) Both fh = self._get_fh(start_line=5, end_line=6, strip_invalid_coords=True) fh._slice(data) pygac.utils.check_user_scanlines.assert_called_with( start_line=5, end_line=6, first_valid_lat=3, last_valid_lat=4, along_track=2) def suite(): """Test suite.""" loader = TestLoader() mysuite = TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestGACLACFile)) return mysuite if __name__ == '__main__': main() satpy-0.20.0/satpy/tests/reader_tests/test_clavrx.py000066400000000000000000000415111362525524100226360ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.clavrx module. """ import os import sys import numpy as np import dask.array as da import xarray as xr from satpy.tests.reader_tests.test_hdf4_utils import FakeHDF4FileHandler from pyresample.geometry import AreaDefinition if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest try: from unittest import mock except ImportError: import mock DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (10, 300) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32) DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0) DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) class FakeHDF4FileHandlerPolar(FakeHDF4FileHandler): """Swap-in HDF4 File Handler""" def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content""" file_content = { '/attr/platform': 'SNPP', '/attr/sensor': 'VIIRS', } file_content['longitude'] = xr.DataArray( da.from_array(DEFAULT_LON_DATA, chunks=4096), attrs={ '_FillValue': np.nan, 'scale_factor': 1., 'add_offset': 0., 'standard_name': 'longitude', }) file_content['longitude/shape'] = DEFAULT_FILE_SHAPE file_content['latitude'] = xr.DataArray( da.from_array(DEFAULT_LAT_DATA, chunks=4096), attrs={ '_FillValue': np.nan, 'scale_factor': 1., 'add_offset': 0., 'standard_name': 'latitude', }) file_content['latitude/shape'] = DEFAULT_FILE_SHAPE file_content['variable1'] = xr.DataArray( da.from_array(DEFAULT_FILE_DATA, chunks=4096).astype(np.float32), attrs={ '_FillValue': -1, 'scale_factor': 1., 'add_offset': 0., 'units': '1', }) file_content['variable1/shape'] = DEFAULT_FILE_SHAPE # data with fill values file_content['variable2'] = xr.DataArray( da.from_array(DEFAULT_FILE_DATA, chunks=4096).astype(np.float32), attrs={ '_FillValue': -1, 'scale_factor': 1., 'add_offset': 0., 'units': '1', }) file_content['variable2/shape'] = DEFAULT_FILE_SHAPE file_content['variable2'] = file_content['variable2'].where( file_content['variable2'] % 2 != 0) # category file_content['variable3'] = xr.DataArray( da.from_array(DEFAULT_FILE_DATA, chunks=4096).astype(np.byte), attrs={ '_FillValue': -128, 'flag_meanings': 'clear water supercooled mixed ice unknown', 'flag_values': [0, 1, 2, 3, 4, 5], 'units': '1', }) file_content['variable3/shape'] = DEFAULT_FILE_SHAPE return file_content class TestCLAVRXReaderPolar(unittest.TestCase): """Test CLAVR-X Reader with Polar files.""" yaml_file = "clavrx.yaml" def setUp(self): """Wrap HDF4 file handler with our own fake handler""" from satpy.config import config_search_paths from satpy.readers.clavrx import CLAVRXFileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(CLAVRXFileHandler, '__bases__', (FakeHDF4FileHandlerPolar,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the NetCDF4 file handler""" self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'clavrx_npp_d20170520_t2053581_e2055223_b28822.level2.hdf', ]) self.assertTrue(len(loadables), 1) r.create_filehandlers(loadables) # make sure we have some files self.assertTrue(r.file_handlers) def test_available_datasets(self): """Test available_datasets with fake variables from YAML.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'clavrx_npp_d20170520_t2053581_e2055223_b28822.level2.hdf', ]) self.assertTrue(len(loadables), 1) r.create_filehandlers(loadables) # make sure we have some files self.assertTrue(r.file_handlers) # mimic the YAML file being configured for more datasets fake_dataset_info = [ (None, {'name': 'variable1', 'resolution': None, 'file_type': ['level2']}), (True, {'name': 'variable2', 'resolution': 742, 'file_type': ['level2']}), (True, {'name': 'variable2', 'resolution': 1, 'file_type': ['level2']}), (None, {'name': 'variable2', 'resolution': 1, 'file_type': ['level2']}), (None, {'name': '_fake1', 'file_type': ['level2']}), (None, {'name': 'variable1', 'file_type': ['level_fake']}), (True, {'name': 'variable3', 'file_type': ['level2']}), ] new_ds_infos = list(r.file_handlers['level2'][0].available_datasets( fake_dataset_info)) self.assertEqual(len(new_ds_infos), 9) # we have this and can provide the resolution self.assertTrue(new_ds_infos[0][0]) self.assertEqual(new_ds_infos[0][1]['resolution'], 742) # hardcoded # we have this, but previous file handler said it knew about it # and it is producing the same resolution as what we have self.assertTrue(new_ds_infos[1][0]) self.assertEqual(new_ds_infos[1][1]['resolution'], 742) # we have this, but don't want to change the resolution # because a previous handler said it has it self.assertTrue(new_ds_infos[2][0]) self.assertEqual(new_ds_infos[2][1]['resolution'], 1) # even though the previous one was known we can still # produce it at our new resolution self.assertTrue(new_ds_infos[3][0]) self.assertEqual(new_ds_infos[3][1]['resolution'], 742) # we have this and can update the resolution since # no one else has claimed it self.assertTrue(new_ds_infos[4][0]) self.assertEqual(new_ds_infos[4][1]['resolution'], 742) # we don't have this variable, don't change it self.assertFalse(new_ds_infos[5][0]) self.assertIsNone(new_ds_infos[5][1].get('resolution')) # we have this, but it isn't supposed to come from our file type self.assertIsNone(new_ds_infos[6][0]) self.assertIsNone(new_ds_infos[6][1].get('resolution')) # we could have loaded this but some other file handler said it has this self.assertTrue(new_ds_infos[7][0]) self.assertIsNone(new_ds_infos[7][1].get('resolution')) # we can add resolution to the previous dataset, so we do self.assertTrue(new_ds_infos[8][0]) self.assertEqual(new_ds_infos[8][1]['resolution'], 742) def test_load_all(self): """Test loading all test datasets""" from satpy.readers import load_reader import xarray as xr r = load_reader(self.reader_configs) with mock.patch('satpy.readers.clavrx.SDS', xr.DataArray): loadables = r.select_files_from_pathnames([ 'clavrx_npp_d20170520_t2053581_e2055223_b28822.level2.hdf', ]) r.create_filehandlers(loadables) datasets = r.load(['variable1', 'variable2', 'variable3']) self.assertEqual(len(datasets), 3) for v in datasets.values(): self.assertIs(v.attrs['calibration'], None) self.assertEqual(v.attrs['units'], '1') self.assertIsNotNone(datasets['variable3'].attrs.get('flag_meanings')) class FakeHDF4FileHandlerGeo(FakeHDF4FileHandler): """Swap-in HDF4 File Handler""" def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content""" file_content = { '/attr/platform': 'HIM8', '/attr/sensor': 'AHI', # this is a Level 2 file that came from a L1B file '/attr/L1B': 'clavrx_H08_20180806_1800', } file_content['longitude'] = xr.DataArray( DEFAULT_LON_DATA, dims=('y', 'x'), attrs={ '_FillValue': np.nan, 'scale_factor': 1., 'add_offset': 0., 'standard_name': 'longitude', }) file_content['longitude/shape'] = DEFAULT_FILE_SHAPE file_content['latitude'] = xr.DataArray( DEFAULT_LAT_DATA, dims=('y', 'x'), attrs={ '_FillValue': np.nan, 'scale_factor': 1., 'add_offset': 0., 'standard_name': 'latitude', }) file_content['latitude/shape'] = DEFAULT_FILE_SHAPE file_content['variable1'] = xr.DataArray( DEFAULT_FILE_DATA.astype(np.float32), dims=('y', 'x'), attrs={ '_FillValue': -1, 'scale_factor': 1., 'add_offset': 0., 'units': '1', 'valid_range': (-32767, 32767), }) file_content['variable1/shape'] = DEFAULT_FILE_SHAPE # data with fill values file_content['variable2'] = xr.DataArray( DEFAULT_FILE_DATA.astype(np.float32), dims=('y', 'x'), attrs={ '_FillValue': -1, 'scale_factor': 1., 'add_offset': 0., 'units': '1', }) file_content['variable2/shape'] = DEFAULT_FILE_SHAPE file_content['variable2'] = file_content['variable2'].where( file_content['variable2'] % 2 != 0) # category file_content['variable3'] = xr.DataArray( DEFAULT_FILE_DATA.astype(np.byte), dims=('y', 'x'), attrs={ '_FillValue': -128, 'flag_meanings': 'clear water supercooled mixed ice unknown', 'flag_values': [0, 1, 2, 3, 4, 5], 'units': '1', }) file_content['variable3/shape'] = DEFAULT_FILE_SHAPE return file_content class TestCLAVRXReaderGeo(unittest.TestCase): """Test CLAVR-X Reader with Geo files.""" yaml_file = "clavrx.yaml" def setUp(self): """Wrap HDF4 file handler with our own fake handler""" from satpy.config import config_search_paths from satpy.readers.clavrx import CLAVRXFileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(CLAVRXFileHandler, '__bases__', (FakeHDF4FileHandlerGeo,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the NetCDF4 file handler""" self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'clavrx_H08_20180806_1800.level2.hdf', ]) self.assertTrue(len(loadables), 1) r.create_filehandlers(loadables) # make sure we have some files self.assertTrue(r.file_handlers) def test_no_nav_donor(self): """Test exception raised when no donor file is available.""" from satpy.readers import load_reader import xarray as xr r = load_reader(self.reader_configs) with mock.patch('satpy.readers.clavrx.SDS', xr.DataArray): loadables = r.select_files_from_pathnames([ 'clavrx_H08_20180806_1800.level2.hdf', ]) r.create_filehandlers(loadables) self.assertRaises(IOError, r.load, ['variable1', 'variable2', 'variable3']) def test_load_all_old_donor(self): """Test loading all test datasets with old donor.""" from satpy.readers import load_reader import xarray as xr r = load_reader(self.reader_configs) with mock.patch('satpy.readers.clavrx.SDS', xr.DataArray): loadables = r.select_files_from_pathnames([ 'clavrx_H08_20180806_1800.level2.hdf', ]) r.create_filehandlers(loadables) with mock.patch('satpy.readers.clavrx.glob') as g, mock.patch('satpy.readers.clavrx.netCDF4.Dataset') as d: g.return_value = ['fake_donor.nc'] x = np.linspace(-0.1518, 0.1518, 300) y = np.linspace(0.1518, -0.1518, 10) proj = mock.Mock( semi_major_axis=6378.137, semi_minor_axis=6356.7523142, perspective_point_height=35791, longitude_of_projection_origin=140.7, sweep_angle_axis='y', ) d.return_value = fake_donor = mock.MagicMock( variables={'Projection': proj, 'x': x, 'y': y}, ) fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] datasets = r.load(['variable1', 'variable2', 'variable3']) self.assertEqual(len(datasets), 3) for v in datasets.values(): self.assertIs(v.attrs['calibration'], None) self.assertEqual(v.attrs['units'], '1') self.assertIsInstance(v.attrs['area'], AreaDefinition) self.assertIsNotNone(datasets['variable3'].attrs.get('flag_meanings')) def test_load_all_new_donor(self): """Test loading all test datasets with new donor.""" from satpy.readers import load_reader import xarray as xr r = load_reader(self.reader_configs) with mock.patch('satpy.readers.clavrx.SDS', xr.DataArray): loadables = r.select_files_from_pathnames([ 'clavrx_H08_20180806_1800.level2.hdf', ]) r.create_filehandlers(loadables) with mock.patch('satpy.readers.clavrx.glob') as g, mock.patch('satpy.readers.clavrx.netCDF4.Dataset') as d: g.return_value = ['fake_donor.nc'] x = np.linspace(-0.1518, 0.1518, 300) y = np.linspace(0.1518, -0.1518, 10) proj = mock.Mock( semi_major_axis=6378137, semi_minor_axis=6356752.3142, perspective_point_height=35791000, longitude_of_projection_origin=140.7, sweep_angle_axis='y', ) d.return_value = fake_donor = mock.MagicMock( variables={'goes_imager_projection': proj, 'x': x, 'y': y}, ) fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] datasets = r.load(['variable1', 'variable2', 'variable3']) self.assertEqual(len(datasets), 3) for v in datasets.values(): self.assertIs(v.attrs['calibration'], None) self.assertEqual(v.attrs['units'], '1') self.assertIsInstance(v.attrs['area'], AreaDefinition) self.assertIsNotNone(datasets['variable3'].attrs.get('flag_meanings')) def suite(): """The test suite for test_viirs_l1b. """ loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestCLAVRXReaderPolar)) mysuite.addTest(loader.loadTestsFromTestCase(TestCLAVRXReaderGeo)) return mysuite satpy-0.20.0/satpy/tests/reader_tests/test_electrol_hrit.py000066400000000000000000000256321362525524100242040ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The HRIT electrol reader tests package. """ import sys import datetime import numpy as np import dask.array as da from xarray import DataArray from satpy.readers.electrol_hrit import (recarray2dict, prologue, HRITGOMSPrologueFileHandler, HRITGOMSEpilogueFileHandler, HRITGOMSFileHandler, satellite_status, image_acquisition, epilogue) if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest try: from unittest import mock except ImportError: import mock # Simplify some type selections f64_t = np.float64 i32_t = np.int32 u32_t = np.uint32 class Testrecarray2dict(unittest.TestCase): """Test the function that converts numpy record arrays into dicts for use within SatPy.""" def test_fun(self): inner_st = np.dtype([('test_str', '. """Test the eps l1b format.""" import os from contextlib import suppress from tempfile import mkstemp from unittest import TestCase, TestLoader, TestSuite import numpy as np import xarray as xr from satpy import DatasetID from satpy.readers import eps_l1b as eps grh_dtype = np.dtype([("record_class", "|i1"), ("INSTRUMENT_GROUP", "|i1"), ("RECORD_SUBCLASS", "|i1"), ("RECORD_SUBCLASS_VERSION", "|i1"), ("RECORD_SIZE", ">u4"), ("RECORD_START_TIME", "S6"), ("RECORD_STOP_TIME", "S6")]) def create_sections(structure): """Create file sections.""" sections = {} form = eps.XMLFormat(os.path.join(eps.CONFIG_PATH, "eps_avhrrl1b_6.5.xml")) for count, (rec_class, sub_class) in structure: try: the_dtype = form.dtype((rec_class, sub_class)) except KeyError: continue item_size = the_dtype.itemsize + grh_dtype.itemsize the_dtype = np.dtype(grh_dtype.descr + the_dtype.descr) item = np.zeros(count, the_dtype) item['record_class'] = eps.record_class.index(rec_class) item['RECORD_SUBCLASS'] = sub_class item['RECORD_SIZE'] = item_size sections[(rec_class, sub_class)] = item return sections class TestEPSL1B(TestCase): """Test the filehandler.""" def setUp(self): """Set up the tests.""" # ipr is not present in the xml format ? structure = [(1, ('mphr', 0)), (1, ('sphr', 0)), (11, ('ipr', 0)), (1, ('geadr', 1)), (1, ('geadr', 2)), (1, ('geadr', 3)), (1, ('geadr', 4)), (1, ('geadr', 5)), (1, ('geadr', 6)), (1, ('geadr', 7)), (1, ('giadr', 1)), (1, ('giadr', 2)), (1, ('veadr', 1)), (1080, ('mdr', 2))] sections = create_sections(structure) sections[('mphr', 0)]['TOTAL_MDR'] = b'TOTAL_MDR = 1080\n' sections[('mphr', 0)]['SPACECRAFT_ID'] = b'SPACECRAFT_ID = M03\n' sections[('mphr', 0)]['INSTRUMENT_ID'] = b'INSTRUMENT_ID = AVHR\n' sections[('sphr', 0)]['EARTH_VIEWS_PER_SCANLINE'] = b'EARTH_VIEWS_PER_SCANLINE = 2048\n' _fd, fname = mkstemp() fd = open(_fd) self.filename = fname for _, arr in sections.items(): arr.tofile(fd) fd.close() self.fh = eps.EPSAVHRRFile(self.filename, {'start_time': 'now', 'end_time': 'later'}, {}) def test_read_all(self): """Test initialization.""" self.fh._read_all() assert(self.fh.scanlines == 1080) assert(self.fh.pixels == 2048) def test_dataset(self): """Test getting a dataset.""" did = DatasetID('1', calibration='reflectance') res = self.fh.get_dataset(did, {}) assert(isinstance(res, xr.DataArray)) assert(res.attrs['platform_name'] == 'Metop-C') assert(res.attrs['sensor'] == 'avhrr-3') assert(res.attrs['name'] == '1') assert(res.attrs['calibration'] == 'reflectance') did = DatasetID('4', calibration='brightness_temperature') res = self.fh.get_dataset(did, {}) assert(isinstance(res, xr.DataArray)) assert(res.attrs['platform_name'] == 'Metop-C') assert(res.attrs['sensor'] == 'avhrr-3') assert(res.attrs['name'] == '4') assert(res.attrs['calibration'] == 'brightness_temperature') def test_navigation(self): """Test the navigation.""" did = DatasetID('longitude') res = self.fh.get_dataset(did, {}) assert(isinstance(res, xr.DataArray)) assert(res.attrs['platform_name'] == 'Metop-C') assert(res.attrs['sensor'] == 'avhrr-3') assert(res.attrs['name'] == 'longitude') def test_angles(self): """Test the navigation.""" did = DatasetID('solar_zenith_angle') res = self.fh.get_dataset(did, {}) assert(isinstance(res, xr.DataArray)) assert(res.attrs['platform_name'] == 'Metop-C') assert(res.attrs['sensor'] == 'avhrr-3') assert(res.attrs['name'] == 'solar_zenith_angle') def tearDown(self): """Tear down the tests.""" with suppress(OSError): os.remove(self.filename) def suite(): """Test suite for test_scene.""" loader = TestLoader() mysuite = TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestEPSL1B)) return mysuite satpy-0.20.0/satpy/tests/reader_tests/test_eum_base.py000066400000000000000000000073741362525524100231300ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """EUMETSAT base reader tests package. """ import sys from datetime import datetime import numpy as np from satpy.readers.eum_base import (timecds2datetime, time_cds_short, time_cds, time_cds_expanded, recarray2dict) if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest class TestMakeTimeCdsDictionary(unittest.TestCase): def test_fun(self): # time_cds_short tcds = {'Days': 1, 'Milliseconds': 2} expected = datetime(1958, 1, 2, 0, 0, 0, 2000) self.assertEqual(timecds2datetime(tcds), expected) # time_cds tcds = {'Days': 1, 'Milliseconds': 2, 'Microseconds': 3} expected = datetime(1958, 1, 2, 0, 0, 0, 2003) self.assertEqual(timecds2datetime(tcds), expected) # time_cds_expanded tcds = {'Days': 1, 'Milliseconds': 2, 'Microseconds': 3, 'Nanoseconds': 4} expected = datetime(1958, 1, 2, 0, 0, 0, 2003) self.assertEqual(timecds2datetime(tcds), expected) class TestMakeTimeCdsRecarray(unittest.TestCase): def test_fun(self): # time_cds_short tcds = np.array([(1, 2)], dtype=np.dtype(time_cds_short)) expected = datetime(1958, 1, 2, 0, 0, 0, 2000) self.assertEqual(timecds2datetime(tcds), expected) # time_cds tcds = np.array([(1, 2, 3)], dtype=np.dtype(time_cds)) expected = datetime(1958, 1, 2, 0, 0, 0, 2003) self.assertEqual(timecds2datetime(tcds), expected) # time_cds_expanded tcds = np.array([(1, 2, 3, 4)], dtype=np.dtype(time_cds_expanded)) expected = datetime(1958, 1, 2, 0, 0, 0, 2003) self.assertEqual(timecds2datetime(tcds), expected) class TestRecarray2Dict(unittest.TestCase): def test_fun(self): # datatype definition pat_dt = np.dtype([ ('TrueRepeatCycleStart', time_cds_expanded), ('PlanForwardScanEnd', time_cds_expanded), ('PlannedRepeatCycleEnd', time_cds_expanded) ]) # planned acquisition time, add extra dimensions # these should be removed by recarray2dict pat = np.array([[[( (21916, 41409544, 305, 262), (21916, 42160340, 659, 856), (21916, 42309417, 918, 443))]]], dtype=pat_dt) expected = { 'TrueRepeatCycleStart': datetime(2018, 1, 2, 11, 30, 9, 544305), 'PlanForwardScanEnd': datetime(2018, 1, 2, 11, 42, 40, 340660), 'PlannedRepeatCycleEnd': datetime(2018, 1, 2, 11, 45, 9, 417918) } self.assertEqual(recarray2dict(pat), expected) def suite(): """The test suite for EUMETSAT base reader. """ loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestMakeTimeCdsDictionary)) mysuite.addTest(loader.loadTestsFromTestCase(TestMakeTimeCdsRecarray)) mysuite.addTest(loader.loadTestsFromTestCase(TestRecarray2Dict)) return mysuite if __name__ == '__main__': unittest.main() satpy-0.20.0/satpy/tests/reader_tests/test_fci_l1c_fdhsi.py000066400000000000000000000360731362525524100240230ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the 'fci_l1c_fdhsi' reader.""" from __future__ import (division, absolute_import, print_function, unicode_literals) import sys import os import numpy as np import xarray as xr import dask.array as da import unittest import numpy.testing from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler try: from unittest import mock # Python 3.3 or newer except ImportError: import mock # Python 2.7 class FakeNetCDF4FileHandler2(FakeNetCDF4FileHandler): def _get_test_calib_for_channel_ir(self, chroot, meas): from pyspectral.blackbody import ( H_PLANCK as h, K_BOLTZMANN as k, C_SPEED as c) xrda = xr.DataArray data = {} data[meas + "/radiance_unit_conversion_coefficient"] = xrda(1) data[chroot + "/central_wavelength_actual"] = xrda(10) data[meas + "/radiance_to_bt_conversion_coefficient_a"] = xrda(1000) data[meas + "/radiance_to_bt_conversion_coefficient_b"] = xrda(1) data[meas + "/radiance_to_bt_conversion_constant_c1"] = xrda(2*h*c**2) data[meas + "/radiance_to_bt_conversion_constant_c2"] = xrda(h*c/k) return data def _get_test_calib_for_channel_vis(self, chroot, meas): xrda = xr.DataArray data = {} data[meas + "/channel_effective_solar_irradiance"] = xrda(50) return data def _get_test_content_for_channel(self, pat, ch): xrda = xr.DataArray nrows = 200 ncols = 11136 chroot = "data/{:s}" meas = chroot + "/measured" rad = meas + "/effective_radiance" pos = meas + "/{:s}_position_{:s}" shp = rad + "/shape" data = {} ch_str = pat.format(ch) ch_path = rad.format(ch_str) d = xr.DataArray( da.ones((nrows, ncols), dtype="uint16", chunks=1024), dims=("y", "x"), attrs={ "valid_range": [0, 4095], "scale_factor": 5, "add_offset": 10, "units": "mW.m-2.sr-1.(cm-1)-1", } ) data[ch_path] = d data[pos.format(ch_str, "start", "row")] = xrda(0) data[pos.format(ch_str, "start", "column")] = xrda(0) data[pos.format(ch_str, "end", "row")] = xrda(nrows) data[pos.format(ch_str, "end", "column")] = xrda(ncols) if pat.startswith("ir") or pat.startswith("wv"): data.update(self._get_test_calib_for_channel_ir(chroot.format(ch_str), meas.format(ch_str))) elif pat.startswith("vis") or pat.startswith("nir"): data.update(self._get_test_calib_for_channel_vis(chroot.format(ch_str), meas.format(ch_str))) data[shp.format(ch_str)] = (nrows, ncols) return data def _get_test_content_all_channels(self): chan_patterns = { "vis_{:>02d}": (4, 5, 6, 8, 9), "nir_{:>02d}": (13, 16, 22), "ir_{:>02d}": (38, 87, 97, 105, 123, 133), "wv_{:>02d}": (63, 73), } data = {} for pat in chan_patterns.keys(): for ch_num in chan_patterns[pat]: data.update(self._get_test_content_for_channel(pat, ch_num)) return data def _get_test_content_areadef(self): data = {} proc = "state/processor" for (lb, no) in ( ("earth_equatorial_radius", 6378137), ("earth_polar_radius", 6356752), ("reference_altitude", 35786000), ("projection_origin_longitude", 0)): data[proc + "/" + lb] = xr.DataArray(no) return data def get_test_content(self, filename, filename_info, filetype_info): # mock global attributes # - root groups global # - other groups global # mock data variables # mock dimensions # # ... but only what satpy is using ... D = {} D.update(self._get_test_content_all_channels()) D.update(self._get_test_content_areadef()) return D class FakeNetCDF4FileHandler3(FakeNetCDF4FileHandler2): """Mock bad data """ def _get_test_calib_for_channel_ir(self, chroot, meas): from netCDF4 import default_fillvals v = xr.DataArray(default_fillvals["f4"]) data = {} data[meas + "/radiance_unit_conversion_coefficient"] = v data[chroot + "/central_wavelength_actual"] = v data[meas + "/radiance_to_bt_conversion_coefficient_a"] = v data[meas + "/radiance_to_bt_conversion_coefficient_b"] = v data[meas + "/radiance_to_bt_conversion_constant_c1"] = v data[meas + "/radiance_to_bt_conversion_constant_c2"] = v return data class TestFCIL1CFDHSIReader(unittest.TestCase): yaml_file = "fci_l1c_fdhsi.yaml" _alt_handler = FakeNetCDF4FileHandler2 def setUp(self): """Wrap NetCDF4 FileHandler with our own fake handler """ # implementation strongly inspired by test_viirs_l1b.py from satpy.config import config_search_paths from satpy.readers.fci_l1c_fdhsi import FCIFDHSIFileHandler self.reader_configs = config_search_paths( os.path.join("readers", self.yaml_file)) self.p = mock.patch.object( FCIFDHSIFileHandler, "__bases__", (self._alt_handler,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the NetCDF4 file handler """ # implementation strongly inspired by test_viirs_l1b.py self.p.stop() class TestFCIL1CFDHSIReaderGoodData(TestFCIL1CFDHSIReader): """Test FCI L1C FDHSI reader """ # TODO: # - test special case for extended range IR38 # - test geolocation _alt_handler = FakeNetCDF4FileHandler2 def test_file_pattern(self): """Test file pattern matching """ from satpy.readers import load_reader filenames = [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" "20170410113925_20170410113934_N__C_0070_0067.nc", "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114442_GTT_DEV_" "20170410113934_20170410113942_N__C_0070_0068.nc", "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114451_GTT_DEV_" "20170410113942_20170410113951_N__C_0070_0069.nc", "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114500_GTT_DEV_" "20170410113951_20170410114000_N__C_0070_0070.nc", "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-HRFI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_19700101000000_GTT_DEV_" "19700000000000_19700000000000_N__C_0042_0070.nc", "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-TRAIL--L2P-NC4E_C_EUMT_20170410114600_GTT_DEV_" "20170410113000_20170410114000_N__C_0070_0071.nc", ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) # only 4 out of 6 above should match self.assertTrue(4, len(files)) _chans = {"solar": ["vis_04", "vis_05", "vis_06", "vis_08", "vis_09", "nir_13", "nir_16", "nir_22"], "terran": ["ir_38", "wv_63", "wv_73", "ir_87", "ir_97", "ir_105", "ir_123", "ir_133"]} def test_load_counts(self): """Test loading with counts """ from satpy import DatasetID from satpy.readers import load_reader # testing two filenames to test correctly combined filenames = [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" "20170410113925_20170410113934_N__C_0070_0067.nc", "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114442_GTT_DEV_" "20170410113934_20170410113942_N__C_0070_0068.nc", ] reader = load_reader(self.reader_configs) loadables = reader.select_files_from_pathnames(filenames) reader.create_filehandlers(loadables) res = reader.load( [DatasetID(name=name, calibration="counts") for name in self._chans["solar"] + self._chans["terran"]]) self.assertEqual(16, len(res)) for ch in self._chans["solar"] + self._chans["terran"]: self.assertEqual(res[ch].shape, (200*2, 11136)) self.assertEqual(res[ch].dtype, np.uint16) self.assertEqual(res[ch].attrs["calibration"], "counts") self.assertEqual(res[ch].attrs["units"], "1") numpy.testing.assert_array_equal(res[ch], 1) def test_load_radiance(self): """Test loading with radiance """ from satpy import DatasetID from satpy.readers import load_reader filenames = [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" "20170410113925_20170410113934_N__C_0070_0067.nc", ] reader = load_reader(self.reader_configs) loadables = reader.select_files_from_pathnames(filenames) reader.create_filehandlers(loadables) res = reader.load( [DatasetID(name=name, calibration="radiance") for name in self._chans["solar"] + self._chans["terran"]]) self.assertEqual(16, len(res)) for ch in self._chans["solar"] + self._chans["terran"]: self.assertEqual(res[ch].shape, (200, 11136)) self.assertEqual(res[ch].dtype, np.float64) self.assertEqual(res[ch].attrs["calibration"], "radiance") self.assertEqual(res[ch].attrs["units"], 'mW.m-2.sr-1.(cm-1)-1') numpy.testing.assert_array_equal(res[ch], 15) def test_load_reflectance(self): """Test loading with reflectance """ from satpy import DatasetID from satpy.readers import load_reader filenames = [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" "20170410113925_20170410113934_N__C_0070_0067.nc", ] reader = load_reader(self.reader_configs) loadables = reader.select_files_from_pathnames(filenames) reader.create_filehandlers(loadables) res = reader.load( [DatasetID(name=name, calibration="reflectance") for name in self._chans["solar"]]) self.assertEqual(8, len(res)) for ch in self._chans["solar"]: self.assertEqual(res[ch].shape, (200, 11136)) self.assertEqual(res[ch].dtype, np.float64) self.assertEqual(res[ch].attrs["calibration"], "reflectance") self.assertEqual(res[ch].attrs["units"], "%") numpy.testing.assert_array_equal(res[ch], 15 / 50 * 100) def test_load_bt(self): """Test loading with bt """ from satpy import DatasetID from satpy.readers import load_reader filenames = [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" "20170410113925_20170410113934_N__C_0070_0067.nc", ] reader = load_reader(self.reader_configs) loadables = reader.select_files_from_pathnames(filenames) reader.create_filehandlers(loadables) res = reader.load( [DatasetID(name=name, calibration="brightness_temperature") for name in self._chans["terran"]]) self.assertEqual(8, len(res)) for ch in self._chans["terran"]: self.assertEqual(res[ch].shape, (200, 11136)) self.assertEqual(res[ch].dtype, np.float64) self.assertEqual(res[ch].attrs["calibration"], "brightness_temperature") self.assertEqual(res[ch].attrs["units"], "K") numpy.testing.assert_array_almost_equal( res[ch], 181.917084) def test_load_composite(self): """Test that composites are loadable """ # when dedicated composites for FCI FDHSI are implemented in satpy, # this method should probably move to a dedicated class and module # in the tests.compositor_tests package from satpy.composites import CompositorLoader cl = CompositorLoader() (comps, mods) = cl.load_compositors(["fci"]) self.assertGreater(len(comps["fci"]), 0) self.assertGreater(len(mods["fci"]), 0) class TestFCIL1CFDHSIReaderBadData(TestFCIL1CFDHSIReader): _alt_handler = FakeNetCDF4FileHandler3 @unittest.skipIf( sys.version_info < (3, 4), "skipping log message testing on old Python version " "that doesn't have TestCase.assertLogs") def test_handling_bad_data_ir(self): """Test handling of bad data """ from satpy import DatasetID from satpy.readers import load_reader filenames = [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" "20170410113925_20170410113934_N__C_0070_0067.nc", ] reader = load_reader(self.reader_configs) loadables = reader.select_files_from_pathnames(filenames) reader.create_filehandlers(loadables) with self.assertLogs( 'satpy.readers.fci_l1c_fdhsi', level="ERROR") as cm: reader.load([DatasetID( name="ir_123", calibration="brightness_temperature")]) self.assertRegex(cm.output[0], "cannot produce brightness temperatur") def suite(): """The test suite """ loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestFCIL1CFDHSIReaderGoodData)) mysuite.addTest(loader.loadTestsFromTestCase(TestFCIL1CFDHSIReaderBadData)) return mysuite satpy-0.20.0/satpy/tests/reader_tests/test_generic_image.py000066400000000000000000000206221362525524100241150ustar00rootroot00000000000000#!/usr/bin/python # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unittests for generic image reader. """ import os import unittest import xarray as xr import dask.array as da import numpy as np class TestGenericImage(unittest.TestCase): """Test generic image reader.""" def setUp(self): """Create temporary images to test on.""" import tempfile from datetime import datetime from pyresample.geometry import AreaDefinition from satpy.scene import Scene self.date = datetime(2018, 1, 1) # Create area definition pcs_id = 'ETRS89 / LAEA Europe' proj4_dict = {'init': 'epsg:3035'} self.x_size = 100 self.y_size = 100 area_extent = (2426378.0132, 1528101.2618, 6293974.6215, 5446513.5222) self.area_def = AreaDefinition('geotiff_area', pcs_id, pcs_id, proj4_dict, self.x_size, self.y_size, area_extent) # Create datasets for L, LA, RGB and RGBA mode images r__ = da.random.randint(0, 256, size=(self.y_size, self.x_size), chunks=(50, 50)).astype(np.uint8) g__ = da.random.randint(0, 256, size=(self.y_size, self.x_size), chunks=(50, 50)).astype(np.uint8) b__ = da.random.randint(0, 256, size=(self.y_size, self.x_size), chunks=(50, 50)).astype(np.uint8) a__ = 255 * np.ones((self.y_size, self.x_size), dtype=np.uint8) a__[:10, :10] = 0 a__ = da.from_array(a__, chunks=(50, 50)) ds_l = xr.DataArray(da.stack([r__]), dims=('bands', 'y', 'x'), attrs={'name': 'test_l', 'start_time': self.date}) ds_l['bands'] = ['L'] ds_la = xr.DataArray(da.stack([r__, a__]), dims=('bands', 'y', 'x'), attrs={'name': 'test_la', 'start_time': self.date}) ds_la['bands'] = ['L', 'A'] ds_rgb = xr.DataArray(da.stack([r__, g__, b__]), dims=('bands', 'y', 'x'), attrs={'name': 'test_rgb', 'start_time': self.date}) ds_rgb['bands'] = ['R', 'G', 'B'] ds_rgba = xr.DataArray(da.stack([r__, g__, b__, a__]), dims=('bands', 'y', 'x'), attrs={'name': 'test_rgba', 'start_time': self.date}) ds_rgba['bands'] = ['R', 'G', 'B', 'A'] # Temp dir for the saved images self.base_dir = tempfile.mkdtemp() # Put the datasets to Scene for easy saving scn = Scene() scn['l'] = ds_l scn['l'].attrs['area'] = self.area_def scn['la'] = ds_la scn['la'].attrs['area'] = self.area_def scn['rgb'] = ds_rgb scn['rgb'].attrs['area'] = self.area_def scn['rgba'] = ds_rgba scn['rgba'].attrs['area'] = self.area_def # Save the images. Two images in PNG and two in GeoTIFF scn.save_dataset('l', os.path.join(self.base_dir, 'test_l.png'), writer='simple_image') scn.save_dataset('la', os.path.join(self.base_dir, '20180101_0000_test_la.png'), writer='simple_image') scn.save_dataset('rgb', os.path.join(self.base_dir, '20180101_0000_test_rgb.tif'), writer='geotiff') scn.save_dataset('rgba', os.path.join(self.base_dir, 'test_rgba.tif'), writer='geotiff') self.scn = scn def tearDown(self): """Remove the temporary directory created for a test""" try: import shutil shutil.rmtree(self.base_dir, ignore_errors=True) except OSError: pass def test_png_scene(self): """Test reading PNG images via satpy.Scene().""" from satpy import Scene fname = os.path.join(self.base_dir, 'test_l.png') scn = Scene(reader='generic_image', filenames=[fname]) scn.load(['image']) self.assertEqual(scn['image'].shape, (1, self.y_size, self.x_size)) self.assertEqual(scn.attrs['sensor'], set(['images'])) self.assertEqual(scn.attrs['start_time'], None) self.assertEqual(scn.attrs['end_time'], None) self.assertNotIn('area', scn['image'].attrs) fname = os.path.join(self.base_dir, '20180101_0000_test_la.png') scn = Scene(reader='generic_image', filenames=[fname]) scn.load(['image']) data = da.compute(scn['image'].data) self.assertEqual(scn['image'].shape, (1, self.y_size, self.x_size)) self.assertEqual(scn.attrs['sensor'], set(['images'])) self.assertEqual(scn.attrs['start_time'], self.date) self.assertEqual(scn.attrs['end_time'], self.date) self.assertNotIn('area', scn['image'].attrs) self.assertEqual(np.sum(np.isnan(data)), 100) def test_geotiff_scene(self): """Test reading PNG images via satpy.Scene().""" from satpy import Scene fname = os.path.join(self.base_dir, '20180101_0000_test_rgb.tif') scn = Scene(reader='generic_image', filenames=[fname]) scn.load(['image']) self.assertEqual(scn['image'].shape, (3, self.y_size, self.x_size)) self.assertEqual(scn.attrs['sensor'], set(['images'])) self.assertEqual(scn.attrs['start_time'], self.date) self.assertEqual(scn.attrs['end_time'], self.date) self.assertEqual(scn['image'].area, self.area_def) fname = os.path.join(self.base_dir, 'test_rgba.tif') scn = Scene(reader='generic_image', filenames=[fname]) scn.load(['image']) self.assertEqual(scn['image'].shape, (3, self.y_size, self.x_size)) self.assertEqual(scn.attrs['sensor'], set(['images'])) self.assertEqual(scn.attrs['start_time'], None) self.assertEqual(scn.attrs['end_time'], None) self.assertEqual(scn['image'].area, self.area_def) def test_GenericImageFileHandler(self): """Test direct use of the reader.""" from satpy.readers.generic_image import GenericImageFileHandler from satpy.readers.generic_image import mask_image_data fname = os.path.join(self.base_dir, 'test_rgba.tif') fname_info = {'start_time': self.date} ftype_info = {} reader = GenericImageFileHandler(fname, fname_info, ftype_info) class Foo(object): """Mock class for dataset id""" def __init__(self): self.name = 'image' foo = Foo() self.assertTrue(reader.file_content) self.assertEqual(reader.finfo['filename'], fname) self.assertEqual(reader.finfo['start_time'], self.date) self.assertEqual(reader.finfo['end_time'], self.date) self.assertEqual(reader.area, self.area_def) self.assertEqual(reader.get_area_def(None), self.area_def) self.assertEqual(reader.start_time, self.date) self.assertEqual(reader.end_time, self.date) dataset = reader.get_dataset(foo, None) self.assertTrue(isinstance(dataset, xr.DataArray)) self.assertTrue('crs' in dataset.attrs) self.assertTrue('transform' in dataset.attrs) self.assertTrue(np.all(np.isnan(dataset.data[:, :10, :10].compute()))) # Test masking of floats data = self.scn['rgba'] self.assertRaises(ValueError, mask_image_data, data / 255.) data = data.astype(np.uint32) self.assertTrue(data.bands.size == 4) data = mask_image_data(data) self.assertTrue(data.bands.size == 3) def suite(): """The test suite for test_writers.""" loader = unittest.TestLoader() my_suite = unittest.TestSuite() my_suite.addTest(loader.loadTestsFromTestCase(TestGenericImage)) return my_suite if __name__ == '__main__': unittest.main() satpy-0.20.0/satpy/tests/reader_tests/test_geocat.py000066400000000000000000000214261362525524100226040ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.geocat module.""" import os import numpy as np from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler from satpy.tests.utils import convert_file_content_to_data_array import unittest from unittest import mock DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (10, 300) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32) DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0) DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) class FakeNetCDF4FileHandler2(FakeNetCDF4FileHandler): """Swap-in NetCDF4 File Handler.""" def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" file_content = { '/attr/Platform_Name': filename_info['platform_shortname'], '/attr/Element_Resolution': 2., '/attr/Line_Resolution': 2., '/attr/Subsatellite_Longitude': -70.2 if 'GOES' in filename_info['platform_shortname'] else 140.65, 'pixel_longitude': DEFAULT_LON_DATA, 'pixel_longitude/attr/scale_factor': 1., 'pixel_longitude/attr/add_offset': 0., 'pixel_longitude/shape': DEFAULT_FILE_SHAPE, 'pixel_longitude/attr/_FillValue': np.nan, 'pixel_latitude': DEFAULT_LAT_DATA, 'pixel_latitude/attr/scale_factor': 1., 'pixel_latitude/attr/add_offset': 0., 'pixel_latitude/shape': DEFAULT_FILE_SHAPE, 'pixel_latitude/attr/_FillValue': np.nan, } sensor = { 'HIMAWARI-8': 'himawari8', 'GOES-17': 'goesr', 'GOES-16': 'goesr', 'GOES-13': 'goes', 'GOES-14': 'goes', 'GOES-15': 'goes', }[filename_info['platform_shortname']] file_content['/attr/Sensor_Name'] = sensor if filename_info['platform_shortname'] == 'HIMAWARI-8': file_content['pixel_longitude'] = DEFAULT_LON_DATA + 130. file_content['variable1'] = DEFAULT_FILE_DATA.astype(np.float32) file_content['variable1/attr/_FillValue'] = -1 file_content['variable1/attr/scale_factor'] = 1. file_content['variable1/attr/add_offset'] = 0. file_content['variable1/attr/units'] = '1' file_content['variable1/shape'] = DEFAULT_FILE_SHAPE # data with fill values file_content['variable2'] = np.ma.masked_array( DEFAULT_FILE_DATA.astype(np.float32), mask=np.zeros_like(DEFAULT_FILE_DATA)) file_content['variable2'].mask[::5, ::5] = True file_content['variable2/attr/_FillValue'] = -1 file_content['variable2/attr/scale_factor'] = 1. file_content['variable2/attr/add_offset'] = 0. file_content['variable2/attr/units'] = '1' file_content['variable2/shape'] = DEFAULT_FILE_SHAPE # category file_content['variable3'] = DEFAULT_FILE_DATA.astype(np.byte) file_content['variable3/attr/_FillValue'] = -128 file_content['variable3/attr/flag_meanings'] = "clear water supercooled mixed ice unknown" file_content['variable3/attr/flag_values'] = [0, 1, 2, 3, 4, 5] file_content['variable3/attr/units'] = '1' file_content['variable3/shape'] = DEFAULT_FILE_SHAPE attrs = ('_FillValue', 'flag_meanings', 'flag_values', 'units') convert_file_content_to_data_array( file_content, attrs=attrs, dims=('z', 'lines', 'elements')) return file_content class TestGEOCATReader(unittest.TestCase): """Test GEOCAT Reader.""" yaml_file = "geocat.yaml" def setUp(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy.config import config_search_paths from satpy.readers.geocat import GEOCATFileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(GEOCATFileHandler, '__bases__', (FakeNetCDF4FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the NetCDF4 file handler.""" self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'geocatL2.GOES-13.2015143.234500.nc', ]) self.assertTrue(len(loadables), 1) r.create_filehandlers(loadables) # make sure we have some files self.assertTrue(r.file_handlers) def test_load_all_old_goes(self): """Test loading all test datasets from old GOES files.""" from satpy.readers import load_reader import xarray as xr r = load_reader(self.reader_configs) with mock.patch('satpy.readers.geocat.netCDF4.Variable', xr.DataArray): loadables = r.select_files_from_pathnames([ 'geocatL2.GOES-13.2015143.234500.nc', ]) r.create_filehandlers(loadables) datasets = r.load(['variable1', 'variable2', 'variable3']) self.assertEqual(len(datasets), 3) for v in datasets.values(): self.assertIs(v.attrs['calibration'], None) self.assertEqual(v.attrs['units'], '1') self.assertIsNotNone(datasets['variable3'].attrs.get('flag_meanings')) def test_load_all_himawari8(self): """Test loading all test datasets from H8 NetCDF file.""" from satpy.readers import load_reader from pyresample.geometry import AreaDefinition import xarray as xr r = load_reader(self.reader_configs) with mock.patch('satpy.readers.geocat.netCDF4.Variable', xr.DataArray): loadables = r.select_files_from_pathnames([ 'geocatL2.HIMAWARI-8.2017092.210730.R304.R20.nc', ]) r.create_filehandlers(loadables) datasets = r.load(['variable1', 'variable2', 'variable3']) self.assertEqual(len(datasets), 3) for v in datasets.values(): self.assertIs(v.attrs['calibration'], None) self.assertEqual(v.attrs['units'], '1') self.assertIsNotNone(datasets['variable3'].attrs.get('flag_meanings')) self.assertIsInstance(datasets['variable1'].attrs['area'], AreaDefinition) def test_load_all_goes17_hdf4(self): """Test loading all test datasets from GOES-17 HDF4 file.""" from satpy.readers import load_reader from pyresample.geometry import AreaDefinition import xarray as xr r = load_reader(self.reader_configs) with mock.patch('satpy.readers.geocat.netCDF4.Variable', xr.DataArray): loadables = r.select_files_from_pathnames([ 'geocatL2.GOES-17.CONUS.2020041.163130.hdf', ]) r.create_filehandlers(loadables) datasets = r.load(['variable1', 'variable2', 'variable3']) self.assertEqual(len(datasets), 3) for v in datasets.values(): self.assertIs(v.attrs['calibration'], None) self.assertEqual(v.attrs['units'], '1') self.assertIsNotNone(datasets['variable3'].attrs.get('flag_meanings')) self.assertIsInstance(datasets['variable1'].attrs['area'], AreaDefinition) def suite(): """Create test suite for test_geocat.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestGEOCATReader)) return mysuite satpy-0.20.0/satpy/tests/reader_tests/test_geos_area.py000066400000000000000000000137631362525524100232740ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Geostationary project utility module tests package.""" import sys from satpy.readers._geos_area import (get_xy_from_linecol, get_area_extent, get_area_definition) import numpy as np if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest class TestGEOSProjectionUtil(unittest.TestCase): """Tests for the area utilities.""" def make_pdict_ext(self, typ, scan): """Create a dictionary and extents to use in testing.""" if typ == 1: # Fulldisk pdict = {'a': 6378169.00, 'b': 6356583.80, 'h': 35785831.00, 'ssp_lon': 0.0, 'nlines': 3712, 'ncols': 3712, 'a_name': 'geostest', 'a_desc': 'test area', 'p_id': 'test_area', 'cfac': -13642337, 'lfac': -13642337, 'coff': 1856} if scan == 'N2S': pdict['scandir'] = 'N2S' pdict['loff'] = 1856 extent = (5567248.28340708, 5567248.28340708, -5570248.686685662, -5570248.686685662) if scan == 'S2N': pdict['scandir'] = 'S2N' pdict['loff'] = -1856 extent = (5567248.28340708, 5570248.686685662, -5570248.686685662, -5567248.28340708) if typ == 2: # One sector pdict = {'a': 6378169.00, 'b': 6356583.80, 'h': 35785831.00, 'ssp_lon': 0.0, 'nlines': 464, 'ncols': 3712, 'a_name': 'geostest', 'a_desc': 'test area', 'p_id': 'test_area', 'cfac': -13642337, 'lfac': -13642337, 'coff': 1856} if scan == 'N2S': pdict['scandir'] = 'N2S' pdict['loff'] = 464 extent = (5567248.28340708, 1390686.9196223018, -5570248.686685662, -1500.2016392905093) if scan == 'S2N': pdict['scandir'] = 'S2N' pdict['loff'] = 464 extent = (5567248.28340708, -1390686.9196223018, -5570248.686685662, -2782874.0408838945) return pdict, extent def test_geos_area(self): """Test area extent calculation with N->S scan then S->N scan.""" # North -> South full disk pdict, extent = self.make_pdict_ext(1, 'N2S') aex = get_area_extent(pdict) np.testing.assert_allclose(aex, extent) # South -> North full disk pdict, extent = self.make_pdict_ext(1, 'S2N') aex = get_area_extent(pdict) np.testing.assert_allclose(aex, extent) # North -> South one sector pdict, extent = self.make_pdict_ext(2, 'N2S') aex = get_area_extent(pdict) np.testing.assert_allclose(aex, extent) # South -> North one sector pdict, extent = self.make_pdict_ext(2, 'S2N') aex = get_area_extent(pdict) np.testing.assert_allclose(aex, extent) def test_get_xy_from_linecol(self): """Test the scan angle calculation.""" pdict, extent = self.make_pdict_ext(1, 'S2N') good_xy = [0.2690166648133674, -10.837528496767087] factors = (pdict['lfac'], pdict['cfac']) offsets = (pdict['loff'], pdict['coff']) x, y = get_xy_from_linecol(400, 1800, offsets, factors) np.testing.assert_approx_equal(x, good_xy[0]) np.testing.assert_approx_equal(y, good_xy[1]) pdict, extent = self.make_pdict_ext(2, 'N2S') good_xy = [0.2690166648133674, 0.30744761692956274] factors = (pdict['lfac'], pdict['cfac']) offsets = (pdict['loff'], pdict['coff']) x, y = get_xy_from_linecol(400, 1800, offsets, factors) np.testing.assert_approx_equal(x, good_xy[0]) np.testing.assert_approx_equal(y, good_xy[1]) def test_get_area_definition(self): """Test the retrieval of the area definition.""" pdict, extent = self.make_pdict_ext(1, 'N2S') good_res = (-3000.4032785810186, -3000.4032785810186) a_def = get_area_definition(pdict, extent) self.assertEqual(a_def.area_id, pdict['a_name']) self.assertEqual(a_def.resolution, good_res) self.assertEqual(a_def.proj_dict['proj'], 'geos') self.assertEqual(a_def.proj_dict['units'], 'm') self.assertEqual(a_def.proj_dict['a'], 6378169) self.assertEqual(a_def.proj_dict['b'], 6356583.8) self.assertEqual(a_def.proj_dict['h'], 35785831) def suite(): """The test suite for test_geos_area.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestGEOSProjectionUtil)) return mysuite if __name__ == "__main__": # So you can run tests from this module individually. unittest.main() satpy-0.20.0/satpy/tests/reader_tests/test_glm_l2.py000066400000000000000000000151371362525524100225200ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The glm_l2 reader tests package.""" import os import numpy as np import xarray as xr import unittest try: from unittest import mock except ImportError: import mock def setup_fake_dataset(): """Create a fake dataset to avoid opening a file.""" # flash_extent_density fed = (np.arange(10.).reshape((2, 5)) + 1.) * 50. fed = (fed + 1.) / 0.5 fed = fed.astype(np.int16) fed = xr.DataArray( fed, dims=('y', 'x'), attrs={ 'scale_factor': 0.5, 'add_offset': -1., '_FillValue': 0, 'units': 'Count per nominal 3136 microradian^2 pixel per 1.0 min', 'grid_mapping': 'goes_imager_projection', 'standard_name': 'flash_extent_density', 'long_name': 'Flash extent density', } ) x__ = xr.DataArray( range(5), attrs={'scale_factor': 2., 'add_offset': -1.}, dims=('x',), ) y__ = xr.DataArray( range(2), attrs={'scale_factor': -2., 'add_offset': 1.}, dims=('y',), ) proj = xr.DataArray( [], attrs={ 'semi_major_axis': 1., 'semi_minor_axis': 1., 'perspective_point_height': 1., 'longitude_of_projection_origin': -90., 'latitude_of_projection_origin': 0., 'sweep_angle_axis': u'x' } ) fake_dataset = xr.Dataset( data_vars={ 'flash_extent_density': fed, 'x': x__, 'y': y__, 'goes_imager_projection': proj, "nominal_satellite_subpoint_lat": np.array(0.0), "nominal_satellite_subpoint_lon": np.array(-89.5), "nominal_satellite_height": np.array(35786.02) }, attrs={ "time_coverage_start": "2017-09-20T17:30:40Z", "time_coverage_end": "2017-09-20T17:41:17Z", "spatial_resolution": "2km at nadir", } ) return fake_dataset class TestGLML2FileHandler(unittest.TestCase): """Tests for the GLM L2 reader.""" @mock.patch('satpy.readers.abi_base.xr') def setUp(self, xr_): """Create a fake file handler to test.""" from satpy.readers.glm_l2 import NCGriddedGLML2 fake_dataset = setup_fake_dataset() xr_.open_dataset.return_value = fake_dataset self.reader = NCGriddedGLML2('filename', {'platform_shortname': 'G16', 'scene_abbr': 'C', 'scan_mode': 'M3'}, {'filetype': 'glm_l2_imagery'}) def test_basic_attributes(self): """Test getting basic file attributes.""" from datetime import datetime self.assertEqual(self.reader.start_time, datetime(2017, 9, 20, 17, 30, 40)) self.assertEqual(self.reader.end_time, datetime(2017, 9, 20, 17, 41, 17)) def test_get_dataset(self): """Test the get_dataset method.""" from satpy import DatasetID key = DatasetID(name='flash_extent_density') res = self.reader.get_dataset(key, {'info': 'info'}) exp = {'instrument_ID': None, 'modifiers': (), 'name': 'flash_extent_density', 'orbital_parameters': {'projection_altitude': 1.0, 'projection_latitude': 0.0, 'projection_longitude': -90.0, # 'satellite_nominal_altitude': 35786.02, 'satellite_nominal_latitude': 0.0, 'satellite_nominal_longitude': -89.5}, 'orbital_slot': None, 'platform_name': 'GOES-16', 'platform_shortname': 'G16', 'production_site': None, 'scan_mode': 'M3', 'scene_abbr': 'C', 'scene_id': None, 'sensor': 'glm', 'timeline_ID': None, 'grid_mapping': 'goes_imager_projection', 'standard_name': 'flash_extent_density', 'long_name': 'Flash extent density', 'units': 'Count per nominal 3136 microradian^2 pixel per 1.0 min'} self.assertDictEqual(res.attrs, exp) class TestGLML2Reader(unittest.TestCase): """Test high-level reading functionality of GLM L2 reader.""" yaml_file = "glm_l2.yaml" @mock.patch('satpy.readers.abi_base.xr') def setUp(self, xr_): """Create a fake reader to test.""" from satpy.readers import load_reader from satpy.config import config_search_paths self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) fake_dataset = setup_fake_dataset() xr_.open_dataset.return_value = fake_dataset r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'OR_GLM-L2-GLMC-M3_G16_s20192862159000_e20192862200000_c20192862200350.nc', ]) self.assertTrue(len(loadables), 1) r.create_filehandlers(loadables) self.reader = r def test_available_datasets(self): """Test that resolution is added to YAML configured variables.""" # make sure we have some files self.assertTrue(self.reader.file_handlers) available_datasets = self.reader.available_dataset_ids # only flash_extent_density is available in our tests self.assertEqual(len(available_datasets), 1) for ds_id in available_datasets: self.assertEqual(ds_id.resolution, 2000) def suite(): """Create test suite for this module.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestGLML2FileHandler)) mysuite.addTest(loader.loadTestsFromTestCase(TestGLML2Reader)) return mysuite if __name__ == '__main__': unittest.main() satpy-0.20.0/satpy/tests/reader_tests/test_goes_imager_hrit.py000066400000000000000000000201061362525524100246430ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The hrit msg reader tests package. """ import sys import datetime import numpy as np from xarray import DataArray from satpy.readers.goes_imager_hrit import (make_gvar_float, make_sgs_time, HRITGOESPrologueFileHandler, sgs_time, HRITGOESFileHandler, ALTITUDE) if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest try: from unittest import mock except ImportError: import mock class TestGVARFloat(unittest.TestCase): def test_fun(self): test_data = [(-1.0, b"\xbe\xf0\x00\x00"), (-0.1640625, b"\xbf\xd6\x00\x00"), (0.0, b"\x00\x00\x00\x00"), (0.1640625, b"\x40\x2a\x00\x00"), (1.0, b"\x41\x10\x00\x00"), (100.1640625, b"\x42\x64\x2a\x00")] for expected, str_val in test_data: val = np.frombuffer(str_val, dtype='>i4') self.assertEqual(expected, make_gvar_float(val)) class TestMakeSGSTime(unittest.TestCase): def test_fun(self): # 2018-129 (may 9th), 21:33:27.999 tcds = np.array([(32, 24, 18, 146, 19, 50, 121, 153)], dtype=sgs_time) expected = datetime.datetime(2018, 5, 9, 21, 33, 27, 999000) self.assertEqual(make_sgs_time(tcds[0]), expected) test_pro = {'TISTR': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), 'TCurr': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), 'TCLMT': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), 'SubSatLongitude': 100.1640625, 'TCHED': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), 'TLTRL': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), 'TIPFS': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), 'TISPC': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), 'ReferenceLatitude': 0.0, 'TIIRT': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), 'TLHED': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), 'TIVIT': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), 'SubSatLatitude': 0.0, 'TIECL': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), 'ReferenceLongitude': 100.1640625, 'TCTRL': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), 'TLRAN': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), 'TINFS': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), 'TIBBC': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), 'TIONA': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), 'ReferenceDistance': 100.1640625, 'SatelliteID': 15} class TestHRITGOESPrologueFileHandler(unittest.TestCase): """Test the HRITFileHandler.""" @mock.patch('satpy.readers.goes_imager_hrit.recarray2dict') @mock.patch('satpy.readers.goes_imager_hrit.np.fromfile') @mock.patch('satpy.readers.goes_imager_hrit.HRITFileHandler.__init__') def test_init(self, new_fh_init, fromfile, recarray2dict): """Setup the hrit file handler for testing.""" recarray2dict.side_effect = lambda x: x[0] new_fh_init.return_value.filename = 'filename' HRITGOESPrologueFileHandler.filename = 'filename' HRITGOESPrologueFileHandler.mda = {'total_header_length': 1} ret = {} the_time = np.array([(32, 24, 18, 146, 19, 50, 121, 153)], dtype=sgs_time)[0] for key in ['TCurr', 'TCHED', 'TCTRL', 'TLHED', 'TLTRL', 'TIPFS', 'TINFS', 'TISPC', 'TIECL', 'TIBBC', 'TISTR', 'TLRAN', 'TIIRT', 'TIVIT', 'TCLMT', 'TIONA']: ret[key] = the_time ret['SubSatLatitude'] = np.frombuffer(b"\x00\x00\x00\x00", dtype='>i4')[0] ret['ReferenceLatitude'] = np.frombuffer(b"\x00\x00\x00\x00", dtype='>i4')[0] ret['SubSatLongitude'] = np.frombuffer(b"\x42\x64\x2a\x00", dtype='>i4')[0] ret['ReferenceLongitude'] = np.frombuffer(b"\x42\x64\x2a\x00", dtype='>i4')[0] ret['ReferenceDistance'] = np.frombuffer(b"\x42\x64\x2a\x00", dtype='>i4')[0] ret['SatelliteID'] = 15 fromfile.return_value = [ret] m = mock.mock_open() with mock.patch('satpy.readers.goes_imager_hrit.open', m, create=True) as newopen: newopen.return_value.__enter__.return_value.seek.return_value = 1 self.reader = HRITGOESPrologueFileHandler( 'filename', {'platform_shortname': 'GOES15', 'start_time': datetime.datetime(2016, 3, 3, 0, 0), 'service': 'test_service'}, {'filetype': 'info'}) self.assertEqual(test_pro, self.reader.prologue) class TestHRITGOESFileHandler(unittest.TestCase): """Test the HRITFileHandler.""" @mock.patch('satpy.readers.goes_imager_hrit.HRITFileHandler.__init__') def setUp(self, new_fh_init): """Setup the hrit file handler for testing.""" blob = '$HALFTONE:=10\r\n_NAME:=albedo\r\n_UNIT:=percent\r\n0:=0.0\r\n1023:=100.0\r\n'.encode() mda = {'projection_parameters': {'SSP_longitude': -123.0}, 'spectral_channel_id': 1, 'image_data_function': blob} HRITGOESFileHandler.filename = 'filename' HRITGOESFileHandler.mda = mda self.prologue = mock.MagicMock() self.prologue.prologue = test_pro self.reader = HRITGOESFileHandler('filename', {}, {}, self.prologue) def test_init(self): blob = '$HALFTONE:=10\r\n_NAME:=albedo\r\n_UNIT:=percent\r\n0:=0.0\r\n1023:=100.0\r\n'.encode() mda = {'spectral_channel_id': 1, 'projection_parameters': {'SSP_longitude': 100.1640625}, 'image_data_function': blob} self.assertEqual(self.reader.mda, mda) @mock.patch('satpy.readers.goes_imager_hrit.HRITFileHandler.get_dataset') def test_get_dataset(self, base_get_dataset): key = mock.MagicMock() key.calibration = 'reflectance' base_get_dataset.return_value = DataArray(np.arange(25).reshape(5, 5)) res = self.reader.get_dataset(key, {}) expected = np.array([[np.nan, 0.097752, 0.195503, 0.293255, 0.391007], [0.488759, 0.58651, 0.684262, 0.782014, 0.879765], [0.977517, 1.075269, 1.173021, 1.270772, 1.368524], [1.466276, 1.564027, 1.661779, 1.759531, 1.857283], [1.955034, 2.052786, 2.150538, 2.248289, 2.346041]]) self.assertTrue(np.allclose(res.values, expected, equal_nan=True)) self.assertEqual(res.attrs['units'], '%') self.assertDictEqual(res.attrs['orbital_parameters'], {'projection_longitude': self.reader.mda['projection_parameters']['SSP_longitude'], 'projection_latitude': 0.0, 'projection_altitude': ALTITUDE}) def suite(): """The test suite for test_scene. """ loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestHRITGOESPrologueFileHandler)) mysuite.addTest(loader.loadTestsFromTestCase(TestHRITGOESFileHandler)) mysuite.addTest(loader.loadTestsFromTestCase(TestGVARFloat)) mysuite.addTest(loader.loadTestsFromTestCase(TestMakeSGSTime)) return mysuite if __name__ == '__main__': unittest.main() satpy-0.20.0/satpy/tests/reader_tests/test_goes_imager_nc.py000066400000000000000000000610461362525524100243050ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . import datetime import sys import numpy as np import xarray as xr from satpy import DatasetID if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest try: from unittest import mock except ImportError: import mock class GOESNCBaseFileHandlerTest(unittest.TestCase): longMessage = True @mock.patch('satpy.readers.goes_imager_nc.xr') @mock.patch.multiple('satpy.readers.goes_imager_nc.GOESNCBaseFileHandler', __abstractmethods__=set(), _get_sector=mock.MagicMock()) def setUp(self, xr_): from satpy.readers.goes_imager_nc import CALIB_COEFS, GOESNCBaseFileHandler self.coefs = CALIB_COEFS['GOES-15'] # Mock file access to return a fake dataset. self.time = datetime.datetime(2018, 8, 16, 16, 7) self.dummy3d = np.zeros((1, 2, 2)) self.dummy2d = np.zeros((2, 2)) self.band = 1 self.nc = xr.Dataset( {'data': xr.DataArray(self.dummy3d, dims=('time', 'yc', 'xc')), 'lon': xr.DataArray(data=self.dummy2d, dims=('yc', 'xc')), 'lat': xr.DataArray(data=self.dummy2d, dims=('yc', 'xc')), 'time': xr.DataArray(data=np.array([self.time], dtype='datetime64[ms]'), dims=('time',)), 'bands': xr.DataArray(data=np.array([self.band]))}, attrs={'Satellite Sensor': 'G-15'}) xr_.open_dataset.return_value = self.nc # Instantiate reader using the mocked open_dataset() method. Also, make # the reader believe all abstract methods have been implemented. self.reader = GOESNCBaseFileHandler(filename='dummy', filename_info={}, filetype_info={}) def test_init(self): """Tests reader initialization""" self.assertEqual(self.reader.nlines, self.dummy2d.shape[0]) self.assertEqual(self.reader.ncols, self.dummy2d.shape[1]) self.assertEqual(self.reader.platform_name, 'GOES-15') self.assertEqual(self.reader.platform_shortname, 'goes15') self.assertEqual(self.reader.gvar_channel, self.band) self.assertIsInstance(self.reader.geo_data, xr.Dataset) def test_get_nadir_pixel(self): """Test identification of the nadir pixel""" from satpy.readers.goes_imager_nc import FULL_DISC earth_mask = np.array([[0, 0, 0, 0], [0, 1, 0, 0], [1, 1, 1, 0], [0, 1, 0, 0], [0, 0, 0, 0]]) nadir_row, nadir_col = self.reader._get_nadir_pixel( earth_mask=earth_mask, sector=FULL_DISC) self.assertEqual((nadir_row, nadir_col), (2, 1), msg='Incorrect nadir pixel') def test_get_earth_mask(self): """Test identification of earth/space pixels""" lat = xr.DataArray([-100, -90, -45, 0, 45, 90, 100]) expected = np.array([0, 1, 1, 1, 1, 1, 0]) mask = self.reader._get_earth_mask(lat) self.assertTrue(np.all(mask == expected), msg='Incorrect identification of earth/space pixel') def test_is_yaw_flip(self): """Test yaw flip identification""" lat_asc = xr.DataArray([[1, 1, 1], [2, 2, 2], [3, 3, 3]]) lat_dsc = xr.DataArray([[3, 3, 3], [2, 2, 3], [1, 1, 1]]) self.assertEqual(self.reader._is_yaw_flip(lat_asc, delta=1), True, msg='Yaw flip not identified') self.assertEqual(self.reader._is_yaw_flip(lat_dsc, delta=1), False, msg='Yaw flip false alarm') def test_viscounts2radiance(self): """Test conversion from VIS counts to radiance""" # Reference data is for detector #1 slope = self.coefs['00_7']['slope'][0] offset = self.coefs['00_7']['offset'][0] counts = xr.DataArray([0, 100, 200, 500, 1000, 1023]) rad_expected = xr.DataArray( [0., 41.54896, 100.06862, 275.6276, 568.2259, 581.685422]) rad = self.reader._viscounts2radiance(counts=counts, slope=slope, offset=offset) self.assertTrue(np.allclose(rad.data, rad_expected.data, atol=1E-6), msg='Incorrect conversion from VIS counts to ' 'radiance') def test_ircounts2radiance(self): """Test conversion from IR counts to radiance""" # Test counts counts = xr.DataArray([0, 100, 500, 1000, 1023]) # Reference Radiance from NOAA lookup tables (same for detectors 1 and # 2, see [IR]) rad_expected = { '03_9': np.array([0, 0.140, 1.899, 4.098, 4.199]), '06_5': np.array([0, 1.825, 12.124, 24.998, 25.590]), '10_7': np.array([0, 16.126, 92.630, 188.259, 192.658]), '13_3': np.array([0, 15.084, 87.421, 177.842, 182.001]) } # The input counts are exact, but the accuracy of the output radiance is # limited to 3 digits atol = 1E-3 for ch in sorted(rad_expected.keys()): coefs = self.coefs[ch] rad = self.reader._ircounts2radiance( counts=counts, scale=coefs['scale'], offset=coefs['offset']) self.assertTrue(np.allclose(rad.data, rad_expected[ch], atol=atol), msg='Incorrect conversion from IR counts to ' 'radiance in channel {}'.format(ch)) def test_calibrate_vis(self): """Test VIS calibration""" rad = xr.DataArray([0, 1, 10, 100, 500]) refl_expected = xr.DataArray([0., 0.188852, 1.88852, 18.8852, 94.426]) refl = self.reader._calibrate_vis(radiance=rad, k=self.coefs['00_7']['k']) self.assertTrue(np.allclose(refl.data, refl_expected.data, atol=1E-6), msg='Incorrect conversion from radiance to ' 'reflectance') def test_calibrate_ir(self): """Test IR calibration""" # Test radiance values and corresponding BT from NOAA lookup tables # rev. H (see [IR]). rad = { '03_9': xr.DataArray([0, 0.1, 2, 3.997, 4.199]), '06_5': xr.DataArray([0, 0.821, 12.201, 25.590, 100]), '10_7': xr.DataArray([0, 11.727, 101.810, 189.407, 192.658]), '13_3': xr.DataArray([0, 22.679, 90.133, 182.001, 500]) } bt_expected = { '03_9': np.array([[np.nan, 253.213, 319.451, 339.983, np.nan], [np.nan, 253.213, 319.451, 339.983, np.nan]]), '06_5': np.array([[np.nan, 200.291, 267.860, 294.988, np.nan], [np.nan, 200.308, 267.879, 295.008, np.nan]]), '10_7': np.array([[np.nan, 200.105, 294.437, 339.960, np.nan], [np.nan, 200.097, 294.429, 339.953, np.nan]]), '13_3': np.array([[np.nan, 200.006, 267.517, 321.986, np.nan], [np.nan, 200.014, 267.524, 321.990, np.nan]]) } # first row is for detector 1, second for detector 2. # The accuracy of the input radiance is limited to 3 digits so that # the results differ slightly. atol = {'03_9': 0.04, '06_5': 0.03, '10_7': 0.01, '13_3': 0.01} for ch in sorted(rad.keys()): coefs = self.coefs[ch] for det in [0, 1]: bt = self.reader._calibrate_ir(radiance=rad[ch], coefs={'a': coefs['a'][det], 'b': coefs['b'][det], 'n': coefs['n'][det], 'btmin': coefs['btmin'], 'btmax': coefs['btmax']}) self.assertTrue( np.allclose(bt.data, bt_expected[ch][det], equal_nan=True, atol=atol[ch]), msg='Incorrect conversion from radiance to brightness ' 'temperature in channel {} detector {}'.format(ch, det)) def test_start_time(self): """Test dataset start time stamp""" self.assertEqual(self.reader.start_time, self.time) def test_end_time(self): """Test dataset end time stamp""" from satpy.readers.goes_imager_nc import (SCAN_DURATION, FULL_DISC, UNKNOWN_SECTOR) expected = { UNKNOWN_SECTOR: self.time, FULL_DISC: self.time + SCAN_DURATION[FULL_DISC] } for sector, end_time in expected.items(): self.reader.sector = sector self.assertEqual(self.reader.end_time, end_time) class GOESNCFileHandlerTest(unittest.TestCase): longMessage = True @mock.patch('satpy.readers.goes_imager_nc.xr') def setUp(self, xr_): from satpy.readers.goes_imager_nc import GOESNCFileHandler, CALIB_COEFS self.coefs = CALIB_COEFS['GOES-15'] self.all_coefs = CALIB_COEFS self.channels = sorted(self.coefs.keys()) self.ir_channels = sorted([ch for ch in self.channels if not GOESNCFileHandler._is_vis(ch)]) self.vis_channels = sorted([ch for ch in self.channels if GOESNCFileHandler._is_vis(ch)]) # Mock file access to return a fake dataset. Choose a medium count value # (100) to avoid elements being masked due to invalid # radiance/reflectance/BT nrows = ncols = 300 self.counts = 100 * 32 * np.ones((1, nrows, ncols)) # emulate 10-bit self.lon = np.zeros((nrows, ncols)) # Dummy self.lat = np.repeat(np.linspace(-150, 150, nrows), ncols).reshape( nrows, ncols) # Includes invalid values to be masked xr_.open_dataset.return_value = xr.Dataset( {'data': xr.DataArray(data=self.counts, dims=('time', 'yc', 'xc')), 'lon': xr.DataArray(data=self.lon, dims=('yc', 'xc')), 'lat': xr.DataArray(data=self.lat, dims=('yc', 'xc')), 'time': xr.DataArray(data=np.array([0], dtype='datetime64[ms]'), dims=('time',)), 'bands': xr.DataArray(data=np.array([1]))}, attrs={'Satellite Sensor': 'G-15'}) # Instantiate reader using the mocked open_dataset() method self.reader = GOESNCFileHandler(filename='dummy', filename_info={}, filetype_info={}) def test_get_dataset_coords(self): """Test whether coordinates returned by get_dataset() are correct""" lon = self.reader.get_dataset(key=DatasetID(name='longitude', calibration=None), info={}) lat = self.reader.get_dataset(key=DatasetID(name='latitude', calibration=None), info={}) # ... this only compares the valid (unmasked) elements self.assertTrue(np.all(lat.to_masked_array() == self.lat), msg='get_dataset() returns invalid latitude') self.assertTrue(np.all(lon.to_masked_array() == self.lon), msg='get_dataset() returns invalid longitude') def test_get_dataset_counts(self): """Test whether counts returned by get_dataset() are correct""" from satpy.readers.goes_imager_nc import ALTITUDE, UNKNOWN_SECTOR self.reader.meta.update({'lon0': -75.0, 'lat0': 0.0, 'sector': UNKNOWN_SECTOR, 'nadir_row': 1, 'nadir_col': 2, 'area_def_uni': 'some_area'}) attrs_exp = {'orbital_parameters': {'projection_longitude': -75.0, 'projection_latitude': 0.0, 'projection_altitude': ALTITUDE, 'yaw_flip': True}, 'satellite_longitude': -75.0, 'satellite_latitude': 0.0, 'satellite_altitude': ALTITUDE, 'platform_name': 'GOES-15', 'sensor': 'goes_imager', 'sector': UNKNOWN_SECTOR, 'nadir_row': 1, 'nadir_col': 2, 'area_def_uniform_sampling': 'some_area'} for ch in self.channels: counts = self.reader.get_dataset( key=DatasetID(name=ch, calibration='counts'), info={}) # ... this only compares the valid (unmasked) elements self.assertTrue(np.all(self.counts/32. == counts.to_masked_array()), msg='get_dataset() returns invalid counts for ' 'channel {}'.format(ch)) # Check attributes self.assertDictEqual(counts.attrs, attrs_exp) def test_get_dataset_masks(self): """Test whether data and coordinates are masked consistently""" # Requires that no element has been masked due to invalid # radiance/reflectance/BT (see setUp()). lon = self.reader.get_dataset(key=DatasetID(name='longitude', calibration=None), info={}) lon_mask = lon.to_masked_array().mask for ch in self.channels: for calib in ('counts', 'radiance', 'reflectance', 'brightness_temperature'): try: data = self.reader.get_dataset( key=DatasetID(name=ch, calibration=calib), info={}) except ValueError: continue data_mask = data.to_masked_array().mask self.assertTrue(np.all(data_mask == lon_mask), msg='get_dataset() returns inconsistently ' 'masked {} in channel {}'.format(calib, ch)) def test_get_dataset_invalid(self): """Test handling of invalid calibrations""" # VIS -> BT args = dict(key=DatasetID(name='00_7', calibration='brightness_temperature'), info={}) self.assertRaises(ValueError, self.reader.get_dataset, **args) # IR -> Reflectance args = dict(key=DatasetID(name='10_7', calibration='reflectance'), info={}) self.assertRaises(ValueError, self.reader.get_dataset, **args) # Unsupported calibration args = dict(key=DatasetID(name='10_7', calibration='invalid'), info={}) self.assertRaises(ValueError, self.reader.get_dataset, **args) def test_calibrate(self): """Test whether the correct calibration methods are called""" for ch in self.channels: if self.reader._is_vis(ch): calibs = {'radiance': '_viscounts2radiance', 'reflectance': '_calibrate_vis'} else: calibs = {'radiance': '_ircounts2radiance', 'brightness_temperature': '_calibrate_ir'} for calib, method in calibs.items(): with mock.patch.object(self.reader, method) as target_func: self.reader.calibrate(counts=self.reader.nc['data'], calibration=calib, channel=ch) target_func.assert_called() def test_get_sector(self): """Test sector identification""" from satpy.readers.goes_imager_nc import (FULL_DISC, NORTH_HEMIS_EAST, SOUTH_HEMIS_EAST, NORTH_HEMIS_WEST, SOUTH_HEMIS_WEST, UNKNOWN_SECTOR) shapes_vis = { (10800, 20754): FULL_DISC, (7286, 13900): NORTH_HEMIS_EAST, (2301, 13840): SOUTH_HEMIS_EAST, (5400, 13200): NORTH_HEMIS_WEST, (4300, 11090): SOUTH_HEMIS_WEST, (123, 456): UNKNOWN_SECTOR } shapes_ir = { (2700, 5200): FULL_DISC, (1850, 3450): NORTH_HEMIS_EAST, (600, 3500): SOUTH_HEMIS_EAST, (1310, 3300): NORTH_HEMIS_WEST, (1099, 2800): SOUTH_HEMIS_WEST, (123, 456): UNKNOWN_SECTOR } shapes = shapes_ir.copy() shapes.update(shapes_vis) for (nlines, ncols), sector_ref in shapes.items(): if (nlines, ncols) in shapes_vis: channel = '00_7' else: channel = '10_7' sector = self.reader._get_sector(channel=channel, nlines=nlines, ncols=ncols) self.assertEqual(sector, sector_ref, msg='Incorrect sector identification') class GOESNCEUMFileHandlerRadianceTest(unittest.TestCase): longMessage = True @mock.patch('satpy.readers.goes_imager_nc.xr') def setUp(self, xr_): from satpy.readers.goes_imager_nc import GOESEUMNCFileHandler, CALIB_COEFS self.coefs = CALIB_COEFS['GOES-15'] self.all_coefs = CALIB_COEFS self.channels = sorted(self.coefs.keys()) self.ir_channels = sorted([ch for ch in self.channels if not GOESEUMNCFileHandler._is_vis(ch)]) self.vis_channels = sorted([ch for ch in self.channels if GOESEUMNCFileHandler._is_vis(ch)]) # Mock file access to return a fake dataset. nrows = ncols = 300 self.radiance = np.ones((1, nrows, ncols)) # IR channels self.lon = np.zeros((nrows, ncols)) # Dummy self.lat = np.repeat(np.linspace(-150, 150, nrows), ncols).reshape( nrows, ncols) # Includes invalid values to be masked xr_.open_dataset.return_value = xr.Dataset( {'data': xr.DataArray(data=self.radiance, dims=('time', 'yc', 'xc')), 'time': xr.DataArray(data=np.array([0], dtype='datetime64[ms]'), dims=('time',)), 'bands': xr.DataArray(data=np.array([1]))}, attrs={'Satellite Sensor': 'G-15'}) geo_data = xr.Dataset( {'lon': xr.DataArray(data=self.lon, dims=('yc', 'xc')), 'lat': xr.DataArray(data=self.lat, dims=('yc', 'xc'))}, attrs={'Satellite Sensor': 'G-15'}) # Instantiate reader using the mocked open_dataset() method self.reader = GOESEUMNCFileHandler(filename='dummy', filename_info={}, filetype_info={}, geo_data=geo_data) def test_get_dataset_radiance(self): for ch in self.channels: if not self.reader._is_vis(ch): radiance = self.reader.get_dataset( key=DatasetID(name=ch, calibration='radiance'), info={}) # ... this only compares the valid (unmasked) elements self.assertTrue(np.all(self.radiance == radiance.to_masked_array()), msg='get_dataset() returns invalid radiance for ' 'channel {}'.format(ch)) def test_calibrate(self): """Test whether the correct calibration methods are called""" for ch in self.channels: if not self.reader._is_vis(ch): calibs = {'brightness_temperature': '_calibrate_ir'} for calib, method in calibs.items(): with mock.patch.object(self.reader, method) as target_func: self.reader.calibrate(data=self.reader.nc['data'], calibration=calib, channel=ch) target_func.assert_called() def test_get_sector(self): """Test sector identification""" from satpy.readers.goes_imager_nc import (FULL_DISC, NORTH_HEMIS_EAST, SOUTH_HEMIS_EAST, NORTH_HEMIS_WEST, SOUTH_HEMIS_WEST, UNKNOWN_SECTOR) shapes = { (2700, 5200): FULL_DISC, (1850, 3450): NORTH_HEMIS_EAST, (600, 3500): SOUTH_HEMIS_EAST, (1310, 3300): NORTH_HEMIS_WEST, (1099, 2800): SOUTH_HEMIS_WEST, (123, 456): UNKNOWN_SECTOR } for (nlines, ncols), sector_ref in shapes.items(): for channel in ('00_7', '10_7'): sector = self.reader._get_sector(channel=channel, nlines=nlines, ncols=ncols) self.assertEqual(sector, sector_ref, msg='Incorrect sector identification') class GOESNCEUMFileHandlerReflectanceTest(unittest.TestCase): longMessage = True @mock.patch('satpy.readers.goes_imager_nc.xr') def setUp(self, xr_): from satpy.readers.goes_imager_nc import GOESEUMNCFileHandler, CALIB_COEFS self.coefs = CALIB_COEFS['GOES-15'] self.all_coefs = CALIB_COEFS self.channels = sorted(self.coefs.keys()) self.ir_channels = sorted([ch for ch in self.channels if not GOESEUMNCFileHandler._is_vis(ch)]) self.vis_channels = sorted([ch for ch in self.channels if GOESEUMNCFileHandler._is_vis(ch)]) # Mock file access to return a fake dataset. nrows = ncols = 300 self.reflectance = 50 * np.ones((1, nrows, ncols)) # Vis channel self.lon = np.zeros((nrows, ncols)) # Dummy self.lat = np.repeat(np.linspace(-150, 150, nrows), ncols).reshape( nrows, ncols) # Includes invalid values to be masked xr_.open_dataset.return_value = xr.Dataset( {'data': xr.DataArray(data=self.reflectance, dims=('time', 'yc', 'xc')), 'time': xr.DataArray(data=np.array([0], dtype='datetime64[ms]'), dims=('time',)), 'bands': xr.DataArray(data=np.array([1]))}, attrs={'Satellite Sensor': 'G-15'}) geo_data = xr.Dataset( {'lon': xr.DataArray(data=self.lon, dims=('yc', 'xc')), 'lat': xr.DataArray(data=self.lat, dims=('yc', 'xc'))}, attrs={'Satellite Sensor': 'G-15'}) # Instantiate reader using the mocked open_dataset() method self.reader = GOESEUMNCFileHandler(filename='dummy', filename_info={}, filetype_info={}, geo_data=geo_data) def test_get_dataset_reflectance(self): for ch in self.channels: if self.reader._is_vis(ch): refl = self.reader.get_dataset( key=DatasetID(name=ch, calibration='reflectance'), info={}) # ... this only compares the valid (unmasked) elements self.assertTrue(np.all(self.reflectance == refl.to_masked_array()), msg='get_dataset() returns invalid reflectance for ' 'channel {}'.format(ch)) def suite(): """Test suite for GOES netCDF reader""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(GOESNCBaseFileHandlerTest)) mysuite.addTest(loader.loadTestsFromTestCase(GOESNCFileHandlerTest)) mysuite.addTest(loader.loadTestsFromTestCase(GOESNCEUMFileHandlerRadianceTest)) mysuite.addTest(loader.loadTestsFromTestCase(GOESNCEUMFileHandlerReflectanceTest)) return mysuite if __name__ == '__main__': unittest.main() satpy-0.20.0/satpy/tests/reader_tests/test_grib.py000066400000000000000000000205451362525524100222660ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.grib module. """ import os import sys import numpy as np import xarray as xr if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest try: from unittest import mock except ImportError: import mock class FakeMessage(object): """Fake message returned by pygrib.open().message(x).""" def __init__(self, values, proj_params=None, latlons=None, **attrs): super(FakeMessage, self).__init__() self.attrs = attrs self.values = values if proj_params is None: proj_params = {'a': 6371229, 'b': 6371229, 'proj': 'cyl'} self.projparams = proj_params self._latlons = latlons def latlons(self): return self._latlons def __getitem__(self, item): return self.attrs[item] def valid_key(self, key): return True class FakeGRIB(object): """Fake GRIB file returned by pygrib.open.""" def __init__(self, messages=None, proj_params=None, latlons=None): super(FakeGRIB, self).__init__() if messages is not None: self._messages = messages else: self._messages = [ FakeMessage( values=np.arange(25.).reshape((5, 5)), name='TEST', shortName='t', level=100, pressureUnits='hPa', cfName='air_temperature', units='K', dataDate=20180504, dataTime=1200, validityDate=20180504, validityTime=1800, distinctLongitudes=np.arange(5.), distinctLatitudes=np.arange(5.), missingValue=9999, modelName='unknown', minimum=100., maximum=200., typeOfLevel='isobaricInhPa', jScansPositively=0, proj_params=proj_params, latlons=latlons, ), FakeMessage( values=np.arange(25.).reshape((5, 5)), name='TEST', shortName='t', level=200, pressureUnits='hPa', cfName='air_temperature', units='K', dataDate=20180504, dataTime=1200, validityDate=20180504, validityTime=1800, distinctLongitudes=np.arange(5.), distinctLatitudes=np.arange(5.), missingValue=9999, modelName='unknown', minimum=100., maximum=200., typeOfLevel='isobaricInhPa', jScansPositively=0, proj_params=proj_params, latlons=latlons, ), FakeMessage( values=np.arange(25.).reshape((5, 5)), name='TEST', shortName='t', level=300, pressureUnits='hPa', cfName='air_temperature', units='K', dataDate=20180504, dataTime=1200, validityDate=20180504, validityTime=1800, distinctLongitudes=np.arange(5.), distinctLatitudes=np.arange(5.), missingValue=9999, modelName='unknown', minimum=100., maximum=200., typeOfLevel='isobaricInhPa', jScansPositively=0, proj_params=proj_params, latlons=latlons, ), ] self.messages = len(self._messages) def message(self, msg_num): return self._messages[msg_num - 1] def seek(self, loc): return def __iter__(self): return iter(self._messages) def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): pass class TestGRIBReader(unittest.TestCase): """Test GRIB Reader""" yaml_file = "grib.yaml" def setUp(self): """Wrap pygrib to read fake data""" from satpy.config import config_search_paths self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) try: import pygrib except ImportError: pygrib = None self.orig_pygrib = pygrib sys.modules['pygrib'] = mock.MagicMock() def tearDown(self): """Re-enable pygrib import.""" sys.modules['pygrib'] = self.orig_pygrib @mock.patch('satpy.readers.grib.pygrib') def test_init(self, pg): """Test basic init with no extra parameters.""" pg.open.return_value = FakeGRIB() from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'gfs.t18z.sfluxgrbf106.grib2', ]) self.assertTrue(len(loadables), 1) r.create_filehandlers(loadables) # make sure we have some files self.assertTrue(r.file_handlers) @mock.patch('satpy.readers.grib.pygrib') def test_load_all(self, pg): """Test loading all test datasets""" pg.open.return_value = FakeGRIB() from satpy.readers import load_reader from satpy import DatasetID r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'gfs.t18z.sfluxgrbf106.grib2', ]) r.create_filehandlers(loadables) datasets = r.load([ DatasetID(name='t', level=100), DatasetID(name='t', level=200), DatasetID(name='t', level=300)]) self.assertEqual(len(datasets), 3) for v in datasets.values(): self.assertEqual(v.attrs['units'], 'K') self.assertIsInstance(v, xr.DataArray) @mock.patch('satpy.readers.grib.pygrib') def test_load_all_lcc(self, pg): """Test loading all test datasets with lcc projections""" lons = np.array([ [12.19, 0, 0, 0, 14.34208538], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [54.56534318, 0, 0, 0, 57.32843565]]) lats = np.array([ [-133.459, 0, 0, 0, -65.12555139], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [-152.8786225, 0, 0, 0, -49.41598659]]) pg.open.return_value = FakeGRIB( proj_params={ 'a': 6371229, 'b': 6371229, 'proj': 'lcc', 'lon_0': 265.0, 'lat_0': 25.0, 'lat_1': 25.0, 'lat_2': 25.0}, latlons=(lats, lons)) from satpy.readers import load_reader from satpy import DatasetID r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'gfs.t18z.sfluxgrbf106.grib2', ]) r.create_filehandlers(loadables) datasets = r.load([ DatasetID(name='t', level=100), DatasetID(name='t', level=200), DatasetID(name='t', level=300)]) self.assertEqual(len(datasets), 3) for v in datasets.values(): self.assertEqual(v.attrs['units'], 'K') self.assertIsInstance(v, xr.DataArray) def suite(): """The test suite for test_grib.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestGRIBReader)) return mysuite satpy-0.20.0/satpy/tests/reader_tests/test_hdf4_utils.py000066400000000000000000000121121362525524100233770ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.hdf4_utils module.""" import os import sys import numpy as np import xarray as xr try: from satpy.readers.hdf4_utils import HDF4FileHandler except ImportError: # fake the import so we can at least run the tests in this file HDF4FileHandler = object if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest class FakeHDF4FileHandler(HDF4FileHandler): """Swap-in NetCDF4 File Handler for reader tests to use.""" def __init__(self, filename, filename_info, filetype_info, **kwargs): """Get fake file content from 'get_test_content'.""" if HDF4FileHandler is object: raise ImportError("Base 'HDF4FileHandler' could not be " "imported.") super(HDF4FileHandler, self).__init__(filename, filename_info, filetype_info) self.file_content = self.get_test_content(filename, filename_info, filetype_info) self.file_content.update(kwargs) def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content. Args: filename (str): input filename filename_info (dict): Dict of metadata pulled from filename filetype_info (dict): Dict of metadata from the reader's yaml config for this file type Returns: dict of file content with keys like: - 'dataset' - '/attr/global_attr' - 'dataset/attr/global_attr' - 'dataset/shape' """ raise NotImplementedError("Fake File Handler subclass must implement 'get_test_content'") class TestHDF4FileHandler(unittest.TestCase): """Test HDF4 File Handler Utility class.""" def setUp(self): """Create a test HDF4 file.""" from pyhdf.SD import SD, SDC h = SD('test.hdf', SDC.WRITE | SDC.CREATE | SDC.TRUNC) data = np.arange(10. * 100, dtype=np.float32).reshape((10, 100)) v1 = h.create('ds1_f', SDC.FLOAT32, (10, 100)) v1[:] = data v2 = h.create('ds1_i', SDC.INT16, (10, 100)) v2[:] = data.astype(np.int16) # Add attributes h.test_attr_str = 'test_string' h.test_attr_int = 0 h.test_attr_float = 1.2 # h.test_attr_str_arr = np.array(b"test_string2") for d in [v1, v2]: d.test_attr_str = 'test_string' d.test_attr_int = 0 d.test_attr_float = 1.2 h.end() def tearDown(self): """Remove the previously created test file.""" os.remove('test.hdf') def test_all_basic(self): """Test everything about the HDF4 class.""" from satpy.readers.hdf4_utils import HDF4FileHandler file_handler = HDF4FileHandler('test.hdf', {}, {}) for ds in ('ds1_f', 'ds1_i'): self.assertEqual(file_handler[ds + '/dtype'], np.float32 if ds.endswith('f') else np.int16) self.assertTupleEqual(file_handler[ds + '/shape'], (10, 100)) # make sure that the dtype is an instance, not the class self.assertEqual(file_handler[ds].dtype.itemsize, 4 if ds.endswith('f') else 2) attrs = file_handler[ds].attrs self.assertEqual(attrs.get('test_attr_str'), 'test_string') self.assertEqual(attrs.get('test_attr_int'), 0) self.assertEqual(attrs.get('test_attr_float'), 1.2) self.assertIsInstance(file_handler['/attr/test_attr_str'], str) self.assertEqual(file_handler['/attr/test_attr_str'], 'test_string') # self.assertEqual(file_handler['/attr/test_attr_str_arr'], 'test_string2') self.assertIsInstance(file_handler['/attr/test_attr_int'], int) self.assertEqual(file_handler['/attr/test_attr_int'], 0) self.assertIsInstance(file_handler['/attr/test_attr_float'], float) self.assertEqual(file_handler['/attr/test_attr_float'], 1.2) self.assertIsInstance(file_handler.get('ds1_f'), xr.DataArray) self.assertIsNone(file_handler.get('fake_ds')) self.assertEqual(file_handler.get('fake_ds', 'test'), 'test') self.assertTrue('ds1_f' in file_handler) self.assertFalse('fake_ds' in file_handler) def suite(): """Create the test suite for test_hdf4_utils.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestHDF4FileHandler)) return mysuite satpy-0.20.0/satpy/tests/reader_tests/test_hdf5_utils.py000066400000000000000000000134351362525524100234110ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.hdf5_utils module.""" import os import numpy as np try: from satpy.readers.hdf5_utils import HDF5FileHandler except ImportError: # fake the import so we can at least run the tests in this file HDF5FileHandler = object import unittest class FakeHDF5FileHandler(HDF5FileHandler): """Swap HDF5 File Handler for reader tests to use.""" def __init__(self, filename, filename_info, filetype_info, **kwargs): """Get fake file content from 'get_test_content'.""" if HDF5FileHandler is object: raise ImportError("Base 'HDF5FileHandler' could not be " "imported.") filename = str(filename) super(HDF5FileHandler, self).__init__(filename, filename_info, filetype_info) self.file_content = self.get_test_content(filename, filename_info, filetype_info) self.file_content.update(kwargs) def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content. Args: filename (str): input filename filename_info (dict): Dict of metadata pulled from filename filetype_info (dict): Dict of metadata from the reader's yaml config for this file type Returns: dict of file content with keys like: - 'dataset' - '/attr/global_attr' - 'dataset/attr/global_attr' - 'dataset/shape' """ raise NotImplementedError("Fake File Handler subclass must implement 'get_test_content'") class TestHDF5FileHandler(unittest.TestCase): """Test HDF5 File Handler Utility class.""" def setUp(self): """Create a test HDF5 file.""" import h5py h = h5py.File('test.h5', 'w') # Create Group g1 = h.create_group('test_group') # Add datasets ds1_f = g1.create_dataset('ds1_f', shape=(10, 100), dtype=np.float32, data=np.arange(10. * 100).reshape((10, 100))) ds1_i = g1.create_dataset('ds1_i', shape=(10, 100), dtype=np.int32, data=np.arange(10 * 100).reshape((10, 100))) ds2_f = h.create_dataset('ds2_f', shape=(10, 100), dtype=np.float32, data=np.arange(10. * 100).reshape((10, 100))) ds2_i = h.create_dataset('ds2_i', shape=(10, 100), dtype=np.int32, data=np.arange(10 * 100).reshape((10, 100))) # Add attributes # shows up as a scalar array of bytes (shape=(), size=1) h.attrs['test_attr_str'] = 'test_string' h.attrs['test_attr_int'] = 0 h.attrs['test_attr_float'] = 1.2 # shows up as a numpy bytes object h.attrs['test_attr_str_arr'] = np.array(b"test_string2") g1.attrs['test_attr_str'] = 'test_string' g1.attrs['test_attr_int'] = 0 g1.attrs['test_attr_float'] = 1.2 for d in [ds1_f, ds1_i, ds2_f, ds2_i]: d.attrs['test_attr_str'] = 'test_string' d.attrs['test_attr_int'] = 0 d.attrs['test_attr_float'] = 1.2 d.attrs['test_ref'] = d.ref h.close() def tearDown(self): """Remove the previously created test file.""" os.remove('test.h5') def test_all_basic(self): """Test everything about the HDF5 class.""" from satpy.readers.hdf5_utils import HDF5FileHandler import xarray as xr file_handler = HDF5FileHandler('test.h5', {}, {}) for ds in ('test_group/ds1_f', 'test_group/ds1_i', 'ds2_f', 'ds2_i'): self.assertEqual(file_handler[ds].dtype, np.float32 if ds.endswith('f') else np.int32) self.assertTupleEqual(file_handler[ds + '/shape'], (10, 100)) self.assertEqual(file_handler[ds + '/attr/test_attr_str'], 'test_string') self.assertEqual(file_handler[ds + '/attr/test_attr_int'], 0) self.assertEqual(file_handler[ds + '/attr/test_attr_float'], 1.2) self.assertEqual(file_handler['/attr/test_attr_str'], 'test_string') self.assertEqual(file_handler['/attr/test_attr_str_arr'], 'test_string2') self.assertEqual(file_handler['/attr/test_attr_int'], 0) self.assertEqual(file_handler['/attr/test_attr_float'], 1.2) self.assertIsInstance(file_handler.get('ds2_f'), xr.DataArray) self.assertIsNone(file_handler.get('fake_ds')) self.assertEqual(file_handler.get('fake_ds', 'test'), 'test') self.assertTrue('ds2_f' in file_handler) self.assertFalse('fake_ds' in file_handler) self.assertIsInstance(file_handler['ds2_f/attr/test_ref'], np.ndarray) def suite(): """Test suite for test_hdf5_utils.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestHDF5FileHandler)) return mysuite satpy-0.20.0/satpy/tests/reader_tests/test_hdfeos_base.py000066400000000000000000000553751362525524100236160ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . import unittest nrt_mda = '''GROUP = INVENTORYMETADATA GROUPTYPE = MASTERGROUP GROUP = ECSDATAGRANULE OBJECT = REPROCESSINGPLANNED NUM_VAL = 1 VALUE = "further update is anticipated" END_OBJECT = REPROCESSINGPLANNED OBJECT = REPROCESSINGACTUAL NUM_VAL = 1 VALUE = "Near Real Time" END_OBJECT = REPROCESSINGACTUAL OBJECT = LOCALGRANULEID NUM_VAL = 1 VALUE = "MYD03.A2019051.1225.061.2019051131153.NRT.hdf" END_OBJECT = LOCALGRANULEID OBJECT = PRODUCTIONDATETIME NUM_VAL = 1 VALUE = "2019-02-20T13:11:53.000Z" END_OBJECT = PRODUCTIONDATETIME OBJECT = DAYNIGHTFLAG NUM_VAL = 1 VALUE = "Day" END_OBJECT = DAYNIGHTFLAG OBJECT = LOCALVERSIONID NUM_VAL = 1 VALUE = "6.0.4" END_OBJECT = LOCALVERSIONID END_GROUP = ECSDATAGRANULE GROUP = MEASUREDPARAMETER OBJECT = MEASUREDPARAMETERCONTAINER CLASS = "1" OBJECT = PARAMETERNAME CLASS = "1" NUM_VAL = 1 VALUE = "Geolocation" END_OBJECT = PARAMETERNAME GROUP = QAFLAGS CLASS = "1" OBJECT = AUTOMATICQUALITYFLAG NUM_VAL = 1 CLASS = "1" VALUE = "Passed" END_OBJECT = AUTOMATICQUALITYFLAG OBJECT = AUTOMATICQUALITYFLAGEXPLANATION NUM_VAL = 1 CLASS = "1" VALUE = "Set to 'Failed' if processing error occurred, set to 'Passed' otherwise" END_OBJECT = AUTOMATICQUALITYFLAGEXPLANATION OBJECT = SCIENCEQUALITYFLAG NUM_VAL = 1 VALUE = "Not Investigated" CLASS = "1" END_OBJECT = SCIENCEQUALITYFLAG END_GROUP = QAFLAGS GROUP = QASTATS CLASS = "1" OBJECT = QAPERCENTMISSINGDATA NUM_VAL = 1 CLASS = "1" VALUE = 0 END_OBJECT = QAPERCENTMISSINGDATA OBJECT = QAPERCENTOUTOFBOUNDSDATA NUM_VAL = 1 CLASS = "1" VALUE = 0 END_OBJECT = QAPERCENTOUTOFBOUNDSDATA END_GROUP = QASTATS END_OBJECT = MEASUREDPARAMETERCONTAINER END_GROUP = MEASUREDPARAMETER GROUP = ORBITCALCULATEDSPATIALDOMAIN OBJECT = ORBITCALCULATEDSPATIALDOMAINCONTAINER CLASS = "1" OBJECT = ORBITNUMBER CLASS = "1" NUM_VAL = 1 VALUE = 89393 END_OBJECT = ORBITNUMBER OBJECT = EQUATORCROSSINGLONGITUDE CLASS = "1" NUM_VAL = 1 VALUE = -151.260740805733 END_OBJECT = EQUATORCROSSINGLONGITUDE OBJECT = EQUATORCROSSINGTIME CLASS = "1" NUM_VAL = 1 VALUE = "12:49:52.965727" END_OBJECT = EQUATORCROSSINGTIME OBJECT = EQUATORCROSSINGDATE CLASS = "1" NUM_VAL = 1 VALUE = "2019-02-20" END_OBJECT = EQUATORCROSSINGDATE END_OBJECT = ORBITCALCULATEDSPATIALDOMAINCONTAINER END_GROUP = ORBITCALCULATEDSPATIALDOMAIN GROUP = COLLECTIONDESCRIPTIONCLASS OBJECT = SHORTNAME NUM_VAL = 1 VALUE = "MYD03" END_OBJECT = SHORTNAME OBJECT = VERSIONID NUM_VAL = 1 VALUE = 61 END_OBJECT = VERSIONID END_GROUP = COLLECTIONDESCRIPTIONCLASS GROUP = INPUTGRANULE OBJECT = INPUTPOINTER NUM_VAL = 8 VALUE = ("MYD01.61.2019-051T12:25:00.000000Z.NA.29878844.500100_1.hdf", "MYD03LUT.coeff_V6.1.4", "PM1EPHND_NRT.A2019051.1220.061.2019051125628", "PM1EPHND_NRT.A2019051.1225.061.2019051125628", "PM1EPHND_NRT.A2019051.1230.061.2019051125628", " PM1ATTNR_NRT.A2019051.1220.061.2019051125628", "PM1ATTNR_NRT.A2019051.1225.061.2019051125628", "PM1ATTNR_NRT.A2019051.1230.061.2019051125628") END_OBJECT = INPUTPOINTER END_GROUP = INPUTGRANULE GROUP = SPATIALDOMAINCONTAINER GROUP = HORIZONTALSPATIALDOMAINCONTAINER GROUP = GPOLYGON OBJECT = GPOLYGONCONTAINER CLASS = "1" GROUP = GRING CLASS = "1" OBJECT = EXCLUSIONGRINGFLAG NUM_VAL = 1 CLASS = "1" VALUE = "N" END_OBJECT = EXCLUSIONGRINGFLAG END_GROUP = GRING GROUP = GRINGPOINT CLASS = "1" OBJECT = GRINGPOINTLONGITUDE NUM_VAL = 4 CLASS = "1" VALUE = (25.3839329817764, 1.80418778807854, -6.50842421663422, 23.0260060198343) END_OBJECT = GRINGPOINTLONGITUDE OBJECT = GRINGPOINTLATITUDE NUM_VAL = 4 CLASS = "1" VALUE = (29.5170117594673, 26.1480434828114, 43.2445462598877, 47.7959787025408) END_OBJECT = GRINGPOINTLATITUDE OBJECT = GRINGPOINTSEQUENCENO NUM_VAL = 4 CLASS = "1" VALUE = (1, 2, 3, 4) END_OBJECT = GRINGPOINTSEQUENCENO END_GROUP = GRINGPOINT END_OBJECT = GPOLYGONCONTAINER END_GROUP = GPOLYGON END_GROUP = HORIZONTALSPATIALDOMAINCONTAINER END_GROUP = SPATIALDOMAINCONTAINER GROUP = RANGEDATETIME OBJECT = RANGEBEGINNINGTIME NUM_VAL = 1 VALUE = "12:25:00.000000" END_OBJECT = RANGEBEGINNINGTIME OBJECT = RANGEENDINGTIME NUM_VAL = 1 VALUE = "12:30:00.000000" END_OBJECT = RANGEENDINGTIME OBJECT = RANGEBEGINNINGDATE NUM_VAL = 1 VALUE = "2019-02-20" END_OBJECT = RANGEBEGINNINGDATE OBJECT = RANGEENDINGDATE NUM_VAL = 1 VALUE = "2019-02-20" END_OBJECT = RANGEENDINGDATE END_GROUP = RANGEDATETIME GROUP = ASSOCIATEDPLATFORMINSTRUMENTSENSOR OBJECT = ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER CLASS = "1" OBJECT = ASSOCIATEDSENSORSHORTNAME CLASS = "1" NUM_VAL = 1 VALUE = "MODIS" END_OBJECT = ASSOCIATEDSENSORSHORTNAME OBJECT = ASSOCIATEDPLATFORMSHORTNAME CLASS = "1" NUM_VAL = 1 VALUE = "Aqua" END_OBJECT = ASSOCIATEDPLATFORMSHORTNAME OBJECT = ASSOCIATEDINSTRUMENTSHORTNAME CLASS = "1" NUM_VAL = 1 VALUE = "MODIS" END_OBJECT = ASSOCIATEDINSTRUMENTSHORTNAME END_OBJECT = ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER END_GROUP = ASSOCIATEDPLATFORMINSTRUMENTSENSOR GROUP = PGEVERSIONCLASS OBJECT = PGEVERSION NUM_VAL = 1 VALUE = "6.1.4" END_OBJECT = PGEVERSION END_GROUP = PGEVERSIONCLASS GROUP = ADDITIONALATTRIBUTES OBJECT = ADDITIONALATTRIBUTESCONTAINER CLASS = "1" OBJECT = ADDITIONALATTRIBUTENAME CLASS = "1" NUM_VAL = 1 VALUE = "GRANULENUMBER" END_OBJECT = ADDITIONALATTRIBUTENAME GROUP = INFORMATIONCONTENT CLASS = "1" OBJECT = PARAMETERVALUE NUM_VAL = 1 CLASS = "1" VALUE = "151" END_OBJECT = PARAMETERVALUE END_GROUP = INFORMATIONCONTENT END_OBJECT = ADDITIONALATTRIBUTESCONTAINER OBJECT = ADDITIONALATTRIBUTESCONTAINER CLASS = "2" OBJECT = ADDITIONALATTRIBUTENAME CLASS = "2" NUM_VAL = 1 VALUE = "SCI_STATE" END_OBJECT = ADDITIONALATTRIBUTENAME GROUP = INFORMATIONCONTENT CLASS = "2" OBJECT = PARAMETERVALUE NUM_VAL = 1 CLASS = "2" VALUE = "1" END_OBJECT = PARAMETERVALUE END_GROUP = INFORMATIONCONTENT END_OBJECT = ADDITIONALATTRIBUTESCONTAINER OBJECT = ADDITIONALATTRIBUTESCONTAINER CLASS = "3" OBJECT = ADDITIONALATTRIBUTENAME CLASS = "3" NUM_VAL = 1 VALUE = "SCI_ABNORM" END_OBJECT = ADDITIONALATTRIBUTENAME GROUP = INFORMATIONCONTENT CLASS = "3" OBJECT = PARAMETERVALUE NUM_VAL = 1 CLASS = "3" VALUE = "1" END_OBJECT = PARAMETERVALUE END_GROUP = INFORMATIONCONTENT END_OBJECT = ADDITIONALATTRIBUTESCONTAINER OBJECT = ADDITIONALATTRIBUTESCONTAINER CLASS = "5" OBJECT = ADDITIONALATTRIBUTENAME CLASS = "5" NUM_VAL = 1 VALUE = "PROCESSVERSION" END_OBJECT = ADDITIONALATTRIBUTENAME GROUP = INFORMATIONCONTENT CLASS = "5" OBJECT = PARAMETERVALUE NUM_VAL = 1 CLASS = "5" VALUE = "6.1.0" END_OBJECT = PARAMETERVALUE END_GROUP = INFORMATIONCONTENT END_OBJECT = ADDITIONALATTRIBUTESCONTAINER OBJECT = ADDITIONALATTRIBUTESCONTAINER CLASS = "4" OBJECT = ADDITIONALATTRIBUTENAME CLASS = "4" NUM_VAL = 1 VALUE = "GEO_EST_RMS_ERROR" END_OBJECT = ADDITIONALATTRIBUTENAME GROUP = INFORMATIONCONTENT CLASS = "4" OBJECT = PARAMETERVALUE NUM_VAL = 1 CLASS = "4" VALUE = "75 " END_OBJECT = PARAMETERVALUE END_GROUP = INFORMATIONCONTENT END_OBJECT = ADDITIONALATTRIBUTESCONTAINER OBJECT = ADDITIONALATTRIBUTESCONTAINER CLASS = "6" OBJECT = ADDITIONALATTRIBUTENAME CLASS = "6" NUM_VAL = 1 VALUE = "identifier_product_doi" END_OBJECT = ADDITIONALATTRIBUTENAME GROUP = INFORMATIONCONTENT CLASS = "6" OBJECT = PARAMETERVALUE NUM_VAL = 1 CLASS = "6" VALUE = "10.5067/MODIS/MYD03.NRT.061" END_OBJECT = PARAMETERVALUE END_GROUP = INFORMATIONCONTENT END_OBJECT = ADDITIONALATTRIBUTESCONTAINER OBJECT = ADDITIONALATTRIBUTESCONTAINER CLASS = "7" OBJECT = ADDITIONALATTRIBUTENAME CLASS = "7" NUM_VAL = 1 VALUE = "identifier_product_doi_authority" END_OBJECT = ADDITIONALATTRIBUTENAME GROUP = INFORMATIONCONTENT CLASS = "7" OBJECT = PARAMETERVALUE NUM_VAL = 1 CLASS = "7" VALUE = "http://dx.doi.org" END_OBJECT = PARAMETERVALUE END_GROUP = INFORMATIONCONTENT END_OBJECT = ADDITIONALATTRIBUTESCONTAINER END_GROUP = ADDITIONALATTRIBUTES END_GROUP = INVENTORYMETADATA END''' # noqa: E501 nrt_mda_dict = { 'INVENTORYMETADATA': { 'ADDITIONALATTRIBUTES': { 'ADDITIONALATTRIBUTESCONTAINER': { 'ADDITIONALATTRIBUTENAME': { 'VALUE': 'identifier_product_doi_authority' }, 'INFORMATIONCONTENT': { 'PARAMETERVALUE': { 'VALUE': 'http://dx.doi.org' } } } }, 'ASSOCIATEDPLATFORMINSTRUMENTSENSOR': { 'ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER': { 'ASSOCIATEDINSTRUMENTSHORTNAME': { 'VALUE': 'MODIS' }, 'ASSOCIATEDPLATFORMSHORTNAME': { 'VALUE': 'Aqua' }, 'ASSOCIATEDSENSORSHORTNAME': { 'VALUE': 'MODIS' } } }, 'COLLECTIONDESCRIPTIONCLASS': { 'SHORTNAME': { 'VALUE': 'MYD03' }, 'VERSIONID': { 'VALUE': 61 } }, 'ECSDATAGRANULE': { 'DAYNIGHTFLAG': { 'VALUE': 'Day' }, 'LOCALGRANULEID': { 'VALUE': 'MYD03.A2019051.1225.061.2019051131153.NRT.hdf' }, 'LOCALVERSIONID': { 'VALUE': '6.0.4' }, 'PRODUCTIONDATETIME': { 'VALUE': '2019-02-20T13:11:53.000Z' }, 'REPROCESSINGACTUAL': { 'VALUE': 'Near ' 'Real ' 'Time' }, 'REPROCESSINGPLANNED': { 'VALUE': 'further ' 'update ' 'is ' 'anticipated' } }, 'GROUPTYPE': 'MASTERGROUP', 'INPUTGRANULE': { 'INPUTPOINTER': { 'VALUE': ('MYD01.61.2019-051T12:25:00.000000Z.NA.29878844.500100_1.hdf', 'MYD03LUT.coeff_V6.1.4', 'PM1EPHND_NRT.A2019051.1220.061.2019051125628', 'PM1EPHND_NRT.A2019051.1225.061.2019051125628', 'PM1EPHND_NRT.A2019051.1230.061.2019051125628', ' ' 'PM1ATTNR_NRT.A2019051.1220.061.2019051125628', 'PM1ATTNR_NRT.A2019051.1225.061.2019051125628', 'PM1ATTNR_NRT.A2019051.1230.061.2019051125628') } }, 'MEASUREDPARAMETER': { 'MEASUREDPARAMETERCONTAINER': { 'PARAMETERNAME': { 'VALUE': 'Geolocation' }, 'QAFLAGS': { 'AUTOMATICQUALITYFLAG': { 'VALUE': 'Passed' }, 'AUTOMATICQUALITYFLAGEXPLANATION': { 'VALUE': 'Set ' 'to ' "'Failed' " 'if ' 'processing ' 'error ' 'occurred, ' 'set ' 'to ' "'Passed' " 'otherwise' }, 'SCIENCEQUALITYFLAG': { 'VALUE': 'Not ' 'Investigated' } }, 'QASTATS': { 'QAPERCENTMISSINGDATA': { 'VALUE': 0 }, 'QAPERCENTOUTOFBOUNDSDATA': { 'VALUE': 0 } } } }, 'ORBITCALCULATEDSPATIALDOMAIN': { 'ORBITCALCULATEDSPATIALDOMAINCONTAINER': { 'EQUATORCROSSINGDATE': { 'VALUE': '2019-02-20' }, 'EQUATORCROSSINGLONGITUDE': { 'VALUE': -151.260740805733 }, 'EQUATORCROSSINGTIME': { 'VALUE': '12:49:52.965727' }, 'ORBITNUMBER': { 'VALUE': 89393 } } }, 'PGEVERSIONCLASS': { 'PGEVERSION': { 'VALUE': '6.1.4' } }, 'RANGEDATETIME': { 'RANGEBEGINNINGDATE': { 'VALUE': '2019-02-20' }, 'RANGEBEGINNINGTIME': { 'VALUE': '12:25:00.000000' }, 'RANGEENDINGDATE': { 'VALUE': '2019-02-20' }, 'RANGEENDINGTIME': { 'VALUE': '12:30:00.000000' } }, 'SPATIALDOMAINCONTAINER': { 'HORIZONTALSPATIALDOMAINCONTAINER': { 'GPOLYGON': { 'GPOLYGONCONTAINER': { 'GRING': { 'EXCLUSIONGRINGFLAG': { 'VALUE': 'N' } }, 'GRINGPOINT': { 'GRINGPOINTLATITUDE': { 'VALUE': (29.5170117594673, 26.1480434828114, 43.2445462598877, 47.7959787025408) }, 'GRINGPOINTLONGITUDE': { 'VALUE': (25.3839329817764, 1.80418778807854, -6.50842421663422, 23.0260060198343) }, 'GRINGPOINTSEQUENCENO': { 'VALUE': (1, 2, 3, 4) } } } } } } } } metadata_modisl1b = """ GROUP=SwathStructure GROUP=SWATH_1 SwathName="MODIS_SWATH_Type_L1B" GROUP=DimensionMap OBJECT=DimensionMap_1 GeoDimension="2*nscans" DataDimension="10*nscans" Offset=2 Increment=5 END_OBJECT=DimensionMap_1 OBJECT=DimensionMap_2 GeoDimension="1KM_geo_dim" DataDimension="Max_EV_frames" Offset=2 Increment=5 END_OBJECT=DimensionMap_2 END_GROUP=DimensionMap END_GROUP=SWATH_1 END_GROUP=SwathStructure END """ # noqa: E501 metadata_modisl2 = """ GROUP=SwathStructure GROUP=SWATH_1 SwathName="mod35" GROUP=DimensionMap OBJECT=DimensionMap_1 GeoDimension="Cell_Across_Swath_5km" DataDimension="Cell_Across_Swath_1km" Offset=2 Increment=5 END_OBJECT=DimensionMap_1 OBJECT=DimensionMap_2 GeoDimension="Cell_Along_Swath_5km" DataDimension="Cell_Along_Swath_1km" Offset=2 Increment=5 END_OBJECT=DimensionMap_2 END_GROUP=DimensionMap GROUP=IndexDimensionMap END_GROUP=IndexDimensionMap END_GROUP=SWATH_1 END_GROUP=SwathStructure END """ # noqa: E501 class TestReadMDA(unittest.TestCase): def test_read_mda(self): from satpy.readers.hdfeos_base import HDFEOSBaseFileReader res = HDFEOSBaseFileReader.read_mda(nrt_mda) self.assertDictEqual(res, nrt_mda_dict) def test_read_mda_geo_resolution(self): from satpy.readers.hdfeos_base import HDFEOSGeoReader resolution_l1b = HDFEOSGeoReader.read_geo_resolution( HDFEOSGeoReader.read_mda(metadata_modisl1b) ) self.assertEqual(resolution_l1b, 1000) resolution_l2 = HDFEOSGeoReader.read_geo_resolution( HDFEOSGeoReader.read_mda(metadata_modisl2) ) self.assertEqual(resolution_l2, 5000) def suite(): """The test suite.""" loader = unittest.TestLoader() my_suite = unittest.TestSuite() my_suite.addTest(loader.loadTestsFromTestCase(TestReadMDA)) return my_suite if __name__ == '__main__': unittest.main() satpy-0.20.0/satpy/tests/reader_tests/test_hrit_base.py000066400000000000000000000152351362525524100233030ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The HRIT base reader tests package. """ import sys from datetime import datetime import os from tempfile import gettempdir, NamedTemporaryFile import numpy as np from satpy.readers.hrit_base import HRITFileHandler, get_xritdecompress_cmd, get_xritdecompress_outfile, decompress if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest try: from unittest import mock except ImportError: import mock class TestHRITDecompress(unittest.TestCase): """Test the on-the-fly decompression.""" def test_xrit_cmd(self): old_env = os.environ.get('XRIT_DECOMPRESS_PATH', None) os.environ['XRIT_DECOMPRESS_PATH'] = '/path/to/my/bin' self.assertRaises(IOError, get_xritdecompress_cmd) os.environ['XRIT_DECOMPRESS_PATH'] = gettempdir() self.assertRaises(IOError, get_xritdecompress_cmd) with NamedTemporaryFile() as fd: os.environ['XRIT_DECOMPRESS_PATH'] = fd.name fname = fd.name res = get_xritdecompress_cmd() if old_env is not None: os.environ['XRIT_DECOMPRESS_PATH'] = old_env else: os.environ.pop('XRIT_DECOMPRESS_PATH') self.assertEqual(fname, res) def test_xrit_outfile(self): stdout = [b"Decompressed file: bla.__\n"] outfile = get_xritdecompress_outfile(stdout) self.assertEqual(outfile, b'bla.__') @mock.patch('satpy.readers.hrit_base.Popen') def test_decompress(self, popen): popen.return_value.returncode = 0 popen.return_value.communicate.return_value = [b"Decompressed file: bla.__\n"] old_env = os.environ.get('XRIT_DECOMPRESS_PATH', None) with NamedTemporaryFile() as fd: os.environ['XRIT_DECOMPRESS_PATH'] = fd.name res = decompress('bla.C_') if old_env is not None: os.environ['XRIT_DECOMPRESS_PATH'] = old_env else: os.environ.pop('XRIT_DECOMPRESS_PATH') self.assertEqual(res, os.path.join('.', 'bla.__')) class TestHRITFileHandler(unittest.TestCase): """Test the HRITFileHandler.""" @mock.patch('satpy.readers.hrit_base.np.fromfile') def setUp(self, fromfile): """Setup the hrit file handler for testing.""" m = mock.mock_open() fromfile.return_value = np.array([(1, 2)], dtype=[('total_header_length', int), ('hdr_id', int)]) with mock.patch('satpy.readers.hrit_base.open', m, create=True) as newopen: newopen.return_value.__enter__.return_value.tell.return_value = 1 self.reader = HRITFileHandler('filename', {'platform_shortname': 'MSG3', 'start_time': datetime(2016, 3, 3, 0, 0)}, {'filetype': 'info'}, [mock.MagicMock(), mock.MagicMock(), mock.MagicMock()]) ncols = 3712 nlines = 464 nbits = 10 self.reader.mda['number_of_bits_per_pixel'] = nbits self.reader.mda['number_of_lines'] = nlines self.reader.mda['number_of_columns'] = ncols self.reader.mda['data_field_length'] = nlines * ncols * nbits self.reader.mda['cfac'] = 5 self.reader.mda['lfac'] = 5 self.reader.mda['coff'] = 10 self.reader.mda['loff'] = 10 self.reader.mda['projection_parameters'] = {} self.reader.mda['projection_parameters']['a'] = 6378169.0 self.reader.mda['projection_parameters']['b'] = 6356583.8 self.reader.mda['projection_parameters']['h'] = 35785831.0 self.reader.mda['projection_parameters']['SSP_longitude'] = 44 def test_get_xy_from_linecol(self): """Test get_xy_from_linecol.""" x__, y__ = self.reader.get_xy_from_linecol(0, 0, (10, 10), (5, 5)) self.assertEqual(-131072, x__) self.assertEqual(-131072, y__) x__, y__ = self.reader.get_xy_from_linecol(10, 10, (10, 10), (5, 5)) self.assertEqual(0, x__) self.assertEqual(0, y__) x__, y__ = self.reader.get_xy_from_linecol(20, 20, (10, 10), (5, 5)) self.assertEqual(131072, x__) self.assertEqual(131072, y__) def test_get_area_extent(self): res = self.reader.get_area_extent((20, 20), (10, 10), (5, 5), 33) exp = (-71717.44995740513, -71717.44995740513, 79266.655216079365, 79266.655216079365) self.assertTupleEqual(res, exp) def test_get_area_def(self): area = self.reader.get_area_def('VIS06') proj_dict = area.proj_dict self.assertEqual(proj_dict['a'], 6378169.0) self.assertEqual(proj_dict['b'], 6356583.8) self.assertEqual(proj_dict['h'], 35785831.0) self.assertEqual(proj_dict['lon_0'], 44.0) self.assertEqual(proj_dict['proj'], 'geos') self.assertEqual(proj_dict['units'], 'm') self.assertEqual(area.area_extent, (-77771774058.38356, -77771774058.38356, 30310525626438.438, 3720765401003.719)) @mock.patch('satpy.readers.hrit_base.np.memmap') def test_read_band(self, memmap): nbits = self.reader.mda['number_of_bits_per_pixel'] memmap.return_value = np.random.randint(0, 256, size=int((464 * 3712 * nbits) / 8), dtype=np.uint8) res = self.reader.read_band('VIS006', None) self.assertEqual(res.compute().shape, (464, 3712)) def suite(): """The test suite for test_scene. """ loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestHRITFileHandler)) mysuite.addTest(loader.loadTestsFromTestCase(TestHRITDecompress)) return mysuite if __name__ == '__main__': unittest.main() satpy-0.20.0/satpy/tests/reader_tests/test_hsaf_grib.py000066400000000000000000000142071362525524100232650ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.grib module. """ import sys import numpy as np from datetime import datetime if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest try: from unittest import mock except ImportError: import mock class FakeMessage(object): """Fake message returned by pygrib.open().message(x).""" def __init__(self, values, proj_params=None, latlons=None, **attrs): super(FakeMessage, self).__init__() self.attrs = attrs self.values = values if proj_params is None: proj_params = {'a': 6378140.0, 'b': 6356755.0, 'lat_0': 0.0, 'lon_0': 0.0, 'proj': 'geos', 'h': 35785830.098} self.projparams = proj_params self._latlons = latlons def latlons(self): return self._latlons def __getitem__(self, item): return self.attrs[item] def valid_key(self, key): return True class FakeGRIB(object): """Fake GRIB file returned by pygrib.open.""" def __init__(self, messages=None, proj_params=None, latlons=None): super(FakeGRIB, self).__init__() if messages is not None: self._messages = messages else: self._messages = [ FakeMessage( values=np.arange(25.).reshape((5, 5)), name='Instantaneous rain rate', shortName='irrate', cfName='unknown', units='kg m**-2 s**-1', dataDate=20190603, dataTime=1645, missingValue=9999, modelName='unknown', centreDescription='Rome', minimum=0.0, maximum=0.01475, Nx=3712, Ny=3712, NrInRadiusOfEarth=6.6107, dx=3622, dy=3610, XpInGridLengths=1856.0, YpInGridLengths=1856.0, jScansPositively=0, proj_params=proj_params, latlons=latlons, ) ] self.messages = len(self._messages) def message(self, msg_num): return self._messages[msg_num - 1] def seek(self, loc): return def __iter__(self): return iter(self._messages) def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): pass class TestHSAFFileHandler(unittest.TestCase): """Test HSAF Reader""" def setUp(self): """Wrap pygrib to read fake data""" try: import pygrib except ImportError: pygrib = None self.orig_pygrib = pygrib sys.modules['pygrib'] = mock.MagicMock() def tearDown(self): """Re-enable pygrib import.""" sys.modules['pygrib'] = self.orig_pygrib @mock.patch('satpy.readers.hsaf_grib.pygrib.open', return_value=FakeGRIB()) def test_init(self, pg): """ Test the init function, ensure that the correct dates and metadata are returned """ pg.open.return_value = FakeGRIB() correct_dt = datetime(2019, 6, 3, 16, 45, 0) from satpy.readers.hsaf_grib import HSAFFileHandler fh = HSAFFileHandler('filename', mock.MagicMock(), mock.MagicMock()) self.assertEqual(fh._analysis_time, correct_dt) self.assertEqual(fh.metadata['projparams']['lat_0'], 0.0) self.assertEqual(fh.metadata['shortName'], 'irrate') self.assertEqual(fh.metadata['nx'], 3712) @mock.patch('satpy.readers.hsaf_grib.pygrib.open', return_value=FakeGRIB()) def test_get_area_def(self, pg): """ Test the area definition setup, checks the size and extent """ pg.open.return_value = FakeGRIB() from satpy.readers.hsaf_grib import HSAFFileHandler fh = HSAFFileHandler('filename', mock.MagicMock(), mock.MagicMock()) area_def = HSAFFileHandler.get_area_def(fh, 'H03B') self.assertEqual(area_def.x_size, 3712) self.assertAlmostEqual(area_def.area_extent[0], -5569209.3026, places=3) self.assertAlmostEqual(area_def.area_extent[3], 5587721.9097, places=3) @mock.patch('satpy.readers.hsaf_grib.pygrib.open', return_value=FakeGRIB()) def test_get_dataset(self, pg): """ Test reading the actual datasets from a grib file """ pg.open.return_value = FakeGRIB() from satpy.readers.hsaf_grib import HSAFFileHandler # Instantaneous precipitation fh = HSAFFileHandler('filename', mock.MagicMock(), mock.MagicMock()) fh.filename = "H03B" ds_id = mock.Mock() ds_id.name = 'H03B' data = fh.get_dataset(ds_id, mock.Mock()) np.testing.assert_array_equal(data.values, np.arange(25.).reshape((5, 5))) # Accumulated precipitation fh = HSAFFileHandler('filename', mock.MagicMock(), mock.MagicMock()) fh.filename = "H05B" ds_id = mock.Mock() ds_id.name = 'H05B' data = fh.get_dataset(ds_id, mock.Mock()) np.testing.assert_array_equal(data.values, np.arange(25.).reshape((5, 5))) def suite(): """The test suite for test_grib.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestHSAFFileHandler)) return mysuite if __name__ == '__main__': unittest.main() satpy-0.20.0/satpy/tests/reader_tests/test_iasi_l2.py000066400000000000000000000332231362525524100226620ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit tests for IASI L2 reader""" import os import unittest import numpy as np SCAN_WIDTH = 120 NUM_LEVELS = 138 NUM_SCANLINES = 1 FNAME = "W_XX-EUMETSAT-kan,iasi,metopb+kan_C_EUMS_20170920103559_IASI_PW3_02_M01_20170920102217Z_20170920102912Z.hdf" # Structure for the test data, to be written to HDF5 file TEST_DATA = { # Not implemented in the reader 'Amsu': { 'FLG_AMSUBAD': {'data': np.zeros((NUM_SCANLINES, 30), dtype=np.uint8), 'attrs': {}} }, # Not implemented in the reader 'INFO': { 'OmC': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), 'attrs': {'long_name': "Cloud signal. Predicted average window channel 'Obs minus Calc", 'units': 'K'}}, 'mdist': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), 'attrs': {}} }, 'L1C': { 'Latitude': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), 'attrs': {'units': 'degrees_north'}}, 'Longitude': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), 'attrs': {'units': 'degrees_north'}}, 'SatAzimuth': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), 'attrs': {'units': 'degrees'}}, 'SatZenith': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), 'attrs': {'units': 'degrees'}}, 'SensingTime_day': {'data': np.array([6472], dtype=np.uint16), 'attrs': {}}, 'SensingTime_msec': {'data': np.array([37337532], dtype=np.uint32), 'attrs': {}}, 'SunAzimuth': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), 'attrs': {'units': 'degrees'}}, 'SunZenith': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), 'attrs': {'units': 'degrees'}}, }, # Not implemented in the reader 'Maps': { 'Height': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), 'attrs': {'units': 'm'}}, 'HeightStd': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), 'attrs': {'units': 'm'}}, }, # Not implemented in the reader 'Mhs': { 'FLG_MHSBAD': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.uint8), 'attrs': {}} }, 'PWLR': { 'E': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH, 10), dtype=np.float32), 'attrs': {'emissivity_wavenumbers': np.array([699.3, 826.4, 925.9, 1075.2, 1204.8, 1315.7, 1724.1, 2000.0, 2325.5, 2702.7], dtype=np.float32)}}, 'O': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH, NUM_LEVELS), dtype=np.float32), 'attrs': {'long_name': 'Ozone mixing ratio vertical profile', 'units': 'kg/kg'}}, 'OC': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), 'attrs': {}}, 'P': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH, NUM_LEVELS), dtype=np.float32), 'attrs': {'long_name': 'Atmospheric pressures at which the vertical profiles are given. ' 'Last value is the surface pressure', 'units': 'hpa'}}, 'QE': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), 'attrs': {}}, 'QO': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), 'attrs': {}}, 'QP': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), 'attrs': {}}, 'QT': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), 'attrs': {}}, 'QTs': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), 'attrs': {}}, 'QW': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), 'attrs': {}}, 'T': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH, NUM_LEVELS), dtype=np.float32), 'attrs': {'long_name': 'Temperature vertical profile', 'units': 'K'}}, 'Ts': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), 'attrs': {'long_name': 'Surface skin temperature', 'units': 'K'}}, 'W': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH, NUM_LEVELS), dtype=np.float32), 'attrs': {'long_name': 'Water vapour mixing ratio vertical profile', 'units': 'kg/kg'}}, 'WC': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), 'attrs': {'long_name': 'Water vapour total columnar amount', 'units': 'mm'}}, } } def save_test_data(path): """Save the test to the indicated directory""" import h5py with h5py.File(os.path.join(path, FNAME), 'w') as fid: # Create groups for grp in TEST_DATA: fid.create_group(grp) # Write datasets for dset in TEST_DATA[grp]: fid[grp][dset] = TEST_DATA[grp][dset]['data'] # Write dataset attributes for attr in TEST_DATA[grp][dset]['attrs']: fid[grp][dset].attrs[attr] = \ TEST_DATA[grp][dset]['attrs'][attr] class TestIasiL2(unittest.TestCase): """Test IASI L2 reader.""" def setUp(self): """Create temporary data to test on.""" import tempfile import datetime as dt from satpy.readers.iasi_l2 import IASIL2HDF5 self.base_dir = tempfile.mkdtemp() save_test_data(self.base_dir) self.fname = os.path.join(self.base_dir, FNAME) self.fname_info = {'start_time': dt.datetime(2017, 9, 20, 10, 22, 17), 'end_time': dt.datetime(2017, 9, 20, 10, 29, 12), 'processing_time': dt.datetime(2017, 9, 20, 10, 35, 59), 'processing_location': 'kan', 'long_platform_id': 'metopb', 'instrument': 'iasi', 'platform_id': 'M01'} self.ftype_info = {'file_reader': IASIL2HDF5, 'file_patterns': ['{fname}.hdf'], 'file_type': 'iasi_l2_hdf5'} self.reader = IASIL2HDF5(self.fname, self.fname_info, self.ftype_info) def tearDown(self): """Remove the temporary directory created for a test""" try: import shutil shutil.rmtree(self.base_dir, ignore_errors=True) except OSError: pass def test_scene(self): """Test scene creation""" from satpy import Scene fname = os.path.join(self.base_dir, FNAME) scn = Scene(reader='iasi_l2', filenames=[fname]) self.assertTrue('start_time' in scn.attrs) self.assertTrue('end_time' in scn.attrs) self.assertTrue('sensor' in scn.attrs) self.assertTrue('iasi' in scn.attrs['sensor']) def test_scene_load_available_datasets(self): """Test that all datasets are available""" from satpy import Scene fname = os.path.join(self.base_dir, FNAME) scn = Scene(reader='iasi_l2', filenames=[fname]) scn.load(scn.available_dataset_names()) def test_scene_load_pressure(self): """Test loading pressure data""" from satpy import Scene fname = os.path.join(self.base_dir, FNAME) scn = Scene(reader='iasi_l2', filenames=[fname]) scn.load(['pressure']) pres = scn['pressure'].compute() self.check_pressure(pres, scn.attrs) def test_scene_load_emissivity(self): """Test loading emissivity data""" from satpy import Scene fname = os.path.join(self.base_dir, FNAME) scn = Scene(reader='iasi_l2', filenames=[fname]) scn.load(['emissivity']) emis = scn['emissivity'].compute() self.check_emissivity(emis) def test_scene_load_sensing_times(self): """Test loading sensing times""" from satpy import Scene fname = os.path.join(self.base_dir, FNAME) scn = Scene(reader='iasi_l2', filenames=[fname]) scn.load(['sensing_time']) times = scn['sensing_time'].compute() self.check_sensing_times(times) def test_init(self): """Test reader initialization""" self.assertEqual(self.reader.filename, self.fname) self.assertEqual(self.reader.finfo, self.fname_info) self.assertTrue(self.reader.lons is None) self.assertTrue(self.reader.lats is None) self.assertEqual(self.reader.mda['platform_name'], 'Metop-B') self.assertEqual(self.reader.mda['sensor'], 'iasi') def test_time_properties(self): """Test time properties""" import datetime as dt self.assertTrue(isinstance(self.reader.start_time, dt.datetime)) self.assertTrue(isinstance(self.reader.end_time, dt.datetime)) def test_get_dataset(self): """Test get_dataset() for different datasets""" from satpy import DatasetID info = {'eggs': 'spam'} key = DatasetID(name='pressure') data = self.reader.get_dataset(key, info).compute() self.check_pressure(data) self.assertTrue('eggs' in data.attrs) self.assertEqual(data.attrs['eggs'], 'spam') key = DatasetID(name='emissivity') data = self.reader.get_dataset(key, info).compute() self.check_emissivity(data) key = DatasetID(name='sensing_time') data = self.reader.get_dataset(key, info).compute() self.assertEqual(data.shape, (NUM_SCANLINES, SCAN_WIDTH)) def check_pressure(self, pres, attrs=None): """Helper method for testing reading pressure dataset""" self.assertTrue(np.all(pres == 0.0)) self.assertEqual(pres.x.size, SCAN_WIDTH) self.assertEqual(pres.y.size, NUM_SCANLINES) self.assertEqual(pres.level.size, NUM_LEVELS) if attrs: self.assertEqual(pres.attrs['start_time'], attrs['start_time']) self.assertEqual(pres.attrs['end_time'], attrs['end_time']) self.assertTrue('long_name' in pres.attrs) self.assertTrue('units' in pres.attrs) def check_emissivity(self, emis): """Helper method for testing reading emissivity dataset.""" self.assertTrue(np.all(emis == 0.0)) self.assertEqual(emis.x.size, SCAN_WIDTH) self.assertEqual(emis.y.size, NUM_SCANLINES) self.assertTrue('emissivity_wavenumbers' in emis.attrs) def check_sensing_times(self, times): """Helper method for testing reading sensing times""" # Times should be equal in blocks of four, but not beyond, so # there should be SCAN_WIDTH/4 different values for i in range(int(SCAN_WIDTH / 4)): self.assertEqual(np.unique(times[0, i*4:i*4+4]).size, 1) self.assertEqual(np.unique(times[0, :]).size, SCAN_WIDTH / 4) def test_read_dataset(self): """Test read_dataset() function""" import h5py from satpy.readers.iasi_l2 import read_dataset from satpy import DatasetID with h5py.File(self.fname, 'r') as fid: key = DatasetID(name='pressure') data = read_dataset(fid, key).compute() self.check_pressure(data) key = DatasetID(name='emissivity') data = read_dataset(fid, key).compute() self.check_emissivity(data) # This dataset doesn't have any attributes key = DatasetID(name='ozone_total_column') data = read_dataset(fid, key).compute() self.assertEqual(len(data.attrs), 0) def test_read_geo(self): """Test read_geo() function""" import h5py from satpy.readers.iasi_l2 import read_geo from satpy import DatasetID with h5py.File(self.fname, 'r') as fid: key = DatasetID(name='sensing_time') data = read_geo(fid, key).compute() self.assertEqual(data.shape, (NUM_SCANLINES, SCAN_WIDTH)) key = DatasetID(name='latitude') data = read_geo(fid, key).compute() self.assertEqual(data.shape, (NUM_SCANLINES, SCAN_WIDTH)) def test_form_datetimes(self): """Test _form_datetimes() function""" from satpy.readers.iasi_l2 import _form_datetimes days = TEST_DATA['L1C']['SensingTime_day']['data'] msecs = TEST_DATA['L1C']['SensingTime_msec']['data'] times = _form_datetimes(days, msecs) self.check_sensing_times(times) def suite(): """The test suite for test_iasi_l2.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestIasiL2)) return mysuite if __name__ == '__main__': unittest.main() satpy-0.20.0/satpy/tests/reader_tests/test_iasi_l2_so2_bufr.py000066400000000000000000000557501362525524100244740ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unittesting the SEVIRI L2 BUFR reader.""" import os import sys import numpy as np import unittest # TDB: this test is based on test_seviri_l2_bufr.py and test_iasi_l2.py # This is a test IASI level 2 SO2 product message, take from a real # bufr file distributed over EUMETCAST msg = { 'unpack': 1, 'inputDelayedDescriptorReplicationFactor': 5, 'edition': 4, 'masterTableNumber': 0, 'bufrHeaderCentre': 254, 'bufrHeaderSubCentre': 0, 'updateSequenceNumber': 0, 'dataCategory': 3, 'internationalDataSubCategory': 255, 'dataSubCategory': 230, 'masterTablesVersionNumber': 31, 'localTablesVersionNumber': 0, 'typicalYear': 2020, 'typicalMonth': 2, 'typicalDay': 4, 'typicalHour': 8, 'typicalMinute': 59, 'typicalSecond': 0, 'numberOfSubsets': 120, 'observedData': 1, 'compressedData': 1, 'unexpandedDescriptors': np.array([ 1007, 1031, 25060, 2019, 2020, 4001, 4002, 4003, 4004, 4005, 4006, 5040, 201133, 5041, 201000, 5001, 6001, 5043, 7024, 5021, 7025, 5022, 7007, 40068, 7002, 15045, 12080, 102000, 31001, 7007, 15045], dtype=np.int), '#1#satelliteIdentifier': 4, '#1#centre': 254, '#1#softwareIdentification': 605, '#1#satelliteInstruments': 221, '#1#satelliteClassification': 61, '#1#year': 2020, '#1#month': 2, '#1#day': 4, '#1#hour': 9, '#1#minute': 1, '#1#second': 11, '#1#orbitNumber': 68984, '#1#scanLineNumber': 447, '#1#latitude': np.array([ -33.4055, -33.6659, -33.738, -33.4648, -33.263, -33.5027, -33.5673, -33.3172, -33.1332, -33.3564, -33.4151, -33.1832, -33.0132, -33.2232, -33.2771, -33.0596, -32.903, -33.1021, -33.1522, -32.9466, -32.7982, -32.9884, -33.0354, -32.8395, -32.7005, -32.8832, -32.9276, -32.7399, -32.6061, -32.7826, -32.8251, -32.644, -32.5168, -32.6883, -32.7292, -32.5537, -32.4261, -32.5934, -32.6331, -32.4621, -32.3397, -32.5036, -32.5425, -32.3752, -32.2537, -32.4151, -32.4534, -32.289, -32.1682, -32.3277, -32.3657, -32.2035, -32.0826, -32.2407, -32.2788, -32.1182, -31.9952, -32.1527, -32.1911, -32.0313, -31.9068, -32.0642, -32.1032, -31.9438, -31.8147, -31.9727, -32.0127, -31.8529, -31.7177, -31.8769, -31.9181, -31.7573, -31.6182, -31.7792, -31.8222, -31.6598, -31.5106, -31.674, -31.7191, -31.5545, -31.3962, -31.5628, -31.6107, -31.4431, -31.2727, -31.4434, -31.4947, -31.3233, -31.1375, -31.3131, -31.3686, -31.1926, -30.9867, -31.1684, -31.2293, -31.0476, -30.8201, -31.009, -31.0768, -30.8882, -30.6289, -30.8265, -30.9031, -30.7062, -30.4071, -30.6153, -30.7036, -30.4967, -30.146, -30.3672, -30.4712, -30.2521, -29.8276, -30.0649, -30.1911, -29.9569, -29.4268, -29.6844, -29.8436, -29.5903]), '#1#longitude': np.array([ 2.53790e+00, 2.49440e+00, 3.08690e+00, 3.12690e+00, 1.15600e+00, 1.11230e+00, 1.59640e+00, 1.63750e+00, -3.70000e-03, -4.73000e-02, 3.61900e-01, 4.03500e-01, -1.00010e+00, -1.04340e+00, -6.88300e-01, -6.46600e-01, -1.88040e+00, -1.92340e+00, -1.60890e+00, -1.56730e+00, -2.66750e+00, -2.71020e+00, -2.42680e+00, -2.38520e+00, -3.38640e+00, -3.42890e+00, -3.16970e+00, -3.12830e+00, -4.04920e+00, -4.09150e+00, -3.85140e+00, -3.81000e+00, -4.66850e+00, -4.71080e+00, -4.48590e+00, -4.44450e+00, -5.25210e+00, -5.29440e+00, -5.08140e+00, -5.03990e+00, -5.80970e+00, -5.85220e+00, -5.64840e+00, -5.60670e+00, -6.34640e+00, -6.38920e+00, -6.19250e+00, -6.15060e+00, -6.86700e+00, -6.91020e+00, -6.71870e+00, -6.67640e+00, -7.37770e+00, -7.42140e+00, -7.23330e+00, -7.19050e+00, -7.88100e+00, -7.92530e+00, -7.73920e+00, -7.69570e+00, -8.38370e+00, -8.42900e+00, -8.24320e+00, -8.19890e+00, -8.88730e+00, -8.93360e+00, -8.74660e+00, -8.70130e+00, -9.39480e+00, -9.44230e+00, -9.25260e+00, -9.20620e+00, -9.91570e+00, -9.96460e+00, -9.77050e+00, -9.72270e+00, -1.04496e+01, -1.05002e+01, -1.02999e+01, -1.02505e+01, -1.10049e+01, -1.10576e+01, -1.08489e+01, -1.07977e+01, -1.15859e+01, -1.16409e+01, -1.14216e+01, -1.13682e+01, -1.21993e+01, -1.22570e+01, -1.20240e+01, -1.19681e+01, -1.28575e+01, -1.29185e+01, -1.26682e+01, -1.26093e+01, -1.35688e+01, -1.36337e+01, -1.33615e+01, -1.32990e+01, -1.43504e+01, -1.44199e+01, -1.41196e+01, -1.40529e+01, -1.52201e+01, -1.52953e+01, -1.49585e+01, -1.48867e+01, -1.62074e+01, -1.62896e+01, -1.59045e+01, -1.58264e+01, -1.73549e+01, -1.74460e+01, -1.69944e+01, -1.69085e+01, -1.87277e+01, -1.88302e+01, -1.82832e+01, -1.81873e+01]), '#1#fieldOfViewNumber': np.array([ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120]), '#1#satelliteZenithAngle': np.array([ 56.64, 56.64, 58.38, 58.37, 52.15, 52.15, 53.8, 53.79, 47.84, 47.84, 49.42, 49.42, 43.67, 43.67, 45.21, 45.2, 39.59, 39.59, 41.1, 41.09, 35.59, 35.59, 37.08, 37.07, 31.65, 31.65, 33.11, 33.1, 27.75, 27.75, 29.2, 29.19, 23.89, 23.89, 25.33, 25.32, 20.07, 20.06, 21.49, 21.48, 16.26, 16.26, 17.67, 17.67, 12.47, 12.47, 13.88, 13.87, 8.7, 8.7, 10.1, 10.1, 4.95, 4.95, 6.34, 6.33, 1.33, 1.34, 2.64, 2.63, 2.72, 2.73, 1.43, 1.41, 6.44, 6.45, 5.05, 5.05, 10.19, 10.19, 8.79, 8.79, 13.97, 13.98, 12.57, 12.57, 17.77, 17.77, 16.35, 16.36, 21.58, 21.59, 20.16, 20.17, 25.42, 25.43, 23.99, 24., 29.29, 29.29, 27.84, 27.85, 33.21, 33.21, 31.75, 31.75, 37.16, 37.17, 35.68, 35.69, 41.19, 41.2, 39.69, 39.69, 45.3, 45.31, 43.76, 43.77, 49.52, 49.53, 47.94, 47.94, 53.89, 53.9, 52.25, 52.25, 58.48, 58.48, 56.74, 56.75]), '#1#bearingOrAzimuth': np.array([ 276.93, 278.61, 278.27, 276.61, 277.64, 279.42, 279.14, 277.38, 278.22, 280.11, 279.88, 278.01, 278.69, 280.72, 280.51, 278.51, 279.09, 281.3, 281.11, 278.94, 279.41, 281.83, 281.64, 279.28, 279.68, 282.36, 282.18, 279.58, 279.88, 282.9, 282.71, 279.79, 280.02, 283.49, 283.29, 279.96, 279.98, 284.07, 283.84, 279.96, 279.84, 284.85, 284.57, 279.89, 279.4, 285.9, 285.49, 279.57, 278.31, 287.59, 286.87, 278.78, 275.22, 291.5, 289.61, 276.76, 252.48, 315.67, 299.21, 268.02, 117.92, 88.23, 72.78, 132.31, 109.86, 97.41, 95.43, 111.52, 108.02, 100.14, 99.35, 108.59, 107.2, 101.44, 100.97, 107.44, 106.92, 102.37, 102.04, 107.04, 106.84, 103.07, 102.81, 106.88, 106.87, 103.65, 103.42, 106.87, 107., 104.18, 103.97, 106.97, 107.2, 104.69, 104.49, 107.14, 107.44, 105.16, 104.97, 107.35, 107.74, 105.67, 105.47, 107.64, 108.11, 106.2, 105.99, 107.98, 108.54, 106.76, 106.53, 108.38, 109.06, 107.39, 107.14, 108.87, 109.7, 108.13, 107.83, 109.46]), '#1#solarZenithAngle': np.array([ 44.36, 44.44, 43.98, 43.89, 45.47, 45.54, 45.16, 45.08, 46.4, 46.47, 46.14, 46.07, 47.21, 47.27, 46.99, 46.92, 47.92, 47.98, 47.73, 47.67, 48.56, 48.62, 48.39, 48.33, 49.15, 49.21, 49., 48.94, 49.7, 49.75, 49.55, 49.5, 50.21, 50.26, 50.07, 50.02, 50.69, 50.74, 50.56, 50.51, 51.15, 51.2, 51.03, 50.98, 51.59, 51.64, 51.48, 51.43, 52.02, 52.07, 51.91, 51.87, 52.45, 52.5, 52.34, 52.29, 52.87, 52.92, 52.76, 52.71, 53.29, 53.34, 53.18, 53.14, 53.71, 53.76, 53.6, 53.56, 54.14, 54.18, 54.03, 53.98, 54.58, 54.62, 54.46, 54.41, 55.03, 55.08, 54.91, 54.86, 55.50, 55.55, 55.37, 55.32, 55.99, 56.04, 55.85, 55.81, 56.51, 56.56, 56.37, 56.32, 57.08, 57.13, 56.91, 56.86, 57.69, 57.74, 57.51, 57.46, 58.36, 58.42, 58.16, 58.1, 59.11, 59.17, 58.88, 58.82, 59.98, 60.04, 59.70, 59.64, 60.98, 61.05, 60.65, 60.59, 62.20, 62.27, 61.78, 61.72]), '#1#solarAzimuth': np.array([ 78.89, 78.66, 78.16, 78.41, 80.00, 79.80, 79.40, 79.62, 80.92, 80.74, 80.40, 80.6, 81.69, 81.53, 81.24, 81.42, 82.36, 82.21, 81.96, 82.12, 82.96, 82.82, 82.60, 82.74, 83.49, 83.36, 83.16, 83.3, 83.98, 83.86, 83.68, 83.80, 84.43, 84.32, 84.15, 84.27, 84.86, 84.75, 84.59, 84.7, 85.26, 85.15, 85., 85.11, 85.64, 85.54, 85.40, 85.5, 86.01, 85.91, 85.77, 85.88, 86.37, 86.28, 86.14, 86.24, 86.73, 86.63, 86.50, 86.59, 87.07, 86.98, 86.85, 86.94, 87.42, 87.33, 87.20, 87.29, 87.77, 87.68, 87.55, 87.64, 88.13, 88.04, 87.90, 87.99, 88.49, 88.41, 88.27, 88.36, 88.87, 88.78, 88.64, 88.73, 89.26, 89.17, 89.02, 89.11, 89.67, 89.59, 89.43, 89.51, 90.11, 90.02, 89.85, 89.94, 90.58, 90.49, 90.31, 90.4, 91.09, 91., 90.81, 90.89, 91.66, 91.57, 91.35, 91.44, 92.29, 92.20, 91.95, 92.04, 93.02, 92.93, 92.64, 92.73, 93.87, 93.79, 93.45, 93.54]), '#1#height': 83270, '#1#generalRetrievalQualityFlagForSo2': 9, '#2#height': -1e+100, '#1#sulphurDioxide': -1e+100, '#1#brightnessTemperatureRealPart': np.array([ 0.11, 0.11, -0.07, 0.08, 0.13, 0.15, 0.10, 0.06, -0.02, -0.03, 0.08, 0.17, -0.05, 0.12, 0.08, -0.06, 0.15, 0.08, -0.04, -0.01, 0.06, 0.17, -0.01, 0.15, 0.18, 0.05, 0.11, -0.03, 0.09, 0.02, 0.04, 0.10, 0.00, 0.00, 0.01, 0.18, -0.20, 0.10, 0.00, 0.13, -0.15, 0.09, 0.09, -0.10, 0.04, 0.06, -0.01, -0.03, -0.07, -0.05, -0.07, -0.09, -0.03, -0.13, -0.01, 0.10, -0.21, -0.23, -0.18, -0.08, -0.09, -0.19, -0.07, -0.08, -0.19, -0.24, -0.24, -0.05, -0.03, -0.08, -0.01, -0.07, -0.03, -0.38, -0.39, -0.22, -0.28, -0.15, -0.10, -0.26, -0.18, -0.11, -0.31, -0.18, -0.19, -0.26, -0.22, -0.19, 0.02, -0.19, -0.01, -0.38, -0.06, -0.34, -0.31, -0.19, 0.08, -0.05, -0.08, 0.41, -0.19, -0.22, -0.03, 0.11, -0.26, -0.33, -0.08, 0.03, -0.05, 0.02, 0.17, -0.10, 0.01, 0.01, 0.05, 0.01, 0.15, -0.06, -0.14, 0.38]), '#3#height': 7000, '#2#sulphurDioxide': np.array([ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, 2.3e+000, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100]), '#4#height': 10000, '#3#sulphurDioxide': np.array([ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, 8.0e-001, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100]), '#5#height': 13000, '#4#sulphurDioxide': np.array([ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, 5.0e-001, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100]), '#6#height': 16000, '#5#sulphurDioxide': np.array([ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, 4.0e-001, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100]), '#7#height': 25000, '#6#sulphurDioxide': np.array([ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, 5.0e-001, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100]) } # the notional filename that would contain the above test message data FILENAME = 'W_XX-EUMETSAT-Darmstadt,SOUNDING+SATELLITE,METOPA+IASI_C_EUMC_20200204091455_68977_eps_o_so2_l2.bin' # the information that would be extracted from the above filename according to the pattern in the .yaml FILENAME_INFO = { 'reception_location': 'EUMETSAT-Darmstadt', 'platform': 'METOPA', 'instrument': 'IASI', 'start_time': '20200204091455', 'perigee': '68977', 'species': 'so2', 'level': 'l2' } # file type info for the above file that is defined in the .yaml FILETYPE_INFO = { 'file_type': 'iasi_l2_so2_bufr', 'file_reader': 'IASIL2SO2BUFR' } # number of cross track samples in one IASI scan SCAN_WIDTH = 120 def save_test_data(path): """Save the test file to the indicated directory.""" import eccodes as ec with open(os.path.join(path, FILENAME), "wb") as f: for m in [msg]: buf = ec.codes_bufr_new_from_samples('BUFR4_local_satellite') for key in m.keys(): val = m[key] if np.isscalar(val): ec.codes_set(buf, key, val) else: ec.codes_set_array(buf, key, val) ec.codes_set(buf, 'pack', 1) ec.codes_write(buf, f) ec.codes_release(buf) class TestIasiL2So2Bufr(unittest.TestCase): """Test IASI l2 SO2 loader.""" def setUp(self): """Create temporary file to perform tests with.""" import tempfile from satpy.readers.iasi_l2_so2_bufr import IASIL2SO2BUFR self.base_dir = tempfile.mkdtemp() save_test_data(self.base_dir) self.fname = os.path.join(self.base_dir, FILENAME) self.fname_info = FILENAME_INFO self.ftype_info = FILETYPE_INFO self.reader = IASIL2SO2BUFR(self.fname, self.fname_info, self.ftype_info) def tearDown(self): """Remove the temporary directory created for a test.""" try: import shutil shutil.rmtree(self.base_dir, ignore_errors=True) except OSError: pass @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") def test_scene(self): """Test scene creation.""" from satpy import Scene fname = os.path.join(self.base_dir, FILENAME) scn = Scene(reader='iasi_l2_so2_bufr', filenames=[fname]) self.assertTrue('start_time' in scn.attrs) self.assertTrue('end_time' in scn.attrs) self.assertTrue('sensor' in scn.attrs) self.assertTrue('iasi' in scn.attrs['sensor']) @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") def test_scene_load_available_datasets(self): """Test that all datasets are available.""" from satpy import Scene fname = os.path.join(self.base_dir, FILENAME) scn = Scene(reader='iasi_l2_so2_bufr', filenames=[fname]) scn.load(scn.available_dataset_names()) @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") def test_scene_dataset_values(self): """Test loading data.""" from satpy import Scene fname = os.path.join(self.base_dir, FILENAME) scn = Scene(reader='iasi_l2_so2_bufr', filenames=[fname]) for name in scn.available_dataset_names(): scn.load([name]) loaded_values = scn.datasets[name].values fill_value = scn.datasets[name].attrs['fill_value'] # replace nans in data loaded from file with the fill value defined in the .yaml # to make them comparable loaded_values_nan_filled = np.nan_to_num(loaded_values, nan=fill_value) key = scn.datasets[name].attrs['key'] original_values = msg[key] # this makes each assertion below a separate test from unittest's point of view # (note: if all subtests pass, they will count as one test) with self.subTest(msg="Test failed for dataset: "+name): self.assertTrue(np.allclose(original_values, loaded_values_nan_filled)) @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") def test_init(self): """Test reader initialization.""" self.assertTrue(True) def suite(): """Test suite for test_scene.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestIasiL2So2Bufr)) return mysuite if __name__ == "__main__": # So you can run tests from this module individually. unittest.main() satpy-0.20.0/satpy/tests/reader_tests/test_mersi2_l1b.py000066400000000000000000000615051362525524100233030ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the 'mersi2_l1b' reader.""" from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler import sys import numpy as np import dask.array as da import xarray as xr import os if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest try: from unittest import mock except ImportError: import mock class FakeHDF5FileHandler2(FakeHDF5FileHandler): """Swap-in HDF5 File Handler.""" def make_test_data(self, dims): return xr.DataArray(da.from_array(np.ones([dim for dim in dims], dtype=np.float32) * 10, [dim for dim in dims])) def _get_calibration(self, num_scans, rows_per_scan): calibration = { 'Calibration/VIS_Cal_Coeff': xr.DataArray( da.ones((19, 3), chunks=1024), attrs={'Slope': [1.] * 19, 'Intercept': [0.] * 19}, dims=('_bands', '_coeffs')), 'Calibration/IR_Cal_Coeff': xr.DataArray( da.ones((6, 4, num_scans), chunks=1024), attrs={'Slope': [1.] * 6, 'Intercept': [0.] * 6}, dims=('_bands', '_coeffs', '_scans')), } return calibration def _get_1km_data(self, num_scans, rows_per_scan, num_cols): data = { 'Data/EV_1KM_RefSB': xr.DataArray( da.ones((15, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ 'Slope': [1.] * 15, 'Intercept': [0.] * 15, 'FillValue': 65535, 'units': 'NO', 'valid_range': [0, 4095], 'long_name': b'1km Earth View Science Data', }, dims=('_ref_bands', '_rows', '_cols')), 'Data/EV_1KM_Emissive': xr.DataArray( da.ones((4, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ 'Slope': [1.] * 4, 'Intercept': [0.] * 4, 'FillValue': 65535, 'units': 'mW/ (m2 cm-1 sr)', 'valid_range': [0, 25000], 'long_name': b'1km Emissive Bands Earth View ' b'Science Data', }, dims=('_ir_bands', '_rows', '_cols')), 'Data/EV_250_Aggr.1KM_RefSB': xr.DataArray( da.ones((4, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ 'Slope': [1.] * 4, 'Intercept': [0.] * 4, 'FillValue': 65535, 'units': 'NO', 'valid_range': [0, 4095], 'long_name': b'250m Reflective Bands Earth View ' b'Science Data Aggregated to 1 km' }, dims=('_ref250_bands', '_rows', '_cols')), 'Data/EV_250_Aggr.1KM_Emissive': xr.DataArray( da.ones((2, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ 'Slope': [1.] * 2, 'Intercept': [0.] * 2, 'FillValue': 65535, 'units': 'mW/ (m2 cm-1 sr)', 'valid_range': [0, 4095], 'long_name': b'250m Emissive Bands Earth View ' b'Science Data Aggregated to 1 km' }, dims=('_ir250_bands', '_rows', '_cols')), } return data def _get_250m_data(self, num_scans, rows_per_scan, num_cols): data = { 'Data/EV_250_RefSB_b1': xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ 'Slope': [1.] * 1, 'Intercept': [0.] * 1, 'FillValue': 65535, 'units': 'NO', 'valid_range': [0, 4095], }, dims=('_rows', '_cols')), 'Data/EV_250_RefSB_b2': xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ 'Slope': [1.] * 1, 'Intercept': [0.] * 1, 'FillValue': 65535, 'units': 'NO', 'valid_range': [0, 4095], }, dims=('_rows', '_cols')), 'Data/EV_250_RefSB_b3': xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ 'Slope': [1.] * 1, 'Intercept': [0.] * 1, 'FillValue': 65535, 'units': 'NO', 'valid_range': [0, 4095], }, dims=('_rows', '_cols')), 'Data/EV_250_RefSB_b4': xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ 'Slope': [1.] * 1, 'Intercept': [0.] * 1, 'FillValue': 65535, 'units': 'NO', 'valid_range': [0, 4095], }, dims=('_rows', '_cols')), 'Data/EV_250_Emissive_b24': xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ 'Slope': [1.] * 1, 'Intercept': [0.] * 1, 'FillValue': 65535, 'units': 'mW/ (m2 cm-1 sr)', 'valid_range': [0, 4095], }, dims=('_rows', '_cols')), 'Data/EV_250_Emissive_b25': xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ 'Slope': [1.] * 1, 'Intercept': [0.] * 1, 'FillValue': 65535, 'units': 'mW/ (m2 cm-1 sr)', 'valid_range': [0, 4095], }, dims=('_rows', '_cols')), } return data def _get_geo_data(self, num_scans, rows_per_scan, num_cols, prefix='Geolocation/'): geo = { prefix + 'Longitude': xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024), attrs={ 'Slope': [1.] * 1, 'Intercept': [0.] * 1, 'units': 'degree', 'valid_range': [-90, 90], }, dims=('_rows', '_cols')), prefix + 'Latitude': xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024), attrs={ 'Slope': [1.] * 1, 'Intercept': [0.] * 1, 'units': 'degree', 'valid_range': [-180, 180], }, dims=('_rows', '_cols')), } return geo def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" rows_per_scan = self.filetype_info.get('rows_per_scan', 10) num_scans = 2 num_cols = 2048 global_attrs = { '/attr/Observing Beginning Date': '2019-01-01', '/attr/Observing Ending Date': '2019-01-01', '/attr/Observing Beginning Time': '18:27:39.720', '/attr/Observing Ending Time': '18:38:36.728', '/attr/Satellite Name': 'FY-3D', '/attr/Sensor Identification Code': 'MERSI', } data = {} if self.filetype_info['file_type'] == 'mersi2_l1b_1000': data = self._get_1km_data(num_scans, rows_per_scan, num_cols) global_attrs['/attr/TBB_Trans_Coefficient_A'] = [1.0] * 6 global_attrs['/attr/TBB_Trans_Coefficient_B'] = [0.0] * 6 elif self.filetype_info['file_type'] == 'mersi2_l1b_250': data = self._get_250m_data(num_scans, rows_per_scan, num_cols * 2) global_attrs['/attr/TBB_Trans_Coefficient_A'] = [0.0] * 6 global_attrs['/attr/TBB_Trans_Coefficient_B'] = [0.0] * 6 elif self.filetype_info['file_type'] == 'mersi2_l1b_1000_geo': data = self._get_geo_data(num_scans, rows_per_scan, num_cols) elif self.filetype_info['file_type'] == 'mersi2_l1b_250_geo': data = self._get_geo_data(num_scans, rows_per_scan, num_cols * 2, prefix='') test_content = {} test_content.update(global_attrs) test_content.update(data) test_content.update(self._get_calibration(num_scans, rows_per_scan)) return test_content class TestMERSI2L1BReader(unittest.TestCase): """Test MERSI2 L1B Reader.""" yaml_file = "mersi2_l1b.yaml" def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy.readers.mersi2_l1b import MERSI2L1B from satpy.config import config_search_paths self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(MERSI2L1B, '__bases__', (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the HDF5 file handler.""" self.p.stop() def test_fy3d_all_resolutions(self): """Test loading data when all resolutions are available.""" from satpy import DatasetID from satpy.readers import load_reader, get_key filenames = [ 'tf2019071182739.FY3D-X_MERSI_0250M_L1B.HDF', 'tf2019071182739.FY3D-X_MERSI_1000M_L1B.HDF', 'tf2019071182739.FY3D-X_MERSI_GEO1K_L1B.HDF', 'tf2019071182739.FY3D-X_MERSI_GEOQK_L1B.HDF', ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) self.assertTrue(4, len(files)) reader.create_filehandlers(files) # Make sure we have some files self.assertTrue(reader.file_handlers) # Verify that we have multiple resolutions for: # - Bands 1-4 (visible) # - Bands 24-25 (IR) available_datasets = reader.available_dataset_ids for band_name in ('1', '2', '3', '4', '24', '25'): if band_name in ('24', '25'): # don't know how to get radiance for IR bands num_results = 2 else: num_results = 3 ds_id = DatasetID(name=band_name, resolution=250) res = get_key(ds_id, available_datasets, num_results=num_results, best=False) self.assertEqual(num_results, len(res)) ds_id = DatasetID(name=band_name, resolution=1000) res = get_key(ds_id, available_datasets, num_results=num_results, best=False) self.assertEqual(num_results, len(res)) res = reader.load(['1', '2', '3', '4', '5', '20', '24', '25']) self.assertEqual(8, len(res)) self.assertEqual((2 * 40, 2048 * 2), res['1'].shape) self.assertEqual('reflectance', res['1'].attrs['calibration']) self.assertEqual('%', res['1'].attrs['units']) self.assertEqual((2 * 40, 2048 * 2), res['2'].shape) self.assertEqual('reflectance', res['2'].attrs['calibration']) self.assertEqual('%', res['2'].attrs['units']) self.assertEqual((2 * 40, 2048 * 2), res['3'].shape) self.assertEqual('reflectance', res['3'].attrs['calibration']) self.assertEqual('%', res['3'].attrs['units']) self.assertEqual((2 * 40, 2048 * 2), res['4'].shape) self.assertEqual('reflectance', res['4'].attrs['calibration']) self.assertEqual('%', res['4'].attrs['units']) self.assertEqual((2 * 10, 2048), res['5'].shape) self.assertEqual('reflectance', res['5'].attrs['calibration']) self.assertEqual('%', res['5'].attrs['units']) self.assertEqual((2 * 10, 2048), res['20'].shape) self.assertEqual('brightness_temperature', res['20'].attrs['calibration']) self.assertEqual('K', res['20'].attrs['units']) self.assertEqual((2 * 40, 2048 * 2), res['24'].shape) self.assertEqual('brightness_temperature', res['24'].attrs['calibration']) self.assertEqual('K', res['24'].attrs['units']) self.assertEqual((2 * 40, 2048 * 2), res['25'].shape) self.assertEqual('brightness_temperature', res['25'].attrs['calibration']) self.assertEqual('K', res['25'].attrs['units']) def test_fy3d_counts_calib(self): """Test loading data at counts calibration.""" from satpy import DatasetID from satpy.readers import load_reader filenames = [ 'tf2019071182739.FY3D-X_MERSI_0250M_L1B.HDF', 'tf2019071182739.FY3D-X_MERSI_1000M_L1B.HDF', 'tf2019071182739.FY3D-X_MERSI_GEO1K_L1B.HDF', 'tf2019071182739.FY3D-X_MERSI_GEOQK_L1B.HDF', ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) self.assertTrue(4, len(files)) reader.create_filehandlers(files) # Make sure we have some files self.assertTrue(reader.file_handlers) ds_ids = [] for band_name in ['1', '2', '3', '4', '5', '20', '24', '25']: ds_ids.append(DatasetID(name=band_name, calibration='counts')) res = reader.load(ds_ids) self.assertEqual(8, len(res)) self.assertEqual((2 * 40, 2048 * 2), res['1'].shape) self.assertEqual('counts', res['1'].attrs['calibration']) self.assertEqual(res['1'].dtype, np.uint16) self.assertEqual('1', res['1'].attrs['units']) self.assertEqual((2 * 40, 2048 * 2), res['2'].shape) self.assertEqual('counts', res['2'].attrs['calibration']) self.assertEqual(res['2'].dtype, np.uint16) self.assertEqual('1', res['2'].attrs['units']) self.assertEqual((2 * 40, 2048 * 2), res['3'].shape) self.assertEqual('counts', res['3'].attrs['calibration']) self.assertEqual(res['3'].dtype, np.uint16) self.assertEqual('1', res['3'].attrs['units']) self.assertEqual((2 * 40, 2048 * 2), res['4'].shape) self.assertEqual('counts', res['4'].attrs['calibration']) self.assertEqual(res['4'].dtype, np.uint16) self.assertEqual('1', res['4'].attrs['units']) self.assertEqual((2 * 10, 2048), res['5'].shape) self.assertEqual('counts', res['5'].attrs['calibration']) self.assertEqual(res['5'].dtype, np.uint16) self.assertEqual('1', res['5'].attrs['units']) self.assertEqual((2 * 10, 2048), res['20'].shape) self.assertEqual('counts', res['20'].attrs['calibration']) self.assertEqual(res['20'].dtype, np.uint16) self.assertEqual('1', res['20'].attrs['units']) self.assertEqual((2 * 40, 2048 * 2), res['24'].shape) self.assertEqual('counts', res['24'].attrs['calibration']) self.assertEqual(res['24'].dtype, np.uint16) self.assertEqual('1', res['24'].attrs['units']) self.assertEqual((2 * 40, 2048 * 2), res['25'].shape) self.assertEqual('counts', res['25'].attrs['calibration']) self.assertEqual(res['25'].dtype, np.uint16) self.assertEqual('1', res['25'].attrs['units']) def test_fy3d_rad_calib(self): """Test loading data at radiance calibration.""" from satpy import DatasetID from satpy.readers import load_reader filenames = [ 'tf2019071182739.FY3D-X_MERSI_0250M_L1B.HDF', 'tf2019071182739.FY3D-X_MERSI_1000M_L1B.HDF', 'tf2019071182739.FY3D-X_MERSI_GEO1K_L1B.HDF', 'tf2019071182739.FY3D-X_MERSI_GEOQK_L1B.HDF', ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) self.assertTrue(4, len(files)) reader.create_filehandlers(files) # Make sure we have some files self.assertTrue(reader.file_handlers) ds_ids = [] for band_name in ['1', '2', '3', '4', '5']: ds_ids.append(DatasetID(name=band_name, calibration='radiance')) res = reader.load(ds_ids) self.assertEqual(5, len(res)) self.assertEqual((2 * 40, 2048 * 2), res['1'].shape) self.assertEqual('radiance', res['1'].attrs['calibration']) self.assertEqual('mW/ (m2 cm-1 sr)', res['1'].attrs['units']) self.assertEqual((2 * 40, 2048 * 2), res['2'].shape) self.assertEqual('radiance', res['2'].attrs['calibration']) self.assertEqual('mW/ (m2 cm-1 sr)', res['2'].attrs['units']) self.assertEqual((2 * 40, 2048 * 2), res['3'].shape) self.assertEqual('radiance', res['3'].attrs['calibration']) self.assertEqual('mW/ (m2 cm-1 sr)', res['3'].attrs['units']) self.assertEqual((2 * 40, 2048 * 2), res['4'].shape) self.assertEqual('radiance', res['4'].attrs['calibration']) self.assertEqual('mW/ (m2 cm-1 sr)', res['4'].attrs['units']) self.assertEqual((2 * 10, 2048), res['5'].shape) self.assertEqual('radiance', res['5'].attrs['calibration']) self.assertEqual('mW/ (m2 cm-1 sr)', res['5'].attrs['units']) def test_fy3d_1km_resolutions(self): """Test loading data when only 1km resolutions are available.""" from satpy import DatasetID from satpy.readers import load_reader, get_key filenames = [ 'tf2019071182739.FY3D-X_MERSI_1000M_L1B.HDF', 'tf2019071182739.FY3D-X_MERSI_GEO1K_L1B.HDF', ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) self.assertTrue(4, len(files)) reader.create_filehandlers(files) # Make sure we have some files self.assertTrue(reader.file_handlers) # Verify that we have multiple resolutions for: # - Bands 1-4 (visible) # - Bands 24-25 (IR) available_datasets = reader.available_dataset_ids for band_name in ('1', '2', '3', '4', '24', '25'): if band_name in ('24', '25'): # don't know how to get radiance for IR bands num_results = 2 else: num_results = 3 ds_id = DatasetID(name=band_name, resolution=250) res = get_key(ds_id, available_datasets, num_results=num_results, best=False) self.assertEqual(0, len(res)) ds_id = DatasetID(name=band_name, resolution=1000) res = get_key(ds_id, available_datasets, num_results=num_results, best=False) self.assertEqual(num_results, len(res)) res = reader.load(['1', '2', '3', '4', '5', '20', '24', '25']) self.assertEqual(8, len(res)) self.assertEqual((2 * 10, 2048), res['1'].shape) self.assertEqual('reflectance', res['1'].attrs['calibration']) self.assertEqual('%', res['1'].attrs['units']) self.assertEqual((2 * 10, 2048), res['2'].shape) self.assertEqual('reflectance', res['2'].attrs['calibration']) self.assertEqual('%', res['2'].attrs['units']) self.assertEqual((2 * 10, 2048), res['3'].shape) self.assertEqual('reflectance', res['3'].attrs['calibration']) self.assertEqual('%', res['3'].attrs['units']) self.assertEqual((2 * 10, 2048), res['4'].shape) self.assertEqual('reflectance', res['4'].attrs['calibration']) self.assertEqual('%', res['4'].attrs['units']) self.assertEqual((2 * 10, 2048), res['5'].shape) self.assertEqual('reflectance', res['5'].attrs['calibration']) self.assertEqual('%', res['5'].attrs['units']) self.assertEqual((2 * 10, 2048), res['20'].shape) self.assertEqual('brightness_temperature', res['20'].attrs['calibration']) self.assertEqual('K', res['20'].attrs['units']) self.assertEqual((2 * 10, 2048), res['24'].shape) self.assertEqual('brightness_temperature', res['24'].attrs['calibration']) self.assertEqual('K', res['24'].attrs['units']) self.assertEqual((2 * 10, 2048), res['25'].shape) self.assertEqual('brightness_temperature', res['25'].attrs['calibration']) self.assertEqual('K', res['25'].attrs['units']) def test_fy3d_250_resolutions(self): """Test loading data when only 250m resolutions are available.""" from satpy import DatasetID from satpy.readers import load_reader, get_key filenames = [ 'tf2019071182739.FY3D-X_MERSI_0250M_L1B.HDF', 'tf2019071182739.FY3D-X_MERSI_GEOQK_L1B.HDF', ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) self.assertTrue(4, len(files)) reader.create_filehandlers(files) # Make sure we have some files self.assertTrue(reader.file_handlers) # Verify that we have multiple resolutions for: # - Bands 1-4 (visible) # - Bands 24-25 (IR) available_datasets = reader.available_dataset_ids for band_name in ('1', '2', '3', '4', '24', '25'): if band_name in ('24', '25'): # don't know how to get radiance for IR bands num_results = 2 else: num_results = 3 ds_id = DatasetID(name=band_name, resolution=250) res = get_key(ds_id, available_datasets, num_results=num_results, best=False) self.assertEqual(num_results, len(res)) ds_id = DatasetID(name=band_name, resolution=1000) res = get_key(ds_id, available_datasets, num_results=num_results, best=False) self.assertEqual(0, len(res)) res = reader.load(['1', '2', '3', '4', '5', '20', '24', '25']) self.assertEqual(6, len(res)) self.assertRaises(KeyError, res.__getitem__, '5') self.assertRaises(KeyError, res.__getitem__, '20') self.assertEqual((2 * 40, 2048 * 2), res['1'].shape) self.assertEqual('reflectance', res['1'].attrs['calibration']) self.assertEqual('%', res['1'].attrs['units']) self.assertEqual((2 * 40, 2048 * 2), res['2'].shape) self.assertEqual('reflectance', res['2'].attrs['calibration']) self.assertEqual('%', res['2'].attrs['units']) self.assertEqual((2 * 40, 2048 * 2), res['3'].shape) self.assertEqual('reflectance', res['3'].attrs['calibration']) self.assertEqual('%', res['3'].attrs['units']) self.assertEqual((2 * 40, 2048 * 2), res['4'].shape) self.assertEqual('reflectance', res['4'].attrs['calibration']) self.assertEqual('%', res['4'].attrs['units']) self.assertEqual((2 * 40, 2048 * 2), res['24'].shape) self.assertEqual('brightness_temperature', res['24'].attrs['calibration']) self.assertEqual('K', res['24'].attrs['units']) self.assertEqual((2 * 40, 2048 * 2), res['25'].shape) self.assertEqual('brightness_temperature', res['25'].attrs['calibration']) self.assertEqual('K', res['25'].attrs['units']) def suite(): """The test suite for test_virr_l1b.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestMERSI2L1BReader)) return mysuite satpy-0.20.0/satpy/tests/reader_tests/test_mimic_TPW2_nc.py000066400000000000000000000136131362525524100237330ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2019 Satpy developers # # This file is part of Satpy. # # Satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # Satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # Satpy. If not, see . """Module for testing the satpy.readers.tropomi_l2 module. """ import os import sys from datetime import datetime import numpy as np from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler import xarray as xr if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest try: from unittest import mock except ImportError: import mock DEFAULT_FILE_DTYPE = np.float32 DEFAULT_FILE_SHAPE = (9001, 18000) DEFAULT_LAT = np.linspace(-90, 90, DEFAULT_FILE_SHAPE[0], dtype=DEFAULT_FILE_DTYPE) DEFAULT_LON = np.linspace(-180, 180, DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) class FakeNetCDF4FileHandlerMimic(FakeNetCDF4FileHandler): """Swap-in NetCDF4 File Handler""" def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content""" from xarray import DataArray dt_s = filename_info.get('start_time', datetime(2019, 6, 19, 13, 0)) dt_e = filename_info.get('end_time', datetime(2019, 6, 19, 13, 0)) if filetype_info['file_type'] == 'mimicTPW2_comp': file_content = { '/attr/start_time': dt_s.strftime('%Y%m%d.%H%M%S'), '/attr/end_time': dt_e.strftime('%Y%m%d.%H%M%S'), '/attr/platform_shortname': 'aggregated microwave', '/attr/sensor': 'mimic', } file_content['latArr'] = DEFAULT_LAT file_content['latArr/shape'] = (DEFAULT_FILE_SHAPE[0],) file_content['latArr/attr/units'] = 'degress_north' file_content['lonArr'] = DEFAULT_LON file_content['lonArr/shape'] = (DEFAULT_FILE_SHAPE[1],) file_content['lonArr/attr/units'] = 'degrees_east' file_content['tpwGrid'] = DEFAULT_FILE_DATA file_content['tpwGrid/shape'] = DEFAULT_FILE_SHAPE file_content['/dimension/lat'] = DEFAULT_FILE_SHAPE[0] file_content['/dimension/lon'] = DEFAULT_FILE_SHAPE[1] # convert to xarrays for key, val in file_content.items(): if key == 'lonArr' or key == 'latArr': file_content[key] = DataArray(val) elif isinstance(val, np.ndarray): if val.ndim > 1: file_content[key] = DataArray(val, dims=('y', 'x')) else: file_content[key] = DataArray(val) file_content['tpwGrid'].attrs['_FillValue'] = -999.0 file_content['tpwGrid'].attrs['name'] = 'tpwGrid' file_content['tpwGrid'].attrs['file_key'] = 'tpwGrid' file_content['tpwGrid'].attrs['file_type'] = self.filetype_info['file_type'] else: msg = 'Wrong Test Reader for file_type {}'.format(filetype_info['file_type']) raise AssertionError(msg) return file_content class TestMimicTPW2Reader(unittest.TestCase): """Test Mimic Reader""" yaml_file = "mimicTPW2_comp.yaml" def setUp(self): """Wrap NetCDF4 file handler with our own fake handler""" from satpy.config import config_search_paths from satpy.readers.mimic_TPW2_nc import MimicTPW2FileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(MimicTPW2FileHandler, '__bases__', (FakeNetCDF4FileHandlerMimic,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the NetCDF4 file handler""" self.p.stop() def test_init(self): """Test basic initialization of this reader.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'comp20190619.130000.nc', ]) self.assertTrue(len(loadables), 1) r.create_filehandlers(loadables) # make sure we have some files self.assertTrue(r.file_handlers) def test_load_mimic(self): """Load Mimic data""" from satpy.readers import load_reader r = load_reader(self.reader_configs) with mock.patch('satpy.readers.mimic_TPW2_nc.netCDF4.Variable', xr.DataArray): loadables = r.select_files_from_pathnames([ 'comp20190619.130000.nc', ]) r.create_filehandlers(loadables) ds = r.load(['tpwGrid']) self.assertEqual(len(ds), 1) for d in ds.values(): self.assertEqual(d.attrs['platform_shortname'], 'aggregated microwave') self.assertEqual(d.attrs['sensor'], 'mimic') self.assertIn('area', d.attrs) self.assertIsNotNone(d.attrs['area']) def suite(): """The test suite for test_mimic_TPW2.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestMimicTPW2Reader)) return mysuite if __name__ == '__main__': unittest.main() satpy-0.20.0/satpy/tests/reader_tests/test_modis_l2.py000066400000000000000000000242161362525524100230520ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit tests for MODIS L2 HDF reader.""" import os import unittest import numpy as np from pyhdf.SD import SD, SDC from satpy import available_readers, Scene # Mock MODIS HDF4 file SCAN_WIDTH = 406 SCAN_LEN = 270 SCALE_FACTOR = 1 TEST_LAT = np.repeat(np.linspace(35., 45., SCAN_WIDTH)[:, None], SCAN_LEN, 1) TEST_LAT *= np.linspace(0.9, 1.1, SCAN_LEN) TEST_LON = np.repeat(np.linspace(-45., -35., SCAN_LEN)[None, :], SCAN_WIDTH, 0) TEST_LON *= np.linspace(0.9, 1.1, SCAN_WIDTH)[:, None] TEST_SATZ = (np.repeat(abs(np.linspace(-65.2, 65.4, SCAN_LEN))[None, :], SCAN_WIDTH, 0) * 100).astype(np.int16) TEST_DATA = { 'Latitude': {'data': TEST_LAT.astype(np.float32), 'type': SDC.FLOAT32, 'fill_value': -999, 'attrs': {'dim_labels': ['Cell_Along_Swath_5km:mod35', 'Cell_Across_Swath_5km:mod35']}}, 'Longitude': {'data': TEST_LON.astype(np.float32), 'type': SDC.FLOAT32, 'fill_value': -999, 'attrs': {'dim_labels': ['Cell_Along_Swath_5km:mod35', 'Cell_Across_Swath_5km:mod35']}}, 'Sensor_Zenith': {'data': TEST_SATZ, 'type': SDC.INT32, 'fill_value': -32767, 'attrs': {'dim_labels': ['Cell_Along_Swath_5km:mod35', 'Cell_Across_Swath_5km:mod35'], 'scale_factor': 0.01}}, 'Cloud_Mask': {'data': np.zeros((6, 5*SCAN_WIDTH, 5*SCAN_LEN+4), dtype=np.int8), 'type': SDC.INT8, 'fill_value': 0, 'attrs': {'dim_labels': ['Byte_Segment:mod35', 'Cell_Along_Swath_1km:mod35', 'Cell_Across_Swath_1km:mod35']}}, 'Quality_Assurance': {'data': np.ones((5*SCAN_WIDTH, 5*SCAN_LEN+4, 10), dtype=np.int8), 'type': SDC.INT8, 'fill_value': 0, 'attrs': {'dim_labels': ['Cell_Along_Swath_1km:mod35', 'Cell_Across_Swath_1km:mod35', 'QA_Dimension:mod35']}} } def generate_file_name(): """Generate a file name that follows MODIS 35 L2 convention in a temporary directory.""" import tempfile from datetime import datetime creation_time = datetime.now() processing_time = datetime.now() file_name = 'MOD35_L2.A{}.{}.061.{}.hdf'.format( creation_time.strftime("%Y%j"), creation_time.strftime("%H%M"), processing_time.strftime("%Y%j%H%M%S") ) base_dir = tempfile.mkdtemp() file_name = os.path.join(base_dir, file_name) return base_dir, file_name def create_test_data(): """Create a fake MODIS 35 L2 HDF4 file with headers.""" from datetime import datetime, timedelta base_dir, file_name = generate_file_name() h = SD(file_name, SDC.WRITE | SDC.CREATE) # Set hdf file attributes beginning_date = datetime.now() ending_date = beginning_date + timedelta(minutes=5) core_metadata_header = "GROUP = INVENTORYMETADATA\nGROUPTYPE = MASTERGROUP\n\n" \ "GROUP = RANGEDATETIME\n\nOBJECT = RANGEBEGINNINGDATE\nNUM_VAL = 1\nVALUE = \"{}\"\n" \ "END_OBJECT = RANGEBEGINNINGDATE\n\nOBJECT = RANGEBEGINNINGTIME\n"\ "NUM_VAL = 1\nVALUE = \"{}\"\n"\ "END_OBJECT = RANGEBEGINNINGTIME\n\nOBJECT = RANGEENDINGDATE\nNUM_VAL = 1\nVALUE = \"{}\"\n"\ "END_OBJECT = RANGEENDINGDATE\n\nOBJECT = RANGEENDINGTIME\nNUM_VAL = 1\nVALUE = \"{}\"\n" \ "END_OBJECT = RANGEENDINGTIME\nEND_GROUP = RANGEDATETIME".format( beginning_date.strftime("%Y-%m-%d"), beginning_date.strftime("%H:%M:%S.%f"), ending_date.strftime("%Y-%m-%d"), ending_date.strftime("%H:%M:%S.%f") ) struct_metadata_header = "GROUP=SwathStructure\n"\ "GROUP=SWATH_1\n"\ "GROUP=DimensionMap\n"\ "OBJECT=DimensionMap_2\n"\ "GeoDimension=\"Cell_Along_Swath_5km\"\n"\ "END_OBJECT=DimensionMap_2\n"\ "END_GROUP=DimensionMap\n"\ "END_GROUP=SWATH_1\n"\ "END_GROUP=SwathStructure\nEND" archive_metadata_header = "GROUP = ARCHIVEDMETADATA\nEND_GROUP = ARCHIVEDMETADATA\nEND" setattr(h, 'CoreMetadata.0', core_metadata_header) # noqa setattr(h, 'StructMetadata.0', struct_metadata_header) # noqa setattr(h, 'ArchiveMetadata.0', archive_metadata_header) # noqa # Fill datasets for dataset in TEST_DATA: v = h.create(dataset, TEST_DATA[dataset]['type'], TEST_DATA[dataset]['data'].shape) v[:] = TEST_DATA[dataset]['data'] dim_count = 0 for dimension_name in TEST_DATA[dataset]['attrs']['dim_labels']: v.dim(dim_count).setname(dimension_name) dim_count += 1 v.setfillvalue(TEST_DATA[dataset]['fill_value']) v.scale_factor = TEST_DATA[dataset]['attrs'].get('scale_factor', SCALE_FACTOR) h.end() return base_dir, file_name class TestModisL2(unittest.TestCase): """Test MODIS L2 reader.""" def setUp(self): """Create fake HDF4 MODIS file.""" self.base_dir, self.file_name = create_test_data() def tearDown(self): """Remove the temporary directory created for the test.""" try: import shutil shutil.rmtree(self.base_dir, ignore_errors=True) except OSError: pass def test_available_reader(self): """Test that MODIS L2 reader is available.""" self.assertIn('modis_l2', available_readers()) def test_scene_available_datasets(self): """Test that datasets are available.""" scene = Scene(reader='modis_l2', filenames=[self.file_name]) available_datasets = scene.all_dataset_names() self.assertTrue(len(available_datasets) > 0) self.assertIn('cloud_mask', available_datasets) self.assertIn('latitude', available_datasets) self.assertIn('longitude', available_datasets) def test_load_longitude_latitude(self): """Test that longitude and latitude datasets are loaded correctly.""" from satpy import DatasetID def test_func(dname, x, y): if dname == 'longitude': # assert less np.testing.assert_array_less(x, y) else: # assert greater # np.testing.assert_equal(x > y, True) np.testing.assert_array_less(y, x) scene = Scene(reader='modis_l2', filenames=[self.file_name]) for dataset_name in ['longitude', 'latitude']: # Default resolution should be the interpolated 1km scene.load([dataset_name]) longitude_1km_id = DatasetID(name=dataset_name, resolution=1000) longitude_1km = scene[longitude_1km_id] self.assertEqual(longitude_1km.shape, (5*SCAN_WIDTH, 5*SCAN_LEN+4)) test_func(dataset_name, longitude_1km.values, 0) # Specify original 5km scale scene.load([dataset_name], resolution=5000) longitude_5km_id = DatasetID(name=dataset_name, resolution=5000) longitude_5km = scene[longitude_5km_id] self.assertEqual(longitude_5km.shape, TEST_DATA[dataset_name.capitalize()]['data'].shape) test_func(dataset_name, longitude_5km.values, 0) def test_load_quality_assurance(self): """Test loading quality assurance.""" from satpy import DatasetID scene = Scene(reader='modis_l2', filenames=[self.file_name]) dataset_name = 'quality_assurance' scene.load([dataset_name]) quality_assurance_id = DatasetID(name=dataset_name, resolution=1000) self.assertIn(quality_assurance_id, scene.datasets) quality_assurance = scene[quality_assurance_id] self.assertEqual(quality_assurance.shape, (5*SCAN_WIDTH, 5*SCAN_LEN+4)) def test_load_1000m_cloud_mask_dataset(self): """Test loading 1000m cloud mask.""" from satpy import DatasetID scene = Scene(reader='modis_l2', filenames=[self.file_name]) dataset_name = 'cloud_mask' scene.load([dataset_name], resolution=1000) cloud_mask_id = DatasetID(name=dataset_name, resolution=1000) self.assertIn(cloud_mask_id, scene.datasets) cloud_mask = scene[cloud_mask_id] self.assertEqual(cloud_mask.shape, (5*SCAN_WIDTH, 5*SCAN_LEN+4)) def test_load_250m_cloud_mask_dataset(self): """Test loading 250m cloud mask.""" from satpy import DatasetID scene = Scene(reader='modis_l2', filenames=[self.file_name]) dataset_name = 'cloud_mask' scene.load([dataset_name], resolution=250) cloud_mask_id = DatasetID(name=dataset_name, resolution=250) self.assertIn(cloud_mask_id, scene.datasets) cloud_mask = scene[cloud_mask_id] self.assertEqual(cloud_mask.shape, (4*5*SCAN_WIDTH, 4*(5*SCAN_LEN+4))) def suite(): """Set the test suite for test_modis_l2.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestModisL2)) return mysuite if __name__ == '__main__': unittest.main() satpy-0.20.0/satpy/tests/reader_tests/test_nc_slstr.py000066400000000000000000000054651362525524100231760ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.nc_slstr module. """ import sys if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest try: import unittest.mock as mock except ImportError: import mock class TestSLSTRReader(unittest.TestCase): """Test various nc_slstr file handlers.""" @mock.patch('xarray.open_dataset') def test_instantiate(self, mocked_dataset): """Test initialization of file handlers.""" from satpy.readers.slstr_l1b import NCSLSTR1B, NCSLSTRGeo, NCSLSTRAngles, NCSLSTRFlag from satpy import DatasetID ds_id = DatasetID(name='foo') filename_info = {'mission_id': 'S3A', 'dataset_name': 'foo', 'start_time': 0, 'end_time': 0} test = NCSLSTR1B('somedir/S1_radiance_an.nc', filename_info, 'c') assert(test.view == 'n') assert(test.stripe == 'a') test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() test = NCSLSTR1B('somedir/S1_radiance_co.nc', filename_info, 'c') assert(test.view == 'o') assert(test.stripe == 'c') test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() test = NCSLSTRGeo('somedir/S1_radiance_an.nc', filename_info, 'c') test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() test = NCSLSTRAngles('somedir/S1_radiance_an.nc', filename_info, 'c') # TODO: Make this test work # test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() test = NCSLSTRFlag('somedir/S1_radiance_an.nc', filename_info, 'c') assert(test.view == 'n') assert(test.stripe == 'a') mocked_dataset.assert_called() mocked_dataset.reset_mock() def suite(): """The test suite for test_nc_slstr.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestSLSTRReader)) return mysuite satpy-0.20.0/satpy/tests/reader_tests/test_netcdf_utils.py000066400000000000000000000163301362525524100240230ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.netcdf_utils module. """ import os import sys import numpy as np try: from satpy.readers.netcdf_utils import NetCDF4FileHandler except ImportError: # fake the import so we can at least run the tests in this file NetCDF4FileHandler = object if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest class FakeNetCDF4FileHandler(NetCDF4FileHandler): """Swap-in NetCDF4 File Handler for reader tests to use.""" def __init__(self, filename, filename_info, filetype_info, **kwargs): """Get fake file content from 'get_test_content'.""" if NetCDF4FileHandler is object: raise ImportError("Base 'NetCDF4FileHandler' could not be " "imported.") super(NetCDF4FileHandler, self).__init__(filename, filename_info, filetype_info) self.file_content = self.get_test_content(filename, filename_info, filetype_info) self.file_content.update(kwargs) def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content. Args: filename (str): input filename filename_info (dict): Dict of metadata pulled from filename filetype_info (dict): Dict of metadata from the reader's yaml config for this file type Returns: dict of file content with keys like: - 'dataset' - '/attr/global_attr' - 'dataset/attr/global_attr' - 'dataset/shape' - '/dimension/my_dim' """ raise NotImplementedError("Fake File Handler subclass must implement 'get_test_content'") class TestNetCDF4FileHandler(unittest.TestCase): """Test NetCDF4 File Handler Utility class.""" def setUp(self): """Create a test NetCDF4 file.""" from netCDF4 import Dataset with Dataset('test.nc', 'w') as nc: # Create dimensions nc.createDimension('rows', 10) nc.createDimension('cols', 100) # Create Group g1 = nc.createGroup('test_group') # Add datasets ds1_f = g1.createVariable('ds1_f', np.float32, dimensions=('rows', 'cols')) ds1_f[:] = np.arange(10. * 100).reshape((10, 100)) ds1_i = g1.createVariable('ds1_i', np.int32, dimensions=('rows', 'cols')) ds1_i[:] = np.arange(10 * 100).reshape((10, 100)) ds2_f = nc.createVariable('ds2_f', np.float32, dimensions=('rows', 'cols')) ds2_f[:] = np.arange(10. * 100).reshape((10, 100)) ds2_i = nc.createVariable('ds2_i', np.int32, dimensions=('rows', 'cols')) ds2_i[:] = np.arange(10 * 100).reshape((10, 100)) ds2_s = nc.createVariable("ds2_s", np.int8, dimensions=("rows",)) ds2_s[:] = np.arange(10) ds2_sc = nc.createVariable("ds2_sc", np.int8, dimensions=()) ds2_sc[:] = 42 # Add attributes nc.test_attr_str = 'test_string' nc.test_attr_int = 0 nc.test_attr_float = 1.2 nc.test_attr_str_arr = np.array(b"test_string2") g1.test_attr_str = 'test_string' g1.test_attr_int = 0 g1.test_attr_float = 1.2 for d in [ds1_f, ds1_i, ds2_f, ds2_i]: d.test_attr_str = 'test_string' d.test_attr_int = 0 d.test_attr_float = 1.2 def tearDown(self): """Remove the previously created test file.""" os.remove('test.nc') def test_all_basic(self): """Test everything about the NetCDF4 class.""" from satpy.readers.netcdf_utils import NetCDF4FileHandler import xarray as xr file_handler = NetCDF4FileHandler('test.nc', {}, {}) self.assertEqual(file_handler['/dimension/rows'], 10) self.assertEqual(file_handler['/dimension/cols'], 100) for ds in ('test_group/ds1_f', 'test_group/ds1_i', 'ds2_f', 'ds2_i'): self.assertEqual(file_handler[ds].dtype, np.float32 if ds.endswith('f') else np.int32) self.assertTupleEqual(file_handler[ds + '/shape'], (10, 100)) self.assertEqual(file_handler[ds + '/attr/test_attr_str'], 'test_string') self.assertEqual(file_handler[ds + '/attr/test_attr_int'], 0) self.assertEqual(file_handler[ds + '/attr/test_attr_float'], 1.2) self.assertEqual(file_handler['/attr/test_attr_str'], 'test_string') self.assertEqual(file_handler['/attr/test_attr_str_arr'], 'test_string2') self.assertEqual(file_handler['/attr/test_attr_int'], 0) self.assertEqual(file_handler['/attr/test_attr_float'], 1.2) self.assertIsInstance(file_handler.get('ds2_f')[:], xr.DataArray) self.assertIsNone(file_handler.get('fake_ds')) self.assertEqual(file_handler.get('fake_ds', 'test'), 'test') self.assertTrue('ds2_f' in file_handler) self.assertFalse('fake_ds' in file_handler) self.assertIsNone(file_handler.file_handle) self.assertEqual(file_handler["ds2_sc"], 42) def test_caching(self): """Test that caching works as intended. """ from satpy.readers.netcdf_utils import NetCDF4FileHandler h = NetCDF4FileHandler("test.nc", {}, {}, cache_var_size=1000, cache_handle=True) self.assertIsNotNone(h.file_handle) self.assertTrue(h.file_handle.isopen()) self.assertEqual(sorted(h.cached_file_content.keys()), ["ds2_s", "ds2_sc"]) # with caching, these tests access different lines than without np.testing.assert_array_equal(h["ds2_s"], np.arange(10)) np.testing.assert_array_equal(h["test_group/ds1_i"], np.arange(10 * 100).reshape((10, 100))) h.__del__() self.assertFalse(h.file_handle.isopen()) def test_filenotfound(self): """Test that error is raised when file not found """ from satpy.readers.netcdf_utils import NetCDF4FileHandler with self.assertRaises(IOError): NetCDF4FileHandler("/thisfiledoesnotexist.nc", {}, {}) def suite(): """The test suite for test_netcdf_utils.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestNetCDF4FileHandler)) return mysuite satpy-0.20.0/satpy/tests/reader_tests/test_nucaps.py000066400000000000000000000555121362525524100226360ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.nucaps module. """ import os import sys import numpy as np from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler from satpy.tests.utils import convert_file_content_to_data_array import unittest from unittest import mock DEFAULT_FILE_DTYPE = np.float32 DEFAULT_FILE_SHAPE = (120,) DEFAULT_PRES_FILE_SHAPE = (120, 100,) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0], dtype=DEFAULT_FILE_DTYPE) DEFAULT_PRES_FILE_DATA = np.arange(DEFAULT_PRES_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE) DEFAULT_PRES_FILE_DATA = np.repeat([DEFAULT_PRES_FILE_DATA], DEFAULT_PRES_FILE_SHAPE[0], axis=0) DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32) DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[0]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[0]).astype(DEFAULT_FILE_DTYPE) ALL_PRESSURE_LEVELS = [ 0.0161, 0.0384, 0.0769, 0.137, 0.2244, 0.3454, 0.5064, 0.714, 0.9753, 1.2972, 1.6872, 2.1526, 2.7009, 3.3398, 4.077, 4.9204, 5.8776, 6.9567, 8.1655, 9.5119, 11.0038, 12.6492, 14.4559, 16.4318, 18.5847, 20.9224, 23.4526, 26.1829, 29.121, 32.2744, 35.6505, 39.2566, 43.1001, 47.1882, 51.5278, 56.126, 60.9895, 66.1253, 71.5398, 77.2396, 83.231, 89.5204, 96.1138, 103.017, 110.237, 117.777, 125.646, 133.846, 142.385, 151.266, 160.496, 170.078, 180.018, 190.32, 200.989, 212.028, 223.441, 235.234, 247.408, 259.969, 272.919, 286.262, 300, 314.137, 328.675, 343.618, 358.966, 374.724, 390.893, 407.474, 424.47, 441.882, 459.712, 477.961, 496.63, 515.72, 535.232, 555.167, 575.525, 596.306, 617.511, 639.14, 661.192, 683.667, 706.565, 729.886, 753.628, 777.79, 802.371, 827.371, 852.788, 878.62, 904.866, 931.524, 958.591, 986.067, 1013.95, 1042.23, 1070.92, 1100 ] ALL_PRESSURE_LEVELS = np.repeat([ALL_PRESSURE_LEVELS], DEFAULT_PRES_FILE_SHAPE[0], axis=0) class FakeNetCDF4FileHandler2(FakeNetCDF4FileHandler): """Swap-in NetCDF4 File Handler""" def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content""" file_content = { '/attr/time_coverage_start': filename_info['start_time'].strftime('%Y-%m-%dT%H:%M:%S.%fZ'), '/attr/time_coverage_end': filename_info['end_time'].strftime('%Y-%m-%dT%H:%M:%S.%fZ'), '/attr/start_orbit_number': 1, '/attr/end_orbit_number': 2, '/attr/platform_name': 'NPP', '/attr/instrument_name': 'CrIS, ATMS, VIIRS', } for k, units, standard_name in [ ('Solar_Zenith', 'degrees', 'solar_zenith_angle'), ('Topography', 'meters', ''), ('Land_Fraction', '1', ''), ('Surface_Pressure', 'mb', ''), ('Skin_Temperature', 'Kelvin', 'surface_temperature'), ]: file_content[k] = DEFAULT_FILE_DATA file_content[k + '/shape'] = DEFAULT_FILE_SHAPE file_content[k + '/attr/units'] = units file_content[k + '/attr/valid_range'] = (0., 120.) file_content[k + '/attr/_FillValue'] = -9999. if standard_name: file_content[k + '/attr/standard_name'] = standard_name for k, units, standard_name in [ ('Temperature', 'Kelvin', 'air_temperature'), ('H2O', '1', ''), ('H2O_MR', 'g/g', ''), ('O3', '1', ''), ('O3_MR', '1', ''), ('Liquid_H2O', '1', ''), ('Liquid_H2O_MR', 'g/g', 'cloud_liquid_water_mixing_ratio'), ('CO', '1', ''), ('CO_MR', '1', ''), ('CH4', '1', ''), ('CH4_MR', '1', ''), ('CO2', '1', ''), ('HNO3', '1', ''), ('HNO3_MR', '1', ''), ('N2O', '1', ''), ('N2O_MR', '1', ''), ('SO2', '1', ''), ('SO2_MR', '1', ''), ]: file_content[k] = DEFAULT_PRES_FILE_DATA file_content[k + '/shape'] = DEFAULT_PRES_FILE_SHAPE file_content[k + '/attr/units'] = units file_content[k + '/attr/valid_range'] = (0., 120.) file_content[k + '/attr/_FillValue'] = -9999. if standard_name: file_content[k + '/attr/standard_name'] = standard_name k = 'Pressure' file_content[k] = ALL_PRESSURE_LEVELS file_content[k + '/shape'] = DEFAULT_PRES_FILE_SHAPE file_content[k + '/attr/units'] = 'mb' file_content[k + '/attr/valid_range'] = (0., 2000.) file_content[k + '/attr/_FillValue'] = -9999. k = 'Quality_Flag' file_content[k] = DEFAULT_FILE_DATA.astype(np.int32) file_content[k + '/shape'] = DEFAULT_FILE_SHAPE file_content[k + '/attr/valid_range'] = (0, 31) file_content[k + '/attr/_FillValue'] = -9999. k = 'Longitude' file_content[k] = DEFAULT_LON_DATA file_content[k + '/shape'] = DEFAULT_FILE_SHAPE file_content[k + '/attr/units'] = 'degrees_east' file_content[k + '/attr/valid_range'] = (-180., 180.) file_content[k + '/attr/standard_name'] = 'longitude' file_content[k + '/attr/_FillValue'] = -9999. k = 'Latitude' file_content[k] = DEFAULT_LAT_DATA file_content[k + '/shape'] = DEFAULT_FILE_SHAPE file_content[k + '/attr/units'] = 'degrees_north' file_content[k + '/attr/valid_range'] = (-90., 90.) file_content[k + '/attr/standard_name'] = 'latitude' file_content[k + '/attr/_FillValue'] = -9999. attrs = ('_FillValue', 'flag_meanings', 'flag_values', 'units') cris_fors_dim_name = 'Number_of_CrIS_FORs' pressure_levels_dim_name = 'Number_of_P_Levels' if ('_v1' in filename): cris_fors_dim_name = 'number_of_FORs' pressure_levels_dim_name = 'number_of_p_levels' convert_file_content_to_data_array( file_content, attrs=attrs, dims=('z', cris_fors_dim_name, pressure_levels_dim_name)) return file_content class TestNUCAPSReader(unittest.TestCase): """Test NUCAPS Reader""" yaml_file = "nucaps.yaml" def setUp(self): """Wrap NetCDF4 file handler with our own fake handler""" from satpy.config import config_search_paths from satpy.readers.nucaps import NUCAPSFileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(NUCAPSFileHandler, '__bases__', (FakeNetCDF4FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the NetCDF4 file handler""" self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', ]) self.assertTrue(len(loadables), 1) r.create_filehandlers(loadables) # make sure we have some files self.assertTrue(r.file_handlers) def test_load_nonpressure_based(self): """Test loading all channels that aren't based on pressure""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', ]) r.create_filehandlers(loadables) datasets = r.load(['Solar_Zenith', 'Topography', 'Land_Fraction', 'Surface_Pressure', 'Skin_Temperature', 'Quality_Flag', ]) self.assertEqual(len(datasets), 6) for v in datasets.values(): # self.assertNotEqual(v.info['resolution'], 0) # self.assertEqual(v.info['units'], 'degrees') self.assertEqual(v.ndim, 1) self.assertEqual(v.attrs['sensor'], ['CrIS', 'ATMS', 'VIIRS']) def test_load_pressure_based(self): """Test loading all channels based on pressure""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', ]) r.create_filehandlers(loadables) datasets = r.load(['Temperature', 'H2O', 'H2O_MR', 'O3', 'O3_MR', 'Liquid_H2O', 'Liquid_H2O_MR', 'CO', 'CO_MR', 'CH4', 'CH4_MR', 'CO2', 'HNO3', 'HNO3_MR', 'N2O', 'N2O_MR', 'SO2', 'SO2_MR', ]) self.assertEqual(len(datasets), 18) for v in datasets.values(): # self.assertNotEqual(v.info['resolution'], 0) self.assertEqual(v.ndim, 2) def test_load_individual_pressure_levels_true(self): """Test loading Temperature with individual pressure datasets""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', ]) r.create_filehandlers(loadables) datasets = r.load(r.pressure_dataset_names['Temperature'], pressure_levels=True) self.assertEqual(len(datasets), 100) for v in datasets.values(): self.assertEqual(v.ndim, 1) def test_load_individual_pressure_levels_min_max(self): """Test loading individual Temperature with min/max level specified""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', ]) r.create_filehandlers(loadables) datasets = r.load(r.pressure_dataset_names['Temperature'], pressure_levels=(100., 150.)) self.assertEqual(len(datasets), 6) for v in datasets.values(): self.assertEqual(v.ndim, 1) def test_load_individual_pressure_levels_single(self): """Test loading individual Temperature with specific levels""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', ]) r.create_filehandlers(loadables) datasets = r.load(r.pressure_dataset_names['Temperature'], pressure_levels=(103.017,)) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.ndim, 1) def test_load_pressure_levels_true(self): """Test loading Temperature with all pressure levels""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', ]) r.create_filehandlers(loadables) datasets = r.load(['Temperature'], pressure_levels=True) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.ndim, 2) self.assertTupleEqual(v.shape, DEFAULT_PRES_FILE_SHAPE) def test_load_pressure_levels_min_max(self): """Test loading Temperature with min/max level specified""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', ]) r.create_filehandlers(loadables) datasets = r.load(['Temperature'], pressure_levels=(100., 150.)) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.ndim, 2) self.assertTupleEqual(v.shape, (DEFAULT_PRES_FILE_SHAPE[0], 6)) def test_load_pressure_levels_single(self): """Test loading a specific Temperature level""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', ]) r.create_filehandlers(loadables) datasets = r.load(['Temperature'], pressure_levels=(103.017,)) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.ndim, 2) self.assertTupleEqual(v.shape, (DEFAULT_PRES_FILE_SHAPE[0], 1)) def test_load_pressure_levels_single_and_pressure_levels(self): """Test loading a specific Temperature level and pressure levels""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', ]) r.create_filehandlers(loadables) datasets = r.load(['Temperature', 'Pressure_Levels'], pressure_levels=(103.017,)) self.assertEqual(len(datasets), 2) t_ds = datasets['Temperature'] self.assertEqual(t_ds.ndim, 2) self.assertTupleEqual(t_ds.shape, (DEFAULT_PRES_FILE_SHAPE[0], 1)) pl_ds = datasets['Pressure_Levels'] self.assertTupleEqual(pl_ds.shape, (1,)) class TestNUCAPSScienceEDRReader(unittest.TestCase): """Test NUCAPS Science EDR Reader""" yaml_file = "nucaps.yaml" def setUp(self): """Wrap NetCDF4 file handler with our own fake handler""" from satpy.config import config_search_paths from satpy.readers.nucaps import NUCAPSFileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(NUCAPSFileHandler, '__bases__', (FakeNetCDF4FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the NetCDF4 file handler""" self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', ]) self.assertTrue(len(loadables), 1) r.create_filehandlers(loadables) # make sure we have some files self.assertTrue(r.file_handlers) def test_load_nonpressure_based(self): """Test loading all channels that aren't based on pressure""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', ]) r.create_filehandlers(loadables) datasets = r.load(['Topography', 'Land_Fraction', 'Surface_Pressure', 'Skin_Temperature', 'Quality_Flag', ]) self.assertEqual(len(datasets), 5) for v in datasets.values(): self.assertEqual(v.ndim, 1) self.assertEqual(v.attrs['sensor'], ['CrIS', 'ATMS', 'VIIRS']) def test_load_pressure_based(self): """Test loading all channels based on pressure""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', ]) r.create_filehandlers(loadables) datasets = r.load(['Temperature', 'H2O', 'H2O_MR', 'O3', 'O3_MR', 'CO', 'CO_MR', 'CH4', 'CH4_MR', 'CO2', 'HNO3', 'HNO3_MR', 'N2O', 'N2O_MR', 'SO2', 'SO2_MR', ]) self.assertEqual(len(datasets), 16) for v in datasets.values(): # self.assertNotEqual(v.info['resolution'], 0) self.assertEqual(v.ndim, 2) def test_load_individual_pressure_levels_true(self): """Test loading Temperature with individual pressure datasets""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', ]) r.create_filehandlers(loadables) datasets = r.load(r.pressure_dataset_names['Temperature'], pressure_levels=True) self.assertEqual(len(datasets), 100) for v in datasets.values(): self.assertEqual(v.ndim, 1) def test_load_individual_pressure_levels_min_max(self): """Test loading individual Temperature with min/max level specified""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', ]) r.create_filehandlers(loadables) datasets = r.load(r.pressure_dataset_names['Temperature'], pressure_levels=(100., 150.)) self.assertEqual(len(datasets), 6) for v in datasets.values(): self.assertEqual(v.ndim, 1) def test_load_individual_pressure_levels_single(self): """Test loading individual Temperature with specific levels""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', ]) r.create_filehandlers(loadables) datasets = r.load(r.pressure_dataset_names['Temperature'], pressure_levels=(103.017,)) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.ndim, 1) def test_load_pressure_levels_true(self): """Test loading Temperature with all pressure levels""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', ]) r.create_filehandlers(loadables) datasets = r.load(['Temperature'], pressure_levels=True) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.ndim, 2) self.assertTupleEqual(v.shape, DEFAULT_PRES_FILE_SHAPE) def test_load_pressure_levels_min_max(self): """Test loading Temperature with min/max level specified""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', ]) r.create_filehandlers(loadables) datasets = r.load(['Temperature'], pressure_levels=(100., 150.)) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.ndim, 2) self.assertTupleEqual(v.shape, (DEFAULT_PRES_FILE_SHAPE[0], 6)) def test_load_pressure_levels_single(self): """Test loading a specific Temperature level""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', ]) r.create_filehandlers(loadables) datasets = r.load(['Temperature'], pressure_levels=(103.017,)) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.ndim, 2) self.assertTupleEqual(v.shape, (DEFAULT_PRES_FILE_SHAPE[0], 1)) def test_load_pressure_levels_single_and_pressure_levels(self): """Test loading a specific Temperature level and pressure levels""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', ]) r.create_filehandlers(loadables) datasets = r.load(['Temperature', 'Pressure_Levels'], pressure_levels=(103.017,)) self.assertEqual(len(datasets), 2) t_ds = datasets['Temperature'] self.assertEqual(t_ds.ndim, 2) self.assertTupleEqual(t_ds.shape, (DEFAULT_PRES_FILE_SHAPE[0], 1)) pl_ds = datasets['Pressure_Levels'] self.assertTupleEqual(pl_ds.shape, (1,)) def suite(): """The test suite for test_nucaps. """ loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestNUCAPSReader)) mysuite.addTest(loader.loadTestsFromTestCase(TestNUCAPSScienceEDRReader)) return mysuite satpy-0.20.0/satpy/tests/reader_tests/test_nwcsaf_msg.py000066400000000000000000000447561362525524100235040ustar00rootroot00000000000000#!/usr/bin/env python # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . # """Unittests for NWC SAF MSG (2013) reader.""" import unittest import numpy as np import tempfile import os import h5py from collections import OrderedDict try: from unittest import mock except ImportError: import mock # noqa CTYPE_TEST_ARRAY = (np.random.rand(1856, 3712) * 255).astype(np.uint8) CTYPE_TEST_FRAME = (np.arange(100).reshape(10, 10) / 100. * 20).astype(np.uint8) CTYPE_TEST_ARRAY[1000:1010, 1000:1010] = CTYPE_TEST_FRAME CTTH_HEIGHT_TEST_ARRAY = (np.random.rand(1856, 3712) * 255).astype(np.uint8) _CTTH_HEIGHT_TEST_FRAME = (np.arange(100).reshape(10, 10) / 100. * 80).astype(np.uint8) CTTH_HEIGHT_TEST_ARRAY[1000:1010, 1000:1010] = _CTTH_HEIGHT_TEST_FRAME CTTH_HEIGHT_TEST_FRAME_RES = _CTTH_HEIGHT_TEST_FRAME.astype(np.float32) * 200 - 2000 CTTH_HEIGHT_TEST_FRAME_RES[0, 0:10] = np.nan CTTH_HEIGHT_TEST_FRAME_RES[1, 0:3] = np.nan CTTH_PRESSURE_TEST_ARRAY = (np.random.rand(1856, 3712) * 255).astype(np.uint8) _CTTH_PRESSURE_TEST_FRAME = (np.arange(100).reshape(10, 10) / 100. * 54).astype(np.uint8) CTTH_PRESSURE_TEST_ARRAY[1000:1010, 1000:1010] = _CTTH_PRESSURE_TEST_FRAME CTTH_PRESSURE_TEST_FRAME_RES = _CTTH_PRESSURE_TEST_FRAME.astype(np.float32) * 25 - 250 CTTH_PRESSURE_TEST_FRAME_RES[0, 0:10] = np.nan CTTH_PRESSURE_TEST_FRAME_RES[1, 0:9] = np.nan CTTH_TEMPERATURE_TEST_ARRAY = (np.random.rand(1856, 3712) * 255).astype(np.uint8) _CTTH_TEMPERATURE_TEST_FRAME = (np.arange(100).reshape(10, 10) / 100. * 140).astype(np.uint8) _CTTH_TEMPERATURE_TEST_FRAME[8, 5] = 255 CTTH_TEMPERATURE_TEST_ARRAY[1000:1010, 1000:1010] = _CTTH_TEMPERATURE_TEST_FRAME CTTH_TEMPERATURE_TEST_FRAME_RES = _CTTH_TEMPERATURE_TEST_FRAME.astype(np.float32) * 1.0 + 150 CTTH_TEMPERATURE_TEST_FRAME_RES[8, 5] = np.nan fake_ct = { "01-PALETTE": { "attrs": { "CLASS": b"PALETTE", "PAL_COLORMODEL": b"RGB", "PAL_TYPE": b"DIRECTINDEX", }, "value": np.array( [ [100, 100, 100], [0, 120, 0], [0, 0, 0], [250, 190, 250], [220, 160, 220], [255, 150, 0], [255, 100, 0], [255, 220, 0], [255, 180, 0], [255, 255, 140], [240, 240, 0], [250, 240, 200], [215, 215, 150], [255, 255, 255], [230, 230, 230], [0, 80, 215], [0, 180, 230], [0, 240, 240], [90, 200, 160], [200, 0, 200], [95, 60, 30], ], dtype=np.uint8, ), }, "02-PALETTE": { "attrs": { "CLASS": b"PALETTE", "PAL_COLORMODEL": b"RGB", "PAL_TYPE": b"DIRECTINDEX", }, "value": np.array( [[100, 100, 100], [255, 100, 0], [0, 80, 215], [95, 60, 30]], dtype=np.uint8 ), }, "CT": { "attrs": { "CLASS": b"IMAGE", "ID": b"CT", "IMAGE_COLORMODEL": b"RGB", "IMAGE_SUBCLASS": b"IMAGE_INDEXED", "IMAGE_VERSION": b"1.0", "N_COLS": 3712, "N_LINES": 1856, "OFFSET": 0.0, "PALETTE": " 01-PALETTE", "PRODUCT": b"CT__", "SCALING_FACTOR": 1.0, }, "value": (CTYPE_TEST_ARRAY), }, "CT_PHASE": { "attrs": { "CLASS": b"IMAGE", "ID": b"CT_PHASE", "IMAGE_COLORMODEL": b"RGB", "IMAGE_SUBCLASS": b"IMAGE_INDEXED", "IMAGE_VERSION": b"1.0", "N_COLS": 3712, "N_LINES": 1856, "OFFSET": 0.0, "PALETTE": " 02-PALETTE", "PRODUCT": b"CT__", "SCALING_FACTOR": 1.0, }, "value": (np.random.rand(1856, 3712) * 255).astype(np.uint8), }, "CT_QUALITY": { "attrs": { "CLASS": b"IMAGE", "ID": b"CT_QUALITY", "IMAGE_COLORMODEL": b"RGB", "IMAGE_SUBCLASS": b"IMAGE_INDEXED", "IMAGE_VERSION": b"1.0", "N_COLS": 3712, "N_LINES": 1856, "OFFSET": 0.0, "PRODUCT": b"CT__", "SCALING_FACTOR": 1.0, }, "value": (np.random.rand(1856, 3712) * 65535).astype(np.uint16), }, "attrs": { "CFAC": 13642337, "COFF": 1856, "GP_SC_ID": 323, "IMAGE_ACQUISITION_TIME": b"201611090800", "LFAC": 13642337, "LOFF": 1856, "NB_PARAMETERS": 3, "NC": 3712, "NL": 1856, "NOMINAL_PRODUCT_TIME": b"201611090814", "PACKAGE": b"SAFNWC/MSG", "PRODUCT_ALGORITHM_VERSION": b" 2.2", "PRODUCT_NAME": b"CT__", "PROJECTION_NAME": b"GEOS<+000.0>", "REGION_NAME": b"MSG-N", "SAF": b"NWC", "SGS_PRODUCT_COMPLETENESS": 99, "SGS_PRODUCT_QUALITY": 79, "SPECTRAL_CHANNEL_ID": 0, }, } fake_ct = OrderedDict(sorted(fake_ct.items(), key=lambda t: t[0])) fake_ctth = { "01-PALETTE": { "attrs": { "CLASS": b"PALETTE", "PAL_COLORMODEL": b"RGB", "PAL_TYPE": b"DIRECTINDEX", }, "value": np.array( [ [0, 0, 0], [255, 255, 255], [255, 255, 255], [255, 255, 255], [255, 255, 255], [255, 255, 255], [255, 255, 255], [255, 255, 255], [255, 255, 255], [255, 255, 255], [255, 255, 255], [255, 255, 255], [255, 255, 255], [255, 255, 255], [255, 255, 255], [239, 239, 223], [239, 239, 223], [238, 214, 210], [238, 214, 210], [0, 255, 255], [0, 255, 255], [0, 216, 255], [0, 216, 255], [0, 178, 255], [0, 178, 255], [0, 140, 48], [0, 140, 48], [0, 255, 0], [0, 255, 0], [153, 255, 0], [153, 255, 0], [178, 255, 0], [178, 255, 0], [216, 255, 0], [216, 255, 0], [255, 255, 0], [255, 255, 0], [255, 216, 0], [255, 216, 0], [255, 164, 0], [255, 164, 0], [255, 102, 0], [255, 102, 0], [255, 76, 0], [255, 76, 0], [178, 51, 0], [178, 51, 0], [153, 20, 47], [153, 20, 47], [126, 0, 43], [126, 0, 43], [255, 0, 216], [255, 0, 216], [255, 0, 128], [255, 0, 128], [255, 0, 128], [255, 0, 128], [255, 0, 128], [255, 0, 128], [255, 0, 128], [255, 0, 128], [255, 0, 128], [255, 0, 128], [255, 0, 128], ], dtype=np.uint8, ), }, "02-PALETTE": { "attrs": { "CLASS": b"PALETTE", "PAL_COLORMODEL": b"RGB", "PAL_TYPE": b"DIRECTINDEX", }, "value": (np.random.rand(128, 3) * 255).astype(np.uint8), }, "03-PALETTE": { "attrs": { "CLASS": b"PALETTE", "PAL_COLORMODEL": b"RGB", "PAL_TYPE": b"DIRECTINDEX", }, "value": (np.random.rand(256, 3) * 255).astype(np.uint8), }, "04-PALETTE": { "attrs": { "CLASS": b"PALETTE", "PAL_COLORMODEL": b"RGB", "PAL_TYPE": b"DIRECTINDEX", }, "value": np.array( [ [78, 119, 145], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [12, 12, 12], [24, 24, 24], [36, 36, 36], [48, 48, 48], [60, 60, 60], [72, 72, 72], [84, 84, 84], [96, 96, 96], [108, 108, 108], [120, 120, 120], [132, 132, 132], [144, 144, 144], [156, 156, 156], [168, 168, 168], [180, 180, 180], [192, 192, 192], [204, 204, 204], [216, 216, 216], [228, 228, 228], [240, 240, 240], [240, 240, 240], ], dtype=np.uint8, ), }, "CTTH_EFFECT": { "attrs": { "CLASS": b"IMAGE", "ID": b"CTTH_EFFECT", "IMAGE_COLORMODEL": b"RGB", "IMAGE_SUBCLASS": b"IMAGE_INDEXED", "IMAGE_VERSION": b"1.0", "N_COLS": 3712, "N_LINES": 1856, "OFFSET": -50.0, "PALETTE": " 04-PALETTE", "PRODUCT": b"CTTH", "SCALING_FACTOR": 5.0, }, "value": (np.random.rand(1856, 3712) * 255).astype(np.uint8), }, "CTTH_HEIGHT": { "attrs": { "CLASS": b"IMAGE", "ID": b"CTTH_HEIGHT", "IMAGE_COLORMODEL": b"RGB", "IMAGE_SUBCLASS": b"IMAGE_INDEXED", "IMAGE_VERSION": b"1.0", "N_COLS": 3712, "N_LINES": 1856, "OFFSET": -2000.0, "PALETTE": " 02-PALETTE", "PRODUCT": b"CTTH", "SCALING_FACTOR": 200.0, }, "value": (CTTH_HEIGHT_TEST_ARRAY), }, "CTTH_PRESS": { "attrs": { "CLASS": b"IMAGE", "ID": b"CTTH_PRESS", "IMAGE_COLORMODEL": b"RGB", "IMAGE_SUBCLASS": b"IMAGE_INDEXED", "IMAGE_VERSION": b"1.0", "N_COLS": 3712, "N_LINES": 1856, "OFFSET": -250.0, "PALETTE": " 01-PALETTE", "PRODUCT": b"CTTH", "SCALING_FACTOR": 25.0, }, "value": (CTTH_PRESSURE_TEST_ARRAY), }, "CTTH_QUALITY": { "attrs": { "CLASS": b"IMAGE", "ID": b"CTTH_QUALITY", "IMAGE_COLORMODEL": b"RGB", "IMAGE_SUBCLASS": b"IMAGE_INDEXED", "IMAGE_VERSION": b"1.0", "N_COLS": 3712, "N_LINES": 1856, "OFFSET": 0.0, "PRODUCT": b"CTTH", "SCALING_FACTOR": 1.0, }, "value": (np.random.rand(1856, 3712) * 65535).astype(np.uint16), }, "CTTH_TEMPER": { "attrs": { "CLASS": b"IMAGE", "ID": b"CTTH_TEMPER", "IMAGE_COLORMODEL": b"RGB", "IMAGE_SUBCLASS": b"IMAGE_INDEXED", "IMAGE_VERSION": b"1.0", "N_COLS": 3712, "N_LINES": 1856, "OFFSET": 150.0, "PALETTE": " 03-PALETTE", "PRODUCT": b"CTTH", "SCALING_FACTOR": 1.0, }, "value": (CTTH_TEMPERATURE_TEST_ARRAY), }, "attrs": { "CFAC": 13642337, "COFF": 1856, "GP_SC_ID": 323, "IMAGE_ACQUISITION_TIME": b"201611090800", "LFAC": 13642337, "LOFF": 1856, "NB_PARAMETERS": 5, "NC": 3712, "NL": 1856, "NOMINAL_PRODUCT_TIME": b"201611090816", "PACKAGE": b"SAFNWC/MSG", "PRODUCT_ALGORITHM_VERSION": b" 2.2", "PRODUCT_NAME": b"CTTH", "PROJECTION_NAME": b"GEOS<+000.0>", "REGION_NAME": b"MSG-N", "SAF": b"NWC", "SGS_PRODUCT_COMPLETENESS": 87, "SGS_PRODUCT_QUALITY": 69, "SPECTRAL_CHANNEL_ID": 0, }, } fake_ctth = OrderedDict(sorted(fake_ctth.items(), key=lambda t: t[0])) PROJ_KM = { "gdal_projection": "+proj=geos +a=6378.137000 +b=6356.752300 +lon_0=0.000000 +h=35785.863000", "gdal_xgeo_up_left": -5569500.0, "gdal_ygeo_up_left": 5437500.0, "gdal_xgeo_low_right": 5566500.0, "gdal_ygeo_low_right": 2653500.0, } PROJ = { "gdal_projection": "+proj=geos +a=6378137.000 +b=6356752.300 +lon_0=0.000000 +h=35785863.000", "gdal_xgeo_up_left": -5569500.0, "gdal_ygeo_up_left": 5437500.0, "gdal_xgeo_low_right": 5566500.0, "gdal_ygeo_low_right": 2653500.0, } AREA_DEF_DICT = { "proj_dict": {'proj': 'geos', 'lon_0': 0, 'h': 35785831, 'x_0': 0, 'y_0': 0, 'a': 6378169, 'b': 6356583.8, 'units': 'm', 'no_defs': None, 'type': 'crs'}, "area_id": 'MSG-N', "x_size": 3712, "y_size": 1856, "area_extent": (-5570248.2825, 1501.0099, 5567247.8793, 5570247.8784) } class TestH5NWCSAF(unittest.TestCase): """Test the nwcsaf msg reader.""" def setUp(self): """Set up the tests.""" self.filename_ct = os.path.join( tempfile.gettempdir(), "SAFNWC_MSG3_CT___201611090800_MSG-N_______.PLAX.CTTH.0.h5", ) self.filename_ctth = os.path.join( tempfile.gettempdir(), "SAFNWC_MSG3_CTTH_201611090800_MSG-N_______.PLAX.CTTH.0.h5", ) def fill_h5(root, stuff): for key, val in stuff.items(): if key in ["value", "attrs"]: continue if "value" in val: root[key] = val["value"] else: grp = root.create_group(key) fill_h5(grp, stuff[key]) if "attrs" in val: for attrs, val in val["attrs"].items(): if isinstance(val, str) and val.startswith( "" ): root[key].attrs[attrs] = root[val[24:]].ref else: root[key].attrs[attrs] = val h5f = h5py.File(self.filename_ct, mode="w") fill_h5(h5f, fake_ct) for attr, val in fake_ct["attrs"].items(): h5f.attrs[attr] = val h5f.close() h5f = h5py.File(self.filename_ctth, mode="w") fill_h5(h5f, fake_ctth) for attr, val in fake_ctth["attrs"].items(): h5f.attrs[attr] = val h5f.close() def test_get_area_def(self): """Get the area definition.""" from satpy.readers.nwcsaf_msg2013_hdf5 import Hdf5NWCSAF from satpy import DatasetID filename_info = {} filetype_info = {} dsid = DatasetID(name="ct") test = Hdf5NWCSAF(self.filename_ct, filename_info, filetype_info) area_def = test.get_area_def(dsid) aext_res = AREA_DEF_DICT['area_extent'] for i in range(4): self.assertAlmostEqual(area_def.area_extent[i], aext_res[i], 4) proj_dict = AREA_DEF_DICT['proj_dict'] self.assertEqual(proj_dict['proj'], area_def.proj_dict['proj']) # Not all elements passed on Appveyor, so skip testing every single element of the proj-dict: # for key in proj_dict: # self.assertEqual(proj_dict[key], area_def.proj_dict[key]) self.assertEqual(AREA_DEF_DICT['x_size'], area_def.width) self.assertEqual(AREA_DEF_DICT['y_size'], area_def.height) self.assertEqual(AREA_DEF_DICT['area_id'], area_def.area_id) def test_get_dataset(self): """Retrieve datasets from a NWCSAF msgv2013 hdf5 file.""" from satpy.readers.nwcsaf_msg2013_hdf5 import Hdf5NWCSAF from satpy import DatasetID filename_info = {} filetype_info = {} dsid = DatasetID(name="ct") test = Hdf5NWCSAF(self.filename_ct, filename_info, filetype_info) ds = test.get_dataset(dsid, {"file_key": "CT"}) self.assertEqual(ds.shape, (1856, 3712)) self.assertEqual(ds.dtype, np.uint8) np.testing.assert_allclose(ds.data[1000:1010, 1000:1010].compute(), CTYPE_TEST_FRAME) filename_info = {} filetype_info = {} dsid = DatasetID(name="ctth_alti") test = Hdf5NWCSAF(self.filename_ctth, filename_info, filetype_info) ds = test.get_dataset(dsid, {"file_key": "CTTH_HEIGHT"}) self.assertEqual(ds.shape, (1856, 3712)) self.assertEqual(ds.dtype, np.float32) np.testing.assert_allclose(ds.data[1000:1010, 1000:1010].compute(), CTTH_HEIGHT_TEST_FRAME_RES) filename_info = {} filetype_info = {} dsid = DatasetID(name="ctth_pres") test = Hdf5NWCSAF(self.filename_ctth, filename_info, filetype_info) ds = test.get_dataset(dsid, {"file_key": "CTTH_PRESS"}) self.assertEqual(ds.shape, (1856, 3712)) self.assertEqual(ds.dtype, np.float32) np.testing.assert_allclose(ds.data[1000:1010, 1000:1010].compute(), CTTH_PRESSURE_TEST_FRAME_RES) filename_info = {} filetype_info = {} dsid = DatasetID(name="ctth_tempe") test = Hdf5NWCSAF(self.filename_ctth, filename_info, filetype_info) ds = test.get_dataset(dsid, {"file_key": "CTTH_TEMPER"}) self.assertEqual(ds.shape, (1856, 3712)) self.assertEqual(ds.dtype, np.float32) np.testing.assert_allclose(ds.data[1000:1010, 1000:1010].compute(), CTTH_TEMPERATURE_TEST_FRAME_RES) def tearDown(self): """Destroy.""" try: os.remove(self.filename_ct) os.remove(self.filename_ctth) except OSError: pass def suite(): """Test suite for test_writers.""" loader = unittest.TestLoader() my_suite = unittest.TestSuite() my_suite.addTest(loader.loadTestsFromTestCase(TestH5NWCSAF)) return my_suite satpy-0.20.0/satpy/tests/reader_tests/test_nwcsaf_nc.py000066400000000000000000000127541362525524100233070ustar00rootroot00000000000000#!/usr/bin/env python # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unittests for NWC SAF reader.""" import unittest try: from unittest import mock except ImportError: import mock PROJ_KM = {'gdal_projection': '+proj=geos +a=6378.137000 +b=6356.752300 +lon_0=0.000000 +h=35785.863000', 'gdal_xgeo_up_left': -5569500.0, 'gdal_ygeo_up_left': 5437500.0, 'gdal_xgeo_low_right': 5566500.0, 'gdal_ygeo_low_right': 2653500.0} PROJ = {'gdal_projection': '+proj=geos +a=6378137.000 +b=6356752.300 +lon_0=0.000000 +h=35785863.000', 'gdal_xgeo_up_left': -5569500.0, 'gdal_ygeo_up_left': 5437500.0, 'gdal_xgeo_low_right': 5566500.0, 'gdal_ygeo_low_right': 2653500.0} class TestNcNWCSAF(unittest.TestCase): """Test the NcNWCSAF reader.""" @mock.patch('satpy.readers.nwcsaf_nc.unzip_file') @mock.patch('satpy.readers.nwcsaf_nc.xr') def setUp(self, xr_, unzip): """Set up the test case.""" from satpy.readers.nwcsaf_nc import NcNWCSAF xr_.return_value = mock.Mock(attrs={}) unzip.return_value = '' self.scn = NcNWCSAF('filename', {}, {}) def test_get_projection(self): """Test generation of the navigation info.""" # a, b and h in kilometers self.scn.nc.attrs = PROJ_KM proj_str, area_extent = self.scn._get_projection() self.assertTrue('+units=km' in proj_str) self.assertAlmostEqual(area_extent[0], PROJ_KM['gdal_xgeo_up_left'] / 1000.) self.assertAlmostEqual(area_extent[1], PROJ_KM['gdal_ygeo_low_right'] / 1000.) self.assertAlmostEqual(area_extent[2], PROJ_KM['gdal_xgeo_low_right'] / 1000.) self.assertAlmostEqual(area_extent[3], PROJ_KM['gdal_ygeo_up_left'] / 1000.) # a, b and h in meters self.scn.nc.attrs = PROJ proj_str, area_extent = self.scn._get_projection() self.assertTrue('+units=m' in proj_str) self.assertAlmostEqual(area_extent[0], PROJ['gdal_xgeo_up_left']) self.assertAlmostEqual(area_extent[1], PROJ['gdal_ygeo_low_right']) self.assertAlmostEqual(area_extent[2], PROJ['gdal_xgeo_low_right']) self.assertAlmostEqual(area_extent[3], PROJ['gdal_ygeo_up_left']) def test_scale_dataset_attr_removal(self): """Test the scaling of the dataset and removal of obsolete attributes.""" import numpy as np import xarray as xr attrs = {'scale_factor': np.array(10), 'add_offset': np.array(20)} var = xr.DataArray([1, 2, 3], attrs=attrs) var = self.scn.scale_dataset('dummy', var, 'dummy') np.testing.assert_allclose(var, [30, 40, 50]) self.assertNotIn('scale_factor', var.attrs) self.assertNotIn('add_offset', var.attrs) def test_scale_dataset_floating(self): """Test the scaling of the dataset with floating point values.""" import numpy as np import xarray as xr attrs = {'scale_factor': np.array(1.5), 'add_offset': np.array(2.5), '_FillValue': 1} var = xr.DataArray([1, 2, 3], attrs=attrs) var = self.scn.scale_dataset('dummy', var, 'dummy') np.testing.assert_allclose(var, [np.nan, 5.5, 7]) self.assertNotIn('scale_factor', var.attrs) self.assertNotIn('add_offset', var.attrs) attrs = {'scale_factor': np.array(1.5), 'add_offset': np.array(2.5), 'valid_min': 1.1} var = xr.DataArray([1, 2, 3], attrs=attrs) var = self.scn.scale_dataset('dummy', var, 'dummy') np.testing.assert_allclose(var, [np.nan, 5.5, 7]) self.assertNotIn('scale_factor', var.attrs) self.assertNotIn('add_offset', var.attrs) attrs = {'scale_factor': np.array(1.5), 'add_offset': np.array(2.5), 'valid_max': 2.1} var = xr.DataArray([1, 2, 3], attrs=attrs) var = self.scn.scale_dataset('dummy', var, 'dummy') np.testing.assert_allclose(var, [4, 5.5, np.nan]) self.assertNotIn('scale_factor', var.attrs) self.assertNotIn('add_offset', var.attrs) attrs = {'scale_factor': np.array(1.5), 'add_offset': np.array(2.5), 'valid_range': (1.1, 2.1)} var = xr.DataArray([1, 2, 3], attrs=attrs) var = self.scn.scale_dataset('dummy', var, 'dummy') np.testing.assert_allclose(var, [np.nan, 5.5, np.nan]) self.assertNotIn('scale_factor', var.attrs) self.assertNotIn('add_offset', var.attrs) def suite(): """Test suite for test_writers.""" loader = unittest.TestLoader() my_suite = unittest.TestSuite() my_suite.addTest(loader.loadTestsFromTestCase(TestNcNWCSAF)) return my_suite if __name__ == '__main__': unittest.main() satpy-0.20.0/satpy/tests/reader_tests/test_olci_nc.py000066400000000000000000000224561362525524100227540ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.olci_nc module.""" import sys if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest try: import unittest.mock as mock except ImportError: import mock class TestOLCIReader(unittest.TestCase): """Test various olci_nc filehandlers.""" @mock.patch('xarray.open_dataset') def test_instantiate(self, mocked_dataset): """Test initialization of file handlers.""" from satpy.readers.olci_nc import (NCOLCIBase, NCOLCICal, NCOLCIGeo, NCOLCIChannelBase, NCOLCI1B, NCOLCI2) from satpy import DatasetID import xarray as xr cal_data = xr.Dataset( { 'solar_flux': (('bands'), [0, 1, 2]), 'detector_index': (('bands'), [0, 1, 2]), }, {'bands': [0, 1, 2], }, ) ds_id = DatasetID(name='Oa01', calibration='reflectance') ds_id2 = DatasetID(name='wsqf', calibration='reflectance') filename_info = {'mission_id': 'S3A', 'dataset_name': 'Oa01', 'start_time': 0, 'end_time': 0} test = NCOLCIBase('somedir/somefile.nc', filename_info, 'c') test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() test = NCOLCICal('somedir/somefile.nc', filename_info, 'c') test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() test = NCOLCIGeo('somedir/somefile.nc', filename_info, 'c') test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() test = NCOLCIChannelBase('somedir/somefile.nc', filename_info, 'c') test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() cal = mock.Mock() cal.nc = cal_data test = NCOLCI1B('somedir/somefile.nc', filename_info, 'c', cal) test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() test = NCOLCI2('somedir/somefile.nc', filename_info, 'c') test.get_dataset(ds_id, {'nc_key': 'the_key'}) test.get_dataset(ds_id2, {'nc_key': 'the_key'}) mocked_dataset.assert_called() mocked_dataset.reset_mock() @mock.patch('xarray.open_dataset') def test_get_dataset(self, mocked_dataset): """Test reading datasets.""" from satpy.readers.olci_nc import NCOLCI2 from satpy import DatasetID import numpy as np import xarray as xr mocked_dataset.return_value = xr.Dataset({'mask': (['rows', 'columns'], np.array([1 << x for x in range(30)]).reshape(5, 6))}, coords={'rows': np.arange(5), 'columns': np.arange(6)}) ds_id = DatasetID(name='mask') filename_info = {'mission_id': 'S3A', 'dataset_name': 'mask', 'start_time': 0, 'end_time': 0} test = NCOLCI2('somedir/somefile.nc', filename_info, 'c') res = test.get_dataset(ds_id, {'nc_key': 'mask'}) self.assertEqual(res.dtype, np.dtype('bool')) @mock.patch('xarray.open_dataset') def test_olci_angles(self, mocked_dataset): """Test reading datasets.""" from satpy.readers.olci_nc import NCOLCIAngles from satpy import DatasetID import numpy as np import xarray as xr attr_dict = { 'ac_subsampling_factor': 1, 'al_subsampling_factor': 2, } mocked_dataset.return_value = xr.Dataset({'SAA': (['tie_rows', 'tie_columns'], np.array([1 << x for x in range(30)]).reshape(5, 6)), 'SZA': (['tie_rows', 'tie_columns'], np.array([1 << x for x in range(30)]).reshape(5, 6)), 'OAA': (['tie_rows', 'tie_columns'], np.array([1 << x for x in range(30)]).reshape(5, 6)), 'OZA': (['tie_rows', 'tie_columns'], np.array([1 << x for x in range(30)]).reshape(5, 6))}, coords={'rows': np.arange(5), 'columns': np.arange(6)}, attrs=attr_dict) filename_info = {'mission_id': 'S3A', 'dataset_name': 'Oa01', 'start_time': 0, 'end_time': 0} ds_id = DatasetID(name='solar_azimuth_angle') ds_id2 = DatasetID(name='satellite_zenith_angle') test = NCOLCIAngles('somedir/somefile.nc', filename_info, 'c') test.get_dataset(ds_id, filename_info) test.get_dataset(ds_id2, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() @mock.patch('xarray.open_dataset') def test_olci_meteo(self, mocked_dataset): """Test reading datasets.""" from satpy.readers.olci_nc import NCOLCIMeteo from satpy import DatasetID import numpy as np import xarray as xr attr_dict = { 'ac_subsampling_factor': 1, 'al_subsampling_factor': 2, } data = {'humidity': (['tie_rows', 'tie_columns'], np.array([1 << x for x in range(30)]).reshape(5, 6)), 'total_ozone': (['tie_rows', 'tie_columns'], np.array([1 << x for x in range(30)]).reshape(5, 6)), 'sea_level_pressure': (['tie_rows', 'tie_columns'], np.array([1 << x for x in range(30)]).reshape(5, 6)), 'total_columnar_water_vapour': (['tie_rows', 'tie_columns'], np.array([1 << x for x in range(30)]).reshape(5, 6))} mocked_dataset.return_value = xr.Dataset(data, coords={'rows': np.arange(5), 'columns': np.arange(6)}, attrs=attr_dict) filename_info = {'mission_id': 'S3A', 'dataset_name': 'humidity', 'start_time': 0, 'end_time': 0} ds_id = DatasetID(name='humidity') ds_id2 = DatasetID(name='total_ozone') test = NCOLCIMeteo('somedir/somefile.nc', filename_info, 'c') test.get_dataset(ds_id, filename_info) test.get_dataset(ds_id2, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() class TestBitFlags(unittest.TestCase): """Test the bitflag reading.""" def test_bitflags(self): """Test the BitFlags class.""" import numpy as np from six.moves import reduce from satpy.readers.olci_nc import BitFlags flag_list = ['INVALID', 'WATER', 'LAND', 'CLOUD', 'SNOW_ICE', 'INLAND_WATER', 'TIDAL', 'COSMETIC', 'SUSPECT', 'HISOLZEN', 'SATURATED', 'MEGLINT', 'HIGHGLINT', 'WHITECAPS', 'ADJAC', 'WV_FAIL', 'PAR_FAIL', 'AC_FAIL', 'OC4ME_FAIL', 'OCNN_FAIL', 'Extra_1', 'KDM_FAIL', 'Extra_2', 'CLOUD_AMBIGUOUS', 'CLOUD_MARGIN', 'BPAC_ON', 'WHITE_SCATT', 'LOWRW', 'HIGHRW'] bits = np.array([1 << x for x in range(len(flag_list))]) bflags = BitFlags(bits) items = ["INVALID", "SNOW_ICE", "INLAND_WATER", "SUSPECT", "AC_FAIL", "CLOUD", "HISOLZEN", "OCNN_FAIL", "CLOUD_MARGIN", "CLOUD_AMBIGUOUS", "LOWRW", "LAND"] mask = reduce(np.logical_or, [bflags[item] for item in items]) expected = np.array([True, False, True, True, True, True, False, False, True, True, False, False, False, False, False, False, False, True, False, True, False, False, False, True, True, False, False, True, False]) self.assertTrue(all(mask == expected)) def suite(): """Test suite for test_nc_slstr.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestBitFlags)) mysuite.addTest(loader.loadTestsFromTestCase(TestOLCIReader)) return mysuite if __name__ == '__main__': unittest.main() satpy-0.20.0/satpy/tests/reader_tests/test_omps_edr.py000066400000000000000000000211411362525524100231440ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.omps_edr module. """ import os import sys import numpy as np from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler from satpy.tests.utils import convert_file_content_to_data_array if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest try: from unittest import mock except ImportError: import mock DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (10, 300) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32) DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0) DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) class FakeHDF5FileHandler2(FakeHDF5FileHandler): """Swap-in HDF5 File Handler""" def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content""" file_content = {} if 'SO2NRT' in filename: k = 'HDFEOS/SWATHS/OMPS Column Amount SO2/Data Fields/ColumnAmountSO2_TRM' file_content[k] = DEFAULT_FILE_DATA file_content[k + '/shape'] = DEFAULT_FILE_SHAPE file_content[k + '/attr/ScaleFactor'] = 1.1 file_content[k + '/attr/Offset'] = 0.1 file_content[k + '/attr/MissingValue'] = -1 file_content[k + '/attr/Title'] = 'Vertical Column Amount SO2 (TRM)' file_content[k + '/attr/Units'] = 'D.U.' file_content[k + '/attr/ValidRange'] = (-10, 2000) k = 'HDFEOS/SWATHS/OMPS Column Amount SO2/Geolocation Fields/Longitude' file_content[k] = DEFAULT_LON_DATA file_content[k + '/shape'] = DEFAULT_FILE_SHAPE file_content[k + '/attr/ScaleFactor'] = 1.1 file_content[k + '/attr/Offset'] = 0.1 file_content[k + '/attr/Units'] = 'deg' file_content[k + '/attr/MissingValue'] = -1 file_content[k + '/attr/Title'] = 'Geodetic Longitude' file_content[k + '/attr/ValidRange'] = (-180, 180) k = 'HDFEOS/SWATHS/OMPS Column Amount SO2/Geolocation Fields/Latitude' file_content[k] = DEFAULT_LAT_DATA file_content[k + '/shape'] = DEFAULT_FILE_SHAPE file_content[k + '/attr/ScaleFactor'] = 1.1 file_content[k + '/attr/Offset'] = 0.1 file_content[k + '/attr/Units'] = 'deg' file_content[k + '/attr/MissingValue'] = -1 file_content[k + '/attr/Title'] = 'Geodetic Latitude' file_content[k + '/attr/ValidRange'] = (-90, 90) else: for k in ['Reflectivity331', 'UVAerosolIndex']: k = 'SCIENCE_DATA/' + k file_content[k] = DEFAULT_FILE_DATA file_content[k + '/shape'] = DEFAULT_FILE_SHAPE file_content[k + '/attr/Units'] = 'Unitless' if k == 'UVAerosolIndex': file_content[k + '/attr/ValidRange'] = (-30, 30) file_content[k + '/attr/Title'] = 'UV Aerosol Index' else: file_content[k + '/attr/ValidRange'] = (-0.15, 1.15) file_content[k + '/attr/Title'] = 'Effective Surface Reflectivity at 331 nm' file_content[k + '/attr/_FillValue'] = -1. file_content['GEOLOCATION_DATA/Longitude'] = DEFAULT_LON_DATA file_content['GEOLOCATION_DATA/Longitude/shape'] = DEFAULT_FILE_SHAPE file_content['GEOLOCATION_DATA/Longitude/attr/ValidRange'] = (-180, 180) file_content['GEOLOCATION_DATA/Longitude/attr/_FillValue'] = -999. file_content['GEOLOCATION_DATA/Longitude/attr/Title'] = 'Geodetic Longitude' file_content['GEOLOCATION_DATA/Longitude/attr/Units'] = 'deg' file_content['GEOLOCATION_DATA/Latitude'] = DEFAULT_LAT_DATA file_content['GEOLOCATION_DATA/Latitude/shape'] = DEFAULT_FILE_SHAPE file_content['GEOLOCATION_DATA/Latitude/attr/ValidRange'] = (-90, 90) file_content['GEOLOCATION_DATA/Latitude/attr/_FillValue'] = -999. file_content['GEOLOCATION_DATA/Latitude/attr/Title'] = 'Geodetic Latitude' file_content['GEOLOCATION_DATA/Latitude/attr/Units'] = 'deg' convert_file_content_to_data_array(file_content) return file_content class TestOMPSEDRReader(unittest.TestCase): """Test OMPS EDR Reader""" yaml_file = "omps_edr.yaml" def setUp(self): """Wrap HDF5 file handler with our own fake handler""" from satpy.config import config_search_paths from satpy.readers.omps_edr import EDRFileHandler, EDREOSFileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(EDRFileHandler, '__bases__', (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True self.p2 = mock.patch.object(EDREOSFileHandler, '__bases__', (EDRFileHandler,)) self.fake_handler2 = self.p2.start() self.p2.is_local = True def tearDown(self): """Stop wrapping the NetCDF4 file handler""" self.p2.stop() self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'OMPS-NPP-TC_EDR_SO2NRT-2016m0607t192031-o00001-2016m0607t192947.he5', 'OMPS-NPP-TC_EDR_TO3-v1.0-2016m0607t192031-o00001-2016m0607t192947.h5', ]) self.assertTrue(len(loadables), 2) r.create_filehandlers(loadables) # make sure we have some files self.assertTrue(r.file_handlers) def test_basic_load_so2(self): """Test basic load of so2 datasets""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'OMPS-NPP-TC_EDR_SO2NRT-2016m0607t192031-o00001-2016m0607t192947.he5', 'OMPS-NPP-TC_EDR_TO3-v1.0-2016m0607t192031-o00001-2016m0607t192947.h5', ]) self.assertTrue(len(loadables), 2) r.create_filehandlers(loadables) ds = r.load(['so2_trm']) self.assertEqual(len(ds), 1) for d in ds.values(): self.assertEqual(d.attrs['resolution'], 50000) self.assertTupleEqual(d.shape, DEFAULT_FILE_SHAPE) self.assertIn('area', d.attrs) self.assertIsNotNone(d.attrs['area']) def test_basic_load_to3(self): """Test basic load of to3 datasets""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'OMPS-NPP-TC_EDR_SO2NRT-2016m0607t192031-o00001-2016m0607t192947.he5', 'OMPS-NPP-TC_EDR_TO3-v1.0-2016m0607t192031-o00001-2016m0607t192947.h5', ]) self.assertTrue(len(loadables), 2) r.create_filehandlers(loadables) ds = r.load(['reflectivity_331', 'uvaerosol_index']) self.assertEqual(len(ds), 2) for d in ds.values(): self.assertEqual(d.attrs['resolution'], 50000) self.assertTupleEqual(d.shape, DEFAULT_FILE_SHAPE) self.assertIn('area', d.attrs) self.assertIsNotNone(d.attrs['area']) def suite(): """The test suite for test_omps_edr. """ loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestOMPSEDRReader)) return mysuite satpy-0.20.0/satpy/tests/reader_tests/test_safe_sar_l2_ocn.py000066400000000000000000000117231362525524100243600ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.safe_sar_l2_ocn module. """ import sys import numpy as np import xarray as xr from satpy import DatasetID if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest try: import unittest.mock as mock except ImportError: import mock class TestSAFENC(unittest.TestCase): """Test various SAFE SAR L2 OCN file handlers.""" @mock.patch('satpy.readers.safe_sar_l2_ocn.xr') @mock.patch.multiple('satpy.readers.safe_sar_l2_ocn.SAFENC', __abstractmethods__=set()) def setUp(self, xr_): from satpy.readers.safe_sar_l2_ocn import SAFENC self.channels = ['owiWindSpeed', 'owiLon', 'owiLat', 'owiHs', 'owiNrcs', 'foo', 'owiPolarisationName', 'owiCalConstObsi'] # Mock file access to return a fake dataset. self.dummy3d = np.zeros((2, 2, 1)) self.dummy2d = np.zeros((2, 2)) self.dummy1d = np.zeros((2)) self.band = 1 self.nc = xr.Dataset( {'owiWindSpeed': xr.DataArray(self.dummy2d, dims=('owiAzSize', 'owiRaSize'), attrs={'_FillValue': np.nan}), 'owiLon': xr.DataArray(data=self.dummy2d, dims=('owiAzSize', 'owiRaSize')), 'owiLat': xr.DataArray(data=self.dummy2d, dims=('owiAzSize', 'owiRaSize')), 'owiHs': xr.DataArray(data=self.dummy3d, dims=('owiAzSize', 'owiRaSize', 'oswPartition')), 'owiNrcs': xr.DataArray(data=self.dummy3d, dims=('owiAzSize', 'owiRaSize', 'oswPolarization')), 'foo': xr.DataArray(self.dummy2d, dims=('owiAzSize', 'owiRaSize')), 'owiPolarisationName': xr.DataArray(self.dummy1d, dims=('owiPolarisation')), 'owiCalConstObsi': xr.DataArray(self.dummy1d, dims=('owiIncSize')) }, attrs={'_FillValue': np.nan, 'missionName': 'S1A'}) xr_.open_dataset.return_value = self.nc # Instantiate reader using the mocked open_dataset() method. Also, make # the reader believe all abstract methods have been implemented. self.reader = SAFENC(filename='dummy', filename_info={'start_time': 0, 'end_time': 0, 'fstart_time': 0, 'fend_time': 0, 'polarization': 'vv'}, filetype_info={}) def test_init(self): """Tests reader initialization""" self.assertEqual(self.reader.start_time, 0) self.assertEqual(self.reader.end_time, 0) self.assertEqual(self.reader.fstart_time, 0) self.assertEqual(self.reader.fend_time, 0) def test_get_dataset(self): for ch in self.channels: dt = self.reader.get_dataset( key=DatasetID(name=ch), info={}) # ... this only compares the valid (unmasked) elements self.assertTrue(np.all(self.nc[ch] == dt.to_masked_array()), msg='get_dataset() returns invalid data for ' 'dataset {}'.format(ch)) # @mock.patch('xarray.open_dataset') # def test_init(self, mocked_dataset): # """Test basic init with no extra parameters.""" # from satpy.readers.safe_sar_l2_ocn import SAFENC # from satpy import DatasetID # # print(mocked_dataset) # ds_id = DatasetID(name='foo') # filename_info = {'mission_id': 'S3A', 'product_type': 'foo', # 'start_time': 0, 'end_time': 0, # 'fstart_time': 0, 'fend_time': 0, # 'polarization': 'vv'} # # test = SAFENC('S1A_IW_OCN__2SDV_20190228T075834_20190228T075849_026127_02EA43_8846.SAFE/measurement/' # 's1a-iw-ocn-vv-20190228t075741-20190228t075800-026127-02EA43-001.nc', filename_info, 'c') # print(test) # mocked_dataset.assert_called() # test.get_dataset(ds_id, filename_info) def suite(): """The test suite for test_safe_sar_l2_ocn.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestSAFENC)) return mysuite if __name__ == '__main__': unittest.main() satpy-0.20.0/satpy/tests/reader_tests/test_sar_c_safe.py000066400000000000000000000042141362525524100234230ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.sar-c_safe module. """ import sys if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest try: import unittest.mock as mock except ImportError: import mock class TestSAFEGRD(unittest.TestCase): """Test various nc_slstr file handlers.""" @mock.patch('rasterio.open') def test_instantiate(self, mocked_dataset): """Test initialization of file handlers.""" from satpy.readers.sar_c_safe import SAFEGRD filename_info = {'mission_id': 'S1A', 'dataset_name': 'foo', 'start_time': 0, 'end_time': 0, 'polarization': 'vv'} filetype_info = 'bla' noisefh = mock.MagicMock() calfh = mock.MagicMock() test = SAFEGRD('S1A_IW_GRDH_1SDV_20190201T024655_20190201T024720_025730_02DC2A_AE07.SAFE/measurement/s1a-iw-grd' '-vv-20190201t024655-20190201t024720-025730-02dc2a-001.tiff', filename_info, filetype_info, calfh, noisefh) assert(test._polarization == 'vv') assert(test.calibration == calfh) assert(test.noise == noisefh) mocked_dataset.assert_called() def suite(): """The test suite for test_sar_c_safe.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestSAFEGRD)) return mysuite if __name__ == '__main__': unittest.main() satpy-0.20.0/satpy/tests/reader_tests/test_scmi.py000066400000000000000000000242331362525524100222740ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The scmi_abi_l1b reader tests package. """ import sys import numpy as np import xarray as xr if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest try: from unittest import mock except ImportError: import mock class FakeDataset(object): def __init__(self, info, attrs, dims=None): for var_name, var_data in list(info.items()): if isinstance(var_data, np.ndarray): info[var_name] = xr.DataArray(var_data) self.info = info self.attrs = attrs self.dims = dims or {} def __getitem__(self, key): return self.info.get(key, self.dims.get(key)) def __contains__(self, key): return key in self.info or key in self.dims def rename(self, *args, **kwargs): return self def close(self): return class TestSCMIFileHandler(unittest.TestCase): """Test the SCMIFileHandler reader.""" @mock.patch('satpy.readers.scmi.xr') def setUp(self, xr_): """Setup for test.""" from satpy.readers.scmi import SCMIFileHandler rad_data = (np.arange(10.).reshape((2, 5)) + 1.) rad_data = (rad_data + 1.) / 0.5 rad_data = rad_data.astype(np.int16) self.expected_rad = rad_data.astype(np.float64) * 0.5 + -1. self.expected_rad[-1, -2] = np.nan time = xr.DataArray(0.) rad = xr.DataArray( rad_data, dims=('y', 'x'), attrs={ 'scale_factor': 0.5, 'add_offset': -1., '_FillValue': 20, 'standard_name': 'toa_bidirectional_reflectance', }, coords={ 'time': time, } ) xr_.open_dataset.return_value = FakeDataset( { 'Sectorized_CMI': rad, "nominal_satellite_subpoint_lat": np.array(0.0), "nominal_satellite_subpoint_lon": np.array(-89.5), "nominal_satellite_height": np.array(35786.02), }, { 'start_date_time': "2017210120000", 'satellite_id': 'GOES-16', 'satellite_longitude': -90., 'satellite_latitude': 0., 'satellite_altitude': 35785831., }, {'y': 2, 'x': 5}, ) self.reader = SCMIFileHandler('filename', {'platform_shortname': 'G16'}, {'filetype': 'info'}) def test_basic_attributes(self): """Test getting basic file attributes.""" from datetime import datetime from satpy import DatasetID self.assertEqual(self.reader.start_time, datetime(2017, 7, 29, 12, 0, 0, 0)) self.assertEqual(self.reader.end_time, datetime(2017, 7, 29, 12, 0, 0, 0)) self.assertEqual(self.reader.get_shape(DatasetID(name='C05'), {}), (2, 5)) def test_data_load(self): """Test data loading.""" from satpy import DatasetID res = self.reader.get_dataset( DatasetID(name='C05', calibration='reflectance'), {}) np.testing.assert_allclose(res.data, self.expected_rad, equal_nan=True) self.assertNotIn('scale_factor', res.attrs) self.assertNotIn('_FillValue', res.attrs) self.assertEqual(res.attrs['standard_name'], 'toa_bidirectional_reflectance') class TestSCMIFileHandlerArea(unittest.TestCase): """Test the SCMIFileHandler's area creation.""" @mock.patch('satpy.readers.scmi.xr') def create_reader(self, proj_name, proj_attrs, xr_): """Create a fake reader.""" from satpy.readers.scmi import SCMIFileHandler proj = xr.DataArray([], attrs=proj_attrs) x__ = xr.DataArray( [0, 1], attrs={'scale_factor': 2., 'add_offset': -1., 'units': 'meters'}, ) y__ = xr.DataArray( [0, 1], attrs={'scale_factor': -2., 'add_offset': 1., 'units': 'meters'}, ) xr_.open_dataset.return_value = FakeDataset({ 'goes_imager_projection': proj, 'x': x__, 'y': y__, 'Sectorized_CMI': np.ones((2, 2))}, { 'satellite_id': 'GOES-16', 'grid_mapping': proj_name, }, { 'y': y__.size, 'x': x__.size, } ) return SCMIFileHandler('filename', {'platform_shortname': 'G16'}, {'filetype': 'info'}) @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') def test_get_area_def_geos(self, adef): """Test the area generation for geos projection.""" reader = self.create_reader( 'goes_imager_projection', { 'semi_major_axis': 1., 'semi_minor_axis': 1., 'perspective_point_height': 1., 'longitude_of_projection_origin': -90., 'sweep_angle_axis': u'x', 'grid_mapping_name': 'geostationary', } ) reader.get_area_def(None) self.assertEqual(adef.call_count, 1) call_args = tuple(adef.call_args)[0] self.assertDictEqual(call_args[3], { 'a': 1.0, 'b': 1.0, 'h': 1.0, 'lon_0': -90.0, 'lat_0': 0.0, 'proj': 'geos', 'sweep': 'x', 'units': 'm'}) self.assertEqual(call_args[4], reader.ncols) self.assertEqual(call_args[5], reader.nlines) np.testing.assert_allclose(call_args[6], (-2., -2., 2, 2.)) @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') def test_get_area_def_lcc(self, adef): """Test the area generation for lcc projection.""" reader = self.create_reader( 'goes_imager_projection', { 'semi_major_axis': 1., 'semi_minor_axis': 1., 'longitude_of_central_meridian': -90., 'standard_parallel': 25., 'latitude_of_projection_origin': 25., 'grid_mapping_name': 'lambert_conformal_conic', } ) reader.get_area_def(None) self.assertEqual(adef.call_count, 1) call_args = tuple(adef.call_args)[0] self.assertDictEqual(call_args[3], { 'a': 1.0, 'b': 1.0, 'lon_0': -90.0, 'lat_0': 25.0, 'lat_1': 25.0, 'proj': 'lcc', 'units': 'm'}) self.assertEqual(call_args[4], reader.ncols) self.assertEqual(call_args[5], reader.nlines) np.testing.assert_allclose(call_args[6], (-2., -2., 2, 2.)) @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') def test_get_area_def_stere(self, adef): """Test the area generation for stere projection.""" reader = self.create_reader( 'goes_imager_projection', { 'semi_major_axis': 1., 'semi_minor_axis': 1., 'straight_vertical_longitude_from_pole': -90., 'standard_parallel': 60., 'latitude_of_projection_origin': 90., 'grid_mapping_name': 'polar_stereographic', } ) reader.get_area_def(None) self.assertEqual(adef.call_count, 1) call_args = tuple(adef.call_args)[0] self.assertDictEqual(call_args[3], { 'a': 1.0, 'b': 1.0, 'lon_0': -90.0, 'lat_0': 90.0, 'lat_ts': 60.0, 'proj': 'stere', 'units': 'm'}) self.assertEqual(call_args[4], reader.ncols) self.assertEqual(call_args[5], reader.nlines) np.testing.assert_allclose(call_args[6], (-2., -2., 2, 2.)) @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') def test_get_area_def_merc(self, adef): """Test the area generation for merc projection.""" reader = self.create_reader( 'goes_imager_projection', { 'semi_major_axis': 1., 'semi_minor_axis': 1., 'longitude_of_projection_origin': -90., 'standard_parallel': 0., 'grid_mapping_name': 'mercator', } ) reader.get_area_def(None) self.assertEqual(adef.call_count, 1) call_args = tuple(adef.call_args)[0] self.assertDictEqual(call_args[3], { 'a': 1.0, 'b': 1.0, 'lon_0': -90.0, 'lat_0': 0.0, 'lat_ts': 0.0, 'proj': 'merc', 'units': 'm'}) self.assertEqual(call_args[4], reader.ncols) self.assertEqual(call_args[5], reader.nlines) np.testing.assert_allclose(call_args[6], (-2., -2., 2, 2.)) @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') def test_get_area_def_bad(self, adef): """Test the area generation for bad projection.""" reader = self.create_reader( 'goes_imager_projection', { 'semi_major_axis': 1., 'semi_minor_axis': 1., 'longitude_of_projection_origin': -90., 'standard_parallel': 0., 'grid_mapping_name': 'fake', } ) self.assertRaises(ValueError, reader.get_area_def, None) def suite(): """The test suite for test_scene. """ loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestSCMIFileHandler)) mysuite.addTest(loader.loadTestsFromTestCase(TestSCMIFileHandlerArea)) return mysuite satpy-0.20.0/satpy/tests/reader_tests/test_seviri_base.py000066400000000000000000000053671362525524100236430ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Test the MSG common (native and hrit format) functionionalities """ import sys import numpy as np from satpy.readers.seviri_base import dec10216, chebyshev, get_cds_time if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest def chebyshev4(c, x, domain): """Evaluate 4th order Chebyshev polynomial""" start_x, end_x = domain t = (x - 0.5 * (end_x + start_x)) / (0.5 * (end_x - start_x)) return c[0] + c[1]*t + c[2]*(2*t**2 - 1) + c[3]*(4*t**3 - 3*t) - 0.5*c[0] class SeviriBaseTest(unittest.TestCase): def test_dec10216(self): """Test the dec10216 function.""" res = dec10216(np.array([255, 255, 255, 255, 255], dtype=np.uint8)) exp = (np.ones((4, )) * 1023).astype(np.uint16) self.assertTrue(np.all(res == exp)) res = dec10216(np.array([1, 1, 1, 1, 1], dtype=np.uint8)) exp = np.array([4, 16, 64, 257], dtype=np.uint16) self.assertTrue(np.all(res == exp)) def test_chebyshev(self): coefs = [1, 2, 3, 4] time = 123 domain = [120, 130] res = chebyshev(coefs=[1, 2, 3, 4], time=time, domain=domain) exp = chebyshev4(coefs, time, domain) self.assertTrue(np.allclose(res, exp)) def test_get_cds_time(self): # Scalar self.assertEqual(get_cds_time(days=21246, msecs=12*3600*1000), np.datetime64('2016-03-03 12:00')) # Array days = np.array([21246, 21247, 21248]) msecs = np.array([12*3600*1000, 13*3600*1000 + 1, 14*3600*1000 + 2]) expected = np.array([np.datetime64('2016-03-03 12:00:00.000'), np.datetime64('2016-03-04 13:00:00.001'), np.datetime64('2016-03-05 14:00:00.002')]) self.assertTrue(np.all(get_cds_time(days=days, msecs=msecs) == expected)) def suite(): """The test suite for test_scene.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(SeviriBaseTest)) return mysuite satpy-0.20.0/satpy/tests/reader_tests/test_seviri_l1b_calibration.py000066400000000000000000000217641362525524100257550ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unittesting the native msg reader """ import sys import numpy as np from satpy.readers.seviri_base import SEVIRICalibrationHandler if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest COUNTS_INPUT = np.array([[377., 377., 377., 376., 375.], [376., 375., 376., 374., 374.], [374., 373., 373., 374., 374.], [347., 345., 345., 348., 347.], [306., 306., 307., 307., 308.]], dtype=np.float32) RADIANCES_OUTPUT = np.array([[66.84162903, 66.84162903, 66.84162903, 66.63659668, 66.4315567], [66.63659668, 66.4315567, 66.63659668, 66.22652435, 66.22652435], [66.22652435, 66.02148438, 66.02148438, 66.22652435, 66.22652435], [60.69055939, 60.28048706, 60.28048706, 60.89559937, 60.69055939], [52.28409576, 52.28409576, 52.48912811, 52.48912811, 52.69416809]], dtype=np.float32) GAIN = 0.20503567620766011 OFFSET = -10.456819486590666 CAL_TYPE1 = 1 CAL_TYPE2 = 2 CHANNEL_NAME = 'IR_108' PLATFORM_ID = 323 # Met-10 TBS_OUTPUT1 = np.array([[269.29684448, 269.29684448, 269.29684448, 269.13296509, 268.96871948], [269.13296509, 268.96871948, 269.13296509, 268.80422974, 268.80422974], [268.80422974, 268.63937378, 268.63937378, 268.80422974, 268.80422974], [264.23751831, 263.88912964, 263.88912964, 264.41116333, 264.23751831], [256.77682495, 256.77682495, 256.96743774, 256.96743774, 257.15756226]], dtype=np.float32) TBS_OUTPUT2 = np.array([[268.94519043, 268.94519043, 268.94519043, 268.77984619, 268.61422729], [268.77984619, 268.61422729, 268.77984619, 268.44830322, 268.44830322], [268.44830322, 268.28204346, 268.28204346, 268.44830322, 268.44830322], [263.84396362, 263.49285889, 263.49285889, 264.01898193, 263.84396362], [256.32858276, 256.32858276, 256.52044678, 256.52044678, 256.71188354]], dtype=np.float32) VIS008_SOLAR_IRRADIANCE = 23.29414028785013 VIS008_RADIANCE = np.array([[0.62234485, 0.59405649, 0.59405649, 0.59405649, 0.59405649], [0.59405649, 0.62234485, 0.62234485, 0.59405649, 0.62234485], [0.76378691, 0.79207528, 0.79207528, 0.76378691, 0.79207528], [3.30974245, 3.33803129, 3.33803129, 3.25316572, 3.47947311], [7.52471399, 7.83588648, 8.2602129, 8.57138538, 8.99571133]], dtype=np.float32) VIS008_REFLECTANCE = np.array([[2.67167997, 2.55024004, 2.55024004, 2.55024004, 2.55024004], [2.55024004, 2.67167997, 2.67167997, 2.55024004, 2.67167997], [3.27888012, 3.40032005, 3.40032005, 3.27888012, 3.40032005], [14.20847702, 14.32991886, 14.32991886, 13.96559715, 14.93711853], [32.30303574, 33.63887405, 35.46047592, 36.79631805, 38.61791611]], dtype=np.float32) # -- CAL_DTYPE = np.array([[(0.0208876, -1.06526761), (0.0278805, -1.42190546), (0.0235881, -1.20299312), (0.00365867, -0.18659201), (0.00831811, -0.42422367), (0.03862197, -1.96972038), (0.12674432, -6.46396025), (0.10396091, -5.30200645), (0.20503568, -10.45681949), (0.22231115, -11.33786848), (0.1576069, -8.03795174), (0.0373969, -1.90724192)]], dtype=[('CalSlope', '>f8'), ('CalOffset', '>f8')]) IR_108_RADIANCES = np.ma.array([[133.06815651, 133.68326355, 134.29837059, 134.91347763, 135.52858467], [136.14369171, 136.75879875, 137.37390579, 137.98901283, 138.60411987], [139.21922691, 139.83433395, 140.44944099, 141.06454803, 141.67965507]], mask=False, dtype=np.float64) VIS006_RADIANCES = np.ma.array([[13.55605239, 13.61871519, 13.68137799, 13.74404079, 13.80670359], [13.86936639, 13.93202919, 13.99469199, 14.05735479, 14.12001759], [14.18268039, 14.24534319, 14.30800599, 14.37066879, 14.43333159]], mask=False, dtype=np.float64) VIS006_REFLECTANCES = np.array([[65.00454035, 65.30502359, 65.60550682, 65.90599006, 66.2064733], [66.50695654, 66.80743977, 67.10792301, 67.40840625, 67.70888949], [68.00937272, 68.30985596, 68.6103392, 68.91082244, 69.21130567]], dtype=np.float64) IR_108_TBS = np.array([[311.77913132, 312.11070275, 312.44143083, 312.77132215, 313.10038322], [313.42862046, 313.75604023, 314.0826488, 314.40845236, 314.73345704], [315.05766888, 315.38109386, 315.70373788, 316.02560677, 316.34670629]], dtype=np.float64) # Calibration type = Effective radiances CALIBRATION_TYPE = np.array( [[2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2]], dtype=np.uint8) # This should preferably be put in a helper-module # Fixme! def assertNumpyArraysEqual(self, other): if self.shape != other.shape: raise AssertionError("Shapes don't match") if not np.allclose(self, other): raise AssertionError("Elements don't match!") class TestSEVIRICalibrationHandler(unittest.TestCase): """Test the SEVIRICalibrationHandler class in the msg_base module""" def setUp(self): """Setup the SEVIRI Calibration handler for testing.""" hdr = {} hdr['15_DATA_HEADER'] = {} hdr['15_DATA_HEADER']['RadiometricProcessing'] = { 'Level15ImageCalibration': CAL_DTYPE} hdr['15_DATA_HEADER']['ImageDescription'] = {} hdr['15_DATA_HEADER']['ImageDescription']['Level15ImageProduction'] = { 'PlannedChanProcessing': CALIBRATION_TYPE} self.handler = SEVIRICalibrationHandler() self.handler.platform_id = PLATFORM_ID def test_convert_to_radiance(self): """Test the conversion from counts to radiance method""" data = COUNTS_INPUT gain = GAIN offset = OFFSET result = self.handler._convert_to_radiance(data, gain, offset) assertNumpyArraysEqual(result, RADIANCES_OUTPUT) def test_ir_calibrate(self): result = self.handler._ir_calibrate(RADIANCES_OUTPUT, CHANNEL_NAME, CAL_TYPE1) assertNumpyArraysEqual(result, TBS_OUTPUT1) result = self.handler._ir_calibrate(RADIANCES_OUTPUT, CHANNEL_NAME, CAL_TYPE2) assertNumpyArraysEqual(result, TBS_OUTPUT2) def test_vis_calibrate(self): result = self.handler._vis_calibrate(VIS008_RADIANCE, VIS008_SOLAR_IRRADIANCE) assertNumpyArraysEqual(result, VIS008_REFLECTANCE) def tearDown(self): pass def suite(): """The test suite for test_scene. """ loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestSEVIRICalibrationHandler)) return mysuite if __name__ == "__main__": # So you can run tests from this module individually. unittest.main() satpy-0.20.0/satpy/tests/reader_tests/test_seviri_l1b_hrit.py000066400000000000000000001135071362525524100244310ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The HRIT msg reader tests package.""" import sys from datetime import datetime import numpy as np import xarray as xr from satpy.readers.seviri_l1b_hrit import (HRITMSGFileHandler, HRITMSGPrologueFileHandler, HRITMSGEpilogueFileHandler, NoValidOrbitParams, pad_data) from satpy.readers.seviri_base import CHANNEL_NAMES, VIS_CHANNELS from satpy.dataset import DatasetID if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest try: from unittest import mock except ImportError: import mock def new_get_hd(instance, hdr_info): """Generate some metadata.""" instance.mda = {'spectral_channel_id': 1} instance.mda.setdefault('number_of_bits_per_pixel', 10) instance.mda['projection_parameters'] = {'a': 6378169.00, 'b': 6356583.80, 'h': 35785831.00, 'SSP_longitude': 0.0} instance.mda['orbital_parameters'] = {} instance.mda['total_header_length'] = 12 class TestHRITMSGFileHandlerHRV(unittest.TestCase): """Test the HRITFileHandler.""" @mock.patch('satpy.readers.seviri_l1b_hrit.np.fromfile') def setUp(self, fromfile): """Set up the hrit file handler for testing HRV.""" m = mock.mock_open() fromfile.return_value = np.array([(1, 2)], dtype=[('total_header_length', int), ('hdr_id', int)]) with mock.patch('satpy.readers.hrit_base.open', m, create=True) as newopen: with mock.patch('satpy.readers.seviri_l1b_hrit.CHANNEL_NAMES'): with mock.patch.object(HRITMSGFileHandler, '_get_hd', new=new_get_hd): newopen.return_value.__enter__.return_value.tell.return_value = 1 prologue = mock.MagicMock() prologue.prologue = {"SatelliteStatus": {"SatelliteDefinition": {"SatelliteId": 324, "NominalLongitude": 47}}, 'GeometricProcessing': {'EarthModel': {'TypeOfEarthModel': 2, 'NorthPolarRadius': 10, 'SouthPolarRadius': 10, 'EquatorialRadius': 10}}, 'ImageDescription': {'ProjectionDescription': {'LongitudeOfSSP': 0.0}, 'Level15ImageProduction': {'ImageProcDirection': 1}}} prologue.get_satpos.return_value = None, None, None prologue.get_earth_radii.return_value = None, None epilogue = mock.MagicMock() epilogue.epilogue = {'ImageProductionStats': {'ActualL15CoverageHRV': {'LowerSouthLineActual': 1, 'LowerNorthLineActual': 8256, 'LowerEastColumnActual': 2877, 'LowerWestColumnActual': 8444, 'UpperSouthLineActual': 8257, 'UpperNorthLineActual': 11136, 'UpperEastColumnActual': 1805, 'UpperWestColumnActual': 7372}}} self.reader = HRITMSGFileHandler( 'filename', {'platform_shortname': 'MSG3', 'start_time': datetime(2016, 3, 3, 0, 0), 'service': 'MSG'}, {'filetype': 'info'}, prologue, epilogue) ncols = 5568 nlines = 464 nbits = 10 self.reader.fill_hrv = True self.reader.mda['number_of_bits_per_pixel'] = nbits self.reader.mda['number_of_lines'] = nlines self.reader.mda['number_of_columns'] = ncols self.reader.mda['data_field_length'] = nlines * ncols * nbits self.reader.mda['cfac'] = 5 self.reader.mda['lfac'] = 5 self.reader.mda['coff'] = 10 self.reader.mda['loff'] = 10 self.reader.mda['projection_parameters'] = {} self.reader.mda['projection_parameters']['a'] = 6378169.0 self.reader.mda['projection_parameters']['b'] = 6356583.8 self.reader.mda['projection_parameters']['h'] = 35785831.0 self.reader.mda['projection_parameters']['SSP_longitude'] = 44 self.reader.mda['projection_parameters']['SSP_latitude'] = 0.0 self.reader.mda['orbital_parameters'] = {} self.reader.mda['orbital_parameters']['satellite_nominal_longitude'] = 47 self.reader.mda['orbital_parameters']['satellite_nominal_latitude'] = 0.0 self.reader.mda['orbital_parameters']['satellite_actual_longitude'] = 47.5 self.reader.mda['orbital_parameters']['satellite_actual_latitude'] = -0.5 self.reader.mda['orbital_parameters']['satellite_actual_altitude'] = 35783328 self.reader.mda['segment_sequence_number'] = 18 self.reader.mda['planned_start_segment_number'] = 1 tline = np.zeros(nlines, dtype=[('days', '>u2'), ('milliseconds', '>u4')]) tline['days'][1:-1] = 21246 * np.ones(nlines-2) # 2016-03-03 tline['milliseconds'][1:-1] = np.arange(nlines-2) self.reader.mda['image_segment_line_quality'] = {'line_mean_acquisition': tline} @mock.patch('satpy.readers.hrit_base.np.memmap') def test_read_hrv_band(self, memmap): """Test reading the hrv band.""" nbits = self.reader.mda['number_of_bits_per_pixel'] memmap.return_value = np.random.randint(0, 256, size=int((464 * 5568 * nbits) / 8), dtype=np.uint8) res = self.reader.read_band('HRV', None) self.assertEqual(res.shape, (464, 5568)) @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler._get_timestamps') @mock.patch('satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset') @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate') def test_get_dataset(self, calibrate, parent_get_dataset, _get_timestamps): """Test getting the hrv dataset.""" key = mock.MagicMock(calibration='calibration') key.name = 'HRV' info = {'units': 'units', 'wavelength': 'wavelength', 'standard_name': 'standard_name'} timestamps = np.arange(0, 464, dtype='datetime64[ns]') parent_get_dataset.return_value = mock.MagicMock() calibrate.return_value = xr.DataArray(data=np.zeros((464, 5568)), dims=('y', 'x')) _get_timestamps.return_value = timestamps res = self.reader.get_dataset(key, info) self.assertEqual(res.shape, (464, 11136)) # Test method calls parent_get_dataset.assert_called_with(key, info) calibrate.assert_called_with(parent_get_dataset(), key.calibration) # Test attributes (just check if raw metadata is there and then remove it before checking the remaining # attributes) attrs_exp = info.copy() attrs_exp.update({ 'platform_name': self.reader.platform_name, 'sensor': 'seviri', 'satellite_longitude': self.reader.mda['projection_parameters']['SSP_longitude'], 'satellite_latitude': self.reader.mda['projection_parameters']['SSP_latitude'], 'satellite_altitude': self.reader.mda['projection_parameters']['h'], 'orbital_parameters': {'projection_longitude': 44, 'projection_latitude': 0., 'projection_altitude': 35785831.0, 'satellite_nominal_longitude': 47, 'satellite_nominal_latitude': 0.0, 'satellite_actual_longitude': 47.5, 'satellite_actual_latitude': -0.5, 'satellite_actual_altitude': 35783328}, 'georef_offset_corrected': self.reader.mda['offset_corrected'] }) self.assertIn('raw_metadata', res.attrs) res.attrs.pop('raw_metadata') self.assertDictEqual(attrs_exp, res.attrs) # Test timestamps self.assertTrue(np.all(res['acq_time'] == timestamps)) self.assertEqual(res['acq_time'].attrs['long_name'], 'Mean scanline acquisition time') @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler._get_timestamps') @mock.patch('satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset') @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate') def test_get_dataset_non_fill(self, calibrate, parent_get_dataset, _get_timestamps): """Test getting a non-filled hrv dataset.""" key = mock.MagicMock(calibration='calibration') key.name = 'HRV' info = {'units': 'units', 'wavelength': 'wavelength', 'standard_name': 'standard_name'} timestamps = np.arange(0, 464, dtype='datetime64[ns]') self.reader.fill_hrv = False parent_get_dataset.return_value = mock.MagicMock() calibrate.return_value = xr.DataArray(data=np.zeros((464, 5568)), dims=('y', 'x')) _get_timestamps.return_value = timestamps res = self.reader.get_dataset(key, info) self.assertEqual(res.shape, (464, 5568)) # Test method calls parent_get_dataset.assert_called_with(key, info) calibrate.assert_called_with(parent_get_dataset(), key.calibration) # Test attributes (just check if raw metadata is there and then remove it before checking the remaining # attributes) attrs_exp = info.copy() attrs_exp.update({ 'platform_name': self.reader.platform_name, 'sensor': 'seviri', 'satellite_longitude': self.reader.mda['projection_parameters']['SSP_longitude'], 'satellite_latitude': self.reader.mda['projection_parameters']['SSP_latitude'], 'satellite_altitude': self.reader.mda['projection_parameters']['h'], 'orbital_parameters': {'projection_longitude': 44, 'projection_latitude': 0., 'projection_altitude': 35785831.0, 'satellite_nominal_longitude': 47, 'satellite_nominal_latitude': 0.0, 'satellite_actual_longitude': 47.5, 'satellite_actual_latitude': -0.5, 'satellite_actual_altitude': 35783328}, 'georef_offset_corrected': self.reader.mda['offset_corrected'] }) self.assertIn('raw_metadata', res.attrs) res.attrs.pop('raw_metadata') self.assertDictEqual(attrs_exp, res.attrs) # Test timestamps self.assertTrue(np.all(res['acq_time'] == timestamps)) self.assertEqual(res['acq_time'].attrs['long_name'], 'Mean scanline acquisition time') def test_pad_data(self): """Test the hrv padding.""" data = xr.DataArray(data=np.zeros((1, 10)), dims=('y', 'x')) east_bound = 4 west_bound = 13 final_size = (1, 20) res = pad_data(data, final_size, east_bound, west_bound) expected = np.array([[np.nan, np.nan, np.nan, 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan]]) np.testing.assert_allclose(res, expected) east_bound = 3 self.assertRaises(IndexError, pad_data, data, final_size, east_bound, west_bound) def test_get_area_def(self): """Test getting the area def.""" area = self.reader.get_area_def(DatasetID('HRV')) self.assertEqual(area.area_extent, (-45561979844414.07, -3720765401003.719, 45602912357076.38, 77771774058.38356)) proj_dict = area.proj_dict self.assertEqual(proj_dict['a'], 6378169.0) self.assertEqual(proj_dict['b'], 6356583.8) self.assertEqual(proj_dict['h'], 35785831.0) self.assertEqual(proj_dict['lon_0'], 44.0) self.assertEqual(proj_dict['proj'], 'geos') self.assertEqual(proj_dict['units'], 'm') self.reader.fill_hrv = False area = self.reader.get_area_def(DatasetID('HRV')) self.assertEqual(area.defs[0].area_extent, (-22017598561055.01, -2926674655354.9604, 23564847539690.22, 77771774058.38356)) self.assertEqual(area.defs[1].area_extent, (-30793529275853.656, -3720765401003.719, 14788916824891.568, -2926674655354.9604)) class TestHRITMSGFileHandler(unittest.TestCase): """Test the HRITFileHandler.""" @mock.patch('satpy.readers.seviri_l1b_hrit.np.fromfile') def setUp(self, fromfile): """Set up the hrit file handler for testing.""" m = mock.mock_open() fromfile.return_value = np.array([(1, 2)], dtype=[('total_header_length', int), ('hdr_id', int)]) with mock.patch('satpy.readers.hrit_base.open', m, create=True) as newopen: with mock.patch('satpy.readers.seviri_l1b_hrit.CHANNEL_NAMES'): with mock.patch.object(HRITMSGFileHandler, '_get_hd', new=new_get_hd): newopen.return_value.__enter__.return_value.tell.return_value = 1 prologue = mock.MagicMock() prologue.prologue = {"SatelliteStatus": {"SatelliteDefinition": {"SatelliteId": 324, "NominalLongitude": 47}}, 'GeometricProcessing': {'EarthModel': {'TypeOfEarthModel': 2, 'NorthPolarRadius': 10, 'SouthPolarRadius': 10, 'EquatorialRadius': 10}}, 'ImageDescription': {'ProjectionDescription': {'LongitudeOfSSP': 0.0}, 'Level15ImageProduction': {'ImageProcDirection': 1}}} prologue.get_satpos.return_value = None, None, None prologue.get_earth_radii.return_value = None, None self.reader = HRITMSGFileHandler( 'filename', {'platform_shortname': 'MSG3', 'start_time': datetime(2016, 3, 3, 0, 0), 'service': 'MSG'}, {'filetype': 'info'}, prologue, mock.MagicMock()) ncols = 3712 nlines = 464 nbits = 10 self.reader.mda['number_of_bits_per_pixel'] = nbits self.reader.mda['number_of_lines'] = nlines self.reader.mda['number_of_columns'] = ncols self.reader.mda['data_field_length'] = nlines * ncols * nbits self.reader.mda['cfac'] = 5 self.reader.mda['lfac'] = 5 self.reader.mda['coff'] = 10 self.reader.mda['loff'] = 10 self.reader.mda['projection_parameters'] = {} self.reader.mda['projection_parameters']['a'] = 6378169.0 self.reader.mda['projection_parameters']['b'] = 6356583.8 self.reader.mda['projection_parameters']['h'] = 35785831.0 self.reader.mda['projection_parameters']['SSP_longitude'] = 44 self.reader.mda['projection_parameters']['SSP_latitude'] = 0.0 self.reader.mda['orbital_parameters'] = {} self.reader.mda['orbital_parameters']['satellite_nominal_longitude'] = 47 self.reader.mda['orbital_parameters']['satellite_nominal_latitude'] = 0.0 self.reader.mda['orbital_parameters']['satellite_actual_longitude'] = 47.5 self.reader.mda['orbital_parameters']['satellite_actual_latitude'] = -0.5 self.reader.mda['orbital_parameters']['satellite_actual_altitude'] = 35783328 tline = np.zeros(nlines, dtype=[('days', '>u2'), ('milliseconds', '>u4')]) tline['days'][1:-1] = 21246 * np.ones(nlines-2) # 2016-03-03 tline['milliseconds'][1:-1] = np.arange(nlines-2) self.reader.mda['image_segment_line_quality'] = {'line_mean_acquisition': tline} def test_get_area_def(self): """Test getting the area def.""" area = self.reader.get_area_def(DatasetID('VIS006')) proj_dict = area.proj_dict self.assertEqual(proj_dict['a'], 6378169.0) self.assertEqual(proj_dict['b'], 6356583.8) self.assertEqual(proj_dict['h'], 35785831.0) self.assertEqual(proj_dict['lon_0'], 44.0) self.assertEqual(proj_dict['proj'], 'geos') self.assertEqual(proj_dict['units'], 'm') self.assertEqual(area.area_extent, (-77771774058.38356, -3720765401003.719, 30310525626438.438, 77771774058.38356)) # Data shifted by 1.5km to N-W self.reader.mda['offset_corrected'] = False area = self.reader.get_area_def(DatasetID('VIS006')) self.assertEqual(area.area_extent, (-77771772558.38356, -3720765402503.719, 30310525627938.438, 77771772558.38356)) @mock.patch('satpy.readers.hrit_base.np.memmap') def test_read_band(self, memmap): """Test reading a band.""" nbits = self.reader.mda['number_of_bits_per_pixel'] memmap.return_value = np.random.randint(0, 256, size=int((464 * 3712 * nbits) / 8), dtype=np.uint8) res = self.reader.read_band('VIS006', None) self.assertEqual(res.shape, (464, 3712)) @mock.patch('satpy.readers.hrit_base.HRITFileHandler.__init__', return_value=None) @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler._get_header', autospec=True) @mock.patch('satpy.readers.seviri_base.SEVIRICalibrationHandler._convert_to_radiance') def test_calibrate(self, _convert_to_radiance, get_header, *mocks): """Test selection of calibration coefficients.""" shp = (10, 10) counts = xr.DataArray(np.zeros(shp)) nominal_gain = np.arange(1, 13) nominal_offset = np.arange(-1, -13, -1) gsics_gain = np.arange(0.1, 1.3, 0.1) gsics_offset = np.arange(-0.1, -1.3, -0.1) # Mock prologue & epilogue pro = mock.MagicMock(prologue={'RadiometricProcessing': { 'Level15ImageCalibration': {'CalSlope': nominal_gain, 'CalOffset': nominal_offset}, 'MPEFCalFeedback': {'GSICSCalCoeff': gsics_gain, 'GSICSOffsetCount': gsics_offset} }}) epi = mock.MagicMock(epilogue=None) # Mock header readout mda = {'image_segment_line_quality': {'line_validity': np.zeros(shp[0]), 'line_radiometric_quality': np.zeros(shp[0]), 'line_geometric_quality': np.zeros(shp[0])}} def get_header_patched(self): self.mda = mda get_header.side_effect = get_header_patched # Test selection of calibration coefficients # # a) Default: Nominal calibration reader = HRITMSGFileHandler(filename=None, filename_info=None, filetype_info=None, prologue=pro, epilogue=epi) for ch_id, ch_name in CHANNEL_NAMES.items(): reader.channel_name = ch_name reader.mda['spectral_channel_id'] = ch_id reader.calibrate(data=counts, calibration='radiance') _convert_to_radiance.assert_called_with(mock.ANY, nominal_gain[ch_id - 1], nominal_offset[ch_id - 1]) # b) GSICS calibration for IR channels, nominal calibration for VIS channels reader = HRITMSGFileHandler(filename=None, filename_info=None, filetype_info=None, prologue=pro, epilogue=epi, calib_mode='GSICS') for ch_id, ch_name in CHANNEL_NAMES.items(): if ch_name in VIS_CHANNELS: gain, offset = nominal_gain[ch_id - 1], nominal_offset[ch_id - 1] else: gain, offset = gsics_gain[ch_id - 1], gsics_offset[ch_id - 1] reader.channel_name = ch_name reader.mda['spectral_channel_id'] = ch_id reader.calibrate(data=counts, calibration='radiance') _convert_to_radiance.assert_called_with(mock.ANY, gain, offset) # c) External calibration coefficients for selected channels, GSICS coefs for remaining # IR channels, nominal coefs for remaining VIS channels coefs = {'VIS006': {'gain': 1.234, 'offset': -0.1}, 'IR_108': {'gain': 2.345, 'offset': -0.2}} reader = HRITMSGFileHandler(filename=None, filename_info=None, filetype_info=None, prologue=pro, epilogue=epi, ext_calib_coefs=coefs, calib_mode='GSICS') for ch_id, ch_name in CHANNEL_NAMES.items(): if ch_name in coefs.keys(): gain, offset = coefs[ch_name]['gain'], coefs[ch_name]['offset'] elif ch_name not in VIS_CHANNELS: gain, offset = gsics_gain[ch_id - 1], gsics_offset[ch_id - 1] else: gain, offset = nominal_gain[ch_id - 1], nominal_offset[ch_id - 1] reader.channel_name = ch_name reader.mda['spectral_channel_id'] = ch_id reader.calibrate(data=counts, calibration='radiance') _convert_to_radiance.assert_called_with(mock.ANY, gain, offset) # d) Invalid mode self.assertRaises(ValueError, HRITMSGFileHandler, filename=None, filename_info=None, filetype_info=None, prologue=pro, epilogue=epi, calib_mode='invalid') @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler._get_timestamps') @mock.patch('satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset') @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate') def test_get_dataset(self, calibrate, parent_get_dataset, _get_timestamps): """Test getting the dataset.""" key = mock.MagicMock(calibration='calibration') info = {'units': 'units', 'wavelength': 'wavelength', 'standard_name': 'standard_name'} timestamps = np.array([1, 2, 3], dtype='datetime64[ns]') parent_get_dataset.return_value = mock.MagicMock() calibrate.return_value = xr.DataArray(data=np.zeros((3, 3)), dims=('y', 'x')) _get_timestamps.return_value = timestamps res = self.reader.get_dataset(key, info) # Test method calls parent_get_dataset.assert_called_with(key, info) calibrate.assert_called_with(parent_get_dataset(), key.calibration) # Test attributes (just check if raw metadata is there and then remove it before checking the remaining # attributes) attrs_exp = info.copy() attrs_exp.update({ 'platform_name': self.reader.platform_name, 'sensor': 'seviri', 'satellite_longitude': self.reader.mda['projection_parameters']['SSP_longitude'], 'satellite_latitude': self.reader.mda['projection_parameters']['SSP_latitude'], 'satellite_altitude': self.reader.mda['projection_parameters']['h'], 'orbital_parameters': {'projection_longitude': 44, 'projection_latitude': 0., 'projection_altitude': 35785831.0, 'satellite_nominal_longitude': 47, 'satellite_nominal_latitude': 0.0, 'satellite_actual_longitude': 47.5, 'satellite_actual_latitude': -0.5, 'satellite_actual_altitude': 35783328}, 'georef_offset_corrected': self.reader.mda['offset_corrected'] }) self.assertIn('raw_metadata', res.attrs) res.attrs.pop('raw_metadata') self.assertDictEqual(attrs_exp, res.attrs) # Test timestamps self.assertTrue(np.all(res['acq_time'] == timestamps)) self.assertEqual(res['acq_time'].attrs['long_name'], 'Mean scanline acquisition time') def test_get_raw_mda(self): """Test provision of raw metadata.""" self.reader.mda = {'segment': 1, 'loff': 123} self.reader.prologue_.reduce = lambda max_size: {'prologue': 1} self.reader.epilogue_.reduce = lambda max_size: {'epilogue': 1} expected = {'prologue': 1, 'epilogue': 1, 'segment': 1} self.assertDictEqual(self.reader._get_raw_mda(), expected) # Make sure _get_raw_mda() doesn't modify the original dictionary self.assertIn('loff', self.reader.mda) def test_get_timestamps(self): """Test getting the timestamps.""" tline = self.reader._get_timestamps() # First and last scanline have invalid timestamps (space) self.assertTrue(np.isnat(tline[0])) self.assertTrue(np.isnat(tline[-1])) # Test remaining lines year = tline.astype('datetime64[Y]').astype(int) + 1970 month = tline.astype('datetime64[M]').astype(int) % 12 + 1 day = (tline.astype('datetime64[D]') - tline.astype('datetime64[M]') + 1).astype(int) msec = (tline - tline.astype('datetime64[D]')).astype(int) self.assertTrue(np.all(year[1:-1] == 2016)) self.assertTrue(np.all(month[1:-1] == 3)) self.assertTrue(np.all(day[1:-1] == 3)) self.assertTrue(np.all(msec[1:-1] == np.arange(len(tline) - 2))) def test_get_header(self): # Make sure that the actual satellite position is only included if available self.reader.mda['orbital_parameters'] = {} self.reader.prologue_.get_satpos.return_value = 1, 2, 3 self.reader._get_header() self.assertIn('satellite_actual_longitude', self.reader.mda['orbital_parameters']) self.reader.mda['orbital_parameters'] = {} self.reader.prologue_.get_satpos.return_value = None, None, None self.reader._get_header() self.assertNotIn('satellite_actual_longitude', self.reader.mda['orbital_parameters']) class TestHRITMSGPrologueFileHandler(unittest.TestCase): """Test the HRIT prologue file handler.""" @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGPrologueFileHandler.__init__', return_value=None) def setUp(self, *mocks): """Set up the test case.""" self.reader = HRITMSGPrologueFileHandler() self.reader.satpos = None self.reader.prologue = { 'GeometricProcessing': { 'EarthModel': { 'EquatorialRadius': 6378.169, 'NorthPolarRadius': 6356.5838, 'SouthPolarRadius': 6356.5838 } }, 'ImageAcquisition': { 'PlannedAcquisitionTime': { 'TrueRepeatCycleStart': datetime(2006, 1, 1, 12, 15, 9, 304888) } }, 'SatelliteStatus': { 'Orbit': { 'OrbitPolynomial': { 'StartTime': np.array([ [datetime(2006, 1, 1, 6), datetime(2006, 1, 1, 12), datetime(2006, 1, 1, 18), datetime(1958, 1, 1, 0)]]), 'EndTime': np.array([ [datetime(2006, 1, 1, 12), datetime(2006, 1, 1, 18), datetime(2006, 1, 2, 0), datetime(1958, 1, 1, 0)]]), 'X': [np.zeros(8), [8.41607082e+04, 2.94319260e+00, 9.86748617e-01, -2.70135453e-01, -3.84364650e-02, 8.48718433e-03, 7.70548174e-04, -1.44262718e-04], np.zeros(8)], 'Y': [np.zeros(8), [-5.21170255e+03, 5.12998948e+00, -1.33370453e+00, -3.09634144e-01, 6.18232793e-02, 7.50505681e-03, -1.35131011e-03, -1.12054405e-04], np.zeros(8)], 'Z': [np.zeros(8), [-6.51293855e+02, 1.45830459e+02, 5.61379400e+01, -3.90970565e+00, -7.38137565e-01, 3.06131644e-02, 3.82892428e-03, -1.12739309e-04], np.zeros(8)], } } } } self.reader._reduced = None @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGPrologueFileHandler.read_prologue') @mock.patch('satpy.readers.hrit_base.HRITFileHandler.__init__', autospec=True) def test_extra_kwargs(self, init, *mocks): """Test whether the prologue file handler accepts extra keyword arguments.""" def init_patched(self, *args, **kwargs): self.mda = {} init.side_effect = init_patched HRITMSGPrologueFileHandler(filename=None, filename_info={'service': ''}, filetype_info=None, ext_calib_coefs={}, mda_max_array_size=123, calib_mode='nominal') def test_find_orbit_coefs(self): """Test identification of orbit coefficients.""" # Contiguous validity intervals (that's the norm) self.assertEqual(self.reader._find_orbit_coefs(), 1) # No interval enclosing the given timestamp ... # a) closest interval should be selected (if not too far away) self.reader.prologue['SatelliteStatus'] = { 'Orbit': { 'OrbitPolynomial': { 'StartTime': np.array([ [datetime(2006, 1, 1, 10), datetime(2006, 1, 1, 13)]]), 'EndTime': np.array([ [datetime(2006, 1, 1, 12), datetime(2006, 1, 1, 18)]]) } } } self.assertEqual(self.reader._find_orbit_coefs(), 0) # b) closest interval too far away self.reader.prologue['SatelliteStatus'] = { 'Orbit': { 'OrbitPolynomial': { 'StartTime': np.array([ [datetime(2006, 1, 1, 0), datetime(2006, 1, 1, 18)]]), 'EndTime': np.array([ [datetime(2006, 1, 1, 4), datetime(2006, 1, 1, 22)]]) } } } self.assertRaises(NoValidOrbitParams, self.reader._find_orbit_coefs) # Overlapping intervals -> most recent interval should be selected self.reader.prologue['SatelliteStatus'] = { 'Orbit': { 'OrbitPolynomial': { 'StartTime': np.array([ [datetime(2006, 1, 1, 6), datetime(2006, 1, 1, 10)]]), 'EndTime': np.array([ [datetime(2006, 1, 1, 13), datetime(2006, 1, 1, 18)]]) } } } self.assertEqual(self.reader._find_orbit_coefs(), 1) # No valid coefficients at all self.reader.prologue['SatelliteStatus'] = { 'Orbit': { 'OrbitPolynomial': { 'StartTime': np.array([ [datetime(1958, 1, 1, 0), datetime(1958, 1, 1)]]), 'EndTime': np.array([ [datetime(1958, 1, 1, 0), datetime(1958, 1, 1)]]) } } } self.assertRaises(NoValidOrbitParams, self.reader._find_orbit_coefs) @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGPrologueFileHandler._find_orbit_coefs') def test_get_satpos_cart(self, find_orbit_coefs): """Test satellite position in cartesian coordinates.""" find_orbit_coefs.return_value = 1 x, y, z = self.reader._get_satpos_cart() self.assertTrue(np.allclose([x, y, z], [42078421.37095518, -2611352.744615312, -419828.9699940758])) @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGPrologueFileHandler._get_satpos_cart') def test_get_satpos(self, get_satpos_cart): """Test satellite position in spherical coordinates.""" get_satpos_cart.return_value = [42078421.37095518, -2611352.744615312, -419828.9699940758] lon, lat, dist = self.reader.get_satpos() self.assertTrue(np.allclose(lon, lat, dist), [-3.5511754052132387, -0.5711189258409902, 35783328.146167226]) # Test cache self.reader.get_satpos() self.assertEqual(get_satpos_cart.call_count, 1) # No valid coefficients self.reader.satpos = None # reset cache get_satpos_cart.side_effect = NoValidOrbitParams self.reader.prologue['ImageAcquisition']['PlannedAcquisitionTime'][ 'TrueRepeatCycleStart'] = datetime(2000, 1, 1) self.assertTupleEqual(self.reader.get_satpos(), (None, None, None)) def test_get_earth_radii(self): """Test readout of earth radii.""" earth_model = self.reader.prologue['GeometricProcessing']['EarthModel'] earth_model['EquatorialRadius'] = 2 earth_model['NorthPolarRadius'] = 1 earth_model['SouthPolarRadius'] = 2 a, b = self.reader.get_earth_radii() self.assertEqual(a, 2000) self.assertEqual(b, 1500) @mock.patch('satpy.readers.seviri_l1b_hrit.utils.reduce_mda') def test_reduce(self, reduce_mda): """Test metadata reduction.""" reduce_mda.return_value = 'reduced' # Set buffer self.assertEqual(self.reader.reduce(123), 'reduced') reduce_mda.assert_called() # Read buffer reduce_mda.reset_mock() self.reader._reduced = 'red' self.assertEqual(self.reader.reduce(123), 'red') reduce_mda.assert_not_called() class TestHRITMSGEpilogueFileHandler(unittest.TestCase): """Test the HRIT epilogue file handler.""" @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGEpilogueFileHandler.read_epilogue') @mock.patch('satpy.readers.hrit_base.HRITFileHandler.__init__', autospec=True) def setUp(self, init, *mocks): """Set up the test case.""" def init_patched(self, *args, **kwargs): self.mda = {} init.side_effect = init_patched self.reader = HRITMSGEpilogueFileHandler(filename=None, filename_info={'service': ''}, filetype_info=None, calib_mode='nominal') @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGEpilogueFileHandler.read_epilogue') @mock.patch('satpy.readers.hrit_base.HRITFileHandler.__init__', autospec=True) def test_extra_kwargs(self, init, *mocks): """Test whether the epilogue file handler accepts extra keyword arguments.""" def init_patched(self, *args, **kwargs): self.mda = {} init.side_effect = init_patched HRITMSGEpilogueFileHandler(filename=None, filename_info={'service': ''}, filetype_info=None, ext_calib_coefs={}, mda_max_array_size=123, calib_mode='nominal') @mock.patch('satpy.readers.seviri_l1b_hrit.utils.reduce_mda') def test_reduce(self, reduce_mda): """Test metadata reduction.""" reduce_mda.return_value = 'reduced' # Set buffer self.assertEqual(self.reader.reduce(123), 'reduced') reduce_mda.assert_called() # Read buffer reduce_mda.reset_mock() self.reader._reduced = 'red' self.assertEqual(self.reader.reduce(123), 'red') reduce_mda.assert_not_called() def suite(): """Test suite for test_scene.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() tests = [TestHRITMSGFileHandler, TestHRITMSGPrologueFileHandler, TestHRITMSGEpilogueFileHandler, TestHRITMSGFileHandlerHRV] for test in tests: mysuite.addTest(loader.loadTestsFromTestCase(test)) return mysuite if __name__ == '__main__': unittest.main() satpy-0.20.0/satpy/tests/reader_tests/test_seviri_l1b_icare.py000066400000000000000000000167271362525524100245540ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the SEVIRI L1b HDF4 from ICARE reader.""" import sys import os import numpy as np from satpy.tests.reader_tests.test_hdf4_utils import FakeHDF4FileHandler from satpy.readers import load_reader if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest try: from unittest import mock except ImportError: import mock DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (10, 300) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) class FakeHDF4FileHandler2(FakeHDF4FileHandler): """Swap in HDF4 file handler.""" def get_test_content(self, filename, filename_info, filename_type): """Mimic reader input file content.""" file_content = {} file_content['/attr/Nadir_Pixel_Size'] = 3000. file_content['/attr/Beginning_Acquisition_Date'] = "2004-12-29T12:15:00Z" file_content['/attr/End_Acquisition_Date'] = "2004-12-29T12:27:44Z" file_content['/attr/Geolocation'] = ('1.3642337E7', '1856.0', '1.3642337E7', '1856.0') file_content['/attr/Altitude'] = '42164.0' file_content['/attr/Geographic_Projection'] = 'geos' file_content['/attr/Projection_Longitude'] = '0.0' file_content['/attr/Sub_Satellite_Longitude'] = '3.4' file_content['/attr/Sensors'] = 'MSG1/SEVIRI' file_content['/attr/Zone'] = 'G' file_content['/attr/_FillValue'] = 1 file_content['/attr/scale_factor'] = 1. file_content['/attr/add_offset'] = 0. # test one IR and one VIS channel file_content['Normalized_Radiance'] = DEFAULT_FILE_DATA file_content['Normalized_Radiance/attr/_FillValue'] = 1 file_content['Normalized_Radiance/attr/scale_factor'] = 1. file_content['Normalized_Radiance/attr/add_offset'] = 0. file_content['Normalized_Radiance/shape'] = DEFAULT_FILE_SHAPE file_content['Brightness_Temperature'] = DEFAULT_FILE_DATA file_content['Brightness_Temperature/attr/_FillValue'] = 1 file_content['Brightness_Temperature/attr/scale_factor'] = 1. file_content['Brightness_Temperature/attr/add_offset'] = 0. file_content['Brightness_Temperature/shape'] = DEFAULT_FILE_SHAPE # convert tp xarrays from xarray import DataArray for key, val in file_content.items(): if isinstance(val, np.ndarray): attrs = {} for a in ['_FillValue', 'scale_factor', 'add_offset']: if key + '/attr/' + a in file_content: attrs[a] = file_content[key + '/attr/' + a] if val.ndim > 1: file_content[key] = DataArray(val, dims=('fakeDim0', 'fakeDim1'), attrs=attrs) else: file_content[key] = DataArray(val, attrs=attrs) if 'y' not in file_content['Normalized_Radiance'].dims: file_content['Normalized_Radiance'] = file_content['Normalized_Radiance'].rename({'fakeDim0': 'x', 'fakeDim1': 'y'}) return file_content class TestSEVIRIICAREReader(unittest.TestCase): """Test SEVIRI L1b HDF4 from ICARE Reader.""" yaml_file = 'seviri_l1b_icare.yaml' def setUp(self): """Wrap HDF4 file handler with own fake file handler.""" from satpy.config import config_search_paths from satpy.readers.seviri_l1b_icare import SEVIRI_ICARE self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) self.p = mock.patch.object(SEVIRI_ICARE, '__bases__', (FakeHDF4FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the HDF4 file handler.""" self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf', 'GEO_L1B-MSG1_2004-12-29T12-15-00_G_IR108_V1-04.hdf' ]) self.assertTrue(len(loadables), 2) r.create_filehandlers(loadables) self.assertTrue(r.file_handlers) def test_load_dataset_vis(self): """Test loading all datasets from a full swath file.""" from datetime import datetime r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf' ]) r.create_filehandlers(loadables) datasets = r.load(['VIS008']) self.assertEqual(len(datasets), 1) for v in datasets.values(): dt = datetime(2004, 12, 29, 12, 27, 44) self.assertEqual(v.attrs['end_time'], dt) self.assertEqual(v.attrs['calibration'], 'reflectance') def test_load_dataset_ir(self): """Test loading all datasets from a full swath file.""" r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'GEO_L1B-MSG1_2004-12-29T12-15-00_G_IR108_V1-04.hdf' ]) r.create_filehandlers(loadables) datasets = r.load(['IR_108']) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.attrs['calibration'], 'brightness_temperature') def test_area_def(self): """Test loading all datasets from an area of interest file.""" r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf', ]) r.create_filehandlers(loadables) datasets = r.load(['VIS008']) test_area = {'area_id': 'geosmsg', 'width': 10, 'height': 300, 'area_extent': (-5567248.2834071, -5570248.6866857, -5537244.2506213, -4670127.7031114)} for v in datasets.values(): self.assertEqual(v.attrs['area'].area_id, test_area['area_id']) self.assertEqual(v.attrs['area'].width, test_area['width']) self.assertEqual(v.attrs['area'].height, test_area['height']) np.testing.assert_almost_equal(v.attrs['area'].area_extent, test_area['area_extent']) def suite(): """Create the test suite for test_viirs_edr_flood.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestSEVIRIICAREReader)) return mysuite if __name__ == '__main__': unittest.main() satpy-0.20.0/satpy/tests/reader_tests/test_seviri_l1b_native.py000066400000000000000000000645031362525524100247520ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unittesting the Native SEVIRI reader.""" import sys import numpy as np import xarray as xr from satpy.readers.seviri_l1b_native import ( NativeMSGFileHandler, get_available_channels, ) from satpy.dataset import DatasetID if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest try: from unittest import mock except ImportError: import mock CHANNEL_INDEX_LIST = ['VIS006', 'VIS008', 'IR_016', 'IR_039', 'WV_062', 'WV_073', 'IR_087', 'IR_097', 'IR_108', 'IR_120', 'IR_134', 'HRV'] AVAILABLE_CHANNELS = {} for item in CHANNEL_INDEX_LIST: AVAILABLE_CHANNELS[item] = True SEC15HDR = '15_SECONDARY_PRODUCT_HEADER' IDS = 'SelectedBandIDs' TEST1_HEADER_CHNLIST = {SEC15HDR: {IDS: {}}} TEST1_HEADER_CHNLIST[SEC15HDR][IDS]['Value'] = 'XX--XX--XX--' TEST2_HEADER_CHNLIST = {SEC15HDR: {IDS: {}}} TEST2_HEADER_CHNLIST[SEC15HDR][IDS]['Value'] = 'XX-XXXX----X' TEST3_HEADER_CHNLIST = {SEC15HDR: {IDS: {}}} TEST3_HEADER_CHNLIST[SEC15HDR][IDS]['Value'] = 'XXXXXXXXXXXX' TEST_AREA_EXTENT_EARTHMODEL1_VISIR_FULLDISK = { 'earth_model': 1, 'dataset_id': DatasetID(name='VIS006'), 'is_full_disk': True, 'expected_area_def': { 'Area ID': 'geos_seviri_visir', 'Description': 'SEVIRI low resolution channel area', 'Projection ID': 'seviri_visir', 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, 'Number of columns': 3712, 'Number of rows': 3712, 'Area extent': (-5568748.2758, -5568748.2758, 5568748.2758, 5568748.2758) } } TEST_AREA_EXTENT_EARTHMODEL1_VISIR_ROI = { 'earth_model': 1, 'dataset_id': DatasetID(name='VIS006'), 'is_full_disk': False, 'expected_area_def': { 'Area ID': 'geos_seviri_visir', 'Description': 'SEVIRI low resolution channel area', 'Projection ID': 'seviri_visir', 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, 'Number of columns': 2516, 'Number of rows': 1829, 'Area extent': (-2205296.3269, -333044.7514, 5337717.232, 5154692.6389) } } TEST_AREA_EXTENT_EARTHMODEL1_HRV_FULLDISK = { 'earth_model': 1, 'dataset_id': DatasetID(name='HRV'), 'is_full_disk': True, 'expected_area_def': { 'Area ID': 'geos_seviri_hrv', 'Description': 'SEVIRI low resolution channel area', 'Projection ID': 'seviri_hrv', 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, 'Number of columns': 5568, 'Number of rows': 11136, 'Area extent 0': (-1964263.8611793518, 2623352.397084236, 3604484.1933250427, 5567747.920155525), 'Area extent 1': (1000.1343488693237, -5569748.188853264, 5569748.188853264, 2623352.397084236) } } TEST_AREA_EXTENT_EARTHMODEL1_HRV_ROI = { 'earth_model': 1, 'dataset_id': DatasetID(name='HRV'), 'is_full_disk': False, 'expected_area_def': { 'Area ID': 'geos_seviri_hrv', 'Description': 'SEVIRI high resolution channel area', 'Projection ID': 'seviri_hrv', 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, 'Number of columns': 11136, 'Number of rows': 11136, 'Area extent': (-2204296.1049, -332044.6038, 5336716.8856, 5153692.2997) } } TEST_AREA_EXTENT_EARTHMODEL2_VISIR_FULLDISK = { 'earth_model': 2, 'dataset_id': DatasetID(name='VIS006'), 'is_full_disk': True, 'expected_area_def': { 'Area ID': 'geos_seviri_visir', 'Description': 'SEVIRI low resolution channel area', 'Projection ID': 'seviri_visir', 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, 'Number of columns': 3712, 'Number of rows': 3712, 'Area extent': (-5570248.4773, -5567248.0742, 5567248.0742, 5570248.4773) } } TEST_AREA_EXTENT_EARTHMODEL2_HRV_FULLDISK = { 'earth_model': 2, 'dataset_id': DatasetID(name='HRV'), 'is_full_disk': True, 'expected_area_def': { 'Area ID': 'geos_seviri_hrv', 'Description': 'SEVIRI low resolution channel area', 'Projection ID': 'seviri_hrv', 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, 'Number of columns': 5568, 'Number of rows': 11136, 'Area extent 0': (-1965764.0627026558, 2624852.59860754, 3602983.9918017387, 5569248.121678829), 'Area extent 1': (-500.06717443466187, -5568247.98732996, 5568247.98732996, 2624852.59860754) } } TEST_AREA_EXTENT_EARTHMODEL2_VISIR_ROI = { 'earth_model': 2, 'dataset_id': DatasetID(name='VIS006'), 'is_full_disk': False, 'expected_area_def': { 'Area ID': 'geos_seviri_visir', 'Description': 'SEVIRI low resolution channel area', 'Projection ID': 'seviri_visir', 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, 'Number of columns': 2516, 'Number of rows': 1829, 'Area extent': (-2206796.5285, -331544.5498, 5336217.0304, 5156192.8405) } } TEST_AREA_EXTENT_EARTHMODEL2_HRV_ROI = { 'earth_model': 2, 'dataset_id': DatasetID(name='HRV'), 'is_full_disk': False, 'expected_area_def': { 'Area ID': 'geos_seviri_hrv', 'Description': 'SEVIRI high resolution channel area', 'Projection ID': 'seviri_hrv', 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, 'Number of columns': 11136, 'Number of rows': 11136, 'Area extent': (-2205796.3064, -330544.4023, 5335216.684, 5155192.5012) } } TEST_CALIBRATION_MODE = { 'earth_model': 1, 'dataset_id': DatasetID(name='IR_108', calibration='radiance'), 'is_full_disk': True, 'calibration': 'radiance', 'CalSlope': [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0.95, 0.96, 0.97], 'CalOffset': [-1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0], 'GSICSCalCoeff': [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0.95, 0.96, 0.97], 'GSICSOffsetCount': [-51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0] } # This should preferably be put in a helper-module # Fixme! def assertNumpyArraysEqual(self, other): """Assert that Numpy arrays are equal.""" if self.shape != other.shape: raise AssertionError("Shapes don't match") if not np.allclose(self, other): raise AssertionError("Elements don't match!") class TestNativeMSGFileHandler(unittest.TestCase): """Test the NativeMSGFileHandler.""" def test_get_available_channels(self): """Test the derivation of the available channel list.""" available_chs = get_available_channels(TEST1_HEADER_CHNLIST) trues = ['WV_062', 'WV_073', 'IR_108', 'VIS006', 'VIS008', 'IR_120'] for bandname in AVAILABLE_CHANNELS.keys(): if bandname in trues: self.assertTrue(available_chs[bandname]) else: self.assertFalse(available_chs[bandname]) available_chs = get_available_channels(TEST2_HEADER_CHNLIST) trues = ['VIS006', 'VIS008', 'IR_039', 'WV_062', 'WV_073', 'IR_087', 'HRV'] for bandname in AVAILABLE_CHANNELS.keys(): if bandname in trues: self.assertTrue(available_chs[bandname]) else: self.assertFalse(available_chs[bandname]) available_chs = get_available_channels(TEST3_HEADER_CHNLIST) for bandname in AVAILABLE_CHANNELS.keys(): self.assertTrue(available_chs[bandname]) class TestNativeMSGArea(unittest.TestCase): """Test NativeMSGFileHandler.get_area_extent. The expected results have been verified by manually inspecting the output of geoferenced imagery. """ @staticmethod def create_test_header(earth_model, dataset_id, is_full_disk): """Create mocked NativeMSGFileHandler. Contains sufficient attributes for NativeMSGFileHandler.get_area_extent to be able to execute. """ if dataset_id.name == 'HRV': reference_grid = 'ReferenceGridHRV' column_dir_grid_step = 1.0001343488693237 line_dir_grid_step = 1.0001343488693237 else: reference_grid = 'ReferenceGridVIS_IR' column_dir_grid_step = 3.0004031658172607 line_dir_grid_step = 3.0004031658172607 if is_full_disk: north = 3712 east = 1 west = 3712 south = 1 n_visir_cols = 3712 n_visir_lines = 3712 else: north = 3574 east = 78 west = 2591 south = 1746 n_visir_cols = 2516 n_visir_lines = north - south + 1 header = { '15_DATA_HEADER': { 'ImageDescription': { reference_grid: { 'ColumnDirGridStep': column_dir_grid_step, 'LineDirGridStep': line_dir_grid_step, 'GridOrigin': 2, # south-east corner }, 'ProjectionDescription': { 'LongitudeOfSSP': 0.0 } }, 'GeometricProcessing': { 'EarthModel': { 'TypeOfEarthModel': earth_model, 'EquatorialRadius': 6378169.0, 'NorthPolarRadius': 6356583.800000001, 'SouthPolarRadius': 6356583.800000001, } }, 'SatelliteStatus': { 'SatelliteDefinition': { 'SatelliteId': 324 } } }, '15_SECONDARY_PRODUCT_HEADER': { 'NorthLineSelectedRectangle': {'Value': north}, 'EastColumnSelectedRectangle': {'Value': east}, 'WestColumnSelectedRectangle': {'Value': west}, 'SouthLineSelectedRectangle': {'Value': south}, 'SelectedBandIDs': {'Value': 'xxxxxxxxxxxx'}, 'NumberColumnsVISIR': {'Value': n_visir_cols}, 'NumberLinesVISIR': {'Value': n_visir_lines}, 'NumberColumnsHRV': {'Value': 11136}, 'NumberLinesHRV': {'Value': 11136}, } } return header @staticmethod def create_test_trailer(): """Create Test Trailer. Mocked Trailer with sufficient attributes for NativeMSGFileHandler.get_area_extent to be able to execute. """ trailer = { '15TRAILER': { 'ImageProductionStats': { 'ActualL15CoverageHRV': { 'UpperNorthLineActual': 11136, 'UpperWestColumnActual': 7533, 'UpperSouthLineActual': 8193, 'UpperEastColumnActual': 1966, 'LowerNorthLineActual': 8192, 'LowerWestColumnActual': 5568, 'LowerSouthLineActual': 1, 'LowerEastColumnActual': 1 } } } } return trailer def prepare_area_defs(self, test_dict): """Prepare calculated and expected area definitions for equal checking.""" earth_model = test_dict['earth_model'] dataset_id = test_dict['dataset_id'] is_full_disk = test_dict['is_full_disk'] header = self.create_test_header(earth_model, dataset_id, is_full_disk) trailer = self.create_test_trailer() expected_area_def = test_dict['expected_area_def'] with mock.patch('satpy.readers.seviri_l1b_native.np.fromfile') as fromfile: fromfile.return_value = header with mock.patch('satpy.readers.seviri_l1b_native.recarray2dict') as recarray2dict: recarray2dict.side_effect = (lambda x: x) with mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap') as _get_memmap: _get_memmap.return_value = np.arange(3) with mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer'): fh = NativeMSGFileHandler(None, {}, None) fh.header = header fh.trailer = trailer calc_area_def = fh.get_area_def(dataset_id) return (calc_area_def, expected_area_def) # Earth model 1 tests def test_earthmodel1_visir_fulldisk(self): """Test the VISIR Fulldisk with the EarthModel 1.""" calculated, expected = self.prepare_area_defs( TEST_AREA_EXTENT_EARTHMODEL1_VISIR_FULLDISK ) assertNumpyArraysEqual(np.array(calculated.area_extent), np.array(expected['Area extent'])) self.assertEqual(calculated.width, expected['Number of columns']) self.assertEqual(calculated.height, expected['Number of rows']) self.assertEqual(calculated.proj_id, expected['Projection ID']) def test_earthmodel1_hrv_fulldisk(self): """Test the HRV Fulldisk with the EarthModel 1.""" calculated, expected = self.prepare_area_defs( TEST_AREA_EXTENT_EARTHMODEL1_HRV_FULLDISK ) assertNumpyArraysEqual(np.array(calculated.defs[0].area_extent), np.array(expected['Area extent 0'])) assertNumpyArraysEqual(np.array(calculated.defs[1].area_extent), np.array(expected['Area extent 1'])) self.assertEqual(calculated.width, expected['Number of columns']) self.assertEqual(calculated.height, expected['Number of rows']) self.assertEqual(calculated.defs[0].proj_id, expected['Projection ID']) self.assertEqual(calculated.defs[1].proj_id, expected['Projection ID']) def test_earthmodel1_visir_roi(self): """Test the VISIR ROI with the EarthModel 1.""" calculated, expected = self.prepare_area_defs( TEST_AREA_EXTENT_EARTHMODEL1_VISIR_ROI ) assertNumpyArraysEqual(np.array(calculated.area_extent), np.array(expected['Area extent'])) self.assertEqual(calculated.width, expected['Number of columns']) self.assertEqual(calculated.height, expected['Number of rows']) self.assertEqual(calculated.proj_id, expected['Projection ID']) def test_earthmodel1_hrv_roi(self): """Test the HRV ROI with the EarthModel 1.""" calculated, expected = self.prepare_area_defs( TEST_AREA_EXTENT_EARTHMODEL1_HRV_ROI ) assertNumpyArraysEqual(np.array(calculated.area_extent), np.array(expected['Area extent'])) self.assertEqual(calculated.width, expected['Number of columns']) self.assertEqual(calculated.height, expected['Number of rows']) self.assertEqual(calculated.proj_id, expected['Projection ID']) # Earth model 2 tests def test_earthmodel2_visir_fulldisk(self): """Test the VISIR Fulldisk with the EarthModel 2.""" calculated, expected = self.prepare_area_defs( TEST_AREA_EXTENT_EARTHMODEL2_VISIR_FULLDISK ) assertNumpyArraysEqual(np.array(calculated.area_extent), np.array(expected['Area extent'])) self.assertEqual(calculated.width, expected['Number of columns']) self.assertEqual(calculated.height, expected['Number of rows']) self.assertEqual(calculated.proj_id, expected['Projection ID']) def test_earthmodel2_hrv_fulldisk(self): """Test the HRV Fulldisk with the EarthModel 2.""" calculated, expected = self.prepare_area_defs( TEST_AREA_EXTENT_EARTHMODEL2_HRV_FULLDISK ) assertNumpyArraysEqual(np.array(calculated.defs[0].area_extent), np.array(expected['Area extent 0'])) assertNumpyArraysEqual(np.array(calculated.defs[1].area_extent), np.array(expected['Area extent 1'])) self.assertEqual(calculated.width, expected['Number of columns']) self.assertEqual(calculated.height, expected['Number of rows']) self.assertEqual(calculated.defs[0].proj_id, expected['Projection ID']) self.assertEqual(calculated.defs[1].proj_id, expected['Projection ID']) def test_earthmodel2_visir_roi(self): """Test the VISIR ROI with the EarthModel 2.""" calculated, expected = self.prepare_area_defs( TEST_AREA_EXTENT_EARTHMODEL2_VISIR_ROI ) assertNumpyArraysEqual(np.array(calculated.area_extent), np.array(expected['Area extent'])) self.assertEqual(calculated.width, expected['Number of columns']) self.assertEqual(calculated.height, expected['Number of rows']) self.assertEqual(calculated.proj_id, expected['Projection ID']) def test_earthmodel2_hrv_roi(self): """Test the HRV ROI with the EarthModel 2.""" calculated, expected = self.prepare_area_defs( TEST_AREA_EXTENT_EARTHMODEL2_HRV_ROI ) assertNumpyArraysEqual(np.array(calculated.area_extent), np.array(expected['Area extent'])) self.assertEqual(calculated.width, expected['Number of columns']) self.assertEqual(calculated.height, expected['Number of rows']) self.assertEqual(calculated.proj_id, expected['Projection ID']) class TestNativeMSGCalibrationMode(unittest.TestCase): """Test NativeMSGFileHandler.get_area_extent. The expected results have been verified by manually inspecting the output of geoferenced imagery. """ @staticmethod def create_test_header(earth_model, dataset_id, is_full_disk): """Create Test Header. Mocked NativeMSGFileHandler with sufficient attributes for NativeMSGFileHandler.get_area_extent to be able to execute. """ if dataset_id.name == 'HRV': # reference_grid = 'ReferenceGridHRV' column_dir_grid_step = 1.0001343488693237 line_dir_grid_step = 1.0001343488693237 else: # reference_grid = 'ReferenceGridVIS_IR' column_dir_grid_step = 3.0004031658172607 line_dir_grid_step = 3.0004031658172607 if is_full_disk: north = 3712 east = 1 west = 3712 south = 1 n_visir_cols = 3712 n_visir_lines = 3712 else: north = 3574 east = 78 west = 2591 south = 1746 n_visir_cols = 2516 n_visir_lines = north - south + 1 header = { '15_DATA_HEADER': { 'ImageDescription': { 'reference_grid': { 'ColumnDirGridStep': column_dir_grid_step, 'LineDirGridStep': line_dir_grid_step, 'GridOrigin': 2, # south-east corner }, 'ProjectionDescription': { 'LongitudeOfSSP': 0.0 } }, 'GeometricProcessing': { 'EarthModel': { 'TypeOfEarthModel': earth_model, 'EquatorialRadius': 6378169.0, 'NorthPolarRadius': 6356583.800000001, 'SouthPolarRadius': 6356583.800000001, } }, 'RadiometricProcessing': { 'Level15ImageCalibration': { 'CalSlope': [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0.95, 0.96, 0.97], 'CalOffset': [-1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0], }, 'MPEFCalFeedback': { 'GSICSCalCoeff': [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0.95, 0.96, 0.97], 'GSICSOffsetCount': [-51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0] }, }, 'SatelliteStatus': { 'SatelliteDefinition': { 'SatelliteId': 324 } } }, '15_SECONDARY_PRODUCT_HEADER': { 'NorthLineSelectedRectangle': {'Value': north}, 'EastColumnSelectedRectangle': {'Value': east}, 'WestColumnSelectedRectangle': {'Value': west}, 'SouthLineSelectedRectangle': {'Value': south}, 'SelectedBandIDs': {'Value': 'xxxxxxxxxxxx'}, 'NumberColumnsVISIR': {'Value': n_visir_cols}, 'NumberLinesVISIR': {'Value': n_visir_lines}, 'NumberColumnsHRV': {'Value': 11136}, 'NumberLinesHRV': {'Value': 11136}, } } return header def calibration_mode_test(self, test_dict, cal_mode): """Test the Calibration Mode.""" # dummy data array data = xr.DataArray([255., 200., 300.]) earth_model = test_dict['earth_model'] dataset_id = test_dict['dataset_id'] index = CHANNEL_INDEX_LIST.index(dataset_id.name) # determine the cal coeffs needed for the expected data calculation if cal_mode == 'nominal': cal_slope = test_dict['CalSlope'][index] cal_offset = test_dict['CalOffset'][index] else: cal_slope_arr = test_dict['GSICSCalCoeff'] cal_offset_arr = test_dict['GSICSOffsetCount'] cal_offset = cal_offset_arr[index] * cal_slope_arr[index] cal_slope = cal_slope_arr[index] is_full_disk = test_dict['is_full_disk'] header = self.create_test_header(earth_model, dataset_id, is_full_disk) with mock.patch('satpy.readers.seviri_l1b_native.np.fromfile') as fromfile: fromfile.return_value = header with mock.patch('satpy.readers.seviri_l1b_native.recarray2dict') as recarray2dict: recarray2dict.side_effect = (lambda x: x) with mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap') as _get_memmap: _get_memmap.return_value = np.arange(3) with mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer'): # Create an instance of the native msg reader # with the calibration mode to test fh = NativeMSGFileHandler(None, {}, None, calib_mode=cal_mode) # Caluculate the expected calibration values using the coeefs # from the test data set expected = fh._convert_to_radiance(data, cal_slope, cal_offset) # Calculate the calibrated vaues using the cal coeffs from the # test header and using the correct calibration mode values fh.header = header calculated = fh.calibrate(data, dataset_id) return (expected.data, calculated.data) def test_calibration_mode_nominal(self): """Test the nominal calibration mode.""" # Test using the Nominal calibration mode expected, calculated = self.calibration_mode_test( TEST_CALIBRATION_MODE, 'nominal', ) assertNumpyArraysEqual(calculated, expected) def test_calibration_mode_gsics(self): """Test the GSICS calibration mode.""" # Test using the GSICS calibration mode expected, calculated = self.calibration_mode_test( TEST_CALIBRATION_MODE, 'gsics', ) assertNumpyArraysEqual(calculated, expected) def test_calibration_mode_dummy(self): """Test a dummy calibration mode.""" # pass in a calibration mode that is not recognised by the reader # and an exception will be raised self.assertRaises(NotImplementedError, self.calibration_mode_test, TEST_CALIBRATION_MODE, 'dummy', ) def suite(): """Test suite for test_scene.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestNativeMSGFileHandler)) mysuite.addTest(loader.loadTestsFromTestCase(TestNativeMSGArea)) mysuite.addTest(loader.loadTestsFromTestCase(TestNativeMSGCalibrationMode)) return mysuite if __name__ == "__main__": # So you can run tests from this module individually. unittest.main() satpy-0.20.0/satpy/tests/reader_tests/test_seviri_l2_bufr.py000066400000000000000000000131461362525524100242560ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unittesting the SEVIRI L2 BUFR reader.""" import sys import numpy as np from datetime import datetime if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest try: from unittest import mock except ImportError: import mock FILETYPE_INFO = {'file_type': 'seviri_l2_bufr_csr'} FILENAME_INFO = {'start_time': '20191112000000', 'spacecraft': 'MSG4'} FILENAME_INFO2 = {'start_time': '20191112000000', 'spacecraft': 'MSG4', 'server': 'TESTSERVER'} MPEF_PRODUCT_HEADER = { 'NominalTime': datetime(2019, 11, 6, 18, 0), 'SpacecraftName': '08', 'RectificationLongitude': 'E0415' } DATASET_INFO = { 'key': '#1#brightnessTemperature', 'fill_value': 0 } DATASET_ATTRS = { 'platform_name': 'MET08', 'ssp_lon': 41.5, 'seg_size': 16 } class TestSeviriL2Bufr(unittest.TestCase): """Test NativeMSGBufrHandler.""" @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") def seviri_l2_bufr_test(self, filename): """Test the SEVIRI BUFR handler.""" from satpy.readers.seviri_l2_bufr import SeviriL2BufrFileHandler import eccodes as ec buf1 = ec.codes_bufr_new_from_samples('BUFR4_local_satellite') ec.codes_set(buf1, 'unpack', 1) samp1 = np.random.uniform(low=250, high=350, size=(128,)) # write the bufr test data twice as we want to read in and the concatenate the data in the reader # 55 id corresponds to METEOSAT 8 ec.codes_set(buf1, 'satelliteIdentifier', 55) ec.codes_set_array(buf1, '#1#brightnessTemperature', samp1) ec.codes_set_array(buf1, '#1#brightnessTemperature', samp1) m = mock.mock_open() # only our offline product contain MPEF product headers so we get the metadata from there if ('BUFRProd' in filename): with mock.patch('satpy.readers.seviri_l2_bufr.np.fromfile') as fromfile: fromfile.return_value = MPEF_PRODUCT_HEADER with mock.patch('satpy.readers.seviri_l2_bufr.recarray2dict') as recarray2dict: recarray2dict.side_effect = (lambda x: x) fh = SeviriL2BufrFileHandler(filename, FILENAME_INFO2, FILETYPE_INFO) fh.mpef_header = MPEF_PRODUCT_HEADER else: # No Mpef Header so we get the metadata from the BUFR messages with mock.patch('satpy.readers.seviri_l2_bufr.open', m, create=True): with mock.patch('eccodes.codes_bufr_new_from_file', side_effect=[buf1, None, buf1, None, buf1, None]) as ec1: ec1.return_value = ec1.side_effect with mock.patch('eccodes.codes_set') as ec2: ec2.return_value = 1 with mock.patch('eccodes.codes_release') as ec5: ec5.return_value = 1 fh = SeviriL2BufrFileHandler(filename, FILENAME_INFO, FILETYPE_INFO) with mock.patch('satpy.readers.seviri_l2_bufr.open', m, create=True): with mock.patch('eccodes.codes_bufr_new_from_file', side_effect=[buf1, buf1, None]) as ec1: ec1.return_value = ec1.side_effect with mock.patch('eccodes.codes_set') as ec2: ec2.return_value = 1 with mock.patch('eccodes.codes_release') as ec5: ec5.return_value = 1 z = fh.get_dataset(None, DATASET_INFO) # concatenate the original test arrays as # get dataset will have read and concatented the data x1 = np.concatenate((samp1, samp1), axis=0) np.testing.assert_array_equal(z.values, x1) self.assertEqual(z.attrs['platform_name'], DATASET_ATTRS['platform_name']) self.assertEqual(z.attrs['ssp_lon'], DATASET_ATTRS['ssp_lon']) self.assertEqual(z.attrs['seg_size'], DATASET_ATTRS['seg_size']) def test_seviri_l2_bufr(self): """Call the test function.""" self.seviri_l2_bufr_test('GIIBUFRProduct_20191106130000Z_00_OMPEFS04_MET11_FES_E0000') self.seviri_l2_bufr_test('MSG4-SEVI-MSGGIIN-0101-0101-20191106130000.000000000Z-20191106131702-1362128.bfr') self.seviri_l2_bufr_test('MSG4-SEVI-MSGGIIN-0101-0101-20191106101500.000000000Z-20191106103218-1362148') def suite(): """Test suite for test_scene.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestSeviriL2Bufr)) return mysuite if __name__ == "__main__": # So you can run tests from this module individually. unittest.main() satpy-0.20.0/satpy/tests/reader_tests/test_slstr_l2.py000066400000000000000000000062671362525524100231140ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.slstr_l2 module.""" import unittest from unittest import mock from unittest.mock import MagicMock from unittest.mock import patch import xarray as xr from satpy.readers.slstr_l2 import SLSTRL2FileHandler class TestSLSTRL2Reader(unittest.TestCase): """Test Sentinel-3 SST L2 reader.""" @mock.patch('xarray.open_dataset') def test_instantiate(self, mocked_dataset): """Test initialization of file handlers.""" filename_info = {} tmp = MagicMock(start_time='20191120T125002Z', stop_time='20191120T125002Z') tmp.rename.return_value = tmp xr.open_dataset.return_value = tmp SLSTRL2FileHandler('somedir/somefile.nc', filename_info, None) mocked_dataset.assert_called() mocked_dataset.reset_mock() with patch('tarfile.open') as tf: tf.return_value.__enter__.return_value = MagicMock(getnames=lambda *a: ["GHRSST-SSTskin.nc"]) SLSTRL2FileHandler('somedir/somefile.tar', filename_info, None) mocked_dataset.assert_called() mocked_dataset.reset_mock() @mock.patch('xarray.open_dataset') def test_get_dataset(self, mocked_dataset): """Test retrieval of datasets.""" filename_info = {} tmp = MagicMock(start_time='20191120T125002Z', stop_time='20191120T125002Z') tmp.rename.return_value = tmp xr.open_dataset.return_value = tmp test = SLSTRL2FileHandler('somedir/somefile.nc', filename_info, None) test.nc = {'longitude': xr.Dataset(), 'latitude': xr.Dataset(), 'sea_surface_temperature': xr.Dataset(), 'sea_ice_fraction': xr.Dataset(), } test.get_dataset('longitude', {'standard_name': 'longitude'}) test.get_dataset('latitude', {'standard_name': 'latitude'}) test.get_dataset('sea_surface_temperature', {'standard_name': 'sea_surface_temperature'}) test.get_dataset('sea_ice_fraction', {'standard_name': 'sea_ice_fraction'}) with self.assertRaises(KeyError): test.get_dataset('erroneous dataset', {'standard_name': 'erroneous dataset'}) mocked_dataset.assert_called() mocked_dataset.reset_mock() def suite(): """Test suite for test_slstr_l2.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestSLSTRL2Reader)) return mysuite if __name__ == '__main__': unittest.main() satpy-0.20.0/satpy/tests/reader_tests/test_tropomi_l2.py000066400000000000000000000152321362525524100234260ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2019 Satpy developers # # This file is part of Satpy. # # Satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # Satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # Satpy. If not, see . """Module for testing the satpy.readers.tropomi_l2 module. """ import os import sys from datetime import datetime import numpy as np from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler import xarray as xr if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest try: from unittest import mock except ImportError: import mock DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (3246, 450) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) class FakeNetCDF4FileHandlerTL2(FakeNetCDF4FileHandler): """Swap-in NetCDF4 File Handler""" def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content""" from xarray import DataArray dt_s = filename_info.get('start_time', datetime(2016, 1, 1, 12, 0, 0)) dt_e = filename_info.get('end_time', datetime(2016, 1, 1, 12, 0, 0)) if filetype_info['file_type'] == 'tropomi_l2': file_content = { '/attr/time_coverage_start': dt_s.strftime('%Y-%m-%dT%H:%M:%S.000Z'), '/attr/time_coverage_end': dt_e.strftime('%Y-%m-%dT%H:%M:%S.000Z'), '/attr/platform_shortname': 'S5P', '/attr/sensor': 'TROPOMI', } file_content['PRODUCT/latitude'] = DEFAULT_FILE_DATA file_content['PRODUCT/longitude'] = DEFAULT_FILE_DATA if 'NO2' in filename: file_content['PRODUCT/nitrogen_dioxide_total_column'] = DEFAULT_FILE_DATA if 'SO2' in filename: file_content['PRODUCT/sulfurdioxide_total_vertical_column'] = DEFAULT_FILE_DATA for k in list(file_content.keys()): if not k.startswith('PRODUCT'): continue file_content[k + '/shape'] = DEFAULT_FILE_SHAPE # convert to xarrays for key, val in file_content.items(): if isinstance(val, np.ndarray): if val.ndim > 1: file_content[key] = DataArray(val, dims=('scanline', 'ground_pixel')) else: file_content[key] = DataArray(val) file_content['PRODUCT/latitude'].attrs['_FillValue'] = -999.0 file_content['PRODUCT/longitude'].attrs['_FillValue'] = -999.0 if 'NO2' in filename: file_content['PRODUCT/nitrogen_dioxide_total_column'].attrs['_FillValue'] = -999.0 if 'SO2' in filename: file_content['PRODUCT/sulfurdioxide_total_vertical_column'].attrs['_FillValue'] = -999.0 else: assert False return file_content class TestTROPOMIL2Reader(unittest.TestCase): """Test TROPOMI L2 Reader""" yaml_file = "tropomi_l2.yaml" def setUp(self): """Wrap NetCDF4 file handler with our own fake handler""" from satpy.config import config_search_paths from satpy.readers.tropomi_l2 import TROPOMIL2FileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(TROPOMIL2FileHandler, '__bases__', (FakeNetCDF4FileHandlerTL2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the NetCDF4 file handler""" self.p.stop() def test_init(self): """Test basic initialization of this reader.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'S5P_OFFL_L2__NO2____20180709T170334_20180709T184504_03821_01_010002_20180715T184729.nc', ]) self.assertTrue(len(loadables), 1) r.create_filehandlers(loadables) # make sure we have some files self.assertTrue(r.file_handlers) def test_load_no2(self): """Load NO2 dataset""" from satpy.readers import load_reader r = load_reader(self.reader_configs) with mock.patch('satpy.readers.tropomi_l2.netCDF4.Variable', xr.DataArray): loadables = r.select_files_from_pathnames([ 'S5P_OFFL_L2__NO2____20180709T170334_20180709T184504_03821_01_010002_20180715T184729.nc', ]) r.create_filehandlers(loadables) ds = r.load(['nitrogen_dioxide_total_column']) self.assertEqual(len(ds), 1) for d in ds.values(): self.assertEqual(d.attrs['platform_shortname'], 'S5P') self.assertEqual(d.attrs['sensor'], 'TROPOMI') self.assertIn('area', d.attrs) self.assertIsNotNone(d.attrs['area']) self.assertIn('y', d.dims) self.assertIn('x', d.dims) def test_load_so2(self): """Load SO2 dataset""" from satpy.readers import load_reader r = load_reader(self.reader_configs) with mock.patch('satpy.readers.tropomi_l2.netCDF4.Variable', xr.DataArray): loadables = r.select_files_from_pathnames([ 'S5P_OFFL_L2__SO2____20181224T055107_20181224T073237_06198_01_010105_20181230T150634.nc', ]) r.create_filehandlers(loadables) ds = r.load(['sulfurdioxide_total_vertical_column']) self.assertEqual(len(ds), 1) for d in ds.values(): self.assertEqual(d.attrs['platform_shortname'], 'S5P') self.assertIn('area', d.attrs) self.assertIsNotNone(d.attrs['area']) self.assertIn('y', d.dims) self.assertIn('x', d.dims) def suite(): """The test suite for test_tropomi_l2. """ loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestTROPOMIL2Reader)) return mysuite if __name__ == '__main__': unittest.main() satpy-0.20.0/satpy/tests/reader_tests/test_utils.py000066400000000000000000000257701362525524100225100ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2014-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Testing of helper functions.""" import unittest try: from unittest import mock except ImportError: import mock import os import sys import numpy as np import numpy.testing import pyresample.geometry from satpy.readers import utils as hf class TestHelpers(unittest.TestCase): """Test the area helpers.""" def test_lonlat_from_geos(self): """Get lonlats from geos.""" geos_area = mock.MagicMock() lon_0 = 0 h = 35785831.00 geos_area.proj_dict = {'a': 6378169.00, 'b': 6356583.80, 'h': h, 'lon_0': lon_0} expected = np.array((lon_0, 0)) import pyproj proj = pyproj.Proj(proj='geos', **geos_area.proj_dict) expected = proj(0, 0, inverse=True) np.testing.assert_allclose(expected, hf._lonlat_from_geos_angle(0, 0, geos_area)) expected = proj(0, 1000000, inverse=True) np.testing.assert_allclose(expected, hf._lonlat_from_geos_angle(0, 1000000 / h, geos_area)) expected = proj(1000000, 0, inverse=True) np.testing.assert_allclose(expected, hf._lonlat_from_geos_angle(1000000 / h, 0, geos_area)) expected = proj(2000000, -2000000, inverse=True) np.testing.assert_allclose(expected, hf._lonlat_from_geos_angle(2000000 / h, -2000000 / h, geos_area)) def test_get_geostationary_bbox(self): """Get the geostationary bbox.""" geos_area = mock.MagicMock() lon_0 = 0 geos_area.proj_dict = {'a': 6378169.00, 'b': 6356583.80, 'h': 35785831.00, 'lon_0': lon_0} geos_area.area_extent = [-5500000., -5500000., 5500000., 5500000.] lon, lat = hf.get_geostationary_bounding_box(geos_area, 20) elon = np.array([-74.802824, -73.667708, -69.879687, -60.758081, -32.224989, 32.224989, 60.758081, 69.879687, 73.667708, 74.802824, 74.802824, 73.667708, 69.879687, 60.758081, 32.224989, -32.224989, -60.758081, -69.879687, -73.667708, -74.802824]) elat = -np.array([-6.81982903e-15, -1.93889346e+01, -3.84764764e+01, -5.67707359e+01, -7.18862588e+01, -7.18862588e+01, -5.67707359e+01, -3.84764764e+01, -1.93889346e+01, 0.00000000e+00, 6.81982903e-15, 1.93889346e+01, 3.84764764e+01, 5.67707359e+01, 7.18862588e+01, 7.18862588e+01, 5.67707359e+01, 3.84764764e+01, 1.93889346e+01, -0.00000000e+00]) np.testing.assert_allclose(lon, elon + lon_0) np.testing.assert_allclose(lat, elat) def test_get_geostationary_angle_extent(self): """Get max geostationary angles.""" geos_area = mock.MagicMock() geos_area.proj_dict = {'a': 6378169.00, 'b': 6356583.80, 'h': 35785831.00} expected = (0.15185342867090912, 0.15133555510297725) np.testing.assert_allclose(expected, hf.get_geostationary_angle_extent(geos_area)) geos_area.proj_dict = {'a': 1000.0, 'b': 1000.0, 'h': np.sqrt(2) * 1000.0 - 1000.0} expected = (np.deg2rad(45), np.deg2rad(45)) np.testing.assert_allclose(expected, hf.get_geostationary_angle_extent(geos_area)) def test_geostationary_mask(self): """Test geostationary mask.""" # Compute mask of a very elliptical earth area = pyresample.geometry.AreaDefinition( 'FLDK', 'Full Disk', 'geos', {'a': '6378169.0', 'b': '3000000.0', 'h': '35785831.0', 'lon_0': '145.0', 'proj': 'geos', 'units': 'm'}, 101, 101, (-6498000.088960204, -6498000.088960204, 6502000.089024927, 6502000.089024927)) mask = hf.get_geostationary_mask(area).astype(np.int).compute() # Check results along a couple of lines # a) Horizontal self.assertTrue(np.all(mask[50, :8] == 0)) self.assertTrue(np.all(mask[50, 8:93] == 1)) self.assertTrue(np.all(mask[50, 93:] == 0)) # b) Vertical self.assertTrue(np.all(mask[:31, 50] == 0)) self.assertTrue(np.all(mask[31:70, 50] == 1)) self.assertTrue(np.all(mask[70:, 50] == 0)) # c) Top left to bottom right self.assertTrue(np.all(mask[range(33), range(33)] == 0)) self.assertTrue(np.all(mask[range(33, 68), range(33, 68)] == 1)) self.assertTrue(np.all(mask[range(68, 101), range(68, 101)] == 0)) # d) Bottom left to top right self.assertTrue(np.all(mask[range(101-1, 68-1, -1), range(33)] == 0)) self.assertTrue(np.all(mask[range(68-1, 33-1, -1), range(33, 68)] == 1)) self.assertTrue(np.all(mask[range(33-1, -1, -1), range(68, 101)] == 0)) @mock.patch('satpy.readers.utils.AreaDefinition') def test_sub_area(self, adef): """Sub area slicing.""" area = mock.MagicMock() area.pixel_size_x = 1.5 area.pixel_size_y = 1.5 area.pixel_upper_left = (0, 0) area.area_id = 'fakeid' area.name = 'fake name' area.proj_id = 'fakeproj' area.proj_dict = {'fake': 'dict'} hf.get_sub_area(area, slice(1, 4), slice(0, 3)) adef.assert_called_once_with('fakeid', 'fake name', 'fakeproj', {'fake': 'dict'}, 3, 3, (0.75, -3.75, 5.25, 0.75)) def test_np2str(self): """Test the np2str function.""" # byte object npstring = np.string_('hej') self.assertEqual(hf.np2str(npstring), 'hej') # single element numpy array np_arr = np.array([npstring]) self.assertEqual(hf.np2str(np_arr), 'hej') # scalar numpy array np_arr = np.array(npstring) self.assertEqual(hf.np2str(np_arr), 'hej') # multi-element array npstring = np.array([npstring, npstring]) self.assertRaises(ValueError, hf.np2str, npstring) # non-array self.assertRaises(ValueError, hf.np2str, 5) def test_get_earth_radius(self): """Test earth radius computation.""" a = 2. b = 1. def re(lat): """Compute ellipsoid radius at the given geodetic latitude. Reference: Capderou, M.: Handbook of Satellite Orbits, Equation (2.20). """ lat = np.deg2rad(lat) e2 = 1 - b ** 2 / a ** 2 n = a / np.sqrt(1 - e2*np.sin(lat)**2) return n * np.sqrt((1 - e2)**2 * np.sin(lat)**2 + np.cos(lat)**2) for lon in (0, 180, 270): self.assertEqual(hf.get_earth_radius(lon=lon, lat=0., a=a, b=b), a) for lat in (90, -90): self.assertEqual(hf.get_earth_radius(lon=0., lat=lat, a=a, b=b), b) self.assertTrue(np.isclose(hf.get_earth_radius(lon=123, lat=45., a=a, b=b), re(45.))) def test_reduce_mda(self): """Test metadata size reduction.""" mda = {'a': 1, 'b': np.array([1, 2, 3]), 'c': np.array([1, 2, 3, 4]), 'd': {'a': 1, 'b': np.array([1, 2, 3]), 'c': np.array([1, 2, 3, 4]), 'd': {'a': 1, 'b': np.array([1, 2, 3]), 'c': np.array([1, 2, 3, 4])}}} exp = {'a': 1, 'b': np.array([1, 2, 3]), 'd': {'a': 1, 'b': np.array([1, 2, 3]), 'd': {'a': 1, 'b': np.array([1, 2, 3])}}} numpy.testing.assert_equal(hf.reduce_mda(mda, max_size=3), exp) # Make sure, reduce_mda() doesn't modify the original dictionary self.assertIn('c', mda) self.assertIn('c', mda['d']) self.assertIn('c', mda['d']['d']) @mock.patch('satpy.readers.utils.bz2.BZ2File') @mock.patch('satpy.readers.utils.Popen') def test_unzip_file_pbzip2(self, mock_popen, mock_bz2): """Test the bz2 file unzipping techniques.""" process_mock = mock.Mock() attrs = {'communicate.return_value': (b'output', b'error'), 'returncode': 0} process_mock.configure_mock(**attrs) mock_popen.return_value = process_mock bz2_mock = mock.MagicMock() bz2_mock.read.return_value = b'TEST' mock_bz2.return_value = bz2_mock filename = 'tester.DAT.bz2' whichstr = 'satpy.readers.utils.which' # no bz2 installed with mock.patch(whichstr) as whichmock: whichmock.return_value = None new_fname = hf.unzip_file(filename) self.assertTrue(bz2_mock.read.called) self.assertTrue(os.path.exists(new_fname)) if os.path.exists(new_fname): os.remove(new_fname) # bz2 installed, but python 3 only if sys.version_info.major >= 3: with mock.patch(whichstr) as whichmock: whichmock.return_value = '/usr/bin/pbzip2' new_fname = hf.unzip_file(filename) self.assertTrue(mock_popen.called) self.assertTrue(os.path.exists(new_fname)) if os.path.exists(new_fname): os.remove(new_fname) filename = 'tester.DAT' new_fname = hf.unzip_file(filename) self.assertIsNone(new_fname) def suite(): """Test suite for utils library.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestHelpers)) return mysuite if __name__ == '__main__': unittest.main() satpy-0.20.0/satpy/tests/reader_tests/test_vaisala_gld360.py000066400000000000000000000041631362525524100240400ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unittesting the Vaisala GLD360 reader. """ from io import StringIO import numpy as np from satpy.readers.vaisala_gld360 import VaisalaGLD360TextFileHandler from satpy.dataset import DatasetID import unittest class TestVaisalaGLD360TextFileHandler(unittest.TestCase): """Test the VaisalaGLD360TextFileHandler.""" def test_vaisala_gld360(self): expected = np.array([12.3, 13.2, -31.]) filename = StringIO( u'2017-06-20 00:00:00.007178 30.5342 -90.1152 12.3 kA\n' '2017-06-20 00:00:00.020162 -0.5727 104.0688 13.2 kA\n' '2017-06-20 00:00:00.023183 12.1529 -10.8756 -31.0 kA' ) filename_info = {} filetype_info = {} self.handler = VaisalaGLD360TextFileHandler( filename, filename_info, filetype_info ) filename.close() dataset_id = DatasetID('power') dataset_info = {'units': 'kA'} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(result, expected, rtol=1e-05) def suite(): """The test suite for test_vaisala_gld360.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestVaisalaGLD360TextFileHandler)) return mysuite if __name__ == "__main__": # So you can run tests from this module individually. unittest.main() satpy-0.20.0/satpy/tests/reader_tests/test_viirs_compact.py000066400000000000000000003544001362525524100242050ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.viirs_compact module.""" import numpy as np import unittest import h5py import tempfile import os class TestCompact(unittest.TestCase): """Test class for reading compact viirs format.""" def setUp(self): """Create a fake file from scratch.""" fake_dnb = { "All_Data": { "ModeGran": {"value": 0}, "ModeScan": { "value": np.array( [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 249, ], dtype=np.uint8, ) }, "NumberOfScans": {"value": np.array([47])}, "VIIRS-DNB-GEO_All": { "AlignmentCoefficient": { "value": np.array( [ 2.11257413e-02, 2.11152732e-02, 2.11079046e-02, 2.10680142e-02, 1.80840008e-02, 1.80402063e-02, 1.79968309e-02, 1.79477539e-02, 2.20463774e-03, 2.17431062e-03, 2.14360282e-03, 2.11503846e-03, 2.08630669e-03, 2.05924874e-03, 2.03177333e-03, 2.00573727e-03, 1.98072987e-03, 1.95503305e-03, 1.93077011e-03, 1.90702057e-03, 1.88353716e-03, 1.86104013e-03, 1.83863181e-03, 1.81696517e-03, 1.79550308e-03, 1.77481642e-03, 1.75439729e-03, 1.73398503e-03, 1.71459839e-03, 1.69516564e-03, 1.67622324e-03, 1.65758410e-03, 1.63990213e-03, 1.62128301e-03, 1.60375470e-03, 1.58667017e-03, 1.61543000e-03, 1.59775047e-03, 1.50719041e-03, 1.48937735e-03, 1.47257745e-03, 1.50070526e-03, 1.48288533e-03, 9.29064234e-04, 9.12246935e-04, 8.95748264e-04, 8.71886965e-04, 8.55044520e-04, 8.38686305e-04, 8.18263041e-04, 8.01501446e-04, 7.85346841e-04, 1.15984806e-03, 1.14326552e-03, 1.12648588e-03, 1.11018715e-03, 1.09399087e-03, 1.19698711e-03, 1.18051842e-03, 1.16404379e-03, 1.14832399e-03, 9.92591376e-04, 9.75896895e-04, 9.59663419e-04, 9.43415158e-04, 9.27662419e-04, 8.92253709e-04, 8.75947590e-04, 8.60177504e-04, 8.44484195e-04, 8.35279003e-04, 8.19236680e-04, 8.03303672e-04, 7.87482015e-04, 7.60449213e-04, 7.44239136e-04, 7.28625571e-04, 7.12990935e-04, 6.89090986e-04, 6.73000410e-04, 6.57248020e-04, 6.41623745e-04, 6.20219158e-04, 6.04308851e-04, 5.88596100e-04, 5.73108089e-04, 3.65344196e-04, 3.49639275e-04, 3.34273063e-04, 4.81286290e-04, 4.65485587e-04, 4.49862011e-04, 4.34543617e-04, 4.19324206e-04, 2.60536268e-04, 2.45052564e-04, 2.29740850e-04, 2.34466774e-04, 2.18822126e-04, 2.03370175e-04, 1.88058810e-04, 1.60192372e-04, 1.44485937e-04, 1.28920830e-04, 3.45615146e-04, 3.30171984e-04, 3.14682693e-04, 2.99300562e-04, 2.83925037e-04, 2.68518896e-04, 2.53254839e-04, 2.37950648e-04, 2.22716670e-04, 2.07562072e-04, 1.92296386e-04, 1.77147449e-04, 1.61994336e-04, 1.46895778e-04, 1.31844325e-04, 1.16730320e-04, 1.01757469e-04, 8.67861963e-05, 7.18669180e-05, 5.70719567e-05, 4.24701866e-05, 2.84846719e-05, 1.70599415e-05, -1.47213286e-05, -2.33691408e-05, -3.68025649e-05, -5.12388433e-05, -6.59972284e-05, -8.08926561e-05, -9.58433884e-05, -1.10882705e-04, -1.25976600e-04, -1.41044657e-04, -1.56166439e-04, -1.71307023e-04, -1.86516074e-04, -2.01731804e-04, -2.16980450e-04, -2.32271064e-04, -2.47527263e-04, -2.62940506e-04, -2.78283434e-04, -2.93711084e-04, -3.09180934e-04, -3.24661058e-04, -3.40237195e-04, -1.27807143e-04, -1.43646437e-04, -1.59638614e-04, -1.87593061e-04, -2.03169184e-04, -2.18941437e-04, -2.34920750e-04, -2.30605408e-04, -2.46262236e-04, -2.62226094e-04, -4.19838558e-04, -4.35510388e-04, -4.51152271e-04, -4.67120990e-04, -4.83241311e-04, -3.37647041e-04, -3.53568990e-04, -3.69836489e-04, -5.76354389e-04, -5.92070050e-04, -6.08178903e-04, -6.24440494e-04, -6.45648804e-04, -6.61431870e-04, -6.77491073e-04, -6.93967624e-04, -7.17683870e-04, -7.33471534e-04, -7.49999890e-04, -7.66390527e-04, -7.93468382e-04, -8.09502264e-04, -8.25728697e-04, -8.42282083e-04, -8.51265620e-04, -8.67322611e-04, -8.83649045e-04, -9.00280487e-04, -9.35055199e-04, -9.51097580e-04, -9.67527216e-04, -9.84144746e-04, -1.00128003e-03, -1.15522649e-03, -1.17168750e-03, -1.18826574e-03, -1.20496599e-03, -1.10272120e-03, -1.11865194e-03, -1.13539130e-03, -1.15241797e-03, -1.16964686e-03, -7.97322951e-04, -8.14269355e-04, -8.31696263e-04, -8.51555436e-04, -8.68656265e-04, -8.86220601e-04, -9.09406052e-04, -9.26509325e-04, -9.44124535e-04, -1.49479776e-03, -1.51314179e-03, -1.48387800e-03, -1.50146009e-03, -1.51945755e-03, -1.61006744e-03, -1.62846781e-03, -1.59783731e-03, -1.61545863e-03, -1.63336343e-03, -1.65167439e-03, -1.67034590e-03, -1.68956630e-03, -1.70884258e-03, -1.72863202e-03, -1.74859120e-03, -1.76901231e-03, -1.79015659e-03, -1.81144674e-03, -1.83329231e-03, -1.85552111e-03, -1.87840930e-03, -1.90151483e-03, -1.92550803e-03, -1.94982730e-03, -1.97511422e-03, -2.00066133e-03, -2.02709576e-03, -2.05422146e-03, -2.08215159e-03, -2.11093877e-03, -2.14011059e-03, -2.17073411e-03, -2.20196834e-03, -2.23409734e-03, -2.26700748e-03, -2.30150856e-03, -2.33719964e-03, -2.37406371e-03, -2.41223071e-03, -2.45184498e-03, -2.49327719e-03, -2.53651105e-03, -2.58166087e-03, -2.62895599e-03, -2.67871981e-03, -2.73117283e-03, -5.49861044e-03, -5.55437338e-03, -5.61159104e-03, -5.67073002e-03, -5.73173212e-03, -5.79498662e-03, -5.85969677e-03, -5.92768658e-03, -5.99809457e-03, -6.07080618e-03, -6.14715228e-03, -6.22711331e-03, ], dtype=np.float32, ) }, "ExpansionCoefficient": { "value": np.array( [ 1.17600127e-03, 1.17271533e-03, 1.17000856e-03, 1.16674276e-03, 2.11251900e-03, 2.10516527e-03, 2.09726905e-03, 2.08941335e-03, 1.63907595e-02, 1.58577170e-02, 1.53679820e-02, 1.49007449e-02, 1.44708352e-02, 1.40612368e-02, 1.36818690e-02, 1.33193973e-02, 1.29744308e-02, 1.26568424e-02, 1.23488475e-02, 1.20567940e-02, 1.17803067e-02, 1.15150018e-02, 1.12629030e-02, 1.10203745e-02, 1.07905651e-02, 1.05690639e-02, 1.03563424e-02, 1.01526314e-02, 9.95650515e-03, 9.76785459e-03, 9.58597753e-03, 9.41115711e-03, 9.23914276e-03, 9.07964632e-03, 8.92116502e-03, 8.76654685e-03, 9.04925726e-03, 8.88936501e-03, 9.14804544e-03, 8.98920093e-03, 8.83030891e-03, 9.06952657e-03, 8.90891161e-03, 1.36343827e-02, 1.32706892e-02, 1.29242949e-02, 1.36271119e-02, 1.32572902e-02, 1.29025253e-02, 1.35165229e-02, 1.31412474e-02, 1.27808526e-02, 8.91761761e-03, 8.74674786e-03, 8.58181808e-03, 8.42147414e-03, 8.26664641e-03, 7.81304855e-03, 7.67400907e-03, 7.54208490e-03, 7.40892906e-03, 8.81091598e-03, 8.62924196e-03, 8.45206063e-03, 8.28018785e-03, 8.11239891e-03, 8.62185098e-03, 8.43446422e-03, 8.25031102e-03, 8.07087123e-03, 8.30837712e-03, 8.11944436e-03, 7.93648325e-03, 7.75875151e-03, 8.14332347e-03, 7.94676598e-03, 7.75293307e-03, 7.56529858e-03, 7.88933039e-03, 7.68536143e-03, 7.48489471e-03, 7.28917075e-03, 7.55438488e-03, 7.34063145e-03, 7.13229552e-03, 6.92783622e-03, 1.06161544e-02, 1.01234140e-02, 9.64432582e-03, 6.52031973e-03, 6.29310543e-03, 6.06948463e-03, 5.84984245e-03, 5.63343242e-03, 8.61937553e-03, 8.08268972e-03, 7.55874207e-03, 6.79610623e-03, 6.32849289e-03, 5.86955249e-03, 5.41723240e-03, 5.56734810e-03, 5.01116784e-03, 4.46233014e-03, 1.40874484e-03, 1.34475902e-03, 1.28140685e-03, 1.21824886e-03, 1.15505024e-03, 1.09222531e-03, 1.02962845e-03, 9.67168540e-04, 9.04808170e-04, 8.42478999e-04, 7.80681905e-04, 7.18652213e-04, 6.56902499e-04, 5.95146266e-04, 5.33432467e-04, 4.72071581e-04, 4.10460081e-04, 3.49062117e-04, 2.87777104e-04, 2.26464268e-04, 1.65259655e-04, 1.03993290e-04, 4.27830964e-05, -1.84028686e-05, -7.95840388e-05, -1.40780976e-04, -2.01987947e-04, -2.63233029e-04, -3.24499299e-04, -3.85862397e-04, -4.47216793e-04, -5.08567959e-04, -5.70152479e-04, -6.31901203e-04, -6.93684444e-04, -7.55490037e-04, -8.17523745e-04, -8.79664498e-04, -9.41973762e-04, -1.00450485e-03, -1.06710335e-03, -1.12990546e-03, -1.19290419e-03, -1.25615683e-03, -1.31971564e-03, -1.38323894e-03, -4.38789371e-03, -4.93527949e-03, -5.48970094e-03, -5.34658274e-03, -5.79780247e-03, -6.25621388e-03, -6.72366377e-03, -7.48283789e-03, -8.00681766e-03, -8.54192488e-03, -5.58420410e-03, -5.79793099e-03, -6.01683883e-03, -6.23886706e-03, -6.46463828e-03, -9.56355780e-03, -1.00387875e-02, -1.05282217e-02, -6.87109074e-03, -7.07587786e-03, -7.28309387e-03, -7.49528036e-03, -7.23363785e-03, -7.42882164e-03, -7.62982434e-03, -7.83343613e-03, -7.51076965e-03, -7.69859226e-03, -7.88733363e-03, -8.08352232e-03, -7.69890239e-03, -7.87641760e-03, -8.05852562e-03, -8.24564695e-03, -8.00882280e-03, -8.18727538e-03, -8.36882368e-03, -8.55544209e-03, -8.04922916e-03, -8.21674801e-03, -8.38823151e-03, -8.56383517e-03, -8.74411128e-03, -7.35407788e-03, -7.48245185e-03, -7.61653157e-03, -7.75389513e-03, -8.20003450e-03, -8.35770369e-03, -8.51695240e-03, -8.67962278e-03, -8.84699915e-03, -1.26767000e-02, -1.30308550e-02, -1.34020159e-02, -1.27902590e-02, -1.31374933e-02, -1.35022206e-02, -1.28020663e-02, -1.31427627e-02, -1.35003338e-02, -8.81921593e-03, -8.97676684e-03, -8.73885304e-03, -8.89289286e-03, -9.05076787e-03, -8.79113190e-03, -8.94579384e-03, -8.66949651e-03, -8.81993212e-03, -8.97467043e-03, -9.13402718e-03, -9.29924846e-03, -9.47104022e-03, -9.64829233e-03, -9.83224157e-03, -1.00242840e-02, -1.02243433e-02, -1.04304748e-02, -1.06464764e-02, -1.08723603e-02, -1.11076497e-02, -1.13517633e-02, -1.16107482e-02, -1.18797245e-02, -1.21643478e-02, -1.24597261e-02, -1.27725713e-02, -1.31026637e-02, -1.34509858e-02, -1.38195883e-02, -1.42097492e-02, -1.46267340e-02, -1.50670996e-02, -1.55417984e-02, -1.60482023e-02, -1.65943075e-02, -1.71795618e-02, -1.78127103e-02, -1.84999816e-02, -1.92504879e-02, -2.00698171e-02, -2.09702197e-02, -2.19654124e-02, -2.30720937e-02, -2.43106075e-02, -2.57069822e-02, -2.72962451e-02, -1.43178934e-02, -1.48085468e-02, -1.53383436e-02, -1.59113277e-02, -1.65353119e-02, -1.72161739e-02, -1.79625414e-02, -1.87847745e-02, -1.96950957e-02, -2.07099430e-02, -2.18482167e-02, -2.31328830e-02, ], dtype=np.float32, ) }, "Latitude": {"value": np.random.rand(96, 332).astype(np.float32)}, "Longitude": {"value": np.random.rand(96, 332).astype(np.float32)}, "LunarAzimuthAngle": { "value": np.random.rand(96, 332).astype(np.float32) }, "LunarZenithAngle": { "value": np.random.rand(96, 332).astype(np.float32) }, "MidTime": { "value": np.array( [ 1950675122400462, 1950675124187044, 1950675125973621, 1950675127760200, 1950675129546777, 1950675131333401, 1950675133119981, 1950675134906559, 1950675136693138, 1950675138479716, 1950675140266341, 1950675142052918, 1950675143839498, 1950675145626075, 1950675147412654, 1950675149199278, 1950675150985857, 1950675152772434, 1950675154559014, 1950675156345591, 1950675158132216, 1950675159918795, 1950675161705373, 1950675163491595, 1950675165278173, 1950675167064395, 1950675168850973, 1950675170637195, 1950675172423773, 1950675174209995, 1950675175996573, 1950675177782795, 1950675179569373, 1950675181355595, 1950675183142173, 1950675184928395, 1950675186714973, 1950675188501195, 1950675190287773, 1950675192073995, 1950675193860573, 1950675195646795, 1950675197433373, 1950675199219595, 1950675201006173, 1950675202792395, 1950675204578973, -993, ] ) }, "MoonIllumFraction": {"value": 11.518141746520996}, "MoonPhaseAngle": {"value": 140.32131958007812}, "NumberOfTiePointZoneGroupsScan": {"value": 62}, "NumberOfTiePointZoneGroupsTrack": {"value": 1}, "NumberOfTiePointZonesScan": { "value": np.array( [ 1, 1, 1, 1, 1, 1, 1, 1, 28, 2, 3, 2, 3, 3, 3, 5, 4, 5, 4, 4, 4, 4, 4, 3, 5, 3, 4, 3, 23, 23, 3, 4, 3, 5, 3, 4, 4, 4, 4, 4, 5, 4, 5, 3, 3, 3, 2, 3, 2, 40, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, ], dtype=np.int32, ) }, "NumberOfTiePointZonesTrack": {"value": 1}, "PadByte1": {"value": np.array([0, 0, 0], dtype=np.uint8)}, "QF1_SCAN_VIIRSSDRGEO": { "value": np.array( [ 0, 128, 0, 128, 0, 128, 0, 128, 0, 128, 0, 128, 0, 128, 0, 128, 0, 128, 2, 130, 2, 130, 2, 142, 14, 142, 14, 142, 14, 142, 14, 142, 14, 142, 14, 142, 14, 142, 14, 142, 14, 142, 14, 142, 14, 142, 14, 0, ], dtype=np.uint8, ) }, "QF2_SCAN_VIIRSSDRGEO": { "value": np.array( [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, ], dtype=np.uint8, ) }, "SCAttitude": { "value": np.array( [ [-9.22587514e-01, 3.92340779e00, 5.93621433e-01], [-2.82428920e-01, 3.98425841e00, 7.05978215e-01], [5.63421488e-01, 3.83695555e00, 3.93174857e-01], [-3.16407561e-01, 3.85351181e00, 5.33868372e-01], [-1.10977542e00, 3.82791996e00, 6.06707633e-01], [-1.46703672e00, 3.94862103e00, 6.45296216e-01], [-1.14162290e00, 3.79930806e00, 7.45548725e-01], [-1.56181908e00, 3.68108273e00, 6.49301231e-01], [-1.46823406e00, 3.63365412e00, 5.03535330e-01], [-1.02590537e00, 3.64477968e00, 5.22250295e-01], [-5.35379410e-01, 3.69151831e00, 4.32526857e-01], [-5.78065366e-02, 3.37806726e00, 4.95986529e-02], [-2.40110800e-01, 3.22970843e00, -9.55391768e-03], [-6.54527247e-01, 3.16465378e00, 1.89672917e-01], [-1.35780311e00, 3.24750924e00, 1.63008988e-01], [-1.47417045e00, 3.39788198e00, 1.84387550e-01], [-1.74577117e00, 3.53278613e00, 1.89606979e-01], [-1.46304774e00, 3.22666740e00, 1.59070507e-01], [-4.05473042e00, 3.06258607e00, 1.10443914e00], [-5.91582203e00, 2.83895302e00, 1.79846287e00], [-7.04713678e00, 2.55699897e00, 2.23985386e00], [-7.43741798e00, 2.21711683e00, 2.42266488e00], [-7.06249666e00, 1.81872594e00, 2.33713675e00], [-5.96051836e00, 1.36609375e00, 1.99506497e00], [-4.13137341e00, 8.60225558e-01, 1.39551389e00], [-1.57741416e00, 3.02793205e-01, 5.36690295e-01], [7.63817742e-12, 1.11727738e-10, 2.74194088e-11], [-1.24213686e-11, 8.01499769e-11, -1.34056446e-11], [1.78272761e-11, 9.04948685e-11, 1.77389995e-11], [-1.47259357e-11, 9.37734057e-11, -3.89882709e-11], [-1.94052344e-11, 1.49411969e-10, -2.48492286e-11], [3.40418752e-12, 1.25333730e-10, 1.14499972e-11], [5.64890669e-12, 1.35170833e-10, 2.27858565e-11], [8.78361273e-12, 1.02109009e-10, -5.92111386e-12], [1.47398396e-11, 8.59943505e-11, -8.54686872e-13], [-5.35027361e-12, 1.25450331e-10, -1.54262800e-11], [2.12667054e-11, 1.57356642e-10, 2.54392306e-11], [-6.39285022e-12, 1.42791029e-10, -8.58749790e-12], [-2.18451160e-11, 9.94347313e-11, -2.18451160e-11], [1.77587389e-11, 1.16834944e-10, 3.09037483e-11], [5.09583955e-12, 1.06878555e-10, 1.30452402e-11], [-1.25895900e-11, 1.06217646e-10, -1.07971496e-11], [1.45264981e-11, 1.03935242e-10, 1.73963136e-11], [-1.41730258e-12, 7.72037989e-11, 1.15057850e-11], [1.99397634e-11, 1.36618120e-10, 4.70010628e-11], [1.24784124e-11, 1.14499965e-10, 4.69658253e-12], [-1.83001236e-11, 5.19546177e-11, -1.31873679e-11], [-9.99299988e02, -9.99299988e02, -9.99299988e02], ], dtype=np.float32, ) }, "SCPosition": { "value": np.array( [ [2.3191672e06, -4.5127075e06, 5.1096645e06], [2.3202438e06, -4.5225140e06, 5.1005205e06], [2.3213098e06, -4.5323050e06, 5.0913595e06], [2.3223650e06, -4.5420810e06, 5.0821800e06], [2.3234100e06, -4.5518415e06, 5.0729835e06], [2.3244445e06, -4.5615875e06, 5.0637700e06], [2.3254692e06, -4.5713185e06, 5.0545390e06], [2.3264830e06, -4.5810340e06, 5.0452915e06], [2.3274862e06, -4.5907340e06, 5.0360255e06], [2.3284792e06, -4.6004185e06, 5.0267430e06], [2.3294620e06, -4.6100885e06, 5.0174430e06], [2.3304345e06, -4.6197430e06, 5.0081270e06], [2.3313962e06, -4.6293820e06, 4.9987935e06], [2.3323475e06, -4.6390050e06, 4.9894420e06], [2.3332888e06, -4.6486130e06, 4.9800740e06], [2.3342195e06, -4.6582060e06, 4.9706890e06], [2.3351398e06, -4.6677835e06, 4.9612880e06], [2.3360495e06, -4.6773440e06, 4.9518685e06], [2.3369522e06, -4.6868750e06, 4.9424430e06], [2.3378502e06, -4.6963695e06, 4.9330150e06], [2.3387432e06, -4.7058270e06, 4.9235845e06], [2.3396312e06, -4.7152475e06, 4.9141520e06], [2.3405140e06, -4.7246290e06, 4.9047175e06], [2.3413915e06, -4.7339725e06, 4.8952825e06], [2.3422642e06, -4.7432805e06, 4.8858430e06], [2.3431318e06, -4.7525505e06, 4.8764035e06], [2.3439710e06, -4.7618790e06, 4.8668965e06], [2.3447770e06, -4.7712820e06, 4.8573130e06], [2.3455728e06, -4.7806710e06, 4.8477115e06], [2.3463582e06, -4.7900425e06, 4.8380950e06], [2.3471335e06, -4.7994005e06, 4.8284610e06], [2.3478980e06, -4.8087395e06, 4.8188110e06], [2.3486522e06, -4.8180645e06, 4.8091435e06], [2.3493960e06, -4.8273715e06, 4.7994615e06], [2.3501298e06, -4.8366645e06, 4.7897610e06], [2.3508530e06, -4.8459395e06, 4.7800465e06], [2.3515658e06, -4.8552000e06, 4.7703130e06], [2.3522680e06, -4.8644420e06, 4.7605655e06], [2.3529602e06, -4.8736700e06, 4.7508000e06], [2.3536420e06, -4.8828800e06, 4.7410205e06], [2.3543132e06, -4.8920755e06, 4.7312230e06], [2.3549740e06, -4.9012520e06, 4.7214105e06], [2.3556248e06, -4.9104145e06, 4.7115800e06], [2.3562650e06, -4.9195590e06, 4.7017360e06], [2.3568952e06, -4.9286890e06, 4.6918745e06], [2.3575145e06, -4.9378000e06, 4.6819980e06], [2.3581235e06, -4.9468960e06, 4.6721035e06], [-9.9929999e02, -9.9929999e02, -9.9929999e02], ], dtype=np.float32, ) }, "SCSolarAzimuthAngle": { "value": np.array( [ -140.6137, -140.54446, -140.47484, -140.40486, -140.33464, -140.26427, -140.19333, -140.12198, -140.05042, -139.97855, -139.90648, -139.83394, -139.76117, -139.68803, -139.61465, -139.54103, -139.46695, -139.3923, -139.31741, -139.2424, -139.16727, -139.09201, -139.01662, -138.94112, -138.86546, -138.78972, -138.71251, -138.63487, -138.5569, -138.4786, -138.39995, -138.32097, -138.24161, -138.16193, -138.0819, -138.00153, -137.92078, -137.8397, -137.75827, -137.67648, -137.59433, -137.51183, -137.42896, -137.34573, -137.26213, -137.17819, -137.09386, -999.3, ], dtype=np.float32, ) }, "SCSolarZenithAngle": { "value": np.array( [ 135.88528, 135.96703, 136.04868, 136.1302, 136.21165, 136.2931, 136.37451, 136.4556, 136.53659, 136.61748, 136.69843, 136.77931, 136.86021, 136.94092, 137.02148, 137.10208, 137.18248, 137.26239, 137.34204, 137.42155, 137.50092, 137.58014, 137.65923, 137.73816, 137.81696, 137.8956, 137.97507, 138.05447, 138.13382, 138.21303, 138.29218, 138.37122, 138.45016, 138.529, 138.60777, 138.68642, 138.76498, 138.84343, 138.9218, 139.00005, 139.07823, 139.15627, 139.23422, 139.31207, 139.38983, 139.46748, 139.54503, -999.3, ], dtype=np.float32, ) }, "SCVelocity": { "value": np.array( [ [605.31726, -5492.9614, -5113.397], [599.4935, -5484.5615, -5123.1396], [593.66986, -5476.142, -5132.8657], [587.8464, -5467.7017, -5142.573], [582.02313, -5459.241, -5152.263], [576.19995, -5450.7607, -5161.936], [570.37714, -5442.2607, -5171.592], [564.5546, -5433.741, -5181.2295], [558.73236, -5425.2, -5190.849], [552.9104, -5416.6396, -5200.4517], [547.0887, -5408.06, -5210.0366], [541.26746, -5399.4604, -5219.6035], [535.44666, -5390.841, -5229.153], [529.6263, -5382.201, -5238.684], [523.8063, -5373.5415, -5248.1978], [517.9866, -5364.863, -5257.694], [512.16754, -5356.1646, -5267.1724], [506.34906, -5347.446, -5276.632], [500.53455, -5338.72, -5286.0645], [494.72552, -5329.993, -5295.466], [488.9218, -5321.265, -5304.8364], [483.1238, -5312.536, -5314.1743], [477.33157, -5303.806, -5323.4795], [471.546, -5295.0767, -5332.7515], [465.7647, -5286.344, -5341.9937], [459.99005, -5277.613, -5351.2026], [454.19785, -5268.798, -5360.442], [448.38614, -5259.887, -5369.7207], [442.57404, -5250.955, -5378.983], [436.7639, -5242.0063, -5388.225], [430.9534, -5233.0366, -5397.4517], [425.145, -5224.0483, -5406.6567], [419.33627, -5215.0396, -5415.845], [413.52963, -5206.013, -5425.014], [407.72275, -5196.9663, -5434.1665], [401.91797, -5187.9023, -5443.299], [396.11307, -5178.8164, -5452.4136], [390.3103, -5169.7134, -5461.508], [384.50742, -5160.59, -5470.586], [378.70673, -5151.4497, -5479.644], [372.90598, -5142.288, -5488.6846], [367.1075, -5133.109, -5497.7046], [361.309, -5123.9097, -5506.708], [355.5128, -5114.6934, -5515.691], [349.71658, -5105.4565, -5524.657], [343.9228, -5096.202, -5533.602], [338.12906, -5086.927, -5542.53], [-999.3, -999.3, -999.3], ], dtype=np.float32, ) }, "SatelliteAzimuthAngle": { "value": np.random.rand(96, 332).astype(np.float32) }, "SatelliteZenithAngle": { "value": np.random.rand(96, 332).astype(np.float32) }, "SolarAzimuthAngle": { "value": np.random.rand(96, 332).astype(np.float32) }, "SolarZenithAngle": { "value": np.random.rand(96, 332).astype(np.float32) }, "StartTime": { "value": np.array( [ 1950675122120971, 1950675123907557, 1950675125694139, 1950675127480722, 1950675129267304, 1950675131053910, 1950675132840494, 1950675134627077, 1950675136413660, 1950675138200243, 1950675139986850, 1950675141773433, 1950675143560016, 1950675145346598, 1950675147133181, 1950675148919788, 1950675150706371, 1950675152492953, 1950675154279537, 1950675156066119, 1950675157852726, 1950675159639309, 1950675161425892, 1950675163212109, 1950675164998692, 1950675166784909, 1950675168571492, 1950675170357709, 1950675172144292, 1950675173930509, 1950675175717092, 1950675177503309, 1950675179289892, 1950675181076109, 1950675182862692, 1950675184648909, 1950675186435492, 1950675188221709, 1950675190008292, 1950675191794509, 1950675193581092, 1950675195367309, 1950675197153892, 1950675198940109, 1950675200726692, 1950675202512909, 1950675204299492, -993, ] ) }, "TiePointZoneGroupLocationScanCompact": { "value": np.array( [ 0, 2, 4, 6, 8, 10, 12, 14, 16, 45, 48, 52, 55, 59, 63, 67, 73, 78, 84, 89, 94, 99, 104, 109, 113, 119, 123, 128, 132, 156, 180, 184, 189, 193, 199, 203, 208, 213, 218, 223, 228, 234, 239, 245, 249, 253, 257, 260, 264, 267, 308, 310, 312, 314, 316, 318, 320, 322, 324, 326, 328, 330, ], dtype=np.int32, ) }, "TiePointZoneGroupLocationTrackCompact": {"value": 0}, "attrs": { "OriginalFilename": np.array( [ [ b"GDNBO_j01_d20191025_t0611251_e0612478_b10015_c20191025062405837630_cspp_dev.h5" ] ], dtype="|S78", ) }, }, "VIIRS-DNB-SDR_All": { "NumberOfBadChecksums": { "value": np.array( [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -993, ], dtype=np.int32, ) }, "NumberOfDiscardedPkts": { "value": np.array( [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -993, ], dtype=np.int32, ) }, "NumberOfMissingPkts": { "value": np.array( [ 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 18, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, -993, ], dtype=np.int32, ) }, "PadByte1": {"value": np.array([0, 0, 0], dtype=np.uint8)}, "QF1_VIIRSDNBSDR": { "value": (np.random.rand(768, 4064) * 255).astype(np.uint8) }, "QF2_SCAN_SDR": { "value": np.array( [ 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, ], dtype=np.uint8, ) }, "QF3_SCAN_RDR": { "value": np.array( [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, ], dtype=np.uint8, ) }, "Radiance": {"value": np.random.rand(768, 4064).astype(np.float32)}, "attrs": { "OriginalFilename": np.array( [ [ b"SVDNB_j01_d20191025_t0611251_e0612478_b10015_c20191025062427398006_cspp_dev.h5" ] ], dtype="|S78", ), "PixelOffsetScan": np.array([[0.5]], dtype=np.float32), "PixelOffsetTrack": np.array([[0.5]], dtype=np.float32), "TiePointZoneGroupLocationScan": np.array( [ [0], [2], [4], [6], [8], [10], [12], [14], [16], [464], [496], [544], [576], [648], [720], [792], [872], [928], [1008], [1072], [1136], [1200], [1264], [1328], [1400], [1480], [1552], [1640], [1712], [1896], [2080], [2152], [2240], [2312], [2392], [2464], [2528], [2592], [2656], [2720], [2784], [2864], [2920], [3000], [3072], [3144], [3216], [3248], [3296], [3328], [3968], [3976], [3984], [3992], [4000], [4008], [4016], [4024], [4032], [4040], [4048], [4056], ], dtype=np.int32, ), "TiePointZoneGroupLocationTrack": np.array( [[0]], dtype=np.int32 ), "TiePointZoneSizeScan": np.array( [ [2], [2], [2], [2], [2], [2], [2], [2], [16], [16], [16], [16], [24], [24], [24], [16], [14], [16], [16], [16], [16], [16], [16], [24], [16], [24], [22], [24], [8], [8], [24], [22], [24], [16], [24], [16], [16], [16], [16], [16], [16], [14], [16], [24], [24], [24], [16], [16], [16], [16], [8], [8], [8], [8], [8], [8], [8], [8], [8], [8], [8], [8], ], dtype=np.int32, ), "TiePointZoneSizeTrack": np.array([[16]], dtype=np.int32), }, }, "attrs": {"MissionStartTime": np.array([[1698019234000000]])}, }, "Data_Products": { "VIIRS-DNB-GEO": { "VIIRS-DNB-GEO_Aggr": { "attrs": { "AggregateBeginningDate": np.array( [[b"20191025"]], dtype="|S9" ), "AggregateBeginningGranuleID": np.array( [[b"J01002526558865"]], dtype="|S16" ), "AggregateBeginningOrbitNumber": np.array( [[10015]], dtype=np.uint64 ), "AggregateBeginningTime": np.array( [[b"061125.120971Z"]], dtype="|S15" ), "AggregateEndingDate": np.array( [[b"20191025"]], dtype="|S9" ), "AggregateEndingGranuleID": np.array( [[b"J01002526558865"]], dtype="|S16" ), "AggregateEndingOrbitNumber": np.array( [[10015]], dtype=np.uint64 ), "AggregateEndingTime": np.array( [[b"061247.849492Z"]], dtype="|S15" ), "AggregateNumberGranules": np.array([[1]], dtype=np.uint64), } }, "VIIRS-DNB-GEO_Gran_0": { "attrs": { "Ascending/Descending_Indicator": np.array( [[1]], dtype=np.uint8 ), "Beginning_Date": np.array([[b"20191025"]], dtype="|S9"), "Beginning_Time": np.array( [[b"061125.120971Z"]], dtype="|S15" ), "East_Bounding_Coordinate": np.array( [[-45.09228]], dtype=np.float32 ), "Ending_Date": np.array([[b"20191025"]], dtype="|S9"), "Ending_Time": np.array( [[b"061247.849492Z"]], dtype="|S15" ), "G-Ring_Latitude": np.array( [ [41.84151], [44.31062], [46.78565], [45.41409], [41.07657], [38.81504], [36.53401], [40.55788], ], dtype=np.float32, ), "G-Ring_Longitude": np.array( [ [-82.66234], [-82.55624], [-82.48891], [-62.80042], [-45.09228], [-46.58502], [-47.95933], [-64.54196], ], dtype=np.float32, ), "LeapSecondsGranuleStart": np.array([[37]], dtype=np.int32), "N_Algorithm_Version": np.array( [[b"1.O.000.014"]], dtype="|S12" ), "N_Anc_Filename": np.array( [ [ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0691_1.O.0.0" ], [ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0692_1.O.0.0" ], [ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0693_1.O.0.0" ], [ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0719_1.O.0.0" ], [ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0720_1.O.0.0" ], [ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0721_1.O.0.0" ], [ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0722_1.O.0.0" ], [ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0723_1.O.0.0" ], [ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0724_1.O.0.0" ], [ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0725_1.O.0.0" ], [ b"off_Planet-Eph-ANC_Static_JPL_000f_20151008_200001010000Z_20000101000000Z_ee00000000000000Z_np" # noqa ], [ b"off_USNO-PolarWander-UT1-ANC_Ser7_USNO_000f_20191025_201910250000Z_20191025000109Z_ee20191101120000Z_np" # noqa ], ], dtype="|S104", ), "N_Aux_Filename": np.array( [ [ b"CMNGEO-PARAM-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa ], [ b"CmnGeo-SAA-AC_j01_20151008180000Z_20170807130000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"TLE-AUX_j01_20191024053224Z_20191024000000Z_ee00000000000000Z_-_nobc_ops_all-_ops" # noqa ], [ b"VIIRS-SDR-GEO-DNB-PARAM-LUT_j01_20180507121508Z_20180315000000Z_ee00000000000000Z_PS-1-O-CCR-3963-006-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-GEO-IMG-PARAM-LUT_j01_20180430182354Z_20180315000000Z_ee00000000000000Z_PS-1-O-CCR-3963-006-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-GEO-MOD-PARAM-LUT_j01_20180430182652Z_20180315000000Z_ee00000000000000Z_PS-1-O-CCR-3963-006-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-QA-LUT_j01_20180109121411Z_20180409000000Z_ee00000000000000Z_PS-1-O-CCR-3742-003-LE-PE_all-_all_all-_ops" # noqa ], ], dtype="|S126", ), "N_Beginning_Orbit_Number": np.array( [[10015]], dtype=np.uint64 ), "N_Beginning_Time_IET": np.array( [[1950675122120971]], dtype=np.uint64 ), "N_Creation_Date": np.array([[b"20191025"]], dtype="|S9"), "N_Creation_Time": np.array( [[b"062136.412867Z"]], dtype="|S15" ), "N_Day_Night_Flag": np.array([[b"Night"]], dtype="|S6"), "N_Ending_Time_IET": np.array( [[1950675204849492]], dtype=np.uint64 ), "N_Granule_ID": np.array( [[b"J01002526558865"]], dtype="|S16" ), "N_Granule_Status": np.array([[b"N/A"]], dtype="|S4"), "N_Granule_Version": np.array([[b"A1"]], dtype="|S3"), "N_IDPS_Mode": np.array([[b"N/A"]], dtype="|S4"), "N_Input_Prod": np.array( [ [b"SPACECRAFT-DIARY-RDR:J01002526558800:A1"], [b"SPACECRAFT-DIARY-RDR:J01002526559000:A1"], [b"VIIRS-SCIENCE-RDR:J01002526558865:A1"], ], dtype="|S40", ), "N_JPSS_Document_Ref": np.array( [ [ b"474-00448-02-06_JPSS-DD-Vol-II-Part-6_0200H.pdf" ], [ b"474-00448-02-06_JPSS-VIIRS-SDR-DD-Part-6_0200H_VIIRS-DNB-GEO-PP.xml" ], [ b"474-00448-03-06_JPSS-OAD-Vol-III-Part-6-VIIRS-RDR-SDR_-1.pdf" ], ], dtype="|S68", ), "N_LEOA_Flag": np.array([[b"On"]], dtype="|S3"), "N_Nadir_Latitude_Max": np.array( [[45.3722]], dtype=np.float32 ), "N_Nadir_Latitude_Min": np.array( [[40.6172]], dtype=np.float32 ), "N_Nadir_Longitude_Max": np.array( [[-62.80047]], dtype=np.float32 ), "N_Nadir_Longitude_Min": np.array( [[-64.51342]], dtype=np.float32 ), "N_Number_Of_Scans": np.array([[47]], dtype=np.int32), "N_Primary_Label": np.array( [[b"Non-Primary"]], dtype="|S12" ), "N_Quality_Summary_Names": np.array( [ [b"Automatic Quality Flag"], [b"Percent Missing Data"], [b"Percent Out of Bounds"], ], dtype="|S23", ), "N_Quality_Summary_Values": np.array( [[1], [61], [0]], dtype=np.int32 ), "N_Reference_ID": np.array( [[b"VIIRS-DNB-GEO:J01002526558865:A1"]], dtype="|S33" ), "N_Software_Version": np.array( [[b"CSPP_SDR_3_1_3"]], dtype="|S15" ), "N_Spacecraft_Maneuver": np.array( [[b"Normal Operations"]], dtype="|S18" ), "North_Bounding_Coordinate": np.array( [[46.8018]], dtype=np.float32 ), "South_Bounding_Coordinate": np.array( [[36.53401]], dtype=np.float32 ), "West_Bounding_Coordinate": np.array( [[-82.66234]], dtype=np.float32 ), } }, "attrs": { "Instrument_Short_Name": np.array([[b"VIIRS"]], dtype="|S6"), "N_Anc_Type_Tasked": np.array([[b"Official"]], dtype="|S9"), "N_Collection_Short_Name": np.array( [[b"VIIRS-DNB-GEO"]], dtype="|S14" ), "N_Dataset_Type_Tag": np.array([[b"GEO"]], dtype="|S4"), "N_Processing_Domain": np.array([[b"ops"]], dtype="|S4"), "Operational_Mode": np.array( [[b"J01 Normal Operations, VIIRS Operational"]], dtype="|S41", ), }, }, "VIIRS-DNB-SDR": { "VIIRS-DNB-SDR_Aggr": { "attrs": { "AggregateBeginningDate": np.array( [[b"20191025"]], dtype="|S9" ), "AggregateBeginningGranuleID": np.array( [[b"J01002526558865"]], dtype="|S16" ), "AggregateBeginningOrbitNumber": np.array( [[10015]], dtype=np.uint64 ), "AggregateBeginningTime": np.array( [[b"061125.120971Z"]], dtype="|S15" ), "AggregateEndingDate": np.array( [[b"20191025"]], dtype="|S9" ), "AggregateEndingGranuleID": np.array( [[b"J01002526558865"]], dtype="|S16" ), "AggregateEndingOrbitNumber": np.array( [[10015]], dtype=np.uint64 ), "AggregateEndingTime": np.array( [[b"061247.849492Z"]], dtype="|S15" ), "AggregateNumberGranules": np.array([[1]], dtype=np.uint64), } }, "VIIRS-DNB-SDR_Gran_0": { "attrs": { "Ascending/Descending_Indicator": np.array( [[1]], dtype=np.uint8 ), "Band_ID": np.array([[b"N/A"]], dtype="|S4"), "Beginning_Date": np.array([[b"20191025"]], dtype="|S9"), "Beginning_Time": np.array( [[b"061125.120971Z"]], dtype="|S15" ), "East_Bounding_Coordinate": np.array( [[-45.09281]], dtype=np.float32 ), "Ending_Date": np.array([[b"20191025"]], dtype="|S9"), "Ending_Time": np.array( [[b"061247.849492Z"]], dtype="|S15" ), "G-Ring_Latitude": np.array( [ [41.84157], [44.31069], [46.78591], [45.41409], [41.07675], [38.81512], [36.53402], [40.55788], ], dtype=np.float32, ), "G-Ring_Longitude": np.array( [ [-82.65787], [-82.55148], [-82.47269], [-62.80042], [-45.09281], [-46.58528], [-47.95936], [-64.54196], ], dtype=np.float32, ), "N_Algorithm_Version": np.array( [[b"1.O.000.015"]], dtype="|S12" ), "N_Anc_Filename": np.array( [ [ b"off_Planet-Eph-ANC_Static_JPL_000f_20151008_200001010000Z_20000101000000Z_ee00000000000000Z_np" # noqa ], [ b"off_USNO-PolarWander-UT1-ANC_Ser7_USNO_000f_20191025_201910250000Z_20191025000109Z_ee20191101120000Z_np" # noqa ], ], dtype="|S104", ), "N_Aux_Filename": np.array( [ [ b"CMNGEO-PARAM-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-DNB-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-I1-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-I2-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-I3-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-I4-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-I5-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M1-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M10-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M11-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M12-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M13-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M14-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M15-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M16-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M2-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M3-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M4-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M5-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M6-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M7-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M8-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M9-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-RSBAUTOCAL-HISTORY-AUX_j01_20191024021527Z_20191024000000Z_ee00000000000000Z_-_nobc_ops_all-_ops" # noqa ], [ b"VIIRS-RSBAUTOCAL-VOLT-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-EDD154640-109C-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-BB-TEMP-COEFFS-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-CAL-AUTOMATE-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-Pred-SideA-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-COEFF-A-LUT_j01_20180109114311Z_20180409000000Z_ee00000000000000Z_PS-1-O-CCR-3742-003-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-COEFF-B-LUT_j01_20180109101739Z_20180409000000Z_ee00000000000000Z_PS-1-O-CCR-3742-004-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-DELTA-C-LUT_j01_20180109000000Z_20180409000000Z_ee00000000000000Z_PS-1-O-CCR-3742-003-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-DG-ANOMALY-DN-LIMITS-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-SideA-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-DNB-DN0-LUT_j01_20190930000000Z_20190928000000Z_ee00000000000000Z_PS-1-O-CCR-4262-026-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-DNB-FRAME-TO-ZONE-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-Op21-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-DNB-GAIN-RATIOS-LUT_j01_20190930000000Z_20190928000000Z_ee00000000000000Z_PS-1-O-CCR-4262-025-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-DNB-LGS-GAINS-LUT_j01_20180413122703Z_20180412000000Z_ee00000000000000Z_PS-1-O-CCR-3918-005-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-DNB-RVF-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-Op21-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-DNB-STRAY-LIGHT-CORRECTION-LUT_j01_20190930160523Z_20191001000000Z_ee00000000000000Z_PS-1-O-CCR-4322-024-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-EBBT-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-EMISSIVE-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-F-PREDICTED-LUT_j01_20180413123333Z_20180412000000Z_ee00000000000000Z_PS-1-O-CCR-3918-006-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-GAIN-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-HAM-ER-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-OBC-ER-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-OBC-RR-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-OBS-TO-PIXELS-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-SameAsSNPP-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-QA-LUT_j01_20180109121411Z_20180409000000Z_ee00000000000000Z_PS-1-O-CCR-3742-003-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-RADIOMETRIC-PARAM-V3-LUT_j01_20161117000000Z_20180111000000Z_ee00000000000000Z_PS-1-O-CCR-17-3436-v003-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-REFLECTIVE-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-SameAsSNPP-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-RELATIVE-SPECTRAL-RESPONSE-LUT_j01_20161031000000Z_20180111000000Z_ee00000000000000Z_PS-1-O-CCR-17-3436-v003-FusedM9-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-RTA-ER-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-RVF-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-M16-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-SOLAR-IRAD-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-Thuillier2002-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-TELE-COEFFS-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-SideA-LE-PE_all-_all_all-_ops" # noqa ], ], dtype="|S151", ), "N_Beginning_Orbit_Number": np.array( [[10015]], dtype=np.uint64 ), "N_Beginning_Time_IET": np.array( [[1950675122120971]], dtype=np.uint64 ), "N_Creation_Date": np.array([[b"20191025"]], dtype="|S9"), "N_Creation_Time": np.array( [[b"062411.116253Z"]], dtype="|S15" ), "N_Day_Night_Flag": np.array([[b"Night"]], dtype="|S6"), "N_Ending_Time_IET": np.array( [[1950675204849492]], dtype=np.uint64 ), "N_Graceful_Degradation": np.array([[b"No"]], dtype="|S3"), "N_Granule_ID": np.array( [[b"J01002526558865"]], dtype="|S16" ), "N_Granule_Status": np.array([[b"N/A"]], dtype="|S4"), "N_Granule_Version": np.array([[b"A1"]], dtype="|S3"), "N_IDPS_Mode": np.array([[b"N/A"]], dtype="|S4"), "N_Input_Prod": np.array( [ [b"GEO-VIIRS-OBC-IP:J01002526558865:A1"], [b"SPACECRAFT-DIARY-RDR:J01002526558800:A1"], [b"SPACECRAFT-DIARY-RDR:J01002526559000:A1"], [b"VIIRS-DNB-GEO:J01002526558865:A1"], [b"VIIRS-IMG-RGEO-TC:J01002526558865:A1"], [b"VIIRS-MOD-RGEO-TC:J01002526558865:A1"], [b"VIIRS-SCIENCE-RDR:J01002526558012:A1"], [b"VIIRS-SCIENCE-RDR:J01002526558865:A1"], ], dtype="|S40", ), "N_JPSS_Document_Ref": np.array( [ [ b"474-00448-02-06_JPSS-DD-Vol-II-Part-6_0200H.pdf" ], [ b"474-00448-02-06_JPSS-VIIRS-SDR-DD-Part-6_0200H_VIIRS-DNB-SDR-PP.xml" ], [ b"474-00448-03-06_JPSS-OAD-Vol-III-Part-6-VIIRS-RDR-SDR_-1.pdf" ], ], dtype="|S68", ), "N_LEOA_Flag": np.array([[b"On"]], dtype="|S3"), "N_Nadir_Latitude_Max": np.array( [[45.3722]], dtype=np.float32 ), "N_Nadir_Latitude_Min": np.array( [[40.6172]], dtype=np.float32 ), "N_Nadir_Longitude_Max": np.array( [[-62.80047]], dtype=np.float32 ), "N_Nadir_Longitude_Min": np.array( [[-64.51342]], dtype=np.float32 ), "N_Number_Of_Scans": np.array([[47]], dtype=np.int32), "N_Percent_Erroneous_Data": np.array( [[0.0]], dtype=np.float32 ), "N_Percent_Missing_Data": np.array( [[51.05127]], dtype=np.float32 ), "N_Percent_Not-Applicable_Data": np.array( [[0.0]], dtype=np.float32 ), "N_Primary_Label": np.array( [[b"Non-Primary"]], dtype="|S12" ), "N_Quality_Summary_Names": np.array( [ [b"Scan Quality Exclusion"], [b"Summary VIIRS SDR Quality"], ], dtype="|S26", ), "N_Quality_Summary_Values": np.array( [[24], [49]], dtype=np.int32 ), "N_RSB_Index": np.array([[17]], dtype=np.int32), "N_Reference_ID": np.array( [[b"VIIRS-DNB-SDR:J01002526558865:A1"]], dtype="|S33" ), "N_Satellite/Local_Azimuth_Angle_Max": np.array( [[179.9995]], dtype=np.float32 ), "N_Satellite/Local_Azimuth_Angle_Min": np.array( [[-179.9976]], dtype=np.float32 ), "N_Satellite/Local_Zenith_Angle_Max": np.array( [[69.83973]], dtype=np.float32 ), "N_Satellite/Local_Zenith_Angle_Min": np.array( [[0.00898314]], dtype=np.float32 ), "N_Software_Version": np.array( [[b"CSPP_SDR_3_1_3"]], dtype="|S15" ), "N_Solar_Azimuth_Angle_Max": np.array( [[73.93496]], dtype=np.float32 ), "N_Solar_Azimuth_Angle_Min": np.array( [[23.83542]], dtype=np.float32 ), "N_Solar_Zenith_Angle_Max": np.array( [[147.5895]], dtype=np.float32 ), "N_Solar_Zenith_Angle_Min": np.array( [[126.3929]], dtype=np.float32 ), "N_Spacecraft_Maneuver": np.array( [[b"Normal Operations"]], dtype="|S18" ), "North_Bounding_Coordinate": np.array( [[46.8018]], dtype=np.float32 ), "South_Bounding_Coordinate": np.array( [[36.53402]], dtype=np.float32 ), "West_Bounding_Coordinate": np.array( [[-82.65787]], dtype=np.float32 ), } }, "attrs": { "Instrument_Short_Name": np.array([[b"VIIRS"]], dtype="|S6"), "N_Collection_Short_Name": np.array( [[b"VIIRS-DNB-SDR"]], dtype="|S14" ), "N_Dataset_Type_Tag": np.array([[b"SDR"]], dtype="|S4"), "N_Instrument_Flight_SW_Version": np.array( [[20], [65534]], dtype=np.int32 ), "N_Processing_Domain": np.array([[b"ops"]], dtype="|S4"), "Operational_Mode": np.array( [[b"J01 Normal Operations, VIIRS Operational"]], dtype="|S41", ), }, }, }, "attrs": { "CVIIRS_Version": np.array([[b"2.0.1"]], dtype="|S5"), "Compact_VIIRS_SDR_Version": np.array([[b"3.1"]], dtype="|S3"), "Distributor": np.array([[b"cspp"]], dtype="|S5"), "Mission_Name": np.array([[b"JPSS-1"]], dtype="|S7"), "N_Dataset_Source": np.array([[b"all-"]], dtype="|S5"), "N_GEO_Ref": np.array( [ [ b"GDNBO_j01_d20191025_t0611251_e0612478_b10015_c20191025062405837630_cspp_dev.h5" ] ], dtype="|S78", ), "N_HDF_Creation_Date": np.array([[b"20191025"]], dtype="|S8"), "N_HDF_Creation_Time": np.array([[b"062502.927000Z"]], dtype="|S14"), "Platform_Short_Name": np.array([[b"J01"]], dtype="|S4"), "Satellite_Id_Filename": np.array([[b"j01"]], dtype="|S3"), }, } self.filename = os.path.join( tempfile.gettempdir(), "SVDNBC_j01_d20191025_t0611251_e0612478_b10015_c20191025062459000870_eum_ops.h5", ) h5f = h5py.File(self.filename, mode="w") def fill_h5(root, stuff): for key, val in stuff.items(): if key in ["value", "attrs"]: continue if "value" in val: root[key] = val["value"] else: grp = root.create_group(key) fill_h5(grp, stuff[key]) if "attrs" in val: for attrs, val in val["attrs"].items(): root[key].attrs[attrs] = val fill_h5(h5f, fake_dnb) for attr, val in fake_dnb["attrs"].items(): h5f.attrs[attr] = val h5f.close() def test_get_dataset(self): """Retrieve datasets from a DNB file.""" from satpy.readers.viirs_compact import VIIRSCompactFileHandler from satpy import DatasetID filename_info = {} filetype_info = {'file_type': 'compact_dnb'} dsid = DatasetID(name='DNB', calibration='radiance') test = VIIRSCompactFileHandler(self.filename, filename_info, filetype_info) ds = test.get_dataset(dsid, {}) self.assertEqual(ds.shape, (752, 4064)) self.assertEqual(ds.dtype, np.float32) dsid = DatasetID(name='longitude') ds = test.get_dataset(dsid, {'standard_name': 'longitude'}) self.assertEqual(ds.shape, (752, 4064)) self.assertEqual(ds.dtype, np.float32) self.assertEqual(ds.compute().shape, (752, 4064)) def tearDown(self): """Destroy.""" try: os.remove(self.filename) except OSError: pass def suite(): """Test suite for test_viirs_compact.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestCompact)) return mysuite if __name__ == "__main__": unittest.main() satpy-0.20.0/satpy/tests/reader_tests/test_viirs_edr_active_fires.py000066400000000000000000000405621362525524100260550ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """VIIRS Active Fires Tests. This module implements tests for VIIRS Active Fires NetCDF and ASCII file readers. """ import sys import os import numpy as np import io import dask.dataframe as dd import pandas as pd from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler from satpy.readers.file_handlers import BaseFileHandler from satpy.tests.utils import convert_file_content_to_data_array if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest try: from unittest import mock except ImportError: import mock DEFAULT_FILE_SHAPE = (1, 100) DEFAULT_LATLON_FILE_DTYPE = np.float32 DEFAULT_LATLON_FILE_DATA = np.arange(start=43, stop=45, step=0.02, dtype=DEFAULT_LATLON_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) DEFAULT_DETECTION_FILE_DTYPE = np.uint8 DEFAULT_DETECTION_FILE_DATA = np.arange(start=60, stop=100, step=0.4, dtype=DEFAULT_DETECTION_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) DEFAULT_M13_FILE_DTYPE = np.float32 DEFAULT_M13_FILE_DATA = np.arange(start=300, stop=340, step=0.4, dtype=DEFAULT_M13_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) DEFAULT_POWER_FILE_DTYPE = np.float32 DEFAULT_POWER_FILE_DATA = np.arange(start=1, stop=25, step=0.24, dtype=DEFAULT_POWER_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) class FakeModFiresNetCDF4FileHandler(FakeNetCDF4FileHandler): """Swap in CDF4 file handler.""" def get_test_content(self, filename, filename_info, filename_type): """Mimic reader input file content.""" file_content = {} file_content['/attr/data_id'] = "AFMOD" file_content['satellite_name'] = "npp" file_content['sensor'] = 'VIIRS' file_content['Fire Pixels/FP_latitude'] = DEFAULT_LATLON_FILE_DATA file_content['Fire Pixels/FP_longitude'] = DEFAULT_LATLON_FILE_DATA file_content['Fire Pixels/FP_power'] = DEFAULT_POWER_FILE_DATA file_content['Fire Pixels/FP_T13'] = DEFAULT_M13_FILE_DATA file_content['Fire Pixels/FP_T13/attr/units'] = 'kelvins' file_content['Fire Pixels/FP_confidence'] = DEFAULT_DETECTION_FILE_DATA file_content['Fire Pixels/attr/units'] = 'none' file_content['Fire Pixels/shape'] = DEFAULT_FILE_SHAPE attrs = ('FP_latitude', 'FP_longitude', 'FP_T13', 'FP_confidence') convert_file_content_to_data_array( file_content, attrs=attrs, dims=('z', 'fakeDim0', 'fakeDim1')) return file_content class FakeImgFiresNetCDF4FileHandler(FakeNetCDF4FileHandler): """Swap in CDF4 file handler.""" def get_test_content(self, filename, filename_info, filename_type): """Mimic reader input file content.""" file_content = {} file_content['/attr/data_id'] = "AFIMG" file_content['satellite_name'] = "npp" file_content['sensor'] = 'VIIRS' file_content['FP_latitude'] = DEFAULT_LATLON_FILE_DATA file_content['FP_longitude'] = DEFAULT_LATLON_FILE_DATA file_content['FP_power'] = DEFAULT_POWER_FILE_DATA file_content['FP_T4'] = DEFAULT_M13_FILE_DATA file_content['FP_T4/attr/units'] = 'kelvins' file_content['FP_confidence'] = DEFAULT_DETECTION_FILE_DATA attrs = ('FP_latitude', 'FP_longitude', 'FP_T13', 'FP_confidence') convert_file_content_to_data_array( file_content, attrs=attrs, dims=('z', 'fakeDim0', 'fakeDim1')) return file_content class FakeModFiresTextFileHandler(BaseFileHandler): """Fake file handler for text files at moderate resolution.""" def __init__(self, filename, filename_info, filetype_info, **kwargs): """Get fake file content from 'get_test_content'.""" super(FakeModFiresTextFileHandler, self).__init__(filename, filename_info, filetype_info) self.file_content = self.get_test_content() platform_key = {"NPP": "Suomi-NPP", "J01": "NOAA-20", "J02": "NOAA-21"} self.platform_name = platform_key.get(self.filename_info['satellite_name'].upper(), "unknown") def get_test_content(self): """Create fake test file content.""" fake_file = io.StringIO(u'''\n\n\n\n\n\n\n\n\n\n\n\n\n\n 24.64015007, -107.57017517, 317.38290405, 0.75, 0.75, 40, 4.28618050 25.90660477, -100.06127167, 331.17962646, 0.75, 0.75, 81, 20.61096764''') return dd.from_pandas(pd.read_csv(fake_file, skiprows=15, header=None, names=["latitude", "longitude", "T13", "Along-scan", "Along-track", "confidence_pct", "power"]), chunksize=1) class FakeImgFiresTextFileHandler(BaseFileHandler): """Fake file handler for text files at image resolution.""" def __init__(self, filename, filename_info, filetype_info, **kwargs): """Get fake file content from 'get_test_content'.""" super(FakeImgFiresTextFileHandler, self).__init__(filename, filename_info, filetype_info) self.file_content = self.get_test_content() def get_test_content(self): """Create fake test file content.""" fake_file = io.StringIO(u'''\n\n\n\n\n\n\n\n\n\n\n\n\n\n 24.64015007, -107.57017517, 317.38290405, 0.75, 0.75, 40, 4.28618050 25.90660477, -100.06127167, 331.17962646, 0.75, 0.75, 81, 20.61096764''') platform_key = {"NPP": "Suomi-NPP", "J01": "NOAA-20", "J02": "NOAA-21"} self.platform_name = platform_key.get(self.filename_info['satellite_name'].upper(), "unknown") return dd.from_pandas(pd.read_csv(fake_file, skiprows=15, header=None, names=["latitude", "longitude", "T4", "Along-scan", "Along-track", "confidence_cat", "power"]), chunksize=1) class TestModVIIRSActiveFiresNetCDF4(unittest.TestCase): """Test VIIRS Fires Reader.""" yaml_file = 'viirs_edr_active_fires.yaml' def setUp(self): """Wrap CDF4 file handler with own fake file handler.""" from satpy.config import config_search_paths from satpy.readers.viirs_edr_active_fires import VIIRSActiveFiresFileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) self.p = mock.patch.object(VIIRSActiveFiresFileHandler, '__bases__', (FakeModFiresNetCDF4FileHandler,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the CDF4 file handler.""" self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'AFMOD_j02_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc' ]) self.assertTrue(len(loadables), 1) r.create_filehandlers(loadables) self.assertTrue(r.file_handlers) def test_load_dataset(self): """Test loading all datasets.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'AFMOD_j02_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc' ]) r.create_filehandlers(loadables) datasets = r.load(['confidence_pct']) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.attrs['units'], '%') self.assertEqual(v.attrs['_FillValue'], 255) self.assertTrue(np.issubdtype(v.dtype, DEFAULT_DETECTION_FILE_DTYPE)) datasets = r.load(['T13']) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.attrs['units'], 'K') datasets = r.load(['power']) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.attrs['units'], 'MW') self.assertEqual(v.attrs['platform_name'], 'NOAA-21') self.assertEqual(v.attrs['sensor'], 'VIIRS') class TestImgVIIRSActiveFiresNetCDF4(unittest.TestCase): """Test VIIRS Fires Reader.""" yaml_file = 'viirs_edr_active_fires.yaml' def setUp(self): """Wrap CDF4 file handler with own fake file handler.""" from satpy.config import config_search_paths from satpy.readers.viirs_edr_active_fires import VIIRSActiveFiresFileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) self.p = mock.patch.object(VIIRSActiveFiresFileHandler, '__bases__', (FakeImgFiresNetCDF4FileHandler,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the CDF4 file handler.""" self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc' ]) self.assertTrue(len(loadables), 1) r.create_filehandlers(loadables) self.assertTrue(r.file_handlers) def test_load_dataset(self): """Test loading all datasets.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc' ]) r.create_filehandlers(loadables) datasets = r.load(['confidence_cat']) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.attrs['units'], '1') self.assertEqual(v.attrs['flag_meanings'], ['low', 'medium', 'high']) self.assertEqual(v.attrs['flag_values'], [7, 8, 9]) datasets = r.load(['T4']) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.attrs['units'], 'K') datasets = r.load(['power']) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.attrs['units'], 'MW') self.assertEqual(v.attrs['platform_name'], 'Suomi-NPP') self.assertEqual(v.attrs['sensor'], 'VIIRS') @mock.patch('satpy.readers.viirs_edr_active_fires.dd.read_csv') class TestModVIIRSActiveFiresText(unittest.TestCase): """Test VIIRS Fires Reader.""" yaml_file = 'viirs_edr_active_fires.yaml' def setUp(self): """Wrap file handler with own fake file handler.""" from satpy.config import config_search_paths from satpy.readers.viirs_edr_active_fires import VIIRSActiveFiresTextFileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) self.p = mock.patch.object(VIIRSActiveFiresTextFileHandler, '__bases__', (FakeModFiresTextFileHandler,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the text file handler.""" self.p.stop() def test_init(self, mock_obj): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'AFEDR_j01_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt' ]) self.assertTrue(len(loadables), 1) r.create_filehandlers(loadables) self.assertTrue(r.file_handlers) def test_load_dataset(self, csv_mock): """Test loading all datasets.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'AFEDR_j01_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt' ]) r.create_filehandlers(loadables) datasets = r.load(['confidence_pct']) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.attrs['units'], '%') datasets = r.load(['T13']) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.attrs['units'], 'K') datasets = r.load(['power']) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.attrs['units'], 'MW') self.assertEqual(v.attrs['platform_name'], 'NOAA-20') self.assertEqual(v.attrs['sensor'], 'VIIRS') @mock.patch('satpy.readers.viirs_edr_active_fires.dd.read_csv') class TestImgVIIRSActiveFiresText(unittest.TestCase): """Test VIIRS Fires Reader.""" yaml_file = 'viirs_edr_active_fires.yaml' def setUp(self): """Wrap file handler with own fake file handler.""" from satpy.config import config_search_paths from satpy.readers.viirs_edr_active_fires import VIIRSActiveFiresTextFileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) self.p = mock.patch.object(VIIRSActiveFiresTextFileHandler, '__bases__', (FakeImgFiresTextFileHandler,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the text file handler.""" self.p.stop() def test_init(self, mock_obj): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt' ]) self.assertTrue(len(loadables), 1) r.create_filehandlers(loadables) self.assertTrue(r.file_handlers) def test_load_dataset(self, mock_obj): """Test loading all datasets.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt' ]) r.create_filehandlers(loadables) datasets = r.load(['confidence_cat']) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.attrs['units'], '1') self.assertEqual(v.attrs['flag_meanings'], ['low', 'medium', 'high']) self.assertEqual(v.attrs['flag_values'], [7, 8, 9]) datasets = r.load(['T4']) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.attrs['units'], 'K') datasets = r.load(['power']) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.attrs['units'], 'MW') self.assertEqual(v.attrs['platform_name'], 'Suomi-NPP') self.assertEqual(v.attrs['sensor'], 'VIIRS') def suite(): """Create test suite for testing viirs active fires.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestModVIIRSActiveFiresNetCDF4)) mysuite.addTest(loader.loadTestsFromTestCase(TestModVIIRSActiveFiresText)) mysuite.addTest(loader.loadTestsFromTestCase(TestImgVIIRSActiveFiresNetCDF4)) mysuite.addTest(loader.loadTestsFromTestCase(TestImgVIIRSActiveFiresText)) return mysuite satpy-0.20.0/satpy/tests/reader_tests/test_viirs_edr_flood.py000066400000000000000000000132561362525524100245150ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the VIIRS EDR Flood reader.""" import sys import os import numpy as np from satpy.tests.reader_tests.test_hdf4_utils import FakeHDF4FileHandler if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest try: from unittest import mock except ImportError: import mock DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (10, 300) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) class FakeHDF4FileHandler2(FakeHDF4FileHandler): """Swap in HDF4 file handler.""" def get_test_content(self, filename, filename_info, filename_type): """Mimic reader input file content.""" file_content = {} file_content['/attr/Satellitename'] = filename_info['platform_shortname'] file_content['/attr/SensorIdentifyCode'] = 'VIIRS' # only one dataset for the flood reader file_content['WaterDetection'] = DEFAULT_FILE_DATA file_content['WaterDetection/attr/_Fillvalue'] = 1 file_content['WaterDetection/attr/scale_factor'] = 1. file_content['WaterDetection/attr/add_offset'] = 0. file_content['WaterDetection/attr/units'] = 'none' file_content['WaterDetection/shape'] = DEFAULT_FILE_SHAPE file_content['WaterDetection/attr/ProjectionMinLatitude'] = 15. file_content['WaterDetection/attr/ProjectionMaxLatitude'] = 68. file_content['WaterDetection/attr/ProjectionMinLongitude'] = -124. file_content['WaterDetection/attr/ProjectionMaxLongitude'] = -61. # convert tp xarrays from xarray import DataArray for key, val in file_content.items(): if isinstance(val, np.ndarray): attrs = {} for a in ['_Fillvalue', 'units', 'ProjectionMinLatitude', 'ProjectionMaxLongitude', 'ProjectionMinLongitude', 'ProjectionMaxLatitude']: if key + '/attr/' + a in file_content: attrs[a] = file_content[key + '/attr/' + a] if val.ndim > 1: file_content[key] = DataArray(val, dims=('fakeDim0', 'fakeDim1'), attrs=attrs) else: file_content[key] = DataArray(val, attrs=attrs) if 'y' not in file_content['WaterDetection'].dims: file_content['WaterDetection'] = file_content['WaterDetection'].rename({'fakeDim0': 'x', 'fakeDim1': 'y'}) return file_content class TestVIIRSEDRFloodReader(unittest.TestCase): """Test VIIRS EDR Flood Reader.""" yaml_file = 'viirs_edr_flood.yaml' def setUp(self): """Wrap HDF4 file handler with own fake file handler.""" from satpy.config import config_search_paths from satpy.readers.viirs_edr_flood import VIIRSEDRFlood self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) self.p = mock.patch.object(VIIRSEDRFlood, '__bases__', (FakeHDF4FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the HDF4 file handler.""" self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'WATER_VIIRS_Prj_SVI_npp_d20180824_t1828213_e1839433_b35361_cspp_dev_10_300_01.hdf' ]) self.assertTrue(len(loadables), 1) r.create_filehandlers(loadables) self.assertTrue(r.file_handlers) def test_load_dataset(self): """Test loading all datasets from a full swath file.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'WATER_VIIRS_Prj_SVI_npp_d20180824_t1828213_e1839433_b35361_cspp_dev_10_300_01.hdf' ]) r.create_filehandlers(loadables) datasets = r.load(['WaterDetection']) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.attrs['units'], 'none') def test_load_dataset_aoi(self): """Test loading all datasets from an area of interest file.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'WATER_VIIRS_Prj_SVI_npp_d20180824_t1828213_e1839433_b35361_cspp_dev_001_10_300_01.hdf' ]) r.create_filehandlers(loadables) datasets = r.load(['WaterDetection']) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.attrs['units'], 'none') def suite(): """Create the test suite for test_viirs_edr_flood.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestVIIRSEDRFloodReader)) return mysuite satpy-0.20.0/satpy/tests/reader_tests/test_viirs_l1b.py000066400000000000000000000306101362525524100232270ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.viirs_l1b module. """ import os import sys from datetime import datetime, timedelta import numpy as np from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler from satpy.tests.utils import convert_file_content_to_data_array if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest try: from unittest import mock except ImportError: import mock DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (10, 300) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32) DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0) DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) class FakeNetCDF4FileHandler2(FakeNetCDF4FileHandler): """Swap-in NetCDF4 File Handler""" def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content""" dt = filename_info.get('start_time', datetime(2016, 1, 1, 12, 0, 0)) file_type = filename[:5].lower() # num_lines = { # 'vl1bi': 3248 * 2, # 'vl1bm': 3248, # 'vl1bd': 3248, # }[file_type] # num_pixels = { # 'vl1bi': 6400, # 'vl1bm': 3200, # 'vl1bd': 4064, # }[file_type] # num_scans = 203 # num_luts = 65536 num_lines = DEFAULT_FILE_SHAPE[0] num_pixels = DEFAULT_FILE_SHAPE[1] num_scans = 5 num_luts = DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1] file_content = { '/dimension/number_of_scans': num_scans, '/dimension/number_of_lines': num_lines, '/dimension/number_of_pixels': num_pixels, '/dimension/number_of_LUT_values': num_luts, '/attr/time_coverage_start': dt.strftime('%Y-%m-%dT%H:%M:%S.000Z'), '/attr/time_coverage_end': (dt + timedelta(minutes=6)).strftime('%Y-%m-%dT%H:%M:%S.000Z'), '/attr/orbit_number': 26384, '/attr/instrument': 'viirs', '/attr/platform': 'Suomi-NPP', } if file_type.startswith('vgeo'): file_content['/attr/OrbitNumber'] = file_content.pop('/attr/orbit_number') file_content['geolocation_data/latitude'] = DEFAULT_LAT_DATA file_content['geolocation_data/longitude'] = DEFAULT_LON_DATA elif file_type == 'vl1bm': file_content['observation_data/M01'] = DEFAULT_FILE_DATA file_content['observation_data/M02'] = DEFAULT_FILE_DATA file_content['observation_data/M03'] = DEFAULT_FILE_DATA file_content['observation_data/M04'] = DEFAULT_FILE_DATA file_content['observation_data/M05'] = DEFAULT_FILE_DATA file_content['observation_data/M06'] = DEFAULT_FILE_DATA file_content['observation_data/M07'] = DEFAULT_FILE_DATA file_content['observation_data/M08'] = DEFAULT_FILE_DATA file_content['observation_data/M09'] = DEFAULT_FILE_DATA file_content['observation_data/M10'] = DEFAULT_FILE_DATA file_content['observation_data/M11'] = DEFAULT_FILE_DATA file_content['observation_data/M12'] = DEFAULT_FILE_DATA file_content['observation_data/M13'] = DEFAULT_FILE_DATA file_content['observation_data/M14'] = DEFAULT_FILE_DATA file_content['observation_data/M15'] = DEFAULT_FILE_DATA file_content['observation_data/M16'] = DEFAULT_FILE_DATA elif file_type == 'vl1bi': file_content['observation_data/I01'] = DEFAULT_FILE_DATA file_content['observation_data/I02'] = DEFAULT_FILE_DATA file_content['observation_data/I03'] = DEFAULT_FILE_DATA file_content['observation_data/I04'] = DEFAULT_FILE_DATA file_content['observation_data/I05'] = DEFAULT_FILE_DATA elif file_type == 'vl1bd': file_content['observation_data/DNB_observations'] = DEFAULT_FILE_DATA file_content['observation_data/DNB_observations/attr/units'] = 'Watts/cm^2/steradian' for k in list(file_content.keys()): if not k.startswith('observation_data') and not k.startswith('geolocation_data'): continue file_content[k + '/shape'] = DEFAULT_FILE_SHAPE if k[-3:] in ['M12', 'M13', 'M14', 'M15', 'M16', 'I04', 'I05']: file_content[k + '_brightness_temperature_lut'] = DEFAULT_FILE_DATA.ravel() file_content[k + '_brightness_temperature_lut/attr/units'] = 'Kelvin' file_content[k + '_brightness_temperature_lut/attr/valid_min'] = 0 file_content[k + '_brightness_temperature_lut/attr/valid_max'] = 65534 file_content[k + '_brightness_temperature_lut/attr/_FillValue'] = 65535 file_content[k + '/attr/units'] = 'Watts/meter^2/steradian/micrometer' elif k[-3:] in ['M01', 'M02', 'M03', 'M04', 'M05', 'M06', 'M07', 'M08', 'M09', 'M10', 'M11', 'I01', 'I02', 'I03']: file_content[k + '/attr/radiance_units'] = 'Watts/meter^2/steradian/micrometer' file_content[k + '/attr/radiance_scale_factor'] = 1.1 file_content[k + '/attr/radiance_add_offset'] = 0.1 elif k.endswith('longitude'): file_content[k + '/attr/units'] = 'degrees_east' elif k.endswith('latitude'): file_content[k + '/attr/units'] = 'degrees_north' file_content[k + '/attr/valid_min'] = 0 file_content[k + '/attr/valid_max'] = 65534 file_content[k + '/attr/_FillValue'] = 65535 file_content[k + '/attr/scale_factor'] = 1.1 file_content[k + '/attr/add_offset'] = 0.1 convert_file_content_to_data_array(file_content) return file_content class TestVIIRSL1BReader(unittest.TestCase): """Test VIIRS L1B Reader""" yaml_file = "viirs_l1b.yaml" def setUp(self): """Wrap NetCDF4 file handler with our own fake handler""" from satpy.config import config_search_paths from satpy.readers.viirs_l1b import VIIRSL1BFileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(VIIRSL1BFileHandler, '__bases__', (FakeNetCDF4FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the NetCDF4 file handler""" self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'VL1BM_snpp_d20161130_t012400_c20161130054822.nc', ]) self.assertTrue(len(loadables), 1) r.create_filehandlers(loadables) # make sure we have some files self.assertTrue(r.file_handlers) def test_load_every_m_band_bt(self): """Test loading all M band brightness temperatures""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'VL1BM_snpp_d20161130_t012400_c20161130054822.nc', 'VGEOM_snpp_d20161130_t012400_c20161130054822.nc', ]) r.create_filehandlers(loadables) datasets = r.load(['M12', 'M13', 'M14', 'M15', 'M16']) self.assertEqual(len(datasets), 5) for v in datasets.values(): self.assertEqual(v.attrs['calibration'], 'brightness_temperature') self.assertEqual(v.attrs['units'], 'K') def test_load_every_m_band_refl(self): """Test loading all M band reflectances""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'VL1BM_snpp_d20161130_t012400_c20161130054822.nc', 'VGEOM_snpp_d20161130_t012400_c20161130054822.nc', ]) r.create_filehandlers(loadables) datasets = r.load(['M01', 'M02', 'M03', 'M04', 'M05', 'M06', 'M07', 'M08', 'M09', 'M10', 'M11']) self.assertEqual(len(datasets), 11) for v in datasets.values(): self.assertEqual(v.attrs['calibration'], 'reflectance') self.assertEqual(v.attrs['units'], '%') def test_load_every_m_band_rad(self): """Test loading all M bands as radiances""" from satpy.readers import load_reader from satpy import DatasetID r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'VL1BM_snpp_d20161130_t012400_c20161130054822.nc', 'VGEOM_snpp_d20161130_t012400_c20161130054822.nc', ]) r.create_filehandlers(loadables) datasets = r.load([DatasetID('M01', calibration='radiance'), DatasetID('M02', calibration='radiance'), DatasetID('M03', calibration='radiance'), DatasetID('M04', calibration='radiance'), DatasetID('M05', calibration='radiance'), DatasetID('M06', calibration='radiance'), DatasetID('M07', calibration='radiance'), DatasetID('M08', calibration='radiance'), DatasetID('M09', calibration='radiance'), DatasetID('M10', calibration='radiance'), DatasetID('M11', calibration='radiance'), DatasetID('M12', calibration='radiance'), DatasetID('M13', calibration='radiance'), DatasetID('M14', calibration='radiance'), DatasetID('M15', calibration='radiance'), DatasetID('M16', calibration='radiance')]) self.assertEqual(len(datasets), 16) for v in datasets.values(): self.assertEqual(v.attrs['calibration'], 'radiance') self.assertEqual(v.attrs['units'], 'W m-2 um-1 sr-1') def test_load_dnb_radiance(self): """Test loading the main DNB dataset""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'VL1BD_snpp_d20161130_t012400_c20161130054822.nc', 'VGEOD_snpp_d20161130_t012400_c20161130054822.nc', ]) r.create_filehandlers(loadables) datasets = r.load(['DNB']) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.attrs['calibration'], 'radiance') self.assertEqual(v.attrs['units'], 'W m-2 sr-1') def suite(): """The test suite for test_viirs_l1b. """ loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestVIIRSL1BReader)) return mysuite satpy-0.20.0/satpy/tests/reader_tests/test_viirs_sdr.py000066400000000000000000001124301362525524100233420ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.viirs_sdr module. """ import os import sys import numpy as np from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest try: from unittest import mock except ImportError: import mock DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (10, 300) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32) DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0) DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) DATASET_KEYS = {'GDNBO': 'VIIRS-DNB-GEO', 'SVDNB': 'VIIRS-DNB-SDR', 'GITCO': 'VIIRS-IMG-GEO-TC', 'GIMGO': 'VIIRS-IMG-GEO', 'SVI01': 'VIIRS-I1-SDR', 'SVI02': 'VIIRS-I2-SDR', 'SVI03': 'VIIRS-I3-SDR', 'SVI04': 'VIIRS-I4-SDR', 'SVI05': 'VIIRS-I5-SDR', 'GMTCO': 'VIIRS-MOD-GEO-TC', 'GMODO': 'VIIRS-MOD-GEO', 'SVM01': 'VIIRS-M1-SDR', 'SVM02': 'VIIRS-M2-SDR', 'SVM03': 'VIIRS-M3-SDR', 'SVM04': 'VIIRS-M4-SDR', 'SVM05': 'VIIRS-M5-SDR', 'SVM06': 'VIIRS-M6-SDR', 'SVM07': 'VIIRS-M7-SDR', 'SVM08': 'VIIRS-M8-SDR', 'SVM09': 'VIIRS-M9-SDR', 'SVM10': 'VIIRS-M10-SDR', 'SVM11': 'VIIRS-M11-SDR', 'SVM12': 'VIIRS-M12-SDR', 'SVM13': 'VIIRS-M13-SDR', 'SVM14': 'VIIRS-M14-SDR', 'SVM15': 'VIIRS-M15-SDR', 'SVM16': 'VIIRS-M16-SDR', } class FakeHDF5FileHandler2(FakeHDF5FileHandler): """Swap-in HDF5 File Handler""" def __init__(self, filename, filename_info, filetype_info, use_tc=None): super(FakeHDF5FileHandler2, self).__init__(filename, filename_info, filetype_info) self.datasets = filename_info['datasets'].split('-') self.use_tc = use_tc def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content""" start_time = filename_info['start_time'] end_time = filename_info['end_time'].replace(year=start_time.year, month=start_time.month, day=start_time.day) final_content = {} for dataset in self.datasets: dataset_group = DATASET_KEYS[dataset] prefix1 = 'Data_Products/{dataset_group}'.format(dataset_group=dataset_group) prefix2 = '{prefix}/{dataset_group}_Aggr'.format(prefix=prefix1, dataset_group=dataset_group) prefix3 = 'All_Data/{dataset_group}_All'.format(dataset_group=dataset_group) prefix4 = '{prefix}/{dataset_group}_Gran_0'.format(prefix=prefix1, dataset_group=dataset_group) begin_date = start_time.strftime('%Y%m%d') begin_time = start_time.strftime('%H%M%S.%fZ') ending_date = end_time.strftime('%Y%m%d') ending_time = end_time.strftime('%H%M%S.%fZ') if filename[:3] == 'SVI': geo_prefix = 'GIMGO' elif filename[:3] == 'SVM': geo_prefix = 'GMODO' else: geo_prefix = None file_content = { "{prefix2}/attr/AggregateNumberGranules": 1, "{prefix4}/attr/N_Number_Of_Scans": 48, "{prefix2}/attr/AggregateBeginningDate": begin_date, "{prefix2}/attr/AggregateBeginningTime": begin_time, "{prefix2}/attr/AggregateEndingDate": ending_date, "{prefix2}/attr/AggregateEndingTime": ending_time, "{prefix2}/attr/G-Ring_Longitude": np.array([0.0, 0.1, 0.2, 0.3]), "{prefix2}/attr/G-Ring_Latitude": np.array([0.0, 0.1, 0.2, 0.3]), "{prefix2}/attr/AggregateBeginningOrbitNumber": "{0:d}".format(filename_info['orbit']), "{prefix2}/attr/AggregateEndingOrbitNumber": "{0:d}".format(filename_info['orbit']), "{prefix1}/attr/Instrument_Short_Name": "VIIRS", "/attr/Platform_Short_Name": "NPP", } if geo_prefix: file_content['/attr/N_GEO_Ref'] = geo_prefix + filename[5:] for k, v in list(file_content.items()): file_content[k.format(prefix1=prefix1, prefix2=prefix2, prefix3=prefix3, prefix4=prefix4)] = v if filename[:3] in ['SVM', 'SVI', 'SVD']: if filename[2:5] in ['M{:02d}'.format(x) for x in range(12)] + ['I01', 'I02', 'I03']: keys = ['Radiance', 'Reflectance'] elif filename[2:5] in ['M{:02d}'.format(x) for x in range(12, 17)] + ['I04', 'I05']: keys = ['Radiance', 'BrightnessTemperature'] else: # DNB keys = ['Radiance'] for k in keys: k = prefix3 + "/" + k file_content[k] = DEFAULT_FILE_DATA.copy() file_content[k + "/shape"] = DEFAULT_FILE_SHAPE file_content[k + "Factors"] = DEFAULT_FILE_FACTORS.copy() elif filename[0] == 'G': if filename[:5] in ['GMODO', 'GIMGO']: lon_data = np.linspace(15, 55, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) lat_data = np.linspace(55, 75, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) else: lon_data = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) lat_data = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) for k in ["Latitude"]: k = prefix3 + "/" + k file_content[k] = lat_data file_content[k] = np.repeat([file_content[k]], DEFAULT_FILE_SHAPE[0], axis=0) file_content[k + "/shape"] = DEFAULT_FILE_SHAPE for k in ["Longitude"]: k = prefix3 + "/" + k file_content[k] = lon_data file_content[k] = np.repeat([file_content[k]], DEFAULT_FILE_SHAPE[0], axis=0) file_content[k + "/shape"] = DEFAULT_FILE_SHAPE final_content.update(file_content) # convert to xarrays from xarray import DataArray import dask.array as da for key, val in final_content.items(): if isinstance(val, np.ndarray): val = da.from_array(val, chunks=val.shape) if val.ndim > 1: final_content[key] = DataArray(val, dims=('y', 'x')) else: final_content[key] = DataArray(val) return final_content class TestVIIRSSDRReader(unittest.TestCase): """Test VIIRS SDR Reader""" yaml_file = "viirs_sdr.yaml" def setUp(self): """Wrap HDF5 file handler with our own fake handler""" from satpy.config import config_search_paths from satpy.readers.viirs_sdr import VIIRSSDRFileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(VIIRSSDRFileHandler, '__bases__', (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the HDF5 file handler""" self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', ]) self.assertTrue(len(loadables), 1) r.create_filehandlers(loadables) # make sure we have some files self.assertTrue(r.file_handlers) def test_init_start_time_beyond(self): """Test basic init with start_time after the provided files.""" from satpy.readers import load_reader from datetime import datetime r = load_reader(self.reader_configs, filter_parameters={ 'start_time': datetime(2012, 2, 26) }) loadables = r.select_files_from_pathnames([ 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', ]) self.assertTrue(len(loadables), 0) def test_init_end_time_beyond(self): """Test basic init with end_time before the provided files.""" from satpy.readers import load_reader from datetime import datetime r = load_reader(self.reader_configs, filter_parameters={ 'end_time': datetime(2012, 2, 24) }) loadables = r.select_files_from_pathnames([ 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', ]) self.assertTrue(len(loadables), 0) def test_init_start_end_time(self): """Test basic init with end_time before the provided files.""" from satpy.readers import load_reader from datetime import datetime r = load_reader(self.reader_configs, filter_parameters={ 'start_time': datetime(2012, 2, 24), 'end_time': datetime(2012, 2, 26) }) loadables = r.select_files_from_pathnames([ 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', ]) self.assertTrue(len(loadables), 1) r.create_filehandlers(loadables) # make sure we have some files self.assertTrue(r.file_handlers) def test_load_all_m_reflectances_no_geo(self): """Load all M band reflectances with no geo files provided""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', ]) r.create_filehandlers(loadables) ds = r.load(['M01', 'M02', 'M03', 'M04', 'M05', 'M06', 'M07', 'M08', 'M09', 'M10', 'M11', ]) self.assertEqual(len(ds), 11) for d in ds.values(): self.assertEqual(d.attrs['calibration'], 'reflectance') self.assertEqual(d.attrs['units'], '%') self.assertNotIn('area', d.attrs) def test_load_all_m_reflectances_find_geo(self): """Load all M band reflectances with geo files not specified but existing""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', ]) # make a fake geo file geo_fn = 'GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5' open(geo_fn, 'w') try: r.create_filehandlers(loadables) ds = r.load(['M01', 'M02', 'M03', 'M04', 'M05', 'M06', 'M07', 'M08', 'M09', 'M10', 'M11', ]) finally: os.remove(geo_fn) self.assertEqual(len(ds), 11) for d in ds.values(): self.assertEqual(d.attrs['calibration'], 'reflectance') self.assertEqual(d.attrs['units'], '%') self.assertIn('area', d.attrs) self.assertIsNotNone(d.attrs['area']) def test_load_all_m_reflectances_provided_geo(self): """Load all M band reflectances with geo files provided""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', ]) r.create_filehandlers(loadables) ds = r.load(['M01', 'M02', 'M03', 'M04', 'M05', 'M06', 'M07', 'M08', 'M09', 'M10', 'M11', ]) self.assertEqual(len(ds), 11) for d in ds.values(): self.assertEqual(d.attrs['calibration'], 'reflectance') self.assertEqual(d.attrs['units'], '%') self.assertIn('area', d.attrs) self.assertIsNotNone(d.attrs['area']) self.assertEqual(d.attrs['area'].lons.min(), 5) self.assertEqual(d.attrs['area'].lats.min(), 45) def test_load_all_m_reflectances_use_nontc(self): """Load all M band reflectances but use non-TC geolocation""" from satpy.readers import load_reader r = load_reader(self.reader_configs, use_tc=False) loadables = r.select_files_from_pathnames([ 'SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'GMODO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', ]) r.create_filehandlers(loadables) ds = r.load(['M01', 'M02', 'M03', 'M04', 'M05', 'M06', 'M07', 'M08', 'M09', 'M10', 'M11', ]) self.assertEqual(len(ds), 11) for d in ds.values(): self.assertEqual(d.attrs['calibration'], 'reflectance') self.assertEqual(d.attrs['units'], '%') self.assertIn('area', d.attrs) self.assertIsNotNone(d.attrs['area']) self.assertEqual(d.attrs['area'].lons.min(), 15) self.assertEqual(d.attrs['area'].lats.min(), 55) def test_load_all_m_reflectances_use_nontc2(self): """Load all M band reflectances but use non-TC geolocation because TC isn't available""" from satpy.readers import load_reader r = load_reader(self.reader_configs, use_tc=None) loadables = r.select_files_from_pathnames([ 'SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'GMODO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', ]) r.create_filehandlers(loadables) ds = r.load(['M01', 'M02', 'M03', 'M04', 'M05', 'M06', 'M07', 'M08', 'M09', 'M10', 'M11', ]) self.assertEqual(len(ds), 11) for d in ds.values(): self.assertEqual(d.attrs['calibration'], 'reflectance') self.assertEqual(d.attrs['units'], '%') self.assertIn('area', d.attrs) self.assertIsNotNone(d.attrs['area']) self.assertEqual(d.attrs['area'].lons.min(), 15) self.assertEqual(d.attrs['area'].lats.min(), 55) def test_load_all_m_bts(self): """Load all M band brightness temperatures""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'SVM12_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM13_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM14_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM15_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM16_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', ]) r.create_filehandlers(loadables) ds = r.load(['M12', 'M13', 'M14', 'M15', 'M16', ]) self.assertEqual(len(ds), 5) for d in ds.values(): self.assertEqual(d.attrs['calibration'], 'brightness_temperature') self.assertEqual(d.attrs['units'], 'K') self.assertIn('area', d.attrs) self.assertIsNotNone(d.attrs['area']) def test_load_all_m_radiances(self): """Load all M band radiances""" from satpy.readers import load_reader from satpy import DatasetID r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM12_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM13_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM14_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM15_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'SVM16_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', ]) r.create_filehandlers(loadables) ds = r.load([ DatasetID(name='M01', calibration='radiance', modifiers=None), DatasetID(name='M02', calibration='radiance', modifiers=None), DatasetID(name='M03', calibration='radiance', modifiers=None), DatasetID(name='M04', calibration='radiance', modifiers=None), DatasetID(name='M05', calibration='radiance', modifiers=None), DatasetID(name='M06', calibration='radiance', modifiers=None), DatasetID(name='M07', calibration='radiance', modifiers=None), DatasetID(name='M08', calibration='radiance', modifiers=None), DatasetID(name='M09', calibration='radiance', modifiers=None), DatasetID(name='M10', calibration='radiance', modifiers=None), DatasetID(name='M11', calibration='radiance', modifiers=None), DatasetID(name='M12', calibration='radiance', modifiers=None), DatasetID(name='M13', calibration='radiance', modifiers=None), DatasetID(name='M14', calibration='radiance', modifiers=None), DatasetID(name='M15', calibration='radiance', modifiers=None), DatasetID(name='M16', calibration='radiance', modifiers=None), ]) self.assertEqual(len(ds), 16) for d in ds.values(): self.assertEqual(d.attrs['calibration'], 'radiance') self.assertEqual(d.attrs['units'], 'W m-2 um-1 sr-1') self.assertIn('area', d.attrs) self.assertIsNotNone(d.attrs['area']) def test_load_dnb(self): """Load DNB dataset""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'SVDNB_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'GDNBO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', ]) r.create_filehandlers(loadables) ds = r.load(['DNB']) self.assertEqual(len(ds), 1) for d in ds.values(): self.assertEqual(d.attrs['calibration'], 'radiance') self.assertEqual(d.attrs['units'], 'W m-2 sr-1') self.assertIn('area', d.attrs) self.assertIsNotNone(d.attrs['area']) def test_load_i_no_files(self): """Load I01 when only DNB files are provided""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'SVDNB_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', 'GDNBO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', ]) r.create_filehandlers(loadables) self.assertNotIn('I01', [x.name for x in r.available_dataset_ids]) ds = r.load(['I01']) self.assertEqual(len(ds), 0) class FakeHDF5FileHandlerAggr(FakeHDF5FileHandler): """Swap-in HDF5 File Handler""" def __init__(self, filename, filename_info, filetype_info, use_tc=None): super(FakeHDF5FileHandlerAggr, self).__init__(filename, filename_info, filetype_info) self.datasets = filename_info['datasets'].split('-') self.use_tc = use_tc def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content""" start_time = filename_info['start_time'] end_time = filename_info['end_time'].replace(year=start_time.year, month=start_time.month, day=start_time.day) final_content = {} for dataset in self.datasets: dataset_group = DATASET_KEYS[dataset] prefix1 = 'Data_Products/{dataset_group}'.format(dataset_group=dataset_group) prefix2 = '{prefix}/{dataset_group}_Aggr'.format(prefix=prefix1, dataset_group=dataset_group) prefix3 = 'All_Data/{dataset_group}_All'.format(dataset_group=dataset_group) begin_date = start_time.strftime('%Y%m%d') begin_time = start_time.strftime('%H%M%S.%fZ') ending_date = end_time.strftime('%Y%m%d') ending_time = end_time.strftime('%H%M%S.%fZ') if filename[:3] == 'SVI': geo_prefix = 'GIMGO' elif filename[:3] == 'SVM': geo_prefix = 'GMODO' else: geo_prefix = None file_content = { "{prefix3}/NumberOfScans": np.array([48, 48, 48, 48]), "{prefix2}/attr/AggregateBeginningDate": begin_date, "{prefix2}/attr/AggregateBeginningTime": begin_time, "{prefix2}/attr/AggregateEndingDate": ending_date, "{prefix2}/attr/AggregateEndingTime": ending_time, "{prefix2}/attr/G-Ring_Longitude": np.array([0.0, 0.1, 0.2, 0.3]), "{prefix2}/attr/G-Ring_Latitude": np.array([0.0, 0.1, 0.2, 0.3]), "{prefix2}/attr/AggregateBeginningOrbitNumber": "{0:d}".format(filename_info['orbit']), "{prefix2}/attr/AggregateEndingOrbitNumber": "{0:d}".format(filename_info['orbit']), "{prefix1}/attr/Instrument_Short_Name": "VIIRS", "/attr/Platform_Short_Name": "NPP", } lats_lists = [ np.array( [ 67.969505, 65.545685, 63.103046, 61.853905, 55.169273, 57.062447, 58.86063, 66.495514 ], dtype=np.float32), np.array( [ 72.74879, 70.2493, 67.84738, 66.49691, 58.77254, 60.465942, 62.11525, 71.08249 ], dtype=np.float32), np.array( [ 77.393425, 74.977875, 72.62976, 71.083435, 62.036346, 63.465122, 64.78075, 75.36842 ], dtype=np.float32), np.array( [ 81.67615, 79.49934, 77.278656, 75.369415, 64.72178, 65.78417, 66.66166, 79.00025 ], dtype=np.float32) ] lons_lists = [ np.array( [ 50.51393, 49.566296, 48.865967, 18.96082, -4.0238385, -7.05221, -10.405702, 14.638646 ], dtype=np.float32), np.array( [ 53.52594, 51.685738, 50.439102, 14.629087, -10.247547, -13.951393, -18.256989, 8.36572 ], dtype=np.float32), np.array( [ 59.386833, 55.770416, 53.38952, 8.353765, -18.062435, -22.608992, -27.867302, -1.3537619 ], dtype=np.float32), np.array( [ 72.50243, 64.17125, 59.15234, -1.3654504, -27.620953, -33.091743, -39.28113, -17.749891 ], dtype=np.float32) ] for granule in range(4): prefix_gran = '{prefix}/{dataset_group}_Gran_{idx}'.format(prefix=prefix1, dataset_group=dataset_group, idx=granule) file_content[prefix_gran + '/attr/G-Ring_Longitude'] = lons_lists[granule] file_content[prefix_gran + '/attr/G-Ring_Latitude'] = lats_lists[granule] if geo_prefix: file_content['/attr/N_GEO_Ref'] = geo_prefix + filename[5:] for k, v in list(file_content.items()): file_content[k.format(prefix1=prefix1, prefix2=prefix2, prefix3=prefix3)] = v if filename[:3] in ['SVM', 'SVI', 'SVD']: if filename[2:5] in ['M{:02d}'.format(x) for x in range(12)] + ['I01', 'I02', 'I03']: keys = ['Radiance', 'Reflectance'] elif filename[2:5] in ['M{:02d}'.format(x) for x in range(12, 17)] + ['I04', 'I05']: keys = ['Radiance', 'BrightnessTemperature'] else: # DNB keys = ['Radiance'] for k in keys: k = prefix3 + "/" + k file_content[k] = DEFAULT_FILE_DATA.copy() file_content[k + "/shape"] = DEFAULT_FILE_SHAPE file_content[k + "Factors"] = DEFAULT_FILE_FACTORS.copy() elif filename[0] == 'G': if filename[:5] in ['GMODO', 'GIMGO']: lon_data = np.linspace(15, 55, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) lat_data = np.linspace(55, 75, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) else: lon_data = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) lat_data = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) for k in ["Latitude"]: k = prefix3 + "/" + k file_content[k] = lat_data file_content[k] = np.repeat([file_content[k]], DEFAULT_FILE_SHAPE[0], axis=0) file_content[k + "/shape"] = DEFAULT_FILE_SHAPE for k in ["Longitude"]: k = prefix3 + "/" + k file_content[k] = lon_data file_content[k] = np.repeat([file_content[k]], DEFAULT_FILE_SHAPE[0], axis=0) file_content[k + "/shape"] = DEFAULT_FILE_SHAPE final_content.update(file_content) # convert to xarrays from xarray import DataArray import dask.array as da for key, val in final_content.items(): if isinstance(val, np.ndarray): val = da.from_array(val, chunks=val.shape) if val.ndim > 1: final_content[key] = DataArray(val, dims=('y', 'x')) else: final_content[key] = DataArray(val) return final_content class TestAggrVIIRSSDRReader(unittest.TestCase): """Test VIIRS SDR Reader""" yaml_file = "viirs_sdr.yaml" def setUp(self): """Wrap HDF5 file handler with our own fake handler""" from satpy.config import config_search_paths from satpy.readers.viirs_sdr import VIIRSSDRFileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(VIIRSSDRFileHandler, '__bases__', (FakeHDF5FileHandlerAggr,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the HDF5 file handler""" self.p.stop() def test_bouding_box(self): """Test bouding box.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', ]) r.create_filehandlers(loadables) # make sure we have some files expected_lons = [ 72.50243, 64.17125, 59.15234, 59.386833, 55.770416, 53.38952, 53.52594, 51.685738, 50.439102, 50.51393, 49.566296, 48.865967, 18.96082, -4.0238385, -7.05221, -10.247547, -13.951393, -18.062435, -22.608992, -27.620953, -33.091743, -39.28113, -17.749891 ] expected_lats = [ 81.67615, 79.49934, 77.278656, 77.393425, 74.977875, 72.62976, 72.74879, 70.2493, 67.84738, 67.969505, 65.545685, 63.103046, 61.853905, 55.169273, 57.062447, 58.77254, 60.465942, 62.036346, 63.465122, 64.72178, 65.78417, 66.66166, 79.00025 ] lons, lats = r.file_handlers['generic_file'][0].get_bounding_box() np.testing.assert_allclose(lons, expected_lons) np.testing.assert_allclose(lats, expected_lats) def suite(): """The test suite for test_viirs_sdr.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestVIIRSSDRReader)) mysuite.addTest(loader.loadTestsFromTestCase(TestAggrVIIRSSDRReader)) return mysuite if __name__ == '__main__': unittest.main() satpy-0.20.0/satpy/tests/reader_tests/test_virr_l1b.py000066400000000000000000000235651362525524100230700ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Test for readers/virr_l1b.py.""" from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler import sys import numpy as np import dask.array as da import xarray as xr import os if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest try: from unittest import mock except ImportError: import mock class FakeHDF5FileHandler2(FakeHDF5FileHandler): """Swap-in HDF5 File Handler.""" def make_test_data(self, dims): """Create fake test data.""" return xr.DataArray(da.from_array(np.ones([dim for dim in dims], dtype=np.float32) * 10, [dim for dim in dims])) def _make_file(self, platform_id, geolocation_prefix, l1b_prefix, ECWN, Emissive_units): dim_0 = 19 dim_1 = 20 test_file = { # Satellite data. '/attr/Day Or Night Flag': 'D', '/attr/Observing Beginning Date': '2018-12-25', '/attr/Observing Beginning Time': '21:41:47.090', '/attr/Observing Ending Date': '2018-12-25', '/attr/Observing Ending Time': '21:47:28.254', '/attr/Satellite Name': platform_id, '/attr/Sensor Identification Code': 'VIRR', # Emissive data. l1b_prefix + 'EV_Emissive': self.make_test_data([3, dim_0, dim_1]), l1b_prefix + 'EV_Emissive/attr/valid_range': [0, 50000], l1b_prefix + 'Emissive_Radiance_Scales': self.make_test_data([dim_0, dim_1]), l1b_prefix + 'EV_Emissive/attr/units': Emissive_units, l1b_prefix + 'Emissive_Radiance_Offsets': self.make_test_data([dim_0, dim_1]), '/attr/' + ECWN: [2610.31, 917.6268, 836.2546], # Reflectance data. l1b_prefix + 'EV_RefSB': self.make_test_data([7, dim_0, dim_1]), l1b_prefix + 'EV_RefSB/attr/valid_range': [0, 32767], l1b_prefix + 'EV_RefSB/attr/units': 'none', '/attr/RefSB_Cal_Coefficients': np.ones(14, dtype=np.float32) * 2 } for attribute in ['Latitude', 'Longitude', geolocation_prefix + 'SolarZenith', geolocation_prefix + 'SensorZenith', geolocation_prefix + 'SolarAzimuth', geolocation_prefix + 'SensorAzimuth']: test_file[attribute] = self.make_test_data([dim_0, dim_1]) test_file[attribute + '/attr/Intercept'] = 0. test_file[attribute + '/attr/units'] = 'degrees' if 'Solar' in attribute or 'Sensor' in attribute: test_file[attribute + '/attr/Slope'] = .01 if 'Azimuth' in attribute: test_file[attribute + '/attr/valid_range'] = [0, 18000] else: test_file[attribute + '/attr/valid_range'] = [-18000, 18000] else: test_file[attribute + '/attr/Slope'] = 1. if 'Longitude' == attribute: test_file[attribute + '/attr/valid_range'] = [-180., 180.] else: test_file[attribute + '/attr/valid_range'] = [-90., 90.] return test_file def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" if filename_info['platform_id'] == 'FY3B': return self._make_file('FY3B', '', '', 'Emmisive_Centroid_Wave_Number', 'milliWstts/m^2/cm^(-1)/steradian') return self._make_file(filename_info['platform_id'], 'Geolocation/', 'Data/', 'Emissive_Centroid_Wave_Number', 'none') class TestVIRRL1BReader(unittest.TestCase): """Test VIRR L1B Reader.""" yaml_file = "virr_l1b.yaml" def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy.readers.virr_l1b import VIRR_L1B from satpy.config import config_search_paths self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(VIRR_L1B, '__bases__', (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the HDF5 file handler.""" self.p.stop() def _band_helper(self, attributes, units, calibration, standard_name, file_type, band_index_size, resolution): self.assertEqual(units, attributes['units']) self.assertEqual(calibration, attributes['calibration']) self.assertEqual(standard_name, attributes['standard_name']) self.assertEqual(file_type, attributes['file_type']) self.assertTrue(attributes['band_index'] in range(band_index_size)) self.assertEqual(resolution, attributes['resolution']) self.assertEqual(('longitude', 'latitude'), attributes['coordinates']) def _fy3_helper(self, platform_name, reader, Emissive_units): """Load channels and test accurate metadata.""" import datetime band_values = {'1': 22.0, '2': 22.0, '6': 22.0, '7': 22.0, '8': 22.0, '9': 22.0, '10': 22.0, '3': 496.542155, '4': 297.444511, '5': 288.956557, 'solar_zenith_angle': .1, 'satellite_zenith_angle': .1, 'solar_azimuth_angle': .1, 'satellite_azimuth_angle': .1, 'longitude': 10} if platform_name == 'FY3B': # updated 2015 coefficients band_values['1'] = -0.168 band_values['2'] = -0.2706 band_values['6'] = -1.5631 band_values['7'] = -0.2114 band_values['8'] = -0.171 band_values['9'] = -0.1606 band_values['10'] = -0.1328 datasets = reader.load([band for band in band_values]) for dataset in datasets: # Object returned by get_dataset. ds = datasets[dataset.name] attributes = ds.attrs self.assertTrue(isinstance(ds.data, da.Array)) self.assertEqual('virr', attributes['sensor']) self.assertEqual(platform_name, attributes['platform_name']) self.assertEqual(datetime.datetime(2018, 12, 25, 21, 41, 47, 90000), attributes['start_time']) self.assertEqual(datetime.datetime(2018, 12, 25, 21, 47, 28, 254000), attributes['end_time']) self.assertEqual((19, 20), datasets[dataset.name].shape) self.assertEqual(('y', 'x'), datasets[dataset.name].dims) if dataset.name in ['1', '2', '6', '7', '8', '9', '10']: self._band_helper(attributes, '%', 'reflectance', 'toa_bidirectional_reflectance', 'virr_l1b', 7, 1000) elif dataset.name in ['3', '4', '5']: self._band_helper(attributes, Emissive_units, 'brightness_temperature', 'toa_brightness_temperature', 'virr_l1b', 3, 1000) elif dataset.name in ['longitude', 'latitude']: self.assertEqual('degrees', attributes['units']) self.assertTrue(attributes['standard_name'] in ['longitude', 'latitude']) self.assertEqual(['virr_l1b', 'virr_geoxx'], attributes['file_type']) self.assertEqual(1000, attributes['resolution']) else: self.assertEqual('degrees', attributes['units']) self.assertTrue( attributes['standard_name'] in ['solar_zenith_angle', 'sensor_zenith_angle', 'solar_azimuth_angle', 'sensor_azimuth_angle']) self.assertEqual(['virr_geoxx', 'virr_l1b'], attributes['file_type']) self.assertEqual(('longitude', 'latitude'), attributes['coordinates']) self.assertEqual(band_values[dataset.name], round(float(np.array(ds[ds.shape[0] // 2][ds.shape[1] // 2])), 6)) def test_fy3b_file(self): """Test that FY3B files are recognized.""" from satpy.readers import load_reader FY3B_reader = load_reader(self.reader_configs) FY3B_file = FY3B_reader.select_files_from_pathnames(['tf2018359214943.FY3B-L_VIRRX_L1B.HDF']) self.assertTrue(1, len(FY3B_file)) FY3B_reader.create_filehandlers(FY3B_file) # Make sure we have some files self.assertTrue(FY3B_reader.file_handlers) self._fy3_helper('FY3B', FY3B_reader, 'milliWstts/m^2/cm^(-1)/steradian') def test_fy3c_file(self): """Test that FY3C files are recognized.""" from satpy.readers import load_reader FY3C_reader = load_reader(self.reader_configs) FY3C_files = FY3C_reader.select_files_from_pathnames(['tf2018359143912.FY3C-L_VIRRX_GEOXX.HDF', 'tf2018359143912.FY3C-L_VIRRX_L1B.HDF']) self.assertTrue(2, len(FY3C_files)) FY3C_reader.create_filehandlers(FY3C_files) # Make sure we have some files self.assertTrue(FY3C_reader.file_handlers) self._fy3_helper('FY3C', FY3C_reader, '1') def suite(): """Create test suite for test_virr_l1b.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestVIRRL1BReader)) return mysuite satpy-0.20.0/satpy/tests/test_config.py000066400000000000000000000115721362525524100201240ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Test objects and functions in the satpy.config module.""" import sys if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest try: from unittest import mock except ImportError: import mock class TestCheckSatpy(unittest.TestCase): """Test the 'check_satpy' function.""" def test_basic_check_satpy(self): """Test 'check_satpy' basic functionality.""" from satpy.config import check_satpy check_satpy() def test_specific_check_satpy(self): """Test 'check_satpy' with specific features provided.""" from satpy.config import check_satpy with mock.patch('satpy.config.print') as print_mock: check_satpy(readers=['viirs_sdr'], extras=('cartopy', '__fake')) checked_fake = False for call in print_mock.mock_calls: if len(call[1]) > 0 and '__fake' in call[1][0]: self.assertNotIn('ok', call[1][1]) checked_fake = True self.assertTrue(checked_fake, "Did not find __fake module " "mentioned in checks") class TestBuiltinAreas(unittest.TestCase): """Test that the builtin areas are all valid.""" def test_areas_pyproj(self): """Test all areas have valid projections with pyproj.""" import pyproj from pyresample import parse_area_file from pyresample.geometry import SwathDefinition from satpy.resample import get_area_file import numpy as np import xarray as xr lons = np.array([[0, 0.1, 0.2], [0.05, 0.15, 0.25]]) lats = np.array([[0, 0.1, 0.2], [0.05, 0.15, 0.25]]) lons = xr.DataArray(lons) lats = xr.DataArray(lats) swath_def = SwathDefinition(lons, lats) all_areas = parse_area_file(get_area_file()) for area_obj in all_areas: if hasattr(area_obj, 'freeze'): try: area_obj = area_obj.freeze(lonslats=swath_def) except RuntimeError: # we didn't provide enough info to freeze, hard to guess # in a generic test so just skip this area continue proj_dict = area_obj.proj_dict _ = pyproj.Proj(proj_dict) def test_areas_rasterio(self): """Test all areas have valid projections with rasterio.""" try: from rasterio.crs import CRS except ImportError: return unittest.skip("Missing rasterio dependency") if not hasattr(CRS, 'from_dict'): return unittest.skip("RasterIO 1.0+ required") from pyresample import parse_area_file from pyresample.geometry import SwathDefinition from satpy.resample import get_area_file import numpy as np import xarray as xr lons = np.array([[0, 0.1, 0.2], [0.05, 0.15, 0.25]]) lats = np.array([[0, 0.1, 0.2], [0.05, 0.15, 0.25]]) lons = xr.DataArray(lons) lats = xr.DataArray(lats) swath_def = SwathDefinition(lons, lats) all_areas = parse_area_file(get_area_file()) for area_obj in all_areas: if hasattr(area_obj, 'freeze'): try: area_obj = area_obj.freeze(lonslats=swath_def) except RuntimeError: # we didn't provide enough info to freeze, hard to guess # in a generic test so just skip this area continue proj_dict = area_obj.proj_dict if proj_dict.get('proj') in ('ob_tran', 'nsper') and \ 'wktext' not in proj_dict: # FIXME: rasterio doesn't understand ob_tran unless +wktext # See: https://github.com/pyproj4/pyproj/issues/357 # pyproj 2.0+ seems to drop wktext from PROJ dict continue _ = CRS.from_dict(proj_dict) def suite(): """Test suite for test_config.""" loader = unittest.TestLoader() my_suite = unittest.TestSuite() my_suite.addTest(loader.loadTestsFromTestCase(TestCheckSatpy)) my_suite.addTest(loader.loadTestsFromTestCase(TestBuiltinAreas)) return my_suite satpy-0.20.0/satpy/tests/test_crefl_utils.py000066400000000000000000000037141362525524100211710ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Test CREFL rayleigh correction functions. """ import sys if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest class TestCreflUtils(unittest.TestCase): """Test crefl_utils.""" def test_get_atm_variables_abi(self): import numpy as np from satpy.composites.crefl_utils import get_atm_variables_abi sphalb, rhoray, TtotraytH2O, tOG = get_atm_variables_abi(0.17690244, 6.123234e-17, 530.61332168, 405., 21.71342113, 77.14385758, 56.214566960, 0.0043149700000000004, 0.0037296, 0.014107995000000002, 0.052349) self.assertLess(abs(np.array(sphalb) - 0.045213532544630494), 1e-10) self.assertLess(abs(rhoray - 2.2030281148621356), 1e-10) self.assertLess(abs(TtotraytH2O - 0.30309880915889087), 1e-10) self.assertLess(abs(tOG - 0.5969089524560548), 1e-10) def suite(): """The test suite for test_crefl_utils.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestCreflUtils)) return mysuite satpy-0.20.0/satpy/tests/test_dataset.py000066400000000000000000000073321362525524100203030ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Test objects and functions in the dataset module. """ import sys from datetime import datetime if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest class TestDatasetID(unittest.TestCase): """Test DatasetID object creation and other methods.""" def test_basic_init(self): """Test basic ways of creating a DatasetID.""" from satpy.dataset import DatasetID DatasetID(name="a") DatasetID(name="a", wavelength=0.86) DatasetID(name="a", resolution=1000) DatasetID(name="a", calibration='radiance') DatasetID(name="a", wavelength=0.86, resolution=250, calibration='radiance') DatasetID(name="a", wavelength=0.86, resolution=250, calibration='radiance', modifiers=('sunz_corrected',)) DatasetID(wavelength=0.86) def test_init_bad_modifiers(self): """Test that modifiers are a tuple.""" from satpy.dataset import DatasetID self.assertRaises(TypeError, DatasetID, name="a", modifiers="str") def test_compare_no_wl(self): """Compare fully qualified wavelength ID to no wavelength ID.""" from satpy.dataset import DatasetID d1 = DatasetID(name="a", wavelength=(0.1, 0.2, 0.3)) d2 = DatasetID(name="a", wavelength=None) # this happens when sorting IDs during dependency checks self.assertFalse(d1 < d2) self.assertTrue(d2 < d1) class TestCombineMetadata(unittest.TestCase): """Test how metadata is combined.""" def test_average_datetimes(self): """Test the average_datetimes helper function.""" from satpy.dataset import average_datetimes dts = ( datetime(2018, 2, 1, 11, 58, 0), datetime(2018, 2, 1, 11, 59, 0), datetime(2018, 2, 1, 12, 0, 0), datetime(2018, 2, 1, 12, 1, 0), datetime(2018, 2, 1, 12, 2, 0), ) ret = average_datetimes(dts) self.assertEqual(dts[2], ret) def test_combine_times(self): """Test the combine_metadata with times.""" from satpy.dataset import combine_metadata dts = ( {'start_time': datetime(2018, 2, 1, 11, 58, 0)}, {'start_time': datetime(2018, 2, 1, 11, 59, 0)}, {'start_time': datetime(2018, 2, 1, 12, 0, 0)}, {'start_time': datetime(2018, 2, 1, 12, 1, 0)}, {'start_time': datetime(2018, 2, 1, 12, 2, 0)}, ) ret = combine_metadata(*dts) self.assertEqual(dts[2]['start_time'], ret['start_time']) ret = combine_metadata(*dts, average_times=False) # times are not equal so don't include it in the final result self.assertNotIn('start_time', ret) def suite(): """The test suite for test_projector. """ loader = unittest.TestLoader() my_suite = unittest.TestSuite() my_suite.addTest(loader.loadTestsFromTestCase(TestDatasetID)) my_suite.addTest(loader.loadTestsFromTestCase(TestCombineMetadata)) return my_suite satpy-0.20.0/satpy/tests/test_demo.py000066400000000000000000000170341362525524100176020ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the satpy.demo module.""" import os import sys if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest try: from unittest import mock except ImportError: import mock class _GlobHelper(object): """Create side effect function for mocking gcsfs glob method.""" def __init__(self, num_results): """Initialize side_effect function for mocking gcsfs glob method. Args: num_results (int or list): Number of results for each glob call to return. If a list then number of results per call. The last number is used for any additional calls. """ self.current_call = 0 if not isinstance(num_results, (list, tuple)): num_results = [num_results] self.num_results = num_results def __call__(self, pattern): """Mimic glob by being used as the side effect function.""" try: num_results = self.num_results[self.current_call] except IndexError: num_results = self.num_results[-1] self.current_call += 1 return [pattern + '.{:03d}'.format(idx) for idx in range(num_results)] class TestDemo(unittest.TestCase): """Test demo data download functions.""" def setUp(self): """Create temporary directory to save files to.""" import tempfile self.base_dir = tempfile.mkdtemp() self.prev_dir = os.getcwd() os.chdir(self.base_dir) def tearDown(self): """Remove the temporary directory created for a test.""" os.chdir(self.prev_dir) try: import shutil shutil.rmtree(self.base_dir, ignore_errors=True) except OSError: pass @mock.patch('satpy.demo._google_cloud_platform.gcsfs') def test_get_us_midlatitude_cyclone_abi(self, gcsfs_mod): """Test data download function.""" from satpy.demo import get_us_midlatitude_cyclone_abi gcsfs_mod.GCSFileSystem = mock.MagicMock() gcsfs_inst = mock.MagicMock() gcsfs_mod.GCSFileSystem.return_value = gcsfs_inst gcsfs_inst.glob.return_value = ['a.nc', 'b.nc'] # expected 16 files, got 2 self.assertRaises(AssertionError, get_us_midlatitude_cyclone_abi) # unknown access method self.assertRaises(NotImplementedError, get_us_midlatitude_cyclone_abi, method='unknown') gcsfs_inst.glob.return_value = ['a.nc'] * 16 filenames = get_us_midlatitude_cyclone_abi() expected = os.path.join('.', 'abi_l1b', '20190314_us_midlatitude_cyclone', 'a.nc') for fn in filenames: self.assertEqual(expected, fn) @mock.patch('satpy.demo._google_cloud_platform.gcsfs') def test_get_hurricane_florence_abi(self, gcsfs_mod): """Test data download function.""" from satpy.demo import get_hurricane_florence_abi gcsfs_mod.GCSFileSystem = mock.MagicMock() gcsfs_inst = mock.MagicMock() gcsfs_mod.GCSFileSystem.return_value = gcsfs_inst # only return 5 results total gcsfs_inst.glob.side_effect = _GlobHelper([5, 0]) # expected 16 files * 10 frames, got 16 * 5 self.assertRaises(AssertionError, get_hurricane_florence_abi) self.assertRaises(NotImplementedError, get_hurricane_florence_abi, method='unknown') gcsfs_inst.glob.side_effect = _GlobHelper([int(240 / 16), 0, 0, 0] * 16) filenames = get_hurricane_florence_abi() self.assertEqual(10 * 16, len(filenames)) gcsfs_inst.glob.side_effect = _GlobHelper([int(240 / 16), 0, 0, 0] * 16) filenames = get_hurricane_florence_abi(channels=[2, 3, 4]) self.assertEqual(10 * 3, len(filenames)) gcsfs_inst.glob.side_effect = _GlobHelper([int(240 / 16), 0, 0, 0] * 16) filenames = get_hurricane_florence_abi(channels=[2, 3, 4], num_frames=5) self.assertEqual(5 * 3, len(filenames)) gcsfs_inst.glob.side_effect = _GlobHelper([int(240 / 16), 0, 0, 0] * 16) filenames = get_hurricane_florence_abi(num_frames=5) self.assertEqual(5 * 16, len(filenames)) class TestGCPUtils(unittest.TestCase): """Test Google Cloud Platform utilities.""" @mock.patch('satpy.demo._google_cloud_platform.urlopen') def test_is_gcp_instance(self, uo): """Test is_google_cloud_instance.""" from satpy.demo._google_cloud_platform import is_google_cloud_instance, URLError uo.side_effect = URLError("Test Environment") self.assertFalse(is_google_cloud_instance()) @mock.patch('satpy.demo._google_cloud_platform.gcsfs') def test_get_bucket_files(self, gcsfs_mod): """Test get_bucket_files basic cases.""" from satpy.demo._google_cloud_platform import get_bucket_files gcsfs_mod.GCSFileSystem = mock.MagicMock() gcsfs_inst = mock.MagicMock() gcsfs_mod.GCSFileSystem.return_value = gcsfs_inst gcsfs_inst.glob.return_value = ['a.nc', 'b.nc'] filenames = get_bucket_files('*.nc', '.') expected = [os.path.join('.', 'a.nc'), os.path.join('.', 'b.nc')] self.assertEqual(expected, filenames) gcsfs_inst.glob.side_effect = _GlobHelper(10) filenames = get_bucket_files(['*.nc', '*.txt'], '.', pattern_slice=slice(2, 5)) self.assertEqual(len(filenames), 3 * 2) gcsfs_inst.glob.side_effect = None # reset mock side effect gcsfs_inst.glob.return_value = ['a.nc', 'b.nc'] self.assertRaises(OSError, get_bucket_files, '*.nc', 'does_not_exist') open('a.nc', 'w').close() # touch the file gcsfs_inst.get.reset_mock() gcsfs_inst.glob.return_value = ['a.nc'] filenames = get_bucket_files('*.nc', '.') self.assertEqual([os.path.join('.', 'a.nc')], filenames) gcsfs_inst.get.assert_not_called() # force redownload gcsfs_inst.get.reset_mock() gcsfs_inst.glob.return_value = ['a.nc'] filenames = get_bucket_files('*.nc', '.', force=True) self.assertEqual([os.path.join('.', 'a.nc')], filenames) gcsfs_inst.get.assert_called_once() # if we don't get any results then we expect an exception gcsfs_inst.get.reset_mock() gcsfs_inst.glob.return_value = [] self.assertRaises(OSError, get_bucket_files, '*.nc', '.') @mock.patch('satpy.demo._google_cloud_platform.gcsfs', None) def test_no_gcsfs(self): """Test that 'gcsfs' is required.""" from satpy.demo._google_cloud_platform import get_bucket_files self.assertRaises(RuntimeError, get_bucket_files, '*.nc', '.') def suite(): """Create the test suite for test_demo.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestDemo)) mysuite.addTest(loader.loadTestsFromTestCase(TestGCPUtils)) return mysuite if __name__ == "__main__": unittest.main() satpy-0.20.0/satpy/tests/test_file_handlers.py000066400000000000000000000142571362525524100214610ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """test file handler baseclass. """ import unittest try: from unittest import mock except ImportError: import mock import numpy as np from satpy.readers.file_handlers import BaseFileHandler class TestBaseFileHandler(unittest.TestCase): """Test the BaseFileHandler.""" def setUp(self): """Setup the test.""" self._old_set = BaseFileHandler.__abstractmethods__ BaseFileHandler._abstractmethods__ = set() self.fh = BaseFileHandler( 'filename', {'filename_info': 'bla'}, 'filetype_info') def test_combine_times(self): """Combine times.""" info1 = {'start_time': 1} info2 = {'start_time': 2} res = self.fh.combine_info([info1, info2]) exp = {'start_time': 1} self.assertDictEqual(res, exp) res = self.fh.combine_info([info2, info1]) exp = {'start_time': 1} self.assertDictEqual(res, exp) info1 = {'end_time': 1} info2 = {'end_time': 2} res = self.fh.combine_info([info1, info2]) exp = {'end_time': 2} self.assertDictEqual(res, exp) res = self.fh.combine_info([info2, info1]) exp = {'end_time': 2} self.assertDictEqual(res, exp) def test_combine_orbits(self): """Combine orbits.""" info1 = {'start_orbit': 1} info2 = {'start_orbit': 2} res = self.fh.combine_info([info1, info2]) exp = {'start_orbit': 1} self.assertDictEqual(res, exp) res = self.fh.combine_info([info2, info1]) exp = {'start_orbit': 1} self.assertDictEqual(res, exp) info1 = {'end_orbit': 1} info2 = {'end_orbit': 2} res = self.fh.combine_info([info1, info2]) exp = {'end_orbit': 2} self.assertDictEqual(res, exp) res = self.fh.combine_info([info2, info1]) exp = {'end_orbit': 2} self.assertDictEqual(res, exp) @mock.patch('satpy.readers.file_handlers.SwathDefinition') def test_combine_area(self, sdef): """Combine area.""" area1 = mock.MagicMock() area1.lons = np.arange(5) area1.lats = np.arange(5) area1.name = 'area1' area2 = mock.MagicMock() area2.lons = np.arange(5) area2.lats = np.arange(5) area2.name = 'area2' info1 = {'area': area1} info2 = {'area': area2} self.fh.combine_info([info1, info2]) self.assertTupleEqual(sdef.call_args[1]['lons'].shape, (2, 5)) self.assertTupleEqual(sdef.call_args[1]['lats'].shape, (2, 5)) self.assertEqual(sdef.return_value.name, 'area1_area2') def test_combine_orbital_parameters(self): """Combine orbital parameters.""" info1 = {'orbital_parameters': {'projection_longitude': 1, 'projection_latitude': 1, 'projection_altitude': 1, 'satellite_nominal_longitude': 1, 'satellite_nominal_latitude': 1, 'satellite_actual_longitude': 1, 'satellite_actual_latitude': 1, 'satellite_actual_altitude': 1, 'nadir_longitude': 1, 'nadir_latitude': 1, 'only_in_1': False}} info2 = {'orbital_parameters': {'projection_longitude': 2, 'projection_latitude': 2, 'projection_altitude': 2, 'satellite_nominal_longitude': 2, 'satellite_nominal_latitude': 2, 'satellite_actual_longitude': 2, 'satellite_actual_latitude': 2, 'satellite_actual_altitude': 2, 'nadir_longitude': 2, 'nadir_latitude': 2, 'only_in_2': True}} exp = {'orbital_parameters': {'projection_longitude': 1.5, 'projection_latitude': 1.5, 'projection_altitude': 1.5, 'satellite_nominal_longitude': 1.5, 'satellite_nominal_latitude': 1.5, 'satellite_actual_longitude': 1.5, 'satellite_actual_latitude': 1.5, 'satellite_actual_altitude': 1.5, 'nadir_longitude': 1.5, 'nadir_latitude': 1.5, 'only_in_1': False, 'only_in_2': True}} res = self.fh.combine_info([info1, info2]) self.assertDictEqual(res, exp) # Identity self.assertEqual(self.fh.combine_info([info1]), info1) # Empty self.fh.combine_info([{}]) def tearDown(self): """Tear down the test.""" BaseFileHandler.__abstractmethods__ = self._old_set def suite(): """The test suite for test_projector. """ loader = unittest.TestLoader() my_suite = unittest.TestSuite() my_suite.addTest(loader.loadTestsFromTestCase(TestBaseFileHandler)) return my_suite satpy-0.20.0/satpy/tests/test_multiscene.py000066400000000000000000000500441362525524100210240ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit tests for multiscene.py.""" import os import sys import tempfile import shutil from datetime import datetime if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest try: from unittest import mock except ImportError: import mock DEFAULT_SHAPE = (5, 10) def _fake_get_enhanced_image(img): from trollimage.xrimage import XRImage return XRImage(img) def _create_test_area(proj_str=None, shape=DEFAULT_SHAPE, extents=None): """Create a test area definition.""" from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict if proj_str is None: proj_str = '+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. ' \ '+lat_0=25 +lat_1=25 +units=m +no_defs' proj_dict = proj4_str_to_dict(proj_str) extents = extents or (-1000., -1500., 1000., 1500.) return AreaDefinition( 'test', 'test', 'test', proj_dict, shape[1], shape[0], extents ) def _create_test_dataset(name, shape=DEFAULT_SHAPE, area=None): """Create a test DataArray object.""" import xarray as xr import dask.array as da import numpy as np return xr.DataArray( da.zeros(shape, dtype=np.float32, chunks=shape), dims=('y', 'x'), attrs={'name': name, 'area': area}) def _create_test_scenes(num_scenes=2, shape=DEFAULT_SHAPE, area=None): """Create some test scenes for various test cases.""" from satpy import Scene ds1 = _create_test_dataset('ds1', shape=shape, area=area) ds2 = _create_test_dataset('ds2', shape=shape, area=area) scenes = [] for _ in range(num_scenes): scn = Scene() scn['ds1'] = ds1.copy() scn['ds2'] = ds2.copy() scenes.append(scn) return scenes class TestMultiScene(unittest.TestCase): """Test basic functionality of MultiScene.""" def test_init_empty(self): """Test creating a multiscene with no children.""" from satpy import MultiScene MultiScene() def test_init_children(self): """Test creating a multiscene with children.""" from satpy import MultiScene scenes = _create_test_scenes() MultiScene(scenes) def test_properties(self): """Test basic properties/attributes of the MultiScene.""" from satpy import MultiScene, DatasetID area = _create_test_area() scenes = _create_test_scenes(area=area) ds1_id = DatasetID(name='ds1') ds2_id = DatasetID(name='ds2') ds3_id = DatasetID(name='ds3') ds4_id = DatasetID(name='ds4') # Add a dataset to only one of the Scenes scenes[1]['ds3'] = _create_test_dataset('ds3') mscn = MultiScene(scenes) self.assertSetEqual(mscn.loaded_dataset_ids, {ds1_id, ds2_id, ds3_id}) self.assertSetEqual(mscn.shared_dataset_ids, {ds1_id, ds2_id}) self.assertTrue(mscn.all_same_area) bigger_area = _create_test_area(shape=(20, 40)) scenes[0]['ds4'] = _create_test_dataset('ds4', shape=(20, 40), area=bigger_area) self.assertSetEqual(mscn.loaded_dataset_ids, {ds1_id, ds2_id, ds3_id, ds4_id}) self.assertSetEqual(mscn.shared_dataset_ids, {ds1_id, ds2_id}) self.assertFalse(mscn.all_same_area) def test_from_files(self): """Test creating a multiscene from multiple files.""" from satpy import MultiScene input_files = [ "OR_ABI-L1b-RadC-M3C01_G16_s20171171502203_e20171171504576_c20171171505018.nc", "OR_ABI-L1b-RadC-M3C01_G16_s20171171507203_e20171171509576_c20171171510018.nc", "OR_ABI-L1b-RadC-M3C01_G16_s20171171512203_e20171171514576_c20171171515017.nc", "OR_ABI-L1b-RadC-M3C01_G16_s20171171517203_e20171171519577_c20171171520019.nc", "OR_ABI-L1b-RadC-M3C01_G16_s20171171522203_e20171171524576_c20171171525020.nc", "OR_ABI-L1b-RadC-M3C01_G16_s20171171527203_e20171171529576_c20171171530017.nc", ] with mock.patch('satpy.multiscene.Scene') as scn_mock: mscn = MultiScene.from_files(input_files, reader='abi_l1b') self.assertTrue(len(mscn.scenes), 6) calls = [mock.call(filenames={'abi_l1b': [in_file]}) for in_file in input_files] scn_mock.assert_has_calls(calls) class TestMultiSceneSave(unittest.TestCase): """Test saving a MultiScene to various formats.""" def setUp(self): """Create temporary directory to save files to.""" self.base_dir = tempfile.mkdtemp() def tearDown(self): """Remove the temporary directory created for a test.""" try: shutil.rmtree(self.base_dir, ignore_errors=True) except OSError: pass @mock.patch('satpy.multiscene.get_enhanced_image', _fake_get_enhanced_image) def test_save_mp4(self): """Save a series of fake scenes to an mp4 video.""" from satpy import MultiScene area = _create_test_area() scenes = _create_test_scenes(area=area) # Add a dataset to only one of the Scenes scenes[1]['ds3'] = _create_test_dataset('ds3') # Add a start and end time for ds_id in ['ds1', 'ds2', 'ds3']: scenes[1][ds_id].attrs['start_time'] = datetime(2018, 1, 2) scenes[1][ds_id].attrs['end_time'] = datetime(2018, 1, 2, 12) if ds_id == 'ds3': continue scenes[0][ds_id].attrs['start_time'] = datetime(2018, 1, 1) scenes[0][ds_id].attrs['end_time'] = datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) fn = os.path.join( self.base_dir, 'test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4') writer_mock = mock.MagicMock() with mock.patch('satpy.multiscene.imageio.get_writer') as get_writer: get_writer.return_value = writer_mock # force order of datasets by specifying them mscn.save_animation(fn, datasets=['ds1', 'ds2', 'ds3'], client=False) # 2 saves for the first scene + 1 black frame # 3 for the second scene self.assertEqual(writer_mock.append_data.call_count, 3 + 3) filenames = [os.path.basename(args[0][0]) for args in get_writer.call_args_list] self.assertEqual(filenames[0], 'test_save_mp4_ds1_20180101_00_20180102_12.mp4') self.assertEqual(filenames[1], 'test_save_mp4_ds2_20180101_00_20180102_12.mp4') self.assertEqual(filenames[2], 'test_save_mp4_ds3_20180102_00_20180102_12.mp4') # make sure that not specifying datasets still saves all of them fn = os.path.join( self.base_dir, 'test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4') writer_mock = mock.MagicMock() with mock.patch('satpy.multiscene.imageio.get_writer') as get_writer: get_writer.return_value = writer_mock # force order of datasets by specifying them mscn.save_animation(fn, client=False) # the 'ds3' dataset isn't known to the first scene so it doesn't get saved # 2 for first scene, 2 for second scene self.assertEqual(writer_mock.append_data.call_count, 2 + 2) self.assertIn('test_save_mp4_ds1_20180101_00_20180102_12.mp4', filenames) self.assertIn('test_save_mp4_ds2_20180101_00_20180102_12.mp4', filenames) self.assertIn('test_save_mp4_ds3_20180102_00_20180102_12.mp4', filenames) @mock.patch('satpy.multiscene.get_enhanced_image', _fake_get_enhanced_image) def test_save_mp4_distributed(self): """Save a series of fake scenes to an mp4 video.""" from satpy import MultiScene area = _create_test_area() scenes = _create_test_scenes(area=area) # Add a dataset to only one of the Scenes scenes[1]['ds3'] = _create_test_dataset('ds3') # Add a start and end time for ds_id in ['ds1', 'ds2', 'ds3']: scenes[1][ds_id].attrs['start_time'] = datetime(2018, 1, 2) scenes[1][ds_id].attrs['end_time'] = datetime(2018, 1, 2, 12) if ds_id == 'ds3': continue scenes[0][ds_id].attrs['start_time'] = datetime(2018, 1, 1) scenes[0][ds_id].attrs['end_time'] = datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) fn = os.path.join( self.base_dir, 'test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4') writer_mock = mock.MagicMock() client_mock = mock.MagicMock() client_mock.compute.side_effect = lambda x: tuple(v.compute() for v in x) client_mock.gather.side_effect = lambda x: x with mock.patch('satpy.multiscene.imageio.get_writer') as get_writer: get_writer.return_value = writer_mock # force order of datasets by specifying them mscn.save_animation(fn, client=client_mock, datasets=['ds1', 'ds2', 'ds3']) # 2 saves for the first scene + 1 black frame # 3 for the second scene self.assertEqual(writer_mock.append_data.call_count, 3 + 3) filenames = [os.path.basename(args[0][0]) for args in get_writer.call_args_list] self.assertEqual(filenames[0], 'test_save_mp4_ds1_20180101_00_20180102_12.mp4') self.assertEqual(filenames[1], 'test_save_mp4_ds2_20180101_00_20180102_12.mp4') self.assertEqual(filenames[2], 'test_save_mp4_ds3_20180102_00_20180102_12.mp4') # Test no distributed client found mscn = MultiScene(scenes) fn = os.path.join( self.base_dir, 'test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4') writer_mock = mock.MagicMock() client_mock = mock.MagicMock() client_mock.compute.side_effect = lambda x: tuple(v.compute() for v in x) client_mock.gather.side_effect = lambda x: x with mock.patch('satpy.multiscene.imageio.get_writer') as get_writer, \ mock.patch('satpy.multiscene.get_client', mock.Mock(side_effect=ValueError("No client"))): get_writer.return_value = writer_mock # force order of datasets by specifying them mscn.save_animation(fn, datasets=['ds1', 'ds2', 'ds3']) # 2 saves for the first scene + 1 black frame # 3 for the second scene self.assertEqual(writer_mock.append_data.call_count, 3 + 3) filenames = [os.path.basename(args[0][0]) for args in get_writer.call_args_list] self.assertEqual(filenames[0], 'test_save_mp4_ds1_20180101_00_20180102_12.mp4') self.assertEqual(filenames[1], 'test_save_mp4_ds2_20180101_00_20180102_12.mp4') self.assertEqual(filenames[2], 'test_save_mp4_ds3_20180102_00_20180102_12.mp4') @mock.patch('satpy.multiscene.get_enhanced_image', _fake_get_enhanced_image) def test_save_mp4_no_distributed(self): """Save a series of fake scenes to an mp4 video when distributed isn't available.""" from satpy import MultiScene area = _create_test_area() scenes = _create_test_scenes(area=area) # Add a dataset to only one of the Scenes scenes[1]['ds3'] = _create_test_dataset('ds3') # Add a start and end time for ds_id in ['ds1', 'ds2', 'ds3']: scenes[1][ds_id].attrs['start_time'] = datetime(2018, 1, 2) scenes[1][ds_id].attrs['end_time'] = datetime(2018, 1, 2, 12) if ds_id == 'ds3': continue scenes[0][ds_id].attrs['start_time'] = datetime(2018, 1, 1) scenes[0][ds_id].attrs['end_time'] = datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) fn = os.path.join( self.base_dir, 'test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4') writer_mock = mock.MagicMock() client_mock = mock.MagicMock() client_mock.compute.side_effect = lambda x: tuple(v.compute() for v in x) client_mock.gather.side_effect = lambda x: x with mock.patch('satpy.multiscene.imageio.get_writer') as get_writer, \ mock.patch('satpy.multiscene.get_client', None): get_writer.return_value = writer_mock # force order of datasets by specifying them mscn.save_animation(fn, datasets=['ds1', 'ds2', 'ds3']) # 2 saves for the first scene + 1 black frame # 3 for the second scene self.assertEqual(writer_mock.append_data.call_count, 3 + 3) filenames = [os.path.basename(args[0][0]) for args in get_writer.call_args_list] self.assertEqual(filenames[0], 'test_save_mp4_ds1_20180101_00_20180102_12.mp4') self.assertEqual(filenames[1], 'test_save_mp4_ds2_20180101_00_20180102_12.mp4') self.assertEqual(filenames[2], 'test_save_mp4_ds3_20180102_00_20180102_12.mp4') @mock.patch('satpy.multiscene.get_enhanced_image', _fake_get_enhanced_image) def test_save_datasets_simple(self): """Save a series of fake scenes to an PNG images.""" from satpy import MultiScene area = _create_test_area() scenes = _create_test_scenes(area=area) # Add a dataset to only one of the Scenes scenes[1]['ds3'] = _create_test_dataset('ds3') # Add a start and end time for ds_id in ['ds1', 'ds2', 'ds3']: scenes[1][ds_id].attrs['start_time'] = datetime(2018, 1, 2) scenes[1][ds_id].attrs['end_time'] = datetime(2018, 1, 2, 12) if ds_id == 'ds3': continue scenes[0][ds_id].attrs['start_time'] = datetime(2018, 1, 1) scenes[0][ds_id].attrs['end_time'] = datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) client_mock = mock.MagicMock() client_mock.compute.side_effect = lambda x: tuple(v for v in x) client_mock.gather.side_effect = lambda x: x with mock.patch('satpy.multiscene.Scene.save_datasets') as save_datasets: save_datasets.return_value = [True] # some arbitrary return value # force order of datasets by specifying them mscn.save_datasets(base_dir=self.base_dir, client=False, datasets=['ds1', 'ds2', 'ds3'], writer='simple_image') # 2 for each scene self.assertEqual(save_datasets.call_count, 2) @mock.patch('satpy.multiscene.get_enhanced_image', _fake_get_enhanced_image) def test_save_datasets_distributed(self): """Save a series of fake scenes to an PNG images using dask distributed.""" from satpy import MultiScene area = _create_test_area() scenes = _create_test_scenes(area=area) # Add a dataset to only one of the Scenes scenes[1]['ds3'] = _create_test_dataset('ds3') # Add a start and end time for ds_id in ['ds1', 'ds2', 'ds3']: scenes[1][ds_id].attrs['start_time'] = datetime(2018, 1, 2) scenes[1][ds_id].attrs['end_time'] = datetime(2018, 1, 2, 12) if ds_id == 'ds3': continue scenes[0][ds_id].attrs['start_time'] = datetime(2018, 1, 1) scenes[0][ds_id].attrs['end_time'] = datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) client_mock = mock.MagicMock() client_mock.compute.side_effect = lambda x: tuple(v for v in x) client_mock.gather.side_effect = lambda x: x future_mock = mock.MagicMock() with mock.patch('satpy.multiscene.Scene.save_datasets') as save_datasets: save_datasets.return_value = [future_mock] # some arbitrary return value # force order of datasets by specifying them mscn.save_datasets(base_dir=self.base_dir, client=client_mock, datasets=['ds1', 'ds2', 'ds3'], writer='simple_image') # 2 for each scene self.assertEqual(save_datasets.call_count, 2) def test_crop(self): """Test the crop method.""" from satpy import Scene, MultiScene from xarray import DataArray from pyresample.geometry import AreaDefinition import numpy as np scene1 = Scene() area_extent = (-5570248.477339745, -5561247.267842293, 5567248.074173927, 5570248.477339745) proj_dict = {'a': 6378169.0, 'b': 6356583.8, 'h': 35785831.0, 'lon_0': 0.0, 'proj': 'geos', 'units': 'm'} x_size = 3712 y_size = 3712 area_def = AreaDefinition( 'test', 'test', 'test', proj_dict, x_size, y_size, area_extent, ) area_def2 = AreaDefinition( 'test2', 'test2', 'test2', proj_dict, x_size // 2, y_size // 2, area_extent, ) scene1["1"] = DataArray(np.zeros((y_size, x_size))) scene1["2"] = DataArray(np.zeros((y_size, x_size)), dims=('y', 'x')) scene1["3"] = DataArray(np.zeros((y_size, x_size)), dims=('y', 'x'), attrs={'area': area_def}) scene1["4"] = DataArray(np.zeros((y_size // 2, x_size // 2)), dims=('y', 'x'), attrs={'area': area_def2}) mscn = MultiScene([scene1]) # by lon/lat bbox new_mscn = mscn.crop(ll_bbox=(-20., -5., 0, 0)) new_scn1 = list(new_mscn.scenes)[0] self.assertIn('1', new_scn1) self.assertIn('2', new_scn1) self.assertIn('3', new_scn1) self.assertTupleEqual(new_scn1['1'].shape, (y_size, x_size)) self.assertTupleEqual(new_scn1['2'].shape, (y_size, x_size)) self.assertTupleEqual(new_scn1['3'].shape, (184, 714)) self.assertTupleEqual(new_scn1['4'].shape, (92, 357)) class TestBlendFuncs(unittest.TestCase): """Test individual functions used for blending.""" def setUp(self): """Set up test data.""" import xarray as xr import dask.array as da from datetime import datetime from pyresample.geometry import AreaDefinition area = AreaDefinition('test', 'test', 'test', {'proj': 'geos', 'lon_0': -95.5, 'h': 35786023.0}, 2, 2, [-200, -200, 200, 200]) ds1 = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=('y', 'x'), attrs={'start_time': datetime(2018, 1, 1, 0, 0, 0), 'area': area}) self.ds1 = ds1 ds2 = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=('y', 'x'), attrs={'start_time': datetime(2018, 1, 1, 1, 0, 0), 'area': area}) self.ds2 = ds2 def test_stack(self): """Test the 'stack' function.""" from satpy.multiscene import stack res = stack([self.ds1, self.ds2]) self.assertTupleEqual(self.ds1.shape, res.shape) def test_timeseries(self): """Test the 'timeseries' function.""" from satpy.multiscene import timeseries import xarray as xr res = timeseries([self.ds1, self.ds2]) self.assertIsInstance(res, xr.DataArray) self.assertTupleEqual((2, self.ds1.shape[0], self.ds1.shape[1]), res.shape) def suite(): """Create the test suite for test_multiscene.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestMultiScene)) mysuite.addTest(loader.loadTestsFromTestCase(TestMultiSceneSave)) mysuite.addTest(loader.loadTestsFromTestCase(TestBlendFuncs)) return mysuite if __name__ == "__main__": unittest.main() satpy-0.20.0/satpy/tests/test_readers.py000066400000000000000000001047311362525524100203040ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Test classes and functions in the readers/__init__.py module.""" import os import sys if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest try: from unittest import mock except ImportError: import mock # clear the config dir environment variable so it doesn't interfere os.environ.pop("PPP_CONFIG_DIR", None) class TestDatasetDict(unittest.TestCase): """Test DatasetDict and its methods.""" def setUp(self): """Create a test DatasetDict.""" from satpy.dataset import DatasetID from satpy.readers import DatasetDict self.regular_dict = regular_dict = { DatasetID(name="test", wavelength=(0, 0.5, 1), resolution=1000): "1", DatasetID(name="testh", wavelength=(0, 0.5, 1), resolution=500): "1h", DatasetID(name="test2", wavelength=(1, 1.5, 2), resolution=1000): "2", DatasetID(name="test3", wavelength=(1.2, 1.7, 2.2), resolution=1000): "3", DatasetID(name="test4", calibration="radiance", polarization="V"): "4rad", DatasetID(name="test4", calibration="reflectance", polarization="H"): "4refl", DatasetID(name="test5", modifiers=('mod1', 'mod2')): "5_2mod", DatasetID(name="test5", modifiers=('mod2',)): "5_1mod", DatasetID(name='test6', level=100): '6_100', DatasetID(name='test6', level=200): '6_200', } self.test_dict = DatasetDict(regular_dict) def test_init_noargs(self): """Test DatasetDict init with no arguments.""" from satpy.readers import DatasetDict d = DatasetDict() self.assertIsInstance(d, dict) def test_init_dict(self): """Test DatasetDict init with a regular dict argument.""" from satpy.dataset import DatasetID from satpy.readers import DatasetDict regular_dict = {DatasetID(name="test", wavelength=(0, 0.5, 1)): "1", } d = DatasetDict(regular_dict) self.assertEqual(d, regular_dict) def test_getitem(self): """Test DatasetDict getitem with different arguments.""" from satpy.dataset import DatasetID d = self.test_dict # access by name self.assertEqual(d["test"], "1") # access by exact wavelength self.assertEqual(d[1.5], "2") # access by near wavelength self.assertEqual(d[1.55], "2") # access by near wavelength of another dataset self.assertEqual(d[1.65], "3") # access by name with multiple levels self.assertEqual(d['test6'], '6_200') self.assertEqual(d[DatasetID(wavelength=1.5)], "2") self.assertEqual(d[DatasetID(wavelength=0.5, resolution=1000)], "1") self.assertEqual(d[DatasetID(wavelength=0.5, resolution=500)], "1h") self.assertEqual(d[DatasetID(name='test6', level=100)], '6_100') self.assertEqual(d[DatasetID(name='test6', level=200)], '6_200') # higher resolution is returned self.assertEqual(d[0.5], "1h") self.assertEqual(d['test4'], '4refl') self.assertEqual(d[DatasetID(name='test4', calibration='radiance')], '4rad') self.assertRaises(KeyError, d.getitem, '1h') def test_get_key(self): """Test 'get_key' special functions.""" from satpy import DatasetID from satpy.readers import get_key d = self.test_dict res1 = get_key(DatasetID(name='test4'), d, calibration='radiance') res2 = get_key(DatasetID(name='test4'), d, calibration='radiance', num_results=0) res3 = get_key(DatasetID(name='test4'), d, calibration='radiance', num_results=3) self.assertEqual(len(res2), 1) self.assertEqual(len(res3), 1) res2 = res2[0] res3 = res3[0] self.assertEqual(res1, res2) self.assertEqual(res1, res3) res1 = get_key('test4', d, polarization='V') self.assertEqual(res1, DatasetID(name='test4', calibration='radiance', polarization='V')) res1 = get_key(0.5, d, resolution=500) self.assertEqual(res1, DatasetID(name='testh', wavelength=(0, 0.5, 1), resolution=500)) res1 = get_key('test6', d, level=100) self.assertEqual(res1, DatasetID(name='test6', level=100)) res1 = get_key('test5', d) res2 = get_key('test5', d, modifiers=('mod2',)) res3 = get_key('test5', d, modifiers=('mod1', 'mod2',)) self.assertEqual(res1, DatasetID(name='test5', modifiers=('mod2',))) self.assertEqual(res1, res2) self.assertNotEqual(res1, res3) # more than 1 result when default is to ask for 1 result self.assertRaises(KeyError, get_key, 'test4', d, best=False) def test_contains(self): """Test DatasetDict contains method.""" from satpy.dataset import DatasetID d = self.test_dict self.assertIn('test', d) self.assertFalse(d.contains('test')) self.assertNotIn('test_bad', d) self.assertIn(0.5, d) self.assertFalse(d.contains(0.5)) self.assertIn(1.5, d) self.assertIn(1.55, d) self.assertIn(1.65, d) self.assertIn(DatasetID(name='test4', calibration='radiance'), d) self.assertIn('test4', d) def test_keys(self): """Test keys method of DatasetDict.""" from satpy import DatasetID d = self.test_dict self.assertEqual(len(d.keys()), len(self.regular_dict.keys())) self.assertTrue(all(isinstance(x, DatasetID) for x in d.keys())) name_keys = d.keys(names=True) self.assertListEqual(sorted(set(name_keys))[:4], [ 'test', 'test2', 'test3', 'test4']) wl_keys = tuple(d.keys(wavelengths=True)) self.assertIn((0, 0.5, 1), wl_keys) self.assertIn((1, 1.5, 2), wl_keys) self.assertIn((1.2, 1.7, 2.2), wl_keys) self.assertIn(None, wl_keys) def test_setitem(self): """Test setitem method of DatasetDict.""" d = self.test_dict d['new_ds'] = {'metadata': 'new_ds'} self.assertEqual(d['new_ds']['metadata'], 'new_ds') d[0.5] = {'calibration': 'radiance'} self.assertEqual(d[0.5]['resolution'], 500) self.assertEqual(d[0.5]['name'], 'testh') class TestReaderLoader(unittest.TestCase): """Test the `load_readers` function. Assumes that the VIIRS SDR reader exists and works. """ def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy.readers.viirs_sdr import VIIRSSDRFileHandler from satpy.tests.reader_tests.test_viirs_sdr import FakeHDF5FileHandler2 # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(VIIRSSDRFileHandler, '__bases__', (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the HDF5 file handler.""" self.p.stop() def test_no_args(self): """Test no args provided. This should check the local directory which should have no files. """ from satpy.readers import load_readers ri = load_readers() self.assertDictEqual(ri, {}) def test_filenames_only(self): """Test with filenames specified.""" from satpy.readers import load_readers ri = load_readers(filenames=['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5']) self.assertListEqual(list(ri.keys()), ['viirs_sdr']) def test_filenames_and_reader(self): """Test with filenames and reader specified.""" from satpy.readers import load_readers ri = load_readers(reader='viirs_sdr', filenames=['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5']) self.assertListEqual(list(ri.keys()), ['viirs_sdr']) def test_bad_reader_name_with_filenames(self): """Test bad reader name with filenames provided.""" from satpy.readers import load_readers self.assertRaises(ValueError, load_readers, reader='i_dont_exist', filenames=[ 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', ]) @unittest.skipIf(sys.version_info < (3, 4), "pathlib added in Python 3.4") def test_filenames_as_path(self): """Test with filenames specified as pathlib.Path.""" from pathlib import Path from satpy.readers import load_readers ri = load_readers(filenames=[ Path('SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'), ]) self.assertListEqual(list(ri.keys()), ['viirs_sdr']) def test_filenames_as_dict(self): """Test loading readers where filenames are organized by reader.""" from satpy.readers import load_readers filenames = { 'viirs_sdr': ['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'], } ri = load_readers(filenames=filenames) self.assertListEqual(list(ri.keys()), ['viirs_sdr']) def test_filenames_as_dict_with_reader(self): """Test loading from a filenames dict with a single reader specified. This can happen in the deprecated Scene behavior of passing a reader and a base_dir. """ from satpy.readers import load_readers filenames = { 'viirs_sdr': ['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'], } ri = load_readers(reader='viirs_sdr', filenames=filenames) self.assertListEqual(list(ri.keys()), ['viirs_sdr']) def test_empty_filenames_as_dict(self): """Test passing filenames as a dictionary with an empty list of filenames.""" # only one reader from satpy.readers import load_readers filenames = { 'viirs_sdr': [], } self.assertRaises(ValueError, load_readers, filenames=filenames) # two readers, one is empty filenames = { 'viirs_sdr': ['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'], 'viirs_l1b': [], } ri = load_readers(filenames) self.assertListEqual(list(ri.keys()), ['viirs_sdr']) @mock.patch('satpy.readers.hrit_base.HRITFileHandler._get_hd') @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler._get_header') @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.start_time') @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.end_time') @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGPrologueFileHandler.read_prologue') @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGEpilogueFileHandler.read_epilogue') def test_missing_requirements(self, *mocks): """Test warnings and exceptions in case of missing requirements.""" from satpy.readers import load_readers # Filenames from a single scan epi_pro_miss = ['H-000-MSG4__-MSG4________-IR_108___-000006___-201809050900-__'] epi_miss = epi_pro_miss + ['H-000-MSG4__-MSG4________-_________-PRO______-201809050900-__'] pro_miss = epi_pro_miss + ['H-000-MSG4__-MSG4________-_________-EPI______-201809050900-__'] for filenames in [epi_miss, pro_miss, epi_pro_miss]: self.assertRaises(ValueError, load_readers, reader='seviri_l1b_hrit', filenames=filenames) # Filenames from multiple scans at_least_one_complete = [ # 09:00 scan is ok 'H-000-MSG4__-MSG4________-IR_108___-000006___-201809050900-__', 'H-000-MSG4__-MSG4________-_________-PRO______-201809050900-__', 'H-000-MSG4__-MSG4________-_________-EPI______-201809050900-__', # 10:00 scan is incomplete 'H-000-MSG4__-MSG4________-IR_108___-000006___-201809051000-__', ] try: load_readers(filenames=at_least_one_complete, reader='seviri_l1b_hrit') except ValueError: self.fail('If at least one set of filenames is complete, no ' 'exception should be raised') def test_all_filtered(self): """Test behaviour if no file matches the filter parameters.""" from satpy.readers import load_readers import datetime filenames = { 'viirs_sdr': ['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'], } filter_params = {'start_time': datetime.datetime(1970, 1, 1), 'end_time': datetime.datetime(1970, 1, 2), 'area': None} self.assertRaises(ValueError, load_readers, filenames=filenames, reader_kwargs={'filter_parameters': filter_params}) def test_all_filtered_multiple(self): """Test behaviour if no file matches the filter parameters.""" from satpy.readers import load_readers import datetime filenames = { 'viirs_sdr': ['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'], 'abi_l1b': ['OR_ABI-L1b-RadF-M3C01_G16_s20120561730408_e20120561741175_c20172631741218.nc'], } filter_params = {'start_time': datetime.datetime(1970, 1, 1), 'end_time': datetime.datetime(1970, 1, 2)} self.assertRaises(ValueError, load_readers, filenames=filenames, reader_kwargs={'filter_parameters': filter_params}) def test_almost_all_filtered(self): """Test behaviour if only one reader has datasets.""" from satpy.readers import load_readers import datetime filenames = { 'viirs_sdr': ['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'], 'abi_l1b': ['OR_ABI-L1b-RadF-M3C01_G16_s20172631730408_e20172631741175_c20172631741218.nc'], } filter_params = {'start_time': datetime.datetime(2012, 2, 25), 'end_time': datetime.datetime(2012, 2, 26)} # viirs has data that matches the request, abi doesn't readers = load_readers(filenames=filenames, reader_kwargs={'filter_parameters': filter_params}) self.assertIn('viirs_sdr', readers) # abi_l1b reader was created, but no datasets available self.assertIn('abi_l1b', readers) self.assertEqual(len(list(readers['abi_l1b'].available_dataset_ids)), 0) class TestFindFilesAndReaders(unittest.TestCase): """Test the find_files_and_readers utility function.""" def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy.readers.viirs_sdr import VIIRSSDRFileHandler from satpy.tests.reader_tests.test_viirs_sdr import FakeHDF5FileHandler2 # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(VIIRSSDRFileHandler, '__bases__', (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the HDF5 file handler.""" self.p.stop() # def test_sensor(self): # """Test with filenames and sensor specified""" # from satpy.readers import load_readers # ri = load_readers(sensor='viirs', # filenames=[ # 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5' # ]) # self.assertListEqual(list(ri.keys()), ['viirs_sdr']) # def test_reader_name(self): """Test with default base_dir and reader specified.""" from satpy.readers import find_files_and_readers fn = 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5' # touch the file so it exists on disk test_file = open(fn, 'w') try: ri = find_files_and_readers(reader='viirs_sdr') self.assertListEqual(list(ri.keys()), ['viirs_sdr']) self.assertListEqual(ri['viirs_sdr'], [fn]) finally: test_file.close() os.remove(fn) def test_reader_other_name(self): """Test with default base_dir and reader specified.""" from satpy.readers import find_files_and_readers fn = 'S_NWC_CPP_npp_32505_20180204T1114116Z_20180204T1128227Z.nc' # touch the file so it exists on disk test_file = open(fn, 'w') try: ri = find_files_and_readers(reader='nwcsaf-pps_nc') self.assertListEqual(list(ri.keys()), ['nwcsaf-pps_nc']) self.assertListEqual(ri['nwcsaf-pps_nc'], [fn]) finally: test_file.close() os.remove(fn) def test_reader_name_matched_start_end_time(self): """Test with start and end time matching the filename.""" from satpy.readers import find_files_and_readers from datetime import datetime fn = 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5' # touch the file so it exists on disk test_file = open(fn, 'w') try: ri = find_files_and_readers(reader='viirs_sdr', start_time=datetime(2012, 2, 25, 18, 0, 0), end_time=datetime(2012, 2, 25, 19, 0, 0), ) self.assertListEqual(list(ri.keys()), ['viirs_sdr']) self.assertListEqual(ri['viirs_sdr'], [fn]) finally: test_file.close() os.remove(fn) def test_reader_name_matched_start_time(self): """Test with start matching the filename. Start time in the middle of the file time should still match the file. """ from satpy.readers import find_files_and_readers from datetime import datetime fn = 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5' # touch the file so it exists on disk test_file = open(fn, 'w') try: ri = find_files_and_readers(reader='viirs_sdr', start_time=datetime(2012, 2, 25, 18, 1, 30)) self.assertListEqual(list(ri.keys()), ['viirs_sdr']) self.assertListEqual(ri['viirs_sdr'], [fn]) finally: test_file.close() os.remove(fn) def test_reader_name_matched_end_time(self): """Test with end matching the filename. End time in the middle of the file time should still match the file. """ from satpy.readers import find_files_and_readers from datetime import datetime fn = 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5' # touch the file so it exists on disk test_file = open(fn, 'w') try: ri = find_files_and_readers(reader='viirs_sdr', end_time=datetime(2012, 2, 25, 18, 1, 30)) self.assertListEqual(list(ri.keys()), ['viirs_sdr']) self.assertListEqual(ri['viirs_sdr'], [fn]) finally: test_file.close() os.remove(fn) def test_reader_name_unmatched_start_end_time(self): """Test with start and end time matching the filename.""" from satpy.readers import find_files_and_readers from datetime import datetime fn = 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5' # touch the file so it exists on disk test_file = open(fn, 'w') try: self.assertRaises(ValueError, find_files_and_readers, reader='viirs_sdr', start_time=datetime(2012, 2, 26, 18, 0, 0), end_time=datetime(2012, 2, 26, 19, 0, 0), ) finally: test_file.close() os.remove(fn) def test_no_parameters(self): """Test with no limiting parameters.""" from satpy.readers import find_files_and_readers fn = 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5' # touch the file so it exists on disk test_file = open(fn, 'w') try: ri = find_files_and_readers() self.assertListEqual(list(ri.keys()), ['viirs_sdr']) self.assertListEqual(ri['viirs_sdr'], [fn]) finally: test_file.close() os.remove(fn) def test_bad_sensor(self): """Test bad sensor doesn't find any files.""" from satpy.readers import find_files_and_readers fn = 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5' # touch the file so it exists on disk test_file = open(fn, 'w') try: self.assertRaises(ValueError, find_files_and_readers, sensor='i_dont_exist') finally: test_file.close() os.remove(fn) def test_sensor(self): """Test that readers for the current sensor are loaded.""" from satpy.readers import find_files_and_readers fn = 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5' # touch the file so it exists on disk test_file = open(fn, 'w') try: # we can't easily know how many readers satpy has that support # 'viirs' so we just pass it and hope that this works ri = find_files_and_readers(sensor='viirs') self.assertListEqual(list(ri.keys()), ['viirs_sdr']) self.assertListEqual(ri['viirs_sdr'], [fn]) finally: test_file.close() os.remove(fn) def test_sensor_no_files(self): """Test that readers for the current sensor are loaded.""" from satpy.readers import find_files_and_readers # we can't easily know how many readers satpy has that support # 'viirs' so we just pass it and hope that this works self.assertRaises(ValueError, find_files_and_readers, sensor='viirs') def test_reader_load_failed(self): """Test that an exception is raised when a reader can't be loaded.""" from satpy.readers import find_files_and_readers import yaml # touch the file so it exists on disk with mock.patch('yaml.load') as load: load.side_effect = yaml.YAMLError("Import problems") self.assertRaises(yaml.YAMLError, find_files_and_readers, reader='viirs_sdr') def test_old_reader_name_mapping(self): """Test that requesting old reader names raises a warning.""" from satpy.readers import configs_for_reader, OLD_READER_NAMES if not OLD_READER_NAMES: return unittest.skip("Skipping deprecated reader tests because " "no deprecated readers.") test_reader = sorted(OLD_READER_NAMES.keys())[0] self.assertRaises(ValueError, list, configs_for_reader(test_reader)) class TestYAMLFiles(unittest.TestCase): """Test and analyze the reader configuration files.""" def test_filename_matches_reader_name(self): """Test that every reader filename matches the name in the YAML.""" import yaml class IgnoreLoader(yaml.SafeLoader): def _ignore_all_tags(self, tag_suffix, node): return tag_suffix + ' ' + node.value IgnoreLoader.add_multi_constructor('', IgnoreLoader._ignore_all_tags) from satpy.config import glob_config from satpy.readers import read_reader_config for reader_config in glob_config('readers/*.yaml'): reader_fn = os.path.basename(reader_config) reader_fn_name = os.path.splitext(reader_fn)[0] reader_info = read_reader_config([reader_config], loader=IgnoreLoader) self.assertEqual(reader_fn_name, reader_info['name'], "Reader YAML filename doesn't match reader " "name in the YAML file.") def test_available_readers(self): """Test the 'available_readers' function.""" from satpy import available_readers reader_names = available_readers() self.assertGreater(len(reader_names), 0) self.assertIsInstance(reader_names[0], str) self.assertIn('viirs_sdr', reader_names) # needs h5py self.assertIn('abi_l1b', reader_names) # needs netcdf4 reader_infos = available_readers(as_dict=True) self.assertEqual(len(reader_names), len(reader_infos)) self.assertIsInstance(reader_infos[0], dict) for reader_info in reader_infos: self.assertIn('name', reader_info) class TestGroupFiles(unittest.TestCase): """Test the 'group_files' utility function.""" def setUp(self): """Set up test filenames to use.""" input_files = [ "OR_ABI-L1b-RadC-M3C01_G16_s20171171502203_e20171171504576_c20171171505018.nc", "OR_ABI-L1b-RadC-M3C01_G16_s20171171507203_e20171171509576_c20171171510018.nc", "OR_ABI-L1b-RadC-M3C01_G16_s20171171512203_e20171171514576_c20171171515017.nc", "OR_ABI-L1b-RadC-M3C01_G16_s20171171517203_e20171171519577_c20171171520019.nc", "OR_ABI-L1b-RadC-M3C01_G16_s20171171522203_e20171171524576_c20171171525020.nc", "OR_ABI-L1b-RadC-M3C01_G16_s20171171527203_e20171171529576_c20171171530017.nc", "OR_ABI-L1b-RadC-M3C02_G16_s20171171502203_e20171171504576_c20171171505008.nc", "OR_ABI-L1b-RadC-M3C02_G16_s20171171507203_e20171171509576_c20171171510012.nc", "OR_ABI-L1b-RadC-M3C02_G16_s20171171512203_e20171171514576_c20171171515007.nc", "OR_ABI-L1b-RadC-M3C02_G16_s20171171517203_e20171171519576_c20171171520010.nc", "OR_ABI-L1b-RadC-M3C02_G16_s20171171522203_e20171171524576_c20171171525008.nc", "OR_ABI-L1b-RadC-M3C02_G16_s20171171527203_e20171171529576_c20171171530008.nc", ] self.g16_files = input_files self.g17_files = [x.replace('G16', 'G17') for x in input_files] self.noaa20_files = [ "GITCO_j01_d20180511_t2027292_e2028538_b02476_c20190530192858056873_noac_ops.h5", "GITCO_j01_d20180511_t2028550_e2030195_b02476_c20190530192932937427_noac_ops.h5", "GITCO_j01_d20180511_t2030208_e2031435_b02476_c20190530192932937427_noac_ops.h5", "GITCO_j01_d20180511_t2031447_e2033092_b02476_c20190530192932937427_noac_ops.h5", "GITCO_j01_d20180511_t2033105_e2034350_b02476_c20190530192932937427_noac_ops.h5", "SVI03_j01_d20180511_t2027292_e2028538_b02476_c20190530190950789763_noac_ops.h5", "SVI03_j01_d20180511_t2028550_e2030195_b02476_c20190530192911205765_noac_ops.h5", "SVI03_j01_d20180511_t2030208_e2031435_b02476_c20190530192911205765_noac_ops.h5", "SVI03_j01_d20180511_t2031447_e2033092_b02476_c20190530192911205765_noac_ops.h5", "SVI03_j01_d20180511_t2033105_e2034350_b02476_c20190530192911205765_noac_ops.h5", "SVI04_j01_d20180511_t2027292_e2028538_b02476_c20190530190951848958_noac_ops.h5", "SVI04_j01_d20180511_t2028550_e2030195_b02476_c20190530192903985164_noac_ops.h5", "SVI04_j01_d20180511_t2030208_e2031435_b02476_c20190530192903985164_noac_ops.h5", "SVI04_j01_d20180511_t2031447_e2033092_b02476_c20190530192903985164_noac_ops.h5", "SVI04_j01_d20180511_t2033105_e2034350_b02476_c20190530192903985164_noac_ops.h5" ] self.npp_files = [ "GITCO_npp_d20180511_t1939067_e1940309_b33872_c20190612031740518143_noac_ops.h5", "GITCO_npp_d20180511_t1940321_e1941563_b33872_c20190612031740518143_noac_ops.h5", "GITCO_npp_d20180511_t1941575_e1943217_b33872_c20190612031740518143_noac_ops.h5", "SVI03_npp_d20180511_t1939067_e1940309_b33872_c20190612032009230105_noac_ops.h5", "SVI03_npp_d20180511_t1940321_e1941563_b33872_c20190612032009230105_noac_ops.h5", "SVI03_npp_d20180511_t1941575_e1943217_b33872_c20190612032009230105_noac_ops.h5", ] def test_no_reader(self): """Test that reader must be provided.""" from satpy.readers import group_files self.assertRaises(ValueError, group_files, []) def test_bad_reader(self): """Test that reader not existing causes an error.""" from satpy.readers import group_files import yaml # touch the file so it exists on disk with mock.patch('yaml.load') as load: load.side_effect = yaml.YAMLError("Import problems") self.assertRaises(yaml.YAMLError, group_files, [], reader='abi_l1b') def test_default_behavior(self): """Test the default behavior with the 'abi_l1b' reader.""" from satpy.readers import group_files groups = group_files(self.g16_files, reader='abi_l1b') self.assertEqual(6, len(groups)) self.assertEqual(2, len(groups[0]['abi_l1b'])) def test_non_datetime_group_key(self): """Test what happens when the start_time isn't used for grouping.""" from satpy.readers import group_files groups = group_files(self.g16_files, reader='abi_l1b', group_keys=('platform_shortname',)) self.assertEqual(1, len(groups)) self.assertEqual(12, len(groups[0]['abi_l1b'])) def test_large_time_threshold(self): """Test what happens when the time threshold holds multiple files.""" from satpy.readers import group_files groups = group_files(self.g16_files, reader='abi_l1b', time_threshold=60*8) self.assertEqual(3, len(groups)) self.assertEqual(4, len(groups[0]['abi_l1b'])) def test_two_instruments_files(self): """Test the behavior when two instruments files are provided. This is undesired from a user point of view since we don't want G16 and G17 files in the same Scene. Readers (like abi_l1b) are or can be configured to have specific group keys for handling these situations. Due to that this test forces the fallback group keys of ('start_time',). """ from satpy.readers import group_files groups = group_files(self.g16_files + self.g17_files, reader='abi_l1b', group_keys=('start_time',)) self.assertEqual(6, len(groups)) self.assertEqual(4, len(groups[0]['abi_l1b'])) def test_two_instruments_files_split(self): """Test the default behavior when two instruments files are provided and split. Tell the sorting to include the platform identifier as another field to use for grouping. """ from satpy.readers import group_files groups = group_files(self.g16_files + self.g17_files, reader='abi_l1b', group_keys=('start_time', 'platform_shortname')) self.assertEqual(12, len(groups)) self.assertEqual(2, len(groups[0]['abi_l1b'])) # default for abi_l1b should also behave like this groups = group_files(self.g16_files + self.g17_files, reader='abi_l1b') self.assertEqual(12, len(groups)) self.assertEqual(2, len(groups[0]['abi_l1b'])) def test_viirs_orbits(self): """Test a reader that doesn't use 'start_time' for default grouping.""" from satpy.readers import group_files groups = group_files(self.noaa20_files + self.npp_files, reader='viirs_sdr') self.assertEqual(2, len(groups)) # the noaa-20 files will be first because the orbit number is smaller # 5 granules * 3 file types self.assertEqual(5 * 3, len(groups[0]['viirs_sdr'])) # 3 granules * 2 file types self.assertEqual(6, len(groups[1]['viirs_sdr'])) def test_viirs_override_keys(self): """Test overriding a group keys to add 'start_time'.""" from satpy.readers import group_files groups = group_files(self.noaa20_files + self.npp_files, reader='viirs_sdr', group_keys=('start_time', 'orbit', 'platform_shortname')) self.assertEqual(8, len(groups)) self.assertEqual(2, len(groups[0]['viirs_sdr'])) # NPP self.assertEqual(2, len(groups[1]['viirs_sdr'])) # NPP self.assertEqual(2, len(groups[2]['viirs_sdr'])) # NPP self.assertEqual(3, len(groups[3]['viirs_sdr'])) # N20 self.assertEqual(3, len(groups[4]['viirs_sdr'])) # N20 self.assertEqual(3, len(groups[5]['viirs_sdr'])) # N20 self.assertEqual(3, len(groups[6]['viirs_sdr'])) # N20 self.assertEqual(3, len(groups[7]['viirs_sdr'])) # N20 # Ask for a larger time span with our groups groups = group_files(self.noaa20_files + self.npp_files, reader='viirs_sdr', time_threshold=60 * 60 * 2, group_keys=('start_time', 'orbit', 'platform_shortname')) self.assertEqual(2, len(groups)) # NPP is first because it has an earlier time # 3 granules * 2 file types self.assertEqual(6, len(groups[0]['viirs_sdr'])) # 5 granules * 3 file types self.assertEqual(5 * 3, len(groups[1]['viirs_sdr'])) def suite(): """Create test suite for test_readers.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestDatasetDict)) mysuite.addTest(loader.loadTestsFromTestCase(TestReaderLoader)) mysuite.addTest(loader.loadTestsFromTestCase(TestFindFilesAndReaders)) mysuite.addTest(loader.loadTestsFromTestCase(TestYAMLFiles)) mysuite.addTest(loader.loadTestsFromTestCase(TestGroupFiles)) return mysuite if __name__ == "__main__": unittest.main() satpy-0.20.0/satpy/tests/test_resample.py000066400000000000000000001257651362525524100205010ustar00rootroot00000000000000#!/usr/bin/python # Copyright (c) 2016 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unittests for resamplers.""" import unittest import tempfile import shutil import os try: from unittest import mock except ImportError: import mock try: from pyproj import CRS except ImportError: CRS = None def get_test_data(input_shape=(100, 50), output_shape=(200, 100), output_proj=None, input_dims=('y', 'x')): """Get common data objects used in testing. Returns: tuple with the following elements input_data_on_area: DataArray with dimensions as if it is a gridded dataset. input_area_def: AreaDefinition of the above DataArray input_data_on_swath: DataArray with dimensions as if it is a swath. input_swath: SwathDefinition of the above DataArray target_area_def: AreaDefinition to be used as a target for resampling """ from xarray import DataArray import dask.array as da from pyresample.geometry import AreaDefinition, SwathDefinition from pyresample.utils import proj4_str_to_dict ds1 = DataArray(da.zeros(input_shape, chunks=85), dims=input_dims, attrs={'name': 'test_data_name', 'test': 'test'}) if input_dims and 'y' in input_dims: ds1 = ds1.assign_coords(y=da.arange(input_shape[-2], chunks=85)) if input_dims and 'x' in input_dims: ds1 = ds1.assign_coords(x=da.arange(input_shape[-1], chunks=85)) if input_dims and 'bands' in input_dims: ds1 = ds1.assign_coords(bands=list('RGBA'[:ds1.sizes['bands']])) input_proj_str = ('+proj=geos +lon_0=-95.0 +h=35786023.0 +a=6378137.0 ' '+b=6356752.31414 +sweep=x +units=m +no_defs') source = AreaDefinition( 'test_target', 'test_target', 'test_target', proj4_str_to_dict(input_proj_str), input_shape[1], # width input_shape[0], # height (-1000., -1500., 1000., 1500.)) ds1.attrs['area'] = source if CRS is not None: crs = CRS.from_string(input_proj_str) ds1 = ds1.assign_coords(crs=crs) ds2 = ds1.copy() input_area_shape = tuple(ds1.sizes[dim] for dim in ds1.dims if dim in ['y', 'x']) geo_dims = ('y', 'x') if input_dims else None lons = da.random.random(input_area_shape, chunks=50) lats = da.random.random(input_area_shape, chunks=50) swath_def = SwathDefinition( DataArray(lons, dims=geo_dims), DataArray(lats, dims=geo_dims)) ds2.attrs['area'] = swath_def if CRS is not None: crs = CRS.from_string('+proj=latlong +datum=WGS84 +ellps=WGS84') ds2 = ds2.assign_coords(crs=crs) # set up target definition output_proj_str = ('+proj=lcc +datum=WGS84 +ellps=WGS84 ' '+lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs') output_proj_str = output_proj or output_proj_str target = AreaDefinition( 'test_target', 'test_target', 'test_target', proj4_str_to_dict(output_proj_str), output_shape[1], # width output_shape[0], # height (-1000., -1500., 1000., 1500.), ) return ds1, source, ds2, swath_def, target class TestHLResample(unittest.TestCase): """Test the higher level resampling functions.""" def test_type_preserve(self): """Check that the type of resampled datasets is preserved.""" from satpy.resample import resample_dataset import xarray as xr import dask.array as da import numpy as np from pyresample.geometry import SwathDefinition source_area = SwathDefinition(xr.DataArray(da.arange(4, chunks=5).reshape((2, 2)), dims=['y', 'x']), xr.DataArray(da.arange(4, chunks=5).reshape((2, 2)), dims=['y', 'x'])) dest_area = SwathDefinition(xr.DataArray(da.arange(4, chunks=5).reshape((2, 2)) + .0001, dims=['y', 'x']), xr.DataArray(da.arange(4, chunks=5).reshape((2, 2)) + .0001, dims=['y', 'x'])) expected_gap = np.array([[1, 2], [3, 255]]) data = xr.DataArray(da.from_array(expected_gap, chunks=5), dims=['y', 'x']) data.attrs['_FillValue'] = 255 data.attrs['area'] = source_area res = resample_dataset(data, dest_area) self.assertEqual(res.dtype, data.dtype) self.assertTrue(np.all(res.values == expected_gap)) expected_filled = np.array([[1, 2], [3, 3]]) res = resample_dataset(data, dest_area, radius_of_influence=1000000) self.assertEqual(res.dtype, data.dtype) self.assertTrue(np.all(res.values == expected_filled)) class TestKDTreeResampler(unittest.TestCase): """Test the kd-tree resampler.""" @mock.patch('satpy.resample.KDTreeResampler._check_numpy_cache') @mock.patch('satpy.resample.xr.Dataset') @mock.patch('satpy.resample.zarr.open') @mock.patch('satpy.resample.KDTreeResampler._create_cache_filename') @mock.patch('pyresample.kd_tree.XArrayResamplerNN') def test_kd_resampling(self, resampler, create_filename, zarr_open, xr_dset, cnc): """Test the kd resampler.""" import numpy as np import dask.array as da from satpy.resample import KDTreeResampler data, source_area, swath_data, source_swath, target_area = get_test_data() mock_dset = mock.MagicMock() xr_dset.return_value = mock_dset resampler = KDTreeResampler(source_swath, target_area) resampler.precompute( mask=da.arange(5, chunks=5).astype(np.bool), cache_dir='.') resampler.resampler.get_neighbour_info.assert_called() # swath definitions should not be cached self.assertFalse(len(mock_dset.to_zarr.mock_calls), 0) resampler.resampler.reset_mock() cnc.assert_called_once() resampler = KDTreeResampler(source_area, target_area) resampler.precompute() resampler.resampler.get_neighbour_info.assert_called_with(mask=None) try: the_dir = tempfile.mkdtemp() resampler = KDTreeResampler(source_area, target_area) create_filename.return_value = os.path.join(the_dir, 'test_cache.zarr') zarr_open.side_effect = ValueError() resampler.precompute(cache_dir=the_dir) # assert data was saved to the on-disk cache self.assertEqual(len(mock_dset.to_zarr.mock_calls), 1) # assert that zarr_open was called to try to zarr_open something from disk self.assertEqual(len(zarr_open.mock_calls), 1) # we should have cached things in-memory self.assertEqual(len(resampler._index_caches), 1) nbcalls = len(resampler.resampler.get_neighbour_info.mock_calls) # test reusing the resampler zarr_open.side_effect = None class FakeZarr(dict): def close(self): pass def astype(self, dtype): pass zarr_open.return_value = FakeZarr(valid_input_index=1, valid_output_index=2, index_array=3, distance_array=4) resampler.precompute(cache_dir=the_dir) # we already have things cached in-memory, no need to save again self.assertEqual(len(mock_dset.to_zarr.mock_calls), 1) # we already have things cached in-memory, don't need to load self.assertEqual(len(zarr_open.mock_calls), 1) # we should have cached things in-memory self.assertEqual(len(resampler._index_caches), 1) self.assertEqual(len(resampler.resampler.get_neighbour_info.mock_calls), nbcalls) # test loading saved resampler resampler = KDTreeResampler(source_area, target_area) resampler.precompute(cache_dir=the_dir) self.assertEqual(len(zarr_open.mock_calls), 4) self.assertEqual(len(resampler.resampler.get_neighbour_info.mock_calls), nbcalls) # we should have cached things in-memory now self.assertEqual(len(resampler._index_caches), 1) finally: shutil.rmtree(the_dir) fill_value = 8 resampler.compute(data, fill_value=fill_value) resampler.resampler.get_sample_from_neighbour_info.assert_called_with(data, fill_value) @mock.patch('satpy.resample.np.load') @mock.patch('satpy.resample.xr.Dataset') def test_check_numpy_cache(self, xr_Dataset, np_load): """Test that cache stored in .npz is converted to zarr.""" from satpy.resample import KDTreeResampler data, source_area, swath_data, source_swath, target_area = get_test_data() resampler = KDTreeResampler(source_area, target_area) zarr_out = mock.MagicMock() xr_Dataset.return_value = zarr_out try: the_dir = tempfile.mkdtemp() kwargs = {} np_path = resampler._create_cache_filename(the_dir, prefix='resample_lut-', fmt='.npz', mask=None, **kwargs) zarr_path = resampler._create_cache_filename(the_dir, prefix='nn_lut-', fmt='.zarr', mask=None, **kwargs) resampler._check_numpy_cache(the_dir) np_load.assert_not_called() zarr_out.to_zarr.assert_not_called() with open(np_path, 'w') as fid: fid.write("42") resampler._check_numpy_cache(the_dir) np_load.assert_called_once_with(np_path, 'r') zarr_out.to_zarr.assert_called_once_with(zarr_path) finally: shutil.rmtree(the_dir) class TestEWAResampler(unittest.TestCase): """Test EWA resampler class.""" @mock.patch('satpy.resample.fornav') @mock.patch('satpy.resample.ll2cr') @mock.patch('satpy.resample.SwathDefinition.get_lonlats') def test_2d_ewa(self, get_lonlats, ll2cr, fornav): """Test EWA with a 2D dataset.""" import numpy as np import xarray as xr from satpy.resample import resample_dataset ll2cr.return_value = (100, np.zeros((10, 10), dtype=np.float32), np.zeros((10, 10), dtype=np.float32)) fornav.return_value = (100 * 200, np.zeros((200, 100), dtype=np.float32)) _, _, swath_data, source_swath, target_area = get_test_data() get_lonlats.return_value = (source_swath.lons, source_swath.lats) swath_data.data = swath_data.data.astype(np.float32) num_chunks = len(source_swath.lons.chunks[0]) * len(source_swath.lons.chunks[1]) new_data = resample_dataset(swath_data, target_area, resampler='ewa') self.assertTupleEqual(new_data.shape, (200, 100)) self.assertEqual(new_data.dtype, np.float32) self.assertEqual(new_data.attrs['test'], 'test') self.assertIs(new_data.attrs['area'], target_area) # make sure we can actually compute everything new_data.compute() lonlat_calls = get_lonlats.call_count ll2cr_calls = ll2cr.call_count # resample a different dataset and make sure cache is used data = xr.DataArray( swath_data.data, dims=('y', 'x'), attrs={'area': source_swath, 'test': 'test2', 'name': 'test2'}) new_data = resample_dataset(data, target_area, resampler='ewa') new_data.compute() # ll2cr will be called once more because of the computation self.assertEqual(ll2cr.call_count, ll2cr_calls + num_chunks) # but we should already have taken the lonlats from the SwathDefinition self.assertEqual(get_lonlats.call_count, lonlat_calls) self.assertIn('y', new_data.coords) self.assertIn('x', new_data.coords) if CRS is not None: self.assertIn('crs', new_data.coords) self.assertIsInstance(new_data.coords['crs'].item(), CRS) self.assertIn('lcc', new_data.coords['crs'].item().to_proj4()) self.assertEqual(new_data.coords['y'].attrs['units'], 'meter') self.assertEqual(new_data.coords['x'].attrs['units'], 'meter') if hasattr(target_area, 'crs'): self.assertIs(target_area.crs, new_data.coords['crs'].item()) @mock.patch('satpy.resample.fornav') @mock.patch('satpy.resample.ll2cr') @mock.patch('satpy.resample.SwathDefinition.get_lonlats') def test_3d_ewa(self, get_lonlats, ll2cr, fornav): """Test EWA with a 3D dataset.""" import numpy as np import xarray as xr from satpy.resample import resample_dataset _, _, swath_data, source_swath, target_area = get_test_data( input_shape=(3, 200, 100), input_dims=('bands', 'y', 'x')) swath_data.data = swath_data.data.astype(np.float32) ll2cr.return_value = (100, np.zeros((10, 10), dtype=np.float32), np.zeros((10, 10), dtype=np.float32)) fornav.return_value = ([100 * 200] * 3, [np.zeros((200, 100), dtype=np.float32)] * 3) get_lonlats.return_value = (source_swath.lons, source_swath.lats) num_chunks = len(source_swath.lons.chunks[0]) * len(source_swath.lons.chunks[1]) new_data = resample_dataset(swath_data, target_area, resampler='ewa') self.assertTupleEqual(new_data.shape, (3, 200, 100)) self.assertEqual(new_data.dtype, np.float32) self.assertEqual(new_data.attrs['test'], 'test') self.assertIs(new_data.attrs['area'], target_area) # make sure we can actually compute everything new_data.compute() lonlat_calls = get_lonlats.call_count ll2cr_calls = ll2cr.call_count # resample a different dataset and make sure cache is used swath_data = xr.DataArray( swath_data.data, dims=('bands', 'y', 'x'), coords={'bands': ['R', 'G', 'B']}, attrs={'area': source_swath, 'test': 'test'}) new_data = resample_dataset(swath_data, target_area, resampler='ewa') new_data.compute() # ll2cr will be called once more because of the computation self.assertEqual(ll2cr.call_count, ll2cr_calls + num_chunks) # but we should already have taken the lonlats from the SwathDefinition self.assertEqual(get_lonlats.call_count, lonlat_calls) self.assertIn('y', new_data.coords) self.assertIn('x', new_data.coords) self.assertIn('bands', new_data.coords) if CRS is not None: self.assertIn('crs', new_data.coords) self.assertIsInstance(new_data.coords['crs'].item(), CRS) self.assertIn('lcc', new_data.coords['crs'].item().to_proj4()) self.assertEqual(new_data.coords['y'].attrs['units'], 'meter') self.assertEqual(new_data.coords['x'].attrs['units'], 'meter') np.testing.assert_equal(new_data.coords['bands'].values, ['R', 'G', 'B']) if hasattr(target_area, 'crs'): self.assertIs(target_area.crs, new_data.coords['crs'].item()) class TestNativeResampler(unittest.TestCase): """Tests for the 'native' resampling method.""" def test_expand_reduce(self): """Test class method 'expand_reduce' basics.""" from satpy.resample import NativeResampler import numpy as np import dask.array as da d_arr = da.zeros((6, 20), chunks=4) new_data = NativeResampler.expand_reduce(d_arr, {0: 2., 1: 2.}) self.assertEqual(new_data.shape, (12, 40)) new_data = NativeResampler.expand_reduce(d_arr, {0: .5, 1: .5}) self.assertEqual(new_data.shape, (3, 10)) self.assertRaises(ValueError, NativeResampler.expand_reduce, d_arr, {0: 1. / 3, 1: 1.}) new_data = NativeResampler.expand_reduce(d_arr, {0: 1., 1: 1.}) self.assertEqual(new_data.shape, (6, 20)) self.assertIs(new_data, d_arr) self.assertRaises(ValueError, NativeResampler.expand_reduce, d_arr, {0: 0.333323423, 1: 1.}) self.assertRaises(ValueError, NativeResampler.expand_reduce, d_arr, {0: 1.333323423, 1: 1.}) n_arr = np.zeros((6, 20)) new_data = NativeResampler.expand_reduce(n_arr, {0: 2., 1: 1.0}) self.assertTrue(np.all(new_data.compute()[::2, :] == n_arr)) def test_expand_dims(self): """Test expanding native resampling with 2D data.""" from satpy.resample import NativeResampler import numpy as np ds1, source_area, _, _, target_area = get_test_data() # source geo def doesn't actually matter resampler = NativeResampler(source_area, target_area) new_data = resampler.resample(ds1) self.assertEqual(new_data.shape, (200, 100)) new_data2 = resampler.resample(ds1.compute()) self.assertTrue(np.all(new_data == new_data2)) self.assertIn('y', new_data.coords) self.assertIn('x', new_data.coords) if CRS is not None: self.assertIn('crs', new_data.coords) self.assertIsInstance(new_data.coords['crs'].item(), CRS) self.assertIn('lcc', new_data.coords['crs'].item().to_proj4()) self.assertEqual(new_data.coords['y'].attrs['units'], 'meter') self.assertEqual(new_data.coords['x'].attrs['units'], 'meter') if hasattr(target_area, 'crs'): self.assertIs(target_area.crs, new_data.coords['crs'].item()) def test_expand_dims_3d(self): """Test expanding native resampling with 3D data.""" from satpy.resample import NativeResampler import numpy as np ds1, source_area, _, _, target_area = get_test_data( input_shape=(3, 100, 50), input_dims=('bands', 'y', 'x')) # source geo def doesn't actually matter resampler = NativeResampler(source_area, target_area) new_data = resampler.resample(ds1) self.assertEqual(new_data.shape, (3, 200, 100)) new_data2 = resampler.resample(ds1.compute()) self.assertTrue(np.all(new_data == new_data2)) self.assertIn('y', new_data.coords) self.assertIn('x', new_data.coords) self.assertIn('bands', new_data.coords) np.testing.assert_equal(new_data.coords['bands'].values, ['R', 'G', 'B']) if CRS is not None: self.assertIn('crs', new_data.coords) self.assertIsInstance(new_data.coords['crs'].item(), CRS) self.assertIn('lcc', new_data.coords['crs'].item().to_proj4()) self.assertEqual(new_data.coords['y'].attrs['units'], 'meter') self.assertEqual(new_data.coords['x'].attrs['units'], 'meter') if hasattr(target_area, 'crs'): self.assertIs(target_area.crs, new_data.coords['crs'].item()) def test_expand_without_dims(self): """Test expanding native resampling with no dimensions specified.""" from satpy.resample import NativeResampler import numpy as np ds1, source_area, _, _, target_area = get_test_data(input_dims=None) # source geo def doesn't actually matter resampler = NativeResampler(source_area, target_area) new_data = resampler.resample(ds1) self.assertEqual(new_data.shape, (200, 100)) new_data2 = resampler.resample(ds1.compute()) self.assertTrue(np.all(new_data == new_data2)) if CRS is not None: self.assertIn('crs', new_data.coords) self.assertIsInstance(new_data.coords['crs'].item(), CRS) self.assertIn('lcc', new_data.coords['crs'].item().to_proj4()) if hasattr(target_area, 'crs'): self.assertIs(target_area.crs, new_data.coords['crs'].item()) def test_expand_without_dims_4D(self): """Test expanding native resampling with 4D data with no dimensions specified.""" from satpy.resample import NativeResampler ds1, source_area, _, _, target_area = get_test_data( input_shape=(2, 3, 100, 50), input_dims=None) # source geo def doesn't actually matter resampler = NativeResampler(source_area, target_area) self.assertRaises(ValueError, resampler.resample, ds1) class TestBilinearResampler(unittest.TestCase): """Test the bilinear resampler.""" @mock.patch('satpy.resample._move_existing_caches') @mock.patch('satpy.resample.xr.Dataset') @mock.patch('satpy.resample.zarr.open') @mock.patch('satpy.resample.BilinearResampler._create_cache_filename') @mock.patch('pyresample.bilinear.xarr.XArrayResamplerBilinear') def test_bil_resampling(self, resampler, create_filename, zarr_open, xr_dset, move_existing_caches): """Test the bilinear resampler.""" import numpy as np import dask.array as da import xarray as xr from satpy.resample import BilinearResampler data, source_area, swath_data, source_swath, target_area = get_test_data() mock_dset = mock.MagicMock() xr_dset.return_value = mock_dset # Test that bilinear resampling info calculation is called, # and the info is saved zarr_open.side_effect = IOError() resampler = BilinearResampler(source_swath, target_area) resampler.precompute( mask=da.arange(5, chunks=5).astype(np.bool)) resampler.resampler.get_bil_info.assert_called() resampler.resampler.get_bil_info.assert_called_with() self.assertFalse(len(mock_dset.to_zarr.mock_calls), 1) resampler.resampler.reset_mock() zarr_open.reset_mock() zarr_open.side_effect = None # Test that get_sample_from_bil_info is called properly fill_value = 8 resampler.resampler.get_sample_from_bil_info.return_value = \ xr.DataArray(da.zeros(target_area.shape), dims=('y', 'x')) new_data = resampler.compute(data, fill_value=fill_value) resampler.resampler.get_sample_from_bil_info.assert_called_with( data, fill_value=fill_value, output_shape=target_area.shape) self.assertIn('y', new_data.coords) self.assertIn('x', new_data.coords) if CRS is not None: self.assertIn('crs', new_data.coords) self.assertIsInstance(new_data.coords['crs'].item(), CRS) self.assertIn('lcc', new_data.coords['crs'].item().to_proj4()) self.assertEqual(new_data.coords['y'].attrs['units'], 'meter') self.assertEqual(new_data.coords['x'].attrs['units'], 'meter') if hasattr(target_area, 'crs'): self.assertIs(target_area.crs, new_data.coords['crs'].item()) # Test that the resampling info is tried to read from the disk resampler = BilinearResampler(source_swath, target_area) resampler.precompute(cache_dir='.') zarr_open.assert_called() # Test caching the resampling info try: the_dir = tempfile.mkdtemp() resampler = BilinearResampler(source_area, target_area) create_filename.return_value = os.path.join(the_dir, 'test_cache.zarr') zarr_open.reset_mock() zarr_open.side_effect = IOError() resampler.precompute(cache_dir=the_dir) xr_dset.assert_called() # assert data was saved to the on-disk cache self.assertEqual(len(mock_dset.to_zarr.mock_calls), 1) # assert that zarr.open was called to try to load # something from disk self.assertEqual(len(zarr_open.mock_calls), 1) nbcalls = len(resampler.resampler.get_bil_info.mock_calls) # test reusing the resampler zarr_open.side_effect = None class FakeZarr(dict): def close(self): pass def astype(self, dtype): return self def compute(self): return self zarr_open.return_value = FakeZarr(bilinear_s=1, bilinear_t=2, slices_x=3, slices_y=4, mask_slices=5, out_coords_x=6, out_coords_y=7) resampler.precompute(cache_dir=the_dir) # we already have things cached in-memory, no need to save again self.assertEqual(len(mock_dset.to_zarr.mock_calls), 1) # we already have things cached in-memory, don't need to load # self.assertEqual(len(zarr_open.mock_calls), 1) self.assertEqual(len(resampler.resampler.get_bil_info.mock_calls), nbcalls) # test loading saved resampler resampler = BilinearResampler(source_area, target_area) resampler.precompute(cache_dir=the_dir) self.assertEqual(len(zarr_open.mock_calls), 2) self.assertEqual(len(resampler.resampler.get_bil_info.mock_calls), nbcalls) # we should have cached things in-memory now # self.assertEqual(len(resampler._index_caches), 1) resampler = BilinearResampler(source_area, target_area) resampler.precompute(cache_dir=the_dir) resampler.save_bil_info(cache_dir=the_dir) zarr_file = os.path.join(the_dir, 'test_cache.zarr') # Save again faking the cache file already exists with mock.patch('os.path.exists') as exists: exists.return_value = True resampler.save_bil_info(cache_dir=the_dir) move_existing_caches.assert_called_once_with(the_dir, zarr_file) finally: shutil.rmtree(the_dir) def test_move_existing_caches(self): """Test that existing caches are moved to a subdirectory.""" try: the_dir = tempfile.mkdtemp() # Test that existing cache file is moved away zarr_file = os.path.join(the_dir, 'test.zarr') with open(zarr_file, 'w') as fid: fid.write('42') from satpy.resample import _move_existing_caches _move_existing_caches(the_dir, zarr_file) self.assertFalse(os.path.exists(zarr_file)) self.assertTrue(os.path.exists( os.path.join(the_dir, 'moved_by_satpy', 'test.zarr'))) # Run again to see that the existing dir doesn't matter with open(zarr_file, 'w') as fid: fid.write('42') _move_existing_caches(the_dir, zarr_file) finally: shutil.rmtree(the_dir) class TestCoordinateHelpers(unittest.TestCase): """Test various utility functions for working with coordinates.""" def test_area_def_coordinates(self): """Test coordinates being added with an AreaDefinition.""" import numpy as np import dask.array as da import xarray as xr from pyresample.geometry import AreaDefinition from satpy.resample import add_crs_xy_coords area_def = AreaDefinition( 'test', 'test', 'test', {'proj': 'lcc', 'lat_1': 25, 'lat_0': 25}, 100, 200, [-100, -100, 100, 100] ) data_arr = xr.DataArray( da.zeros((200, 100), chunks=50), attrs={'area': area_def}, dims=('y', 'x'), ) new_data_arr = add_crs_xy_coords(data_arr, area_def) self.assertIn('y', new_data_arr.coords) self.assertIn('x', new_data_arr.coords) if CRS is not None: self.assertIn('units', new_data_arr.coords['y'].attrs) self.assertEqual( new_data_arr.coords['y'].attrs['units'], 'meter') self.assertIn('units', new_data_arr.coords['x'].attrs) self.assertEqual( new_data_arr.coords['x'].attrs['units'], 'meter') self.assertIn('crs', new_data_arr.coords) self.assertIsInstance(new_data_arr.coords['crs'].item(), CRS) if hasattr(area_def, 'crs'): self.assertIs(area_def.crs, new_data_arr.coords['crs'].item()) # already has coords data_arr = xr.DataArray( da.zeros((200, 100), chunks=50), attrs={'area': area_def}, dims=('y', 'x'), coords={'y': np.arange(2, 202), 'x': np.arange(100)} ) new_data_arr = add_crs_xy_coords(data_arr, area_def) self.assertIn('y', new_data_arr.coords) self.assertNotIn('units', new_data_arr.coords['y'].attrs) self.assertIn('x', new_data_arr.coords) self.assertNotIn('units', new_data_arr.coords['x'].attrs) np.testing.assert_equal(new_data_arr.coords['y'], np.arange(2, 202)) if CRS is not None: self.assertIn('crs', new_data_arr.coords) self.assertIsInstance(new_data_arr.coords['crs'].item(), CRS) if hasattr(area_def, 'crs'): self.assertIs(area_def.crs, new_data_arr.coords['crs'].item()) # lat/lon area area_def = AreaDefinition( 'test', 'test', 'test', {'proj': 'latlong'}, 100, 200, [-100, -100, 100, 100] ) data_arr = xr.DataArray( da.zeros((200, 100), chunks=50), attrs={'area': area_def}, dims=('y', 'x'), ) new_data_arr = add_crs_xy_coords(data_arr, area_def) self.assertIn('y', new_data_arr.coords) self.assertIn('x', new_data_arr.coords) if CRS is not None: self.assertIn('units', new_data_arr.coords['y'].attrs) self.assertEqual( new_data_arr.coords['y'].attrs['units'], 'degrees_north') self.assertIn('units', new_data_arr.coords['x'].attrs) self.assertEqual( new_data_arr.coords['x'].attrs['units'], 'degrees_east') self.assertIn('crs', new_data_arr.coords) self.assertIsInstance(new_data_arr.coords['crs'].item(), CRS) if hasattr(area_def, 'crs'): self.assertIs(area_def.crs, new_data_arr.coords['crs'].item()) def test_swath_def_coordinates(self): """Test coordinates being added with an SwathDefinition.""" import dask.array as da import xarray as xr from pyresample.geometry import SwathDefinition from satpy.resample import add_crs_xy_coords lons_data = da.random.random((200, 100), chunks=50) lats_data = da.random.random((200, 100), chunks=50) lons = xr.DataArray(lons_data, attrs={'units': 'degrees_east'}, dims=('y', 'x')) lats = xr.DataArray(lats_data, attrs={'units': 'degrees_north'}, dims=('y', 'x')) area_def = SwathDefinition(lons, lats) data_arr = xr.DataArray( da.zeros((200, 100), chunks=50), attrs={'area': area_def}, dims=('y', 'x'), ) new_data_arr = add_crs_xy_coords(data_arr, area_def) # See https://github.com/pydata/xarray/issues/3068 # self.assertIn('longitude', new_data_arr.coords) # self.assertIn('units', new_data_arr.coords['longitude'].attrs) # self.assertEqual( # new_data_arr.coords['longitude'].attrs['units'], 'degrees_east') # self.assertIsInstance(new_data_arr.coords['longitude'].data, da.Array) # self.assertIn('latitude', new_data_arr.coords) # self.assertIn('units', new_data_arr.coords['latitude'].attrs) # self.assertEqual( # new_data_arr.coords['latitude'].attrs['units'], 'degrees_north') # self.assertIsInstance(new_data_arr.coords['latitude'].data, da.Array) if CRS is not None: self.assertIn('crs', new_data_arr.coords) crs = new_data_arr.coords['crs'].item() self.assertIsInstance(crs, CRS) self.assertIn('longlat', crs.to_proj4()) self.assertIsInstance(new_data_arr.coords['crs'].item(), CRS) class TestBucketAvg(unittest.TestCase): """Test the bucket resampler.""" def setUp(self): """Create fake area definitions and resampler to be tested.""" from satpy.resample import BucketAvg get_lonlats = mock.MagicMock() get_lonlats.return_value = (1, 2) self.source_geo_def = mock.MagicMock(get_lonlats=get_lonlats) self.target_geo_def = mock.MagicMock(get_lonlats=get_lonlats) self.bucket = BucketAvg(self.source_geo_def, self.target_geo_def) def test_init(self): """Test bucket resampler initialization.""" self.assertIsNone(self.bucket.resampler) self.assertTrue(self.bucket.source_geo_def == self.source_geo_def) self.assertTrue(self.bucket.target_geo_def == self.target_geo_def) @mock.patch('pyresample.bucket.BucketResampler') def test_precompute(self, bucket): """Test bucket resampler precomputation.""" bucket.return_value = True self.bucket.precompute() self.assertTrue(self.bucket.resampler) bucket.assert_called_once_with(self.target_geo_def, 1, 2) def test_compute(self): """Test bucket resampler computation.""" import dask.array as da # 1D data self.bucket.resampler = mock.MagicMock() data = da.ones((5,)) self.bucket.resampler.get_average.return_value = data res = self.bucket.compute(data, fill_value=2) self.bucket.resampler.get_average.assert_called_once_with( data, fill_value=2, mask_all_nan=False) self.assertEqual(res.shape, (1, 5)) # 2D data self.bucket.resampler = mock.MagicMock() data = da.ones((5, 5)) self.bucket.resampler.get_average.return_value = data res = self.bucket.compute(data, fill_value=2) self.bucket.resampler.get_average.assert_called_once_with( data, fill_value=2, mask_all_nan=False) self.assertEqual(res.shape, (1, 5, 5)) # 3D data self.bucket.resampler = mock.MagicMock() data = da.ones((3, 5, 5)) self.bucket.resampler.get_average.return_value = data[0, :, :] res = self.bucket.compute(data, fill_value=2) self.assertEqual(res.shape, (3, 5, 5)) @mock.patch('pyresample.bucket.BucketResampler') def test_resample(self, pyresample_bucket): """Test bucket resamplers resample method.""" import xarray as xr import dask.array as da self.bucket.resampler = mock.MagicMock() self.bucket.precompute = mock.MagicMock() self.bucket.compute = mock.MagicMock() # 1D input data data = xr.DataArray(da.ones((5,)), dims=('foo'), attrs={'bar': 'baz'}) self.bucket.compute.return_value = da.ones((5, 5)) res = self.bucket.resample(data) self.bucket.precompute.assert_called_once() self.bucket.compute.assert_called_once() self.assertEqual(res.shape, (5, 5)) self.assertEqual(res.dims, ('y', 'x')) self.assertTrue('bar' in res.attrs) self.assertEqual(res.attrs['bar'], 'baz') # 2D input data data = xr.DataArray(da.ones((5, 5)), dims=('foo', 'bar')) self.bucket.compute.return_value = da.ones((5, 5)) res = self.bucket.resample(data) self.assertEqual(res.shape, (5, 5)) self.assertEqual(res.dims, ('y', 'x')) # 3D input data with 'bands' dim data = xr.DataArray(da.ones((1, 5, 5)), dims=('bands', 'foo', 'bar'), coords={'bands': ['L']}) self.bucket.compute.return_value = da.ones((1, 5, 5)) res = self.bucket.resample(data) self.assertEqual(res.shape, (1, 5, 5)) self.assertEqual(res.dims, ('bands', 'y', 'x')) self.assertEqual(res.coords['bands'], ['L']) # 3D input data with misc dim names data = xr.DataArray(da.ones((3, 5, 5)), dims=('foo', 'bar', 'baz')) self.bucket.compute.return_value = da.ones((3, 5, 5)) res = self.bucket.resample(data) self.assertEqual(res.shape, (3, 5, 5)) self.assertEqual(res.dims, ('foo', 'bar', 'baz')) class TestBucketSum(unittest.TestCase): """Test the sum bucket resampler.""" def setUp(self): """Create fake area definitions and resampler to be tested.""" from satpy.resample import BucketSum get_lonlats = mock.MagicMock() get_lonlats.return_value = (1, 2) self.source_geo_def = mock.MagicMock(get_lonlats=get_lonlats) self.target_geo_def = mock.MagicMock(get_lonlats=get_lonlats) self.bucket = BucketSum(self.source_geo_def, self.target_geo_def) def test_compute(self): """Test sum bucket resampler computation.""" import dask.array as da # 1D data self.bucket.resampler = mock.MagicMock() data = da.ones((5,)) self.bucket.resampler.get_sum.return_value = data res = self.bucket.compute(data) self.bucket.resampler.get_sum.assert_called_once_with( data, mask_all_nan=False) self.assertEqual(res.shape, (1, 5)) # 2D data self.bucket.resampler = mock.MagicMock() data = da.ones((5, 5)) self.bucket.resampler.get_sum.return_value = data res = self.bucket.compute(data) self.bucket.resampler.get_sum.assert_called_once_with( data, mask_all_nan=False) self.assertEqual(res.shape, (1, 5, 5)) # 3D data self.bucket.resampler = mock.MagicMock() data = da.ones((3, 5, 5)) self.bucket.resampler.get_sum.return_value = data[0, :, :] res = self.bucket.compute(data) self.assertEqual(res.shape, (3, 5, 5)) class TestBucketCount(unittest.TestCase): """Test the count bucket resampler.""" def setUp(self): """Create fake area definitions and resampler to be tested.""" from satpy.resample import BucketCount get_lonlats = mock.MagicMock() get_lonlats.return_value = (1, 2) self.source_geo_def = mock.MagicMock(get_lonlats=get_lonlats) self.target_geo_def = mock.MagicMock(get_lonlats=get_lonlats) self.bucket = BucketCount(self.source_geo_def, self.target_geo_def) def test_compute(self): """Test count bucket resampler computation.""" import dask.array as da # 1D data self.bucket.resampler = mock.MagicMock() data = da.ones((5,)) self.bucket.resampler.get_count.return_value = data res = self.bucket.compute(data) self.bucket.resampler.get_count.assert_called_once_with() self.assertEqual(res.shape, (1, 5)) # 2D data self.bucket.resampler = mock.MagicMock() data = da.ones((5, 5)) self.bucket.resampler.get_count.return_value = data res = self.bucket.compute(data) self.bucket.resampler.get_count.assert_called_once_with() self.assertEqual(res.shape, (1, 5, 5)) # 3D data self.bucket.resampler = mock.MagicMock() data = da.ones((3, 5, 5)) self.bucket.resampler.get_count.return_value = data[0, :, :] res = self.bucket.compute(data) self.assertEqual(res.shape, (3, 5, 5)) class TestBucketFraction(unittest.TestCase): """Test the fraction bucket resampler.""" def setUp(self): """Create fake area definitions and resampler to be tested.""" from satpy.resample import BucketFraction get_lonlats = mock.MagicMock() get_lonlats.return_value = (1, 2) self.source_geo_def = mock.MagicMock(get_lonlats=get_lonlats) self.target_geo_def = mock.MagicMock(get_lonlats=get_lonlats) self.bucket = BucketFraction(self.source_geo_def, self.target_geo_def) def test_compute(self): """Test fraction bucket resampler computation.""" import dask.array as da import numpy as np self.bucket.resampler = mock.MagicMock() data = da.ones((3, 3)) # No kwargs given _ = self.bucket.compute(data) self.bucket.resampler.get_fractions.assert_called_with( data, categories=None, fill_value=np.nan) # Custom kwargs _ = self.bucket.compute(data, categories=[1, 2], fill_value=0) self.bucket.resampler.get_fractions.assert_called_with( data, categories=[1, 2], fill_value=0) # Too many dimensions data = da.ones((3, 5, 5)) with self.assertRaises(ValueError): _ = self.bucket.compute(data) @mock.patch('pyresample.bucket.BucketResampler') def test_resample(self, pyresample_bucket): """Test fraction bucket resamplers resample method.""" import xarray as xr import dask.array as da import numpy as np self.bucket.resampler = mock.MagicMock() self.bucket.precompute = mock.MagicMock() self.bucket.compute = mock.MagicMock() # Fractions return a dict data = xr.DataArray(da.ones((1, 5, 5)), dims=('bands', 'y', 'x')) arr = da.ones((5, 5)) self.bucket.compute.return_value = {0: arr, 1: arr, 2: arr} res = self.bucket.resample(data) self.assertTrue('categories' in res.coords) self.assertTrue('categories' in res.dims) self.assertTrue(np.all(res.coords['categories'] == np.array([0, 1, 2]))) def suite(): """Create test suite for test_resampler.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestNativeResampler)) mysuite.addTest(loader.loadTestsFromTestCase(TestKDTreeResampler)) mysuite.addTest(loader.loadTestsFromTestCase(TestEWAResampler)) mysuite.addTest(loader.loadTestsFromTestCase(TestHLResample)) mysuite.addTest(loader.loadTestsFromTestCase(TestBilinearResampler)) mysuite.addTest(loader.loadTestsFromTestCase(TestBucketAvg)) mysuite.addTest(loader.loadTestsFromTestCase(TestBucketSum)) mysuite.addTest(loader.loadTestsFromTestCase(TestBucketCount)) mysuite.addTest(loader.loadTestsFromTestCase(TestBucketFraction)) mysuite.addTest(loader.loadTestsFromTestCase(TestCoordinateHelpers)) return mysuite if __name__ == '__main__': unittest.main() satpy-0.20.0/satpy/tests/test_scene.py000066400000000000000000003107051362525524100177540ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2010-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit tests for scene.py.""" import os import sys if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest try: from unittest import mock except ImportError: import mock # clear the config dir environment variable so it doesn't interfere os.environ.pop("PPP_CONFIG_DIR", None) class TestScene(unittest.TestCase): """Test the scene class.""" def test_init(self): """Test scene initialization.""" import satpy.scene with mock.patch('satpy.scene.Scene.create_reader_instances') as cri: cri.return_value = {} satpy.scene.Scene(filenames=['bla'], reader='blo') cri.assert_called_once_with(filenames=['bla'], reader='blo', reader_kwargs=None) def test_init_str_filename(self): """Test initializing with a single string as filenames.""" import satpy.scene self.assertRaises(ValueError, satpy.scene.Scene, reader='blo', filenames='test.nc') def test_init_with_sensor(self): """Test initializing with a sensor.""" import satpy.scene from satpy.tests.utils import FakeReader with mock.patch('satpy.scene.Scene.create_reader_instances') as cri: cri.return_value = { 'fake_reader': FakeReader('fake_reader', sensor_name='fake_sensor'), } scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', sensor='fake_sensor') self.assertIsInstance(scene.attrs['sensor'], set) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', sensor=['fake_sensor']) self.assertIsInstance(scene.attrs['sensor'], set) def test_start_end_times(self): """Test start and end times for a scene.""" import satpy.scene from satpy.tests.utils import FakeReader from datetime import datetime with mock.patch('satpy.scene.Scene.create_reader_instances') as cri: r = FakeReader('fake_reader', start_time=datetime(2017, 1, 1, 0, 0, 0), end_time=datetime(2017, 1, 1, 1, 0, 0), ) cri.return_value = {'fake_reader': r} scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', sensor='fake_sensor') self.assertEqual(scene.start_time, r.start_time) self.assertEqual(scene.end_time, r.end_time) def test_init_preserve_reader_kwargs(self): """Test that the initialization preserves the kwargs.""" import satpy.scene from satpy.tests.utils import FakeReader from datetime import datetime with mock.patch('satpy.scene.Scene.create_reader_instances') as cri: r = FakeReader('fake_reader', start_time=datetime(2017, 1, 1, 0, 0, 0), end_time=datetime(2017, 1, 1, 1, 0, 0), ) cri.return_value = {'fake_reader': r} reader_kwargs = {'calibration_type': 'gsics'} scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', sensor='fake_sensor', filter_parameters={'area': 'euron1'}, reader_kwargs=reader_kwargs) self.assertIsNot(reader_kwargs, cri.call_args[1]['reader_kwargs']) self.assertEqual(scene.start_time, r.start_time) self.assertEqual(scene.end_time, r.end_time) def test_init_alone(self): """Test simple initialization.""" from satpy.scene import Scene from satpy.config import PACKAGE_CONFIG_PATH scn = Scene() self.assertEqual(scn.ppp_config_dir, PACKAGE_CONFIG_PATH) self.assertFalse(scn.readers, 'Empty scene should not load any readers') def test_init_no_files(self): """Test that providing an empty list of filenames fails.""" from satpy.scene import Scene self.assertRaises(ValueError, Scene, reader='viirs_sdr', filenames=[]) def test_init_with_ppp_config_dir(self): """Test initializing with a ppp_config_dir.""" from satpy.scene import Scene scn = Scene(ppp_config_dir="foo") self.assertEqual(scn.ppp_config_dir, 'foo') def test_create_reader_instances_with_filenames(self): """Test creating a reader providing filenames.""" import satpy.scene filenames = ["bla", "foo", "bar"] reader_name = None with mock.patch('satpy.scene.Scene._compute_metadata_from_readers') as md: md.return_value = {'sensor': {'sensor'}} with mock.patch('satpy.scene.load_readers') as findermock: satpy.scene.Scene(filenames=filenames) findermock.assert_called_once_with( filenames=filenames, reader=reader_name, reader_kwargs=None, ppp_config_dir=mock.ANY ) def test_init_with_empty_filenames(self): """Test initialization with empty filename list.""" from satpy.scene import Scene filenames = [] Scene(filenames=filenames) # TODO: Rewrite this test for the 'find_files_and_readers' function # def test_create_reader_instances_with_sensor(self): # import satpy.scene # sensors = ["bla", "foo", "bar"] # filenames = None # reader_name = None # with mock.patch('satpy.scene.Scene._compute_metadata_from_readers'): # with mock.patch('satpy.scene.load_readers') as findermock: # scene = satpy.scene.Scene(sensor=sensors) # findermock.assert_called_once_with( # ppp_config_dir=mock.ANY, # reader=reader_name, # filenames=filenames, # reader_kwargs=None, # ) # def test_create_reader_instances_with_sensor_and_filenames(self): # import satpy.scene # sensors = ["bla", "foo", "bar"] # filenames = ["1", "2", "3"] # reader_name = None # with mock.patch('satpy.scene.Scene._compute_metadata_from_readers'): # with mock.patch('satpy.scene.load_readers') as findermock: # scene = satpy.scene.Scene(sensor=sensors, filenames=filenames) # findermock.assert_called_once_with( # ppp_config_dir=mock.ANY, # reader=reader_name, # sensor=sensors, # filenames=filenames, # reader_kwargs=None, # ) def test_create_reader_instances_with_reader(self): """Test createring a reader instance providing the reader name.""" from satpy.scene import Scene reader = "foo" filenames = ["1", "2", "3"] with mock.patch('satpy.scene.load_readers') as findermock: findermock.return_value = {} Scene(reader=reader, filenames=filenames) findermock.assert_called_once_with(ppp_config_dir=mock.ANY, reader=reader, filenames=filenames, reader_kwargs=None, ) def test_create_reader_instances_with_reader_kwargs(self): """Test creating a reader instance with reader kwargs.""" import satpy.scene from satpy.tests.utils import FakeReader from datetime import datetime filenames = ["1", "2", "3"] reader_kwargs = {'calibration_type': 'gsics'} filter_parameters = {'area': 'euron1'} reader_kwargs2 = {'calibration_type': 'gsics', 'filter_parameters': filter_parameters} with mock.patch('satpy.readers.load_reader') as lr_mock: r = FakeReader('fake_reader', start_time=datetime(2017, 1, 1, 0, 0, 0), end_time=datetime(2017, 1, 1, 1, 0, 0), ) lr_mock.return_value = r r.select_files_from_pathnames = mock.MagicMock() r.select_files_from_pathnames.return_value = filenames r.create_filehandlers = mock.MagicMock() scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', sensor='fake_sensor', filter_parameters={'area': 'euron1'}, reader_kwargs=reader_kwargs) del scene self.assertDictEqual(reader_kwargs, r.create_filehandlers.call_args[1]['fh_kwargs']) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', sensor='fake_sensor', reader_kwargs=reader_kwargs2) self.assertDictEqual(reader_kwargs, r.create_filehandlers.call_args[1]['fh_kwargs']) del scene def test_iter(self): """Test iteration over the scene.""" from satpy import Scene from xarray import DataArray import numpy as np scene = Scene() scene["1"] = DataArray(np.arange(5)) scene["2"] = DataArray(np.arange(5)) scene["3"] = DataArray(np.arange(5)) for x in scene: self.assertIsInstance(x, DataArray) def test_iter_by_area_swath(self): """Test iterating by area on a swath.""" from satpy import Scene from xarray import DataArray from pyresample.geometry import SwathDefinition import numpy as np scene = Scene() sd = SwathDefinition(lons=np.arange(5), lats=np.arange(5)) scene["1"] = DataArray(np.arange(5), attrs={'area': sd}) scene["2"] = DataArray(np.arange(5), attrs={'area': sd}) scene["3"] = DataArray(np.arange(5)) for area_obj, ds_list in scene.iter_by_area(): ds_list_names = set(ds.name for ds in ds_list) if area_obj is sd: self.assertSetEqual(ds_list_names, {'1', '2'}) else: self.assertIsNone(area_obj) self.assertSetEqual(ds_list_names, {'3'}) def test_bad_setitem(self): """Test setting an item wrongly.""" from satpy import Scene import numpy as np scene = Scene() self.assertRaises(ValueError, scene.__setitem__, '1', np.arange(5)) def test_setitem(self): """Test setting an item.""" from satpy import Scene, DatasetID import numpy as np import xarray as xr scene = Scene() scene["1"] = ds1 = xr.DataArray(np.arange(5)) expected_id = DatasetID.from_dict(ds1.attrs) self.assertSetEqual(set(scene.datasets.keys()), {expected_id}) self.assertSetEqual(set(scene.wishlist), {expected_id}) def test_getitem(self): """Test __getitem__ with names only.""" from satpy import Scene from xarray import DataArray import numpy as np scene = Scene() scene["1"] = ds1 = DataArray(np.arange(5)) scene["2"] = ds2 = DataArray(np.arange(5)) scene["3"] = ds3 = DataArray(np.arange(5)) self.assertIs(scene['1'], ds1) self.assertIs(scene['2'], ds2) self.assertIs(scene['3'], ds3) self.assertRaises(KeyError, scene.__getitem__, '4') self.assertIs(scene.get('3'), ds3) self.assertIs(scene.get('4'), None) def test_getitem_modifiers(self): """Test __getitem__ with names and modifiers.""" from satpy import Scene, DatasetID from xarray import DataArray import numpy as np # Return least modified item scene = Scene() scene['1'] = ds1_m0 = DataArray(np.arange(5)) scene[DatasetID(name='1', modifiers=('mod1',)) ] = ds1_m1 = DataArray(np.arange(5)) self.assertIs(scene['1'], ds1_m0) self.assertEqual(len(list(scene.keys())), 2) scene = Scene() scene['1'] = ds1_m0 = DataArray(np.arange(5)) scene[DatasetID(name='1', modifiers=('mod1',)) ] = ds1_m1 = DataArray(np.arange(5)) scene[DatasetID(name='1', modifiers=('mod1', 'mod2')) ] = ds1_m2 = DataArray(np.arange(5)) self.assertIs(scene['1'], ds1_m0) self.assertEqual(len(list(scene.keys())), 3) scene = Scene() scene[DatasetID(name='1', modifiers=('mod1', 'mod2')) ] = ds1_m2 = DataArray(np.arange(5)) scene[DatasetID(name='1', modifiers=('mod1',)) ] = ds1_m1 = DataArray(np.arange(5)) self.assertIs(scene['1'], ds1_m1) self.assertIs(scene[DatasetID('1', modifiers=('mod1', 'mod2'))], ds1_m2) self.assertRaises(KeyError, scene.__getitem__, DatasetID(name='1', modifiers=tuple())) self.assertEqual(len(list(scene.keys())), 2) def test_getitem_slices(self): """Test __getitem__ with slices.""" from satpy import Scene from xarray import DataArray from pyresample.geometry import AreaDefinition, SwathDefinition from pyresample.utils import proj4_str_to_dict import numpy as np scene1 = Scene() scene2 = Scene() proj_dict = proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 ' '+lon_0=-95. +lat_0=25 +lat_1=25 ' '+units=m +no_defs') area_def = AreaDefinition( 'test', 'test', 'test', proj_dict, 200, 400, (-1000., -1500., 1000., 1500.), ) swath_def = SwathDefinition(lons=np.zeros((5, 10)), lats=np.zeros((5, 10))) scene1["1"] = scene2["1"] = DataArray(np.zeros((5, 10))) scene1["2"] = scene2["2"] = DataArray(np.zeros((5, 10)), dims=('y', 'x')) scene1["3"] = DataArray(np.zeros((5, 10)), dims=('y', 'x'), attrs={'area': area_def}) anc_vars = [DataArray(np.ones((5, 10)), attrs={'name': 'anc_var', 'area': area_def})] attrs = {'ancillary_variables': anc_vars, 'area': area_def} scene1["3a"] = DataArray(np.zeros((5, 10)), dims=('y', 'x'), attrs=attrs) scene2["4"] = DataArray(np.zeros((5, 10)), dims=('y', 'x'), attrs={'area': swath_def}) anc_vars = [DataArray(np.ones((5, 10)), attrs={'name': 'anc_var', 'area': swath_def})] attrs = {'ancillary_variables': anc_vars, 'area': swath_def} scene2["4a"] = DataArray(np.zeros((5, 10)), dims=('y', 'x'), attrs=attrs) new_scn1 = scene1[2:5, 2:8] new_scn2 = scene2[2:5, 2:8] for new_scn in [new_scn1, new_scn2]: # datasets without an area don't get sliced self.assertTupleEqual(new_scn['1'].shape, (5, 10)) self.assertTupleEqual(new_scn['2'].shape, (5, 10)) self.assertTupleEqual(new_scn1['3'].shape, (3, 6)) self.assertIn('area', new_scn1['3'].attrs) self.assertTupleEqual(new_scn1['3'].attrs['area'].shape, (3, 6)) self.assertTupleEqual(new_scn1['3a'].shape, (3, 6)) a_var = new_scn1['3a'].attrs['ancillary_variables'][0] self.assertTupleEqual(a_var.shape, (3, 6)) self.assertTupleEqual(new_scn2['4'].shape, (3, 6)) self.assertIn('area', new_scn2['4'].attrs) self.assertTupleEqual(new_scn2['4'].attrs['area'].shape, (3, 6)) self.assertTupleEqual(new_scn2['4a'].shape, (3, 6)) a_var = new_scn2['4a'].attrs['ancillary_variables'][0] self.assertTupleEqual(a_var.shape, (3, 6)) def test_crop(self): """Test the crop method.""" from satpy import Scene from xarray import DataArray from pyresample.geometry import AreaDefinition import numpy as np scene1 = Scene() area_extent = (-5570248.477339745, -5561247.267842293, 5567248.074173927, 5570248.477339745) proj_dict = {'a': 6378169.0, 'b': 6356583.8, 'h': 35785831.0, 'lon_0': 0.0, 'proj': 'geos', 'units': 'm'} x_size = 3712 y_size = 3712 area_def = AreaDefinition( 'test', 'test', 'test', proj_dict, x_size, y_size, area_extent, ) area_def2 = AreaDefinition( 'test2', 'test2', 'test2', proj_dict, x_size // 2, y_size // 2, area_extent, ) scene1["1"] = DataArray(np.zeros((y_size, x_size))) scene1["2"] = DataArray(np.zeros((y_size, x_size)), dims=('y', 'x')) scene1["3"] = DataArray(np.zeros((y_size, x_size)), dims=('y', 'x'), attrs={'area': area_def}) scene1["4"] = DataArray(np.zeros((y_size // 2, x_size // 2)), dims=('y', 'x'), attrs={'area': area_def2}) # by area crop_area = AreaDefinition( 'test', 'test', 'test', proj_dict, x_size, y_size, (area_extent[0] + 10000., area_extent[1] + 500000., area_extent[2] - 10000., area_extent[3] - 500000.) ) new_scn1 = scene1.crop(crop_area) self.assertIn('1', new_scn1) self.assertIn('2', new_scn1) self.assertIn('3', new_scn1) self.assertTupleEqual(new_scn1['1'].shape, (y_size, x_size)) self.assertTupleEqual(new_scn1['2'].shape, (y_size, x_size)) self.assertTupleEqual(new_scn1['3'].shape, (3380, 3708)) self.assertTupleEqual(new_scn1['4'].shape, (1690, 1854)) # by lon/lat bbox new_scn1 = scene1.crop(ll_bbox=(-20., -5., 0, 0)) self.assertIn('1', new_scn1) self.assertIn('2', new_scn1) self.assertIn('3', new_scn1) self.assertTupleEqual(new_scn1['1'].shape, (y_size, x_size)) self.assertTupleEqual(new_scn1['2'].shape, (y_size, x_size)) self.assertTupleEqual(new_scn1['3'].shape, (184, 714)) self.assertTupleEqual(new_scn1['4'].shape, (92, 357)) # by x/y bbox new_scn1 = scene1.crop(xy_bbox=(-200000., -100000., 0, 0)) self.assertIn('1', new_scn1) self.assertIn('2', new_scn1) self.assertIn('3', new_scn1) self.assertTupleEqual(new_scn1['1'].shape, (y_size, x_size)) self.assertTupleEqual(new_scn1['2'].shape, (y_size, x_size)) self.assertTupleEqual(new_scn1['3'].shape, (36, 70)) self.assertTupleEqual(new_scn1['4'].shape, (18, 35)) def test_crop_rgb(self): """Test the crop method on multi-dimensional data.""" from satpy import Scene from xarray import DataArray from pyresample.geometry import AreaDefinition import numpy as np scene1 = Scene() area_extent = (-5570248.477339745, -5561247.267842293, 5567248.074173927, 5570248.477339745) proj_dict = {'a': 6378169.0, 'b': 6356583.8, 'h': 35785831.0, 'lon_0': 0.0, 'proj': 'geos', 'units': 'm'} x_size = 3712 y_size = 3712 area_def = AreaDefinition( 'test', 'test', 'test', proj_dict, x_size, y_size, area_extent, ) area_def2 = AreaDefinition( 'test2', 'test2', 'test2', proj_dict, x_size // 2, y_size // 2, area_extent, ) scene1["1"] = DataArray(np.zeros((3, y_size, x_size)), dims=('bands', 'y', 'x'), attrs={'area': area_def}) scene1["2"] = DataArray(np.zeros((y_size // 2, 3, x_size // 2)), dims=('y', 'bands', 'x'), attrs={'area': area_def2}) # by lon/lat bbox new_scn1 = scene1.crop(ll_bbox=(-20., -5., 0, 0)) self.assertIn('1', new_scn1) self.assertIn('2', new_scn1) self.assertIn('bands', new_scn1['1'].dims) self.assertIn('bands', new_scn1['2'].dims) self.assertTupleEqual(new_scn1['1'].shape, (3, 184, 714)) self.assertTupleEqual(new_scn1['2'].shape, (92, 3, 357)) def test_aggregate(self): """Test the aggregate method.""" if (sys.version_info < (3, 0)): self.skipTest("Not implemented in python 2 (xarray).") from satpy import Scene from xarray import DataArray from pyresample.geometry import AreaDefinition import numpy as np scene1 = Scene() area_extent = (-5570248.477339745, -5561247.267842293, 5567248.074173927, 5570248.477339745) proj_dict = {'a': 6378169.0, 'b': 6356583.8, 'h': 35785831.0, 'lon_0': 0.0, 'proj': 'geos', 'units': 'm'} x_size = 3712 y_size = 3712 area_def = AreaDefinition( 'test', 'test', 'test', proj_dict, x_size, y_size, area_extent, ) scene1["1"] = DataArray(np.ones((y_size, x_size))) scene1["2"] = DataArray(np.ones((y_size, x_size)), dims=('y', 'x')) scene1["3"] = DataArray(np.ones((y_size, x_size)), dims=('y', 'x'), attrs={'area': area_def}) scene2 = scene1.aggregate(func='sum', x=2, y=2) self.assertIs(scene1['1'], scene2['1']) self.assertIs(scene1['2'], scene2['2']) np.testing.assert_allclose(scene2['3'].data, 4) self.assertTupleEqual(scene2['1'].shape, (y_size, x_size)) self.assertTupleEqual(scene2['2'].shape, (y_size, x_size)) self.assertTupleEqual(scene2['3'].shape, (y_size / 2, x_size / 2)) def test_contains(self): """Test contains.""" from satpy import Scene from xarray import DataArray import numpy as np scene = Scene() scene["1"] = DataArray(np.arange(5), attrs={'wavelength': (0.1, 0.2, 0.3)}) self.assertTrue('1' in scene) self.assertTrue(0.15 in scene) self.assertFalse('2' in scene) self.assertFalse(0.31 in scene) def test_delitem(self): """Test deleting an item.""" from satpy import Scene from xarray import DataArray import numpy as np scene = Scene() scene["1"] = DataArray(np.arange(5), attrs={'wavelength': (0.1, 0.2, 0.3)}) scene["2"] = DataArray(np.arange(5), attrs={'wavelength': (0.4, 0.5, 0.6)}) scene["3"] = DataArray(np.arange(5), attrs={'wavelength': (0.7, 0.8, 0.9)}) del scene['1'] del scene['3'] del scene[0.45] self.assertEqual(len(scene.wishlist), 0) self.assertEqual(len(scene.datasets.keys()), 0) self.assertRaises(KeyError, scene.__delitem__, 0.2) def test_min_max_area(self): """Test 'min_area' and 'max_area' methods.""" from satpy import Scene from xarray import DataArray from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict import numpy as np scene = Scene() scene["1"] = ds1 = DataArray(np.arange(10).reshape((2, 5)), attrs={'wavelength': (0.1, 0.2, 0.3)}) scene["2"] = ds2 = DataArray(np.arange(40).reshape((4, 10)), attrs={'wavelength': (0.4, 0.5, 0.6)}) scene["3"] = ds3 = DataArray(np.arange(40).reshape((4, 10)), attrs={'wavelength': (0.7, 0.8, 0.9)}) proj_dict = proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 ' '+lon_0=-95. +lat_0=25 +lat_1=25 ' '+units=m +no_defs') area_def1 = AreaDefinition( 'test', 'test', 'test', proj_dict, 100, 200, (-1000., -1500., 1000., 1500.), ) area_def2 = AreaDefinition( 'test', 'test', 'test', proj_dict, 200, 400, (-1000., -1500., 1000., 1500.), ) ds1.attrs['area'] = area_def1 ds2.attrs['area'] = area_def2 ds3.attrs['area'] = area_def2 self.assertIs(scene.min_area(), area_def1) self.assertIs(scene.max_area(), area_def2) self.assertIs(scene.min_area(['2', '3']), area_def2) def test_all_datasets_no_readers(self): """Test all datasets with no reader.""" from satpy import Scene scene = Scene() self.assertRaises(KeyError, scene.all_dataset_ids, reader_name='fake') id_list = scene.all_dataset_ids() self.assertListEqual(id_list, []) # no sensors are loaded so we shouldn't get any comps either id_list = scene.all_dataset_ids(composites=True) self.assertListEqual(id_list, []) def test_all_dataset_names_no_readers(self): """Test all dataset names with no reader.""" from satpy import Scene scene = Scene() self.assertRaises(KeyError, scene.all_dataset_names, reader_name='fake') name_list = scene.all_dataset_names() self.assertListEqual(name_list, []) # no sensors are loaded so we shouldn't get any comps either name_list = scene.all_dataset_names(composites=True) self.assertListEqual(name_list, []) def test_available_dataset_no_readers(self): """Test the available datasets without a reader.""" from satpy import Scene scene = Scene() self.assertRaises( KeyError, scene.available_dataset_ids, reader_name='fake') name_list = scene.available_dataset_ids() self.assertListEqual(name_list, []) # no sensors are loaded so we shouldn't get any comps either name_list = scene.available_dataset_ids(composites=True) self.assertListEqual(name_list, []) def test_available_dataset_names_no_readers(self): """Test the available dataset names without a reader.""" from satpy import Scene scene = Scene() self.assertRaises( KeyError, scene.available_dataset_names, reader_name='fake') name_list = scene.available_dataset_names() self.assertListEqual(name_list, []) # no sensors are loaded so we shouldn't get any comps either name_list = scene.available_dataset_names(composites=True) self.assertListEqual(name_list, []) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_all_datasets_one_reader(self, cri, cl): """Test all datasets for one reader.""" from satpy import Scene from satpy.tests.utils import FakeReader, test_composites r = FakeReader('fake_reader', 'fake_sensor') cri.return_value = {'fake_reader': r} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') # patch the cpl scene.cpl.compositors = comps scene.cpl.modifiers = mods id_list = scene.all_dataset_ids() self.assertEqual(len(id_list), len(r.all_ids)) id_list = scene.all_dataset_ids(composites=True) self.assertEqual(len(id_list), len(r.all_ids) + 28) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_all_datasets_multiple_reader(self, cri, cl): """Test all datasets for multiple readers.""" from satpy import Scene from satpy.tests.utils import FakeReader, test_composites r = FakeReader('fake_reader', 'fake_sensor', datasets=['ds1']) r2 = FakeReader( 'fake_reader2', 'fake_sensor2', datasets=['ds2']) cri.return_value = {'fake_reader': r, 'fake_reader2': r2} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') # patch the cpl scene.cpl.compositors = comps scene.cpl.modifiers = mods id_list = scene.all_dataset_ids() self.assertEqual(len(id_list), 2) id_list = scene.all_dataset_ids(composites=True) # ds1 and ds2 => 2 # composites that use these two datasets => 10 self.assertEqual(len(id_list), 2 + 10) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_available_datasets_one_reader(self, cri, cl): """Test the available datasets for one reader.""" from satpy import Scene from satpy.tests.utils import FakeReader, test_composites r = FakeReader('fake_reader', 'fake_sensor', datasets=['ds1']) cri.return_value = {'fake_reader': r} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') # patch the cpl scene.cpl.compositors = comps scene.cpl.modifiers = mods id_list = scene.available_dataset_ids() self.assertEqual(len(id_list), 1) id_list = scene.available_dataset_ids(composites=True) # ds1, comp1, comp14, comp16, static_image self.assertEqual(len(id_list), 5) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_available_composite_ids_missing_available(self, cri, cl): """Test available_composite_ids when a composites dep is missing.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites # only the 500m is available available_datasets = ['ds1'] cri.return_value = { 'fake_reader': FakeReader( 'fake_reader', 'fake_sensor', available_datasets=available_datasets), } comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') self.assertNotIn('comp2', scene.available_composite_names()) class TestSceneLoading(unittest.TestCase): """Test the Scene objects `.load` method.""" @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_no_exist(self, cri, cl): """Test loading a dataset that doesn't exist.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') self.assertRaises(KeyError, scene.load, [ 'im_a_dataset_that_doesnt_exist']) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_no_exist2(self, cri, cl): """Test loading a dataset that doesn't exist then another load.""" from satpy.tests.utils import FakeReader, test_composites from satpy import DatasetID, Scene r = FakeReader('fake_reader', 'fake_sensor') cri.return_value = {'fake_reader': r} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') scene.load(['ds9_fail_load']) loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 0) r.load.assert_called_once_with( set([DatasetID(name='ds9_fail_load', wavelength=(1.0, 1.1, 1.2))])) scene.load(['ds1']) loaded_ids = list(scene.datasets.keys()) self.assertEqual(r.load.call_count, 2) # most recent call should have only been ds1 r.load.assert_called_with(set([DatasetID(name='ds1')])) self.assertEqual(len(loaded_ids), 1) @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_ds1_no_comps(self, cri): """Test loading one dataset with no loaded compositors.""" import satpy.scene from satpy.tests.utils import FakeReader from satpy import DatasetID cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') scene.load(['ds1']) loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 1) self.assertTupleEqual( tuple(loaded_ids[0]), tuple(DatasetID(name='ds1'))) @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_ds1_load_twice(self, cri): """Test loading one dataset with no loaded compositors.""" import satpy.scene from satpy.tests.utils import FakeReader from satpy import DatasetID r = FakeReader('fake_reader', 'fake_sensor') cri.return_value = {'fake_reader': r} scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') scene.load(['ds1']) loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 1) self.assertTupleEqual( tuple(loaded_ids[0]), tuple(DatasetID(name='ds1'))) with mock.patch.object(r, 'load') as m: scene.load(['ds1']) loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 1) self.assertTupleEqual( tuple(loaded_ids[0]), tuple(DatasetID(name='ds1'))) self.assertFalse( m.called, "Reader.load was called again when loading something that's already loaded") @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_ds1_unknown_modifier(self, cri, cl): """Test loading one dataset with no loaded compositors.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites from satpy import DatasetID cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') self.assertRaises(KeyError, scene.load, [DatasetID(name='ds1', modifiers=('_fake_bad_mod_',))]) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_ds4_cal(self, cri, cl): """Test loading a dataset that has two calibration variations.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') scene.load(['ds4']) loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 1) self.assertEqual(loaded_ids[0].calibration, 'reflectance') @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_ds5_best_resolution(self, cri, cl): """Test loading a dataset has multiple resolutions available.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') scene.load(['ds5']) loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 1) self.assertEqual(loaded_ids[0].name, 'ds5') self.assertEqual(loaded_ids[0].resolution, 250) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_ds5_multiple_resolution(self, cri, cl): """Test loading a dataset has multiple resolutions available with different resolutions.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') scene.load(['ds5'], resolution=1000) scene.load(['ds5'], resolution=500) loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 2) self.assertEqual(loaded_ids[0].name, 'ds5') self.assertEqual(loaded_ids[0].resolution, 500) self.assertEqual(loaded_ids[1].name, 'ds5') self.assertEqual(loaded_ids[1].resolution, 1000) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_ds5_missing_best_resolution(self, cri, cl): """Test loading a dataset that has multiple resolutions but the best isn't available.""" import satpy.scene from satpy import DatasetID from satpy.tests.utils import FakeReader, test_composites # only the 500m is available available_datasets = [DatasetID('ds5', resolution=500)] cri.return_value = { 'fake_reader': FakeReader( 'fake_reader', 'fake_sensor', datasets=['ds5'], available_datasets=available_datasets), } comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') scene.load(['ds5']) loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 1) self.assertEqual(loaded_ids[0].name, 'ds5') self.assertEqual(loaded_ids[0].resolution, 500) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_ds6_wl(self, cri, cl): """Test loading a dataset by wavelength.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') scene.load([0.22]) loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 1) self.assertEqual(loaded_ids[0].name, 'ds6') @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_ds9_fail_load(self, cri, cl): """Test loading a dataset that will fail during load.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') scene.load(['ds9_fail_load']) loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 0) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_comp1(self, cri, cl): """Test loading a composite with one required prereq.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites from satpy import DatasetID cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') scene.load(['comp1']) loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 1) self.assertTupleEqual( tuple(loaded_ids[0]), tuple(DatasetID(name='comp1'))) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_comp4(self, cri, cl): """Test loading a composite that depends on a composite.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites from satpy import DatasetID cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') scene.load(['comp4']) loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 1) self.assertTupleEqual( tuple(loaded_ids[0]), tuple(DatasetID(name='comp4'))) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_multiple_resolutions(self, cri, cl): """Test loading a dataset has multiple resolutions available with different resolutions.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites from satpy import DatasetID cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') comp25 = DatasetID(name='comp25', resolution=1000) scene[comp25] = 'bla' scene.load(['comp25'], resolution=500) loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 2) self.assertEqual(loaded_ids[0].name, 'comp25') self.assertEqual(loaded_ids[0].resolution, 500) self.assertEqual(loaded_ids[1].name, 'comp25') self.assertEqual(loaded_ids[1].resolution, 1000) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_same_subcomposite(self, cri, cl): """Test loading a composite and one of it's subcomposites at the same time.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') scene.load(['comp24', 'comp25'], resolution=500) loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 2) self.assertEqual(loaded_ids[0].name, 'comp24') self.assertEqual(loaded_ids[0].resolution, 500) self.assertEqual(loaded_ids[1].name, 'comp25') self.assertEqual(loaded_ids[1].resolution, 500) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_comp5(self, cri, cl): """Test loading a composite that has an optional prerequisite.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites from satpy import DatasetID cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') scene.load(['comp5']) loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 1) self.assertTupleEqual( tuple(loaded_ids[0]), tuple(DatasetID(name='comp5'))) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_comp6(self, cri, cl): """Test loading a composite that has an optional composite prerequisite.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites from satpy import DatasetID cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') scene.load(['comp6']) loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 1) self.assertTupleEqual( tuple(loaded_ids[0]), tuple(DatasetID(name='comp6'))) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_comp8(self, cri, cl): """Test loading a composite that has a non-existent prereq.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') self.assertRaises(KeyError, scene.load, ['comp8']) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_comp9(self, cri, cl): """Test loading a composite that has a non-existent optional prereq.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites from satpy import DatasetID cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') # it is fine that an optional prereq doesn't exist scene.load(['comp9']) loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 1) self.assertTupleEqual( tuple(loaded_ids[0]), tuple(DatasetID(name='comp9'))) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_comp10(self, cri, cl): """Test loading a composite that depends on a modified dataset.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites from satpy import DatasetID cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') # it is fine that an optional prereq doesn't exist scene.load(['comp10']) loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 1) self.assertTupleEqual( tuple(loaded_ids[0]), tuple(DatasetID(name='comp10'))) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_comp11(self, cri, cl): """Test loading a composite that depends all wavelengths.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites from satpy import DatasetID cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') # it is fine that an optional prereq doesn't exist scene.load(['comp11']) loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 1) self.assertTupleEqual( tuple(loaded_ids[0]), tuple(DatasetID(name='comp11'))) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_comp12(self, cri, cl): """Test loading a composite that depends all wavelengths that get modified.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites from satpy import DatasetID cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') # it is fine that an optional prereq doesn't exist scene.load(['comp12']) loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 1) self.assertTupleEqual( tuple(loaded_ids[0]), tuple(DatasetID(name='comp12'))) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_comp13(self, cri, cl): """Test loading a composite that depends on a modified dataset where the resolution changes.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites from satpy import DatasetID cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') # it is fine that an optional prereq doesn't exist scene.load(['comp13']) loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 1) self.assertTupleEqual( tuple(loaded_ids[0]), tuple(DatasetID(name='comp13'))) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_comp14(self, cri, cl): """Test loading a composite that updates the DatasetID during generation.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') # it is fine that an optional prereq doesn't exist scene.load(['comp14']) loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 1) self.assertEqual(loaded_ids[0].name, 'comp14') @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_comp15(self, cri, cl): """Test loading a composite whose prerequisites can't be loaded. Note that the prereq exists in the reader, but fails in loading. """ import satpy.scene from satpy.tests.utils import FakeReader, test_composites cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') # it is fine that an optional prereq doesn't exist scene.load(['comp15']) loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 0) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_comp16(self, cri, cl): """Test loading a composite whose opt prereq can't be loaded. Note that the prereq exists in the reader, but fails in loading """ import satpy.scene from satpy.tests.utils import FakeReader, test_composites cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') # it is fine that an optional prereq doesn't exist scene.load(['comp16']) loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 1) self.assertEqual(loaded_ids[0].name, 'comp16') @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_comp17(self, cri, cl): """Test loading a composite that depends on a composite that won't load.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') # it is fine that an optional prereq doesn't exist scene.load(['comp17']) loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 0) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_comp18(self, cri, cl): """Test loading a composite that depends on a incompatible area modified dataset.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites from satpy import DatasetID cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') # it is fine that an optional prereq doesn't exist scene.load(['comp18']) loaded_ids = list(scene.datasets.keys()) # depends on: # ds3 # ds4 (mod1, mod3) # ds5 (mod1, incomp_areas) # We should end up with ds3, ds4 (mod1, mod3), ds5 (mod1), and ds1 # for the incomp_areas modifier self.assertEqual(len(loaded_ids), 4) # the 1 dependencies self.assertIn('ds3', scene.datasets) self.assertIn(DatasetID(name='ds4', calibration='reflectance', modifiers=('mod1', 'mod3')), scene.datasets) self.assertIn(DatasetID(name='ds5', resolution=250, modifiers=('mod1',)), scene.datasets) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_comp18_2(self, cri, cl): """Test loading a composite that depends on a incompatible area modified dataset. Specifically a modified dataset where the modifier has optional dependencies. """ import satpy.scene from satpy.tests.utils import FakeReader, test_composites from satpy import DatasetID cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') # it is fine that an optional prereq doesn't exist scene.load(['comp18_2']) loaded_ids = list(scene.datasets.keys()) # depends on: # ds3 # ds4 (mod1, mod3) # ds5 (mod1, incomp_areas_opt) # We should end up with ds3, ds4 (mod1, mod3), ds5 (mod1), and ds1 # and ds2 for the incomp_areas_opt modifier self.assertEqual(len(loaded_ids), 5) # the 1 dependencies self.assertIn('ds3', scene.datasets) self.assertIn('ds2', scene.datasets) self.assertIn(DatasetID(name='ds4', calibration='reflectance', modifiers=('mod1', 'mod3')), scene.datasets) self.assertIn(DatasetID(name='ds5', resolution=250, modifiers=('mod1',)), scene.datasets) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_comp19(self, cri, cl): """Test loading a composite that shares a dep with a dependency. More importantly test that loading a dependency that depends on the same dependency as this composite (a sibling dependency) and that sibling dependency includes a modifier. This test makes sure that the Node in the dependency tree is the exact same node. """ import satpy.scene from satpy.tests.utils import FakeReader, test_composites from satpy import DatasetID cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') # Check dependency tree nodes # initialize the dep tree without loading the data scene.dep_tree.find_dependencies({'comp19'}) this_node = scene.dep_tree['comp19'] shared_dep_id = DatasetID(name='ds5', modifiers=('res_change',)) shared_dep_expected_node = scene.dep_tree[shared_dep_id] # get the node for the first dep in the prereqs list of the # comp13 node shared_dep_node = scene.dep_tree['comp13'].data[1][0] shared_dep_node2 = this_node.data[1][0] self.assertIs(shared_dep_expected_node, shared_dep_node) self.assertIs(shared_dep_expected_node, shared_dep_node2) # it is fine that an optional prereq doesn't exist scene.load(['comp19']) loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 1) self.assertTupleEqual( tuple(loaded_ids[0]), tuple(DatasetID(name='comp19'))) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_multiple_comps(self, cri, cl): """Test loading multiple composites.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') scene.load(['comp1', 'comp2', 'comp3', 'comp4', 'comp5', 'comp6', 'comp7', 'comp9', 'comp10']) loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 9) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_multiple_comps_separate(self, cri, cl): """Test loading multiple composites, one at a time.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') scene.load(['comp10']) scene.load(['comp9']) scene.load(['comp7']) scene.load(['comp6']) scene.load(['comp5']) scene.load(['comp4']) scene.load(['comp3']) scene.load(['comp2']) scene.load(['comp1']) loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 9) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_modified(self, cri, cl): """Test loading a modified dataset.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites from satpy import DatasetID cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') scene.load([DatasetID(name='ds1', modifiers=('mod1', 'mod2'))]) loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 1) self.assertTupleEqual(loaded_ids[0].modifiers, ('mod1', 'mod2')) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_multiple_modified(self, cri, cl): """Test loading multiple modified datasets.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites from satpy import DatasetID cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') scene.load([ DatasetID(name='ds1', modifiers=('mod1', 'mod2')), DatasetID(name='ds2', modifiers=('mod2', 'mod1')), ]) loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 2) for i in loaded_ids: if i.name == 'ds1': self.assertTupleEqual(i.modifiers, ('mod1', 'mod2')) else: self.assertEqual(i.name, 'ds2') self.assertTupleEqual(i.modifiers, ('mod2', 'mod1')) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_dataset_after_composite(self, cri, cl): """Test load composite followed by other datasets.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites r = FakeReader('fake_reader', 'fake_sensor') cri.return_value = {'fake_reader': r} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') scene.load(['comp3']) self.assertEqual(r.load.call_count, 1) scene.load(['ds1']) self.assertEqual(r.load.call_count, 2) scene.load(['ds1']) # we should only load from the file twice self.assertEqual(r.load.call_count, 2) # we should only generate the composite once self.assertEqual(comps['fake_sensor'][ 'comp3'].side_effect.call_count, 1) loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 2) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_dataset_after_composite2(self, cri, cl): """Test load complex composite followed by other datasets.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites from satpy import DatasetID r = FakeReader('fake_reader', 'fake_sensor') cri.return_value = {'fake_reader': r} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') scene.load(['comp10']) self.assertEqual(r.load.call_count, 1) loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 1) with mock.patch.object(scene, '_read_composites', wraps=scene._read_composites) as m: scene.load(['ds1']) self.assertEqual(r.load.call_count, 2) loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 2) # this is the unmodified ds1 self.assertIn(DatasetID(name='ds1'), loaded_ids) # m.assert_called_once_with(set([scene.dep_tree['ds1']])) m.assert_called_once_with(set()) with mock.patch.object(scene, '_read_composites', wraps=scene._read_composites) as m: scene.load(['ds1']) self.assertEqual(r.load.call_count, 2) loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 2) # this is the unmodified ds1 self.assertIn(DatasetID(name='ds1'), loaded_ids) m.assert_called_once_with(set()) # we should only generate the composite once self.assertEqual(comps['fake_sensor'][ 'comp10'].side_effect.call_count, 1) # Create the modded ds1 at comp10, then load the numodified version # again self.assertEqual(comps['fake_sensor']['ds1']._call_mock.call_count, 1) loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 2) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_comp20(self, cri, cl): """Test loading composite with optional modifier dependencies.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites from satpy import DatasetID cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') # it is fine that an optional prereq doesn't exist scene.load(['comp20']) loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 1) self.assertTupleEqual( tuple(loaded_ids[0]), tuple(DatasetID(name='comp20'))) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_comp21(self, cri, cl): """Test loading composite with bad optional modifier dependencies.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites from satpy import DatasetID cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') # it is fine that an optional prereq doesn't exist scene.load(['comp21']) loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 1) self.assertTupleEqual( tuple(loaded_ids[0]), tuple(DatasetID(name='comp21'))) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_comp22(self, cri, cl): """Test loading composite with only optional modifier dependencies.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites from satpy import DatasetID cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') # it is fine that an optional prereq doesn't exist scene.load(['comp22']) loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 1) self.assertTupleEqual( tuple(loaded_ids[0]), tuple(DatasetID(name='comp22'))) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_no_generate_comp10(self, cri, cl): """Test generating a composite after loading.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') # it is fine that an optional prereq doesn't exist scene.load(['comp10'], generate=False) self.assertTrue(any(ds_id == 'comp10' for ds_id in scene.wishlist)) self.assertNotIn('comp10', scene.datasets) # two dependencies should have been loaded self.assertEqual(len(scene.datasets), 2) self.assertEqual(len(scene.missing_datasets), 1) scene.generate_composites() self.assertTrue(any(ds_id == 'comp10' for ds_id in scene.wishlist)) self.assertIn('comp10', scene.datasets) self.assertEqual(len(scene.missing_datasets), 0) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_modified_with_wl_dep(self, cri, cl): """Test modifying a dataset with a modifier with modified deps. More importantly test that loading the modifiers dependency at the same time as the original modified dataset that the dependency tree nodes are unique and that DatasetIDs. """ import satpy.scene from satpy.tests.utils import FakeReader, test_composites from satpy import DatasetID cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') # Check dependency tree nodes # initialize the dep tree without loading the data ds1_mod_id = DatasetID(name='ds1', modifiers=('mod_wl',)) ds3_mod_id = DatasetID(name='ds3', modifiers=('mod_wl',)) scene.dep_tree.find_dependencies({ds1_mod_id, ds3_mod_id}) ds1_mod_node = scene.dep_tree[ds1_mod_id] ds3_mod_node = scene.dep_tree[ds3_mod_id] ds1_mod_dep_node = ds1_mod_node.data[1][1] ds3_mod_dep_node = ds3_mod_node.data[1][1] # mod_wl depends on the this node: ds6_modded_node = scene.dep_tree[DatasetID(name='ds6', modifiers=('mod1',))] # this dep should be full qualified with name and wavelength self.assertIsNotNone(ds6_modded_node.name.name) self.assertIsNotNone(ds6_modded_node.name.wavelength) self.assertEqual(len(ds6_modded_node.name.wavelength), 3) # the node should be shared between everything that uses it self.assertIs(ds1_mod_dep_node, ds3_mod_dep_node) self.assertIs(ds1_mod_dep_node, ds6_modded_node) # it is fine that an optional prereq doesn't exist scene.load([ds1_mod_id, ds3_mod_id]) loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 2) self.assertIn(ds1_mod_id, scene.datasets) self.assertIn(ds3_mod_id, scene.datasets) @mock.patch('satpy.composites.CompositorLoader.load_compositors', autospec=True) @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_comp11_and_23(self, cri, cl): """Test loading two composites that depend on similar wavelengths.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites from satpy import DatasetID, DatasetDict cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') def _test(self, sensor_names): if not self.compositors: self.compositors = comps self.modifiers = mods new_comps = {} new_mods = {} for sn in sensor_names: new_comps[sn] = DatasetDict( self.compositors[sn].copy()) new_mods[sn] = self.modifiers[sn].copy() return new_comps, new_mods cl.side_effect = _test scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') # mock the available comps/mods in the compositor loader avail_comps = scene.available_composite_ids() self.assertIn(DatasetID(name='comp11'), avail_comps) self.assertIn(DatasetID(name='comp23'), avail_comps) # it is fine that an optional prereq doesn't exist scene.load(['comp11', 'comp23']) comp11_node = scene.dep_tree['comp11'] comp23_node = scene.dep_tree['comp23'] self.assertEqual(comp11_node.data[1][-1].name.name, 'ds10') self.assertEqual(comp23_node.data[1][0].name.name, 'ds8') loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 2) self.assertIn('comp11', scene.datasets) self.assertIn('comp23', scene.datasets) @mock.patch('satpy.composites.CompositorLoader.load_compositors', autospec=True) @mock.patch('satpy.scene.Scene.create_reader_instances') def test_load_too_many(self, cri, cl): """Test dependency tree if too many reader keys match.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites from satpy import DatasetID datasets = [DatasetID(name='duplicate1', wavelength=(0.1, 0.2, 0.3)), DatasetID(name='duplicate2', wavelength=(0.1, 0.2, 0.3))] reader = FakeReader('fake_reader', 'fake_sensor', datasets=datasets, filter_datasets=False) cri.return_value = {'fake_reader': reader} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') # mock the available comps/mods in the compositor loader avail_comps = scene.available_composite_ids() # static image => 1 self.assertEqual(len(avail_comps), 1) self.assertRaises(KeyError, scene.load, [0.21]) @mock.patch('satpy.composites.CompositorLoader.load_compositors', autospec=True) @mock.patch('satpy.scene.Scene.create_reader_instances') def test_available_comps_no_deps(self, cri, cl): """Test Scene available composites when composites don't have a dependency.""" from satpy.tests.utils import FakeReader, test_composites import satpy.scene from satpy.readers import DatasetDict from satpy import DatasetID def _test(self, sensor_names): if not self.compositors: self.compositors = comps self.modifiers = mods new_comps = {} new_mods = {} for sn in sensor_names: new_comps[sn] = DatasetDict( self.compositors[sn].copy()) new_mods[sn] = self.modifiers[sn].copy() return new_comps, new_mods # fancy magic to make sure the CompositorLoader thinks it has comps cl.side_effect = _test reader = FakeReader('fake_reader', 'fake_sensor') cri.return_value = {'fake_reader': reader} comps, mods = test_composites('fake_sensor') scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') all_comp_ids = scene.available_composite_ids() self.assertIn(DatasetID(name='static_image'), all_comp_ids) available_comp_ids = scene.available_composite_ids() self.assertIn(DatasetID(name='static_image'), available_comp_ids) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_empty_node_copy(self, cri, cl): """Test copying a dependency tree while preserving the empty node identical.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') # Check dependency tree nodes # initialize the dep tree without loading the data scene.dep_tree.find_dependencies({'comp19'}) sc2 = scene.copy() self.assertIs(scene.dep_tree.children[0].children[0].children[1], scene.dep_tree.empty_node) self.assertIs(scene.dep_tree.children[0].children[0].children[1], sc2.dep_tree.empty_node) self.assertIs(sc2.dep_tree.children[0].children[0].children[1], scene.dep_tree.empty_node) self.assertIs(sc2.dep_tree.children[0].children[0].children[1], sc2.dep_tree.empty_node) class TestSceneResampling(unittest.TestCase): """Test resampling a Scene to another Scene object.""" def _fake_resample_dataset(self, dataset, dest_area, **kwargs): """Return copy of dataset pretending it was resampled.""" return dataset.copy() @mock.patch('satpy.scene.resample_dataset') @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_resample_scene_copy(self, cri, cl, rs): """Test that the Scene is properly copied during resampling. The Scene that is created as a copy of the original Scene should not be able to affect the original Scene object. """ import satpy.scene from satpy.tests.utils import FakeReader, test_composites from satpy import DatasetID from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) rs.side_effect = self._fake_resample_dataset proj_dict = proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 ' '+lon_0=-95. +lat_0=25 +lat_1=25 ' '+units=m +no_defs') area_def = AreaDefinition('test', 'test', 'test', proj_dict, 5, 5, (-1000., -1500., 1000., 1500.)) area_def.get_area_slices = mock.MagicMock() scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') scene.load(['comp19']) new_scene = scene.resample(area_def) new_scene['new_ds'] = new_scene['comp19'].copy() scene.load(['ds1']) comp19_node = scene.dep_tree['comp19'] ds5_mod_id = DatasetID(name='ds5', modifiers=('res_change',)) ds5_node = scene.dep_tree[ds5_mod_id] comp13_node = scene.dep_tree['comp13'] self.assertIs(comp13_node.data[1][0], comp19_node.data[1][0]) self.assertIs(comp13_node.data[1][0], ds5_node) self.assertRaises(KeyError, scene.dep_tree.__getitem__, 'new_ds') loaded_ids = list(scene.datasets.keys()) self.assertEqual(len(loaded_ids), 2) self.assertTupleEqual(tuple(loaded_ids[0]), tuple(DatasetID(name='comp19'))) self.assertTupleEqual(tuple(loaded_ids[1]), tuple(DatasetID(name='ds1'))) loaded_ids = list(new_scene.datasets.keys()) self.assertEqual(len(loaded_ids), 2) self.assertTupleEqual(tuple(loaded_ids[0]), tuple(DatasetID(name='comp19'))) self.assertTupleEqual(tuple(loaded_ids[1]), tuple(DatasetID(name='new_ds'))) @mock.patch('satpy.scene.resample_dataset') @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_resample_reduce_data_toggle(self, cri, cl, rs): """Test that the Scene can be reduced or not reduced during resampling.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites from satpy import DatasetID from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict import dask.array as da import xarray as xr cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) rs.side_effect = self._fake_resample_dataset proj_dict = proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 ' '+lon_0=-95. +lat_0=25 +lat_1=25 ' '+units=m +no_defs') target_area = AreaDefinition('test', 'test', 'test', proj_dict, 4, 4, (-1000., -1500., 1000., 1500.)) area_def = AreaDefinition('test', 'test', 'test', proj_dict, 5, 5, (-1000., -1500., 1000., 1500.)) area_def.get_area_slices = mock.MagicMock() get_area_slices = area_def.get_area_slices get_area_slices.return_value = (slice(0, 3, None), slice(0, 3, None)) area_def_big = AreaDefinition('test', 'test', 'test', proj_dict, 10, 10, (-1000., -1500., 1000., 1500.)) area_def_big.get_area_slices = mock.MagicMock() get_area_slices_big = area_def_big.get_area_slices get_area_slices_big.return_value = (slice(0, 6, None), slice(0, 6, None)) # Test that data reduction can be disabled scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') scene.load(['comp19']) scene['comp19'].attrs['area'] = area_def scene['comp19_big'] = xr.DataArray( da.zeros((10, 10)), dims=('y', 'x'), attrs=scene['comp19'].attrs.copy()) scene['comp19_big'].attrs['area'] = area_def_big scene['comp19_copy'] = scene['comp19'].copy() orig_slice_data = scene._slice_data # we force the below order of processing to test that success isn't # based on data of the same resolution being processed together test_order = [ DatasetID.from_dict(scene['comp19'].attrs), DatasetID.from_dict(scene['comp19_big'].attrs), DatasetID.from_dict(scene['comp19_copy'].attrs), ] with mock.patch('satpy.scene.Scene._slice_data') as slice_data, \ mock.patch('satpy.dataset.dataset_walker') as ds_walker: ds_walker.return_value = test_order slice_data.side_effect = orig_slice_data scene.resample(target_area, reduce_data=False) self.assertFalse(slice_data.called) self.assertFalse(get_area_slices.called) scene.resample(target_area) self.assertTrue(slice_data.called_once) self.assertTrue(get_area_slices.called_once) scene.resample(target_area, reduce_data=True) # 2 times for each dataset # once for default (reduce_data=True) # once for kwarg forced to `True` self.assertEqual(slice_data.call_count, 2 * 3) self.assertTrue(get_area_slices.called_once) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_resample_ancillary(self, cri, cl): """Test that the Scene reducing data does not affect final output.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) proj_dict = proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 ' '+lon_0=-95. +lat_0=25 +lat_1=25 ' '+units=m +no_defs') area_def = AreaDefinition('test', 'test', 'test', proj_dict, 5, 5, (-1000., -1500., 1000., 1500.)) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') scene.load(['comp19', 'comp20']) scene['comp19'].attrs['area'] = area_def scene['comp19'].attrs['ancillary_variables'] = [scene['comp20']] scene['comp20'].attrs['area'] = area_def dst_area = AreaDefinition('dst', 'dst', 'dst', proj_dict, 2, 2, (-1000., -1500., 0., 0.), ) new_scene = scene.resample(dst_area) self.assertIs(new_scene['comp20'], new_scene['comp19'].attrs['ancillary_variables'][0]) @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_resample_reduce_data(self, cri, cl): """Test that the Scene reducing data does not affect final output.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) proj_dict = proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 ' '+lon_0=-95. +lat_0=25 +lat_1=25 ' '+units=m +no_defs') area_def = AreaDefinition('test', 'test', 'test', proj_dict, 5, 5, (-1000., -1500., 1000., 1500.)) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') scene.load(['comp19']) scene['comp19'].attrs['area'] = area_def dst_area = AreaDefinition('dst', 'dst', 'dst', proj_dict, 2, 2, (-1000., -1500., 0., 0.), ) new_scene1 = scene.resample(dst_area, reduce_data=False) new_scene2 = scene.resample(dst_area) new_scene3 = scene.resample(dst_area, reduce_data=True) self.assertTupleEqual(new_scene1['comp19'].shape, (2, 2, 3)) self.assertTupleEqual(new_scene2['comp19'].shape, (2, 2, 3)) self.assertTupleEqual(new_scene3['comp19'].shape, (2, 2, 3)) @mock.patch('satpy.scene.resample_dataset') @mock.patch('satpy.composites.CompositorLoader.load_compositors') @mock.patch('satpy.scene.Scene.create_reader_instances') def test_no_generate_comp10(self, cri, cl, rs): """Test generating a composite after loading.""" import satpy.scene from satpy.tests.utils import FakeReader, test_composites from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) rs.side_effect = self._fake_resample_dataset proj_dict = proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 ' '+lon_0=-95. +lat_0=25 +lat_1=25 ' '+units=m +no_defs') area_def = AreaDefinition( 'test', 'test', 'test', proj_dict, 200, 400, (-1000., -1500., 1000., 1500.), ) cri.return_value = {'fake_reader': FakeReader( 'fake_reader', 'fake_sensor')} comps, mods = test_composites('fake_sensor') cl.return_value = (comps, mods) scene = satpy.scene.Scene(filenames=['bla'], base_dir='bli', reader='fake_reader') # it is fine that an optional prereq doesn't exist scene.load(['comp10'], generate=False) self.assertTrue(any(ds_id == 'comp10' for ds_id in scene.wishlist)) self.assertNotIn('comp10', scene.datasets) # two dependencies should have been loaded self.assertEqual(len(scene.datasets), 2) self.assertEqual(len(scene.missing_datasets), 1) new_scn = scene.resample(area_def, generate=False) self.assertNotIn('comp10', scene.datasets) # two dependencies should have been loaded self.assertEqual(len(scene.datasets), 2) self.assertEqual(len(scene.missing_datasets), 1) new_scn.generate_composites() self.assertTrue(any(ds_id == 'comp10' for ds_id in new_scn.wishlist)) self.assertIn('comp10', new_scn.datasets) self.assertEqual(len(new_scn.missing_datasets), 0) # try generating them right away new_scn = scene.resample(area_def) self.assertTrue(any(ds_id == 'comp10' for ds_id in new_scn.wishlist)) self.assertIn('comp10', new_scn.datasets) self.assertEqual(len(new_scn.missing_datasets), 0) class TestSceneSaving(unittest.TestCase): """Test the Scene's saving method.""" def setUp(self): """Create temporary directory to save files to.""" import tempfile self.base_dir = tempfile.mkdtemp() def tearDown(self): """Remove the temporary directory created for a test.""" try: import shutil shutil.rmtree(self.base_dir, ignore_errors=True) except OSError: pass def test_save_datasets_default(self): """Save a dataset using 'save_datasets'.""" from satpy.scene import Scene import xarray as xr import dask.array as da from datetime import datetime ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={'name': 'test', 'start_time': datetime(2018, 1, 1, 0, 0, 0)} ) scn = Scene() scn['test'] = ds1 scn.save_datasets(base_dir=self.base_dir) self.assertTrue(os.path.isfile( os.path.join(self.base_dir, 'test_20180101_000000.tif'))) def test_save_datasets_by_ext(self): """Save a dataset using 'save_datasets' with 'filename'.""" from satpy.scene import Scene from satpy.tests.utils import spy_decorator import xarray as xr import dask.array as da from datetime import datetime ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={'name': 'test', 'start_time': datetime(2018, 1, 1, 0, 0, 0)} ) scn = Scene() scn['test'] = ds1 from satpy.writers.simple_image import PillowWriter save_image_mock = spy_decorator(PillowWriter.save_image) with mock.patch.object(PillowWriter, 'save_image', save_image_mock): scn.save_datasets(base_dir=self.base_dir, filename='{name}.png') save_image_mock.mock.assert_called_once() self.assertTrue(os.path.isfile( os.path.join(self.base_dir, 'test.png'))) def test_save_datasets_bad_writer(self): """Save a dataset using 'save_datasets' and a bad writer.""" from satpy.scene import Scene import xarray as xr import dask.array as da from datetime import datetime ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={'name': 'test', 'start_time': datetime.utcnow()} ) scn = Scene() scn['test'] = ds1 self.assertRaises(ValueError, scn.save_datasets, writer='_bad_writer_', base_dir=self.base_dir) def test_save_datasets_missing_wishlist(self): """Calling 'save_datasets' with no valid datasets.""" from satpy.scene import Scene, DatasetID scn = Scene() scn.wishlist.add(DatasetID(name='true_color')) self.assertRaises(RuntimeError, scn.save_datasets, writer='geotiff', base_dir=self.base_dir) self.assertRaises(KeyError, scn.save_datasets, datasets=['no_exist']) def test_save_dataset_default(self): """Save a dataset using 'save_dataset'.""" from satpy.scene import Scene import xarray as xr import dask.array as da from datetime import datetime ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={'name': 'test', 'start_time': datetime(2018, 1, 1, 0, 0, 0)} ) scn = Scene() scn['test'] = ds1 scn.save_dataset('test', base_dir=self.base_dir) self.assertTrue(os.path.isfile( os.path.join(self.base_dir, 'test_20180101_000000.tif'))) class TestSceneConversions(unittest.TestCase): """Test Scene conversion to geoviews, xarray, etc.""" def test_geoviews_basic_with_area(self): """Test converting a Scene to geoviews with an AreaDefinition.""" from satpy import Scene import xarray as xr import dask.array as da from datetime import datetime from pyresample.geometry import AreaDefinition scn = Scene() area = AreaDefinition('test', 'test', 'test', {'proj': 'geos', 'lon_0': -95.5, 'h': 35786023.0}, 2, 2, [-200, -200, 200, 200]) scn['ds1'] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=('y', 'x'), attrs={'start_time': datetime(2018, 1, 1), 'area': area}) gv_obj = scn.to_geoviews() # we assume that if we got something back, geoviews can use it self.assertIsNotNone(gv_obj) def test_geoviews_basic_with_swath(self): """Test converting a Scene to geoviews with a SwathDefinition.""" from satpy import Scene import xarray as xr import dask.array as da from datetime import datetime from pyresample.geometry import SwathDefinition scn = Scene() lons = xr.DataArray(da.zeros((2, 2))) lats = xr.DataArray(da.zeros((2, 2))) area = SwathDefinition(lons, lats) scn['ds1'] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=('y', 'x'), attrs={'start_time': datetime(2018, 1, 1), 'area': area}) gv_obj = scn.to_geoviews() # we assume that if we got something back, geoviews can use it self.assertIsNotNone(gv_obj) def suite(): """Test suite for test_scene.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestScene)) mysuite.addTest(loader.loadTestsFromTestCase(TestSceneLoading)) mysuite.addTest(loader.loadTestsFromTestCase(TestSceneResampling)) mysuite.addTest(loader.loadTestsFromTestCase(TestSceneSaving)) mysuite.addTest(loader.loadTestsFromTestCase(TestSceneConversions)) return mysuite if __name__ == "__main__": unittest.main() satpy-0.20.0/satpy/tests/test_utils.py000066400000000000000000000216371362525524100200220ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Testing of utils.""" import unittest try: from unittest import mock except ImportError: import mock from numpy import sqrt from satpy.utils import angle2xyz, lonlat2xyz, xyz2angle, xyz2lonlat, proj_units_to_meters, get_satpos class TestUtils(unittest.TestCase): """Testing utils.""" def test_lonlat2xyz(self): """Test the lonlat2xyz function.""" x__, y__, z__ = lonlat2xyz(0, 0) self.assertAlmostEqual(x__, 1) self.assertAlmostEqual(y__, 0) self.assertAlmostEqual(z__, 0) x__, y__, z__ = lonlat2xyz(90, 0) self.assertAlmostEqual(x__, 0) self.assertAlmostEqual(y__, 1) self.assertAlmostEqual(z__, 0) x__, y__, z__ = lonlat2xyz(0, 90) self.assertAlmostEqual(x__, 0) self.assertAlmostEqual(y__, 0) self.assertAlmostEqual(z__, 1) x__, y__, z__ = lonlat2xyz(180, 0) self.assertAlmostEqual(x__, -1) self.assertAlmostEqual(y__, 0) self.assertAlmostEqual(z__, 0) x__, y__, z__ = lonlat2xyz(-90, 0) self.assertAlmostEqual(x__, 0) self.assertAlmostEqual(y__, -1) self.assertAlmostEqual(z__, 0) x__, y__, z__ = lonlat2xyz(0, -90) self.assertAlmostEqual(x__, 0) self.assertAlmostEqual(y__, 0) self.assertAlmostEqual(z__, -1) x__, y__, z__ = lonlat2xyz(0, 45) self.assertAlmostEqual(x__, sqrt(2) / 2) self.assertAlmostEqual(y__, 0) self.assertAlmostEqual(z__, sqrt(2) / 2) x__, y__, z__ = lonlat2xyz(0, 60) self.assertAlmostEqual(x__, sqrt(1) / 2) self.assertAlmostEqual(y__, 0) self.assertAlmostEqual(z__, sqrt(3) / 2) def test_angle2xyz(self): """Test the lonlat2xyz function.""" x__, y__, z__ = angle2xyz(0, 0) self.assertAlmostEqual(x__, 0) self.assertAlmostEqual(y__, 0) self.assertAlmostEqual(z__, 1) x__, y__, z__ = angle2xyz(90, 0) self.assertAlmostEqual(x__, 0) self.assertAlmostEqual(y__, 0) self.assertAlmostEqual(z__, 1) x__, y__, z__ = angle2xyz(0, 90) self.assertAlmostEqual(x__, 0) self.assertAlmostEqual(y__, 1) self.assertAlmostEqual(z__, 0) x__, y__, z__ = angle2xyz(180, 0) self.assertAlmostEqual(x__, 0) self.assertAlmostEqual(y__, 0) self.assertAlmostEqual(z__, 1) x__, y__, z__ = angle2xyz(-90, 0) self.assertAlmostEqual(x__, 0) self.assertAlmostEqual(y__, 0) self.assertAlmostEqual(z__, 1) x__, y__, z__ = angle2xyz(0, -90) self.assertAlmostEqual(x__, 0) self.assertAlmostEqual(y__, -1) self.assertAlmostEqual(z__, 0) x__, y__, z__ = angle2xyz(90, 90) self.assertAlmostEqual(x__, 1) self.assertAlmostEqual(y__, 0) self.assertAlmostEqual(z__, 0) x__, y__, z__ = angle2xyz(-90, 90) self.assertAlmostEqual(x__, -1) self.assertAlmostEqual(y__, 0) self.assertAlmostEqual(z__, 0) x__, y__, z__ = angle2xyz(180, 90) self.assertAlmostEqual(x__, 0) self.assertAlmostEqual(y__, -1) self.assertAlmostEqual(z__, 0) x__, y__, z__ = angle2xyz(0, -90) self.assertAlmostEqual(x__, 0) self.assertAlmostEqual(y__, -1) self.assertAlmostEqual(z__, 0) x__, y__, z__ = angle2xyz(0, 45) self.assertAlmostEqual(x__, 0) self.assertAlmostEqual(y__, sqrt(2) / 2) self.assertAlmostEqual(z__, sqrt(2) / 2) x__, y__, z__ = angle2xyz(0, 60) self.assertAlmostEqual(x__, 0) self.assertAlmostEqual(y__, sqrt(3) / 2) self.assertAlmostEqual(z__, sqrt(1) / 2) def test_xyz2lonlat(self): """Test xyz2lonlat.""" lon, lat = xyz2lonlat(1, 0, 0) self.assertAlmostEqual(lon, 0) self.assertAlmostEqual(lat, 0) lon, lat = xyz2lonlat(0, 1, 0) self.assertAlmostEqual(lon, 90) self.assertAlmostEqual(lat, 0) lon, lat = xyz2lonlat(0, 0, 1, asin=True) self.assertAlmostEqual(lon, 0) self.assertAlmostEqual(lat, 90) lon, lat = xyz2lonlat(0, 0, 1) self.assertAlmostEqual(lon, 0) self.assertAlmostEqual(lat, 90) lon, lat = xyz2lonlat(sqrt(2) / 2, sqrt(2) / 2, 0) self.assertAlmostEqual(lon, 45) self.assertAlmostEqual(lat, 0) def test_xyz2angle(self): """Test xyz2angle.""" azi, zen = xyz2angle(1, 0, 0) self.assertAlmostEqual(azi, 90) self.assertAlmostEqual(zen, 90) azi, zen = xyz2angle(0, 1, 0) self.assertAlmostEqual(azi, 0) self.assertAlmostEqual(zen, 90) azi, zen = xyz2angle(0, 0, 1) self.assertAlmostEqual(azi, 0) self.assertAlmostEqual(zen, 0) azi, zen = xyz2angle(0, 0, 1, acos=True) self.assertAlmostEqual(azi, 0) self.assertAlmostEqual(zen, 0) azi, zen = xyz2angle(sqrt(2) / 2, sqrt(2) / 2, 0) self.assertAlmostEqual(azi, 45) self.assertAlmostEqual(zen, 90) azi, zen = xyz2angle(-1, 0, 0) self.assertAlmostEqual(azi, -90) self.assertAlmostEqual(zen, 90) azi, zen = xyz2angle(0, -1, 0) self.assertAlmostEqual(azi, 180) self.assertAlmostEqual(zen, 90) def test_proj_units_to_meters(self): """Test proj units to meters conversion.""" prj = '+asd=123123123123' res = proj_units_to_meters(prj) self.assertEqual(res, prj) prj = '+a=6378.137' res = proj_units_to_meters(prj) self.assertEqual(res, '+a=6378137.000') prj = '+a=6378.137 +units=km' res = proj_units_to_meters(prj) self.assertEqual(res, '+a=6378137.000') prj = '+a=6378.137 +b=6378.137' res = proj_units_to_meters(prj) self.assertEqual(res, '+a=6378137.000 +b=6378137.000') prj = '+a=6378.137 +b=6378.137 +h=35785.863' res = proj_units_to_meters(prj) self.assertEqual(res, '+a=6378137.000 +b=6378137.000 +h=35785863.000') @mock.patch('satpy.utils.warnings.warn') def test_get_satpos(self, warn_mock): """Test getting the satellite position.""" orb_params = {'nadir_longitude': 1, 'satellite_actual_longitude': 1.1, 'satellite_nominal_longitude': 1.2, 'projection_longitude': 1.3, 'nadir_latitude': 2, 'satellite_actual_latitude': 2.1, 'satellite_nominal_latitude': 2.2, 'projection_latitude': 2.3, 'satellite_actual_altitude': 3, 'satellite_nominal_altitude': 3.1, 'projection_altitude': 3.2} dataset = mock.MagicMock(attrs={'orbital_parameters': orb_params, 'satellite_longitude': -1, 'satellite_latitude': -2, 'satellite_altitude': -3}) # Nadir lon, lat, alt = get_satpos(dataset) self.assertTupleEqual((lon, lat, alt), (1, 2, 3)) # Actual orb_params.pop('nadir_longitude') orb_params.pop('nadir_latitude') lon, lat, alt = get_satpos(dataset) self.assertTupleEqual((lon, lat, alt), (1.1, 2.1, 3)) # Nominal orb_params.pop('satellite_actual_longitude') orb_params.pop('satellite_actual_latitude') orb_params.pop('satellite_actual_altitude') lon, lat, alt = get_satpos(dataset) self.assertTupleEqual((lon, lat, alt), (1.2, 2.2, 3.1)) # Projection orb_params.pop('satellite_nominal_longitude') orb_params.pop('satellite_nominal_latitude') orb_params.pop('satellite_nominal_altitude') lon, lat, alt = get_satpos(dataset) self.assertTupleEqual((lon, lat, alt), (1.3, 2.3, 3.2)) warn_mock.assert_called() # Legacy dataset.attrs.pop('orbital_parameters') lon, lat, alt = get_satpos(dataset) self.assertTupleEqual((lon, lat, alt), (-1, -2, -3)) def suite(): """Test suite.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestUtils)) return mysuite satpy-0.20.0/satpy/tests/test_writers.py000066400000000000000000000674371362525524100203710ustar00rootroot00000000000000#!/usr/bin/python # Copyright (c) 2015 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Test generic writer functions.""" import os import errno import shutil import unittest import warnings import numpy as np import xarray as xr from trollimage.colormap import greys try: from unittest import mock except ImportError: import mock def mkdir_p(path): """Make directories.""" if not path or path == '.': return # Use for python 2.7 compatibility # When python 2.7 support is dropped just use # `os._makedirs(path, exist_ok=True)` try: os.makedirs(path) except OSError as exc: # Python >2.5 if exc.errno == errno.EEXIST and os.path.isdir(path): pass else: raise class TestWritersModule(unittest.TestCase): """Test the writers module.""" def test_to_image_1d(self): """Conversion to image.""" # 1D from satpy.writers import to_image p = xr.DataArray(np.arange(25), dims=['y']) self.assertRaises(ValueError, to_image, p) @mock.patch('satpy.writers.XRImage') def test_to_image_2d(self, mock_geoimage): """Conversion to image.""" from satpy.writers import to_image # 2D data = np.arange(25).reshape((5, 5)) p = xr.DataArray(data, attrs=dict(mode="L", fill_value=0, palette=[0, 1, 2, 3, 4, 5]), dims=['y', 'x']) to_image(p) np.testing.assert_array_equal( data, mock_geoimage.call_args[0][0].values) mock_geoimage.reset_mock() @mock.patch('satpy.writers.XRImage') def test_to_image_3d(self, mock_geoimage): """Conversion to image.""" # 3D from satpy.writers import to_image data = np.arange(75).reshape((3, 5, 5)) p = xr.DataArray(data, dims=['bands', 'y', 'x']) p['bands'] = ['R', 'G', 'B'] to_image(p) np.testing.assert_array_equal(data[0], mock_geoimage.call_args[0][0][0]) np.testing.assert_array_equal(data[1], mock_geoimage.call_args[0][0][1]) np.testing.assert_array_equal(data[2], mock_geoimage.call_args[0][0][2]) @mock.patch('satpy.writers.get_enhanced_image') def test_show(self, mock_get_image): """Check showing.""" from satpy.writers import show data = np.arange(25).reshape((5, 5)) p = xr.DataArray(data, dims=['y', 'x']) show(p) self.assertTrue(mock_get_image.return_value.show.called) class TestEnhancer(unittest.TestCase): """Test basic `Enhancer` functionality with builtin configs.""" def test_basic_init_no_args(self): """Test Enhancer init with no arguments passed.""" from satpy.writers import Enhancer e = Enhancer() self.assertIsNotNone(e.enhancement_tree) def test_basic_init_no_enh(self): """Test Enhancer init requesting no enhancements.""" from satpy.writers import Enhancer e = Enhancer(enhancement_config_file=False) self.assertIsNone(e.enhancement_tree) def test_basic_init_provided_enh(self): """Test Enhancer init with string enhancement configs.""" from satpy.writers import Enhancer e = Enhancer(enhancement_config_file=["""enhancements: enh1: standard_name: toa_bidirectional_reflectance operations: - name: stretch method: &stretchfun !!python/name:satpy.enhancements.stretch '' kwargs: {stretch: linear} """]) self.assertIsNotNone(e.enhancement_tree) def test_init_nonexistent_enh_file(self): """Test Enhancer init with a nonexistent enhancement configuration file.""" from satpy.writers import Enhancer self.assertRaises( ValueError, Enhancer, enhancement_config_file="is_not_a_valid_filename_?.yaml") class TestEnhancerUserConfigs(unittest.TestCase): """Test `Enhancer` functionality when user's custom configurations are present.""" ENH_FN = 'test_sensor.yaml' ENH_ENH_FN = os.path.join('enhancements', ENH_FN) ENH_FN2 = 'test_sensor2.yaml' ENH_ENH_FN2 = os.path.join('enhancements', ENH_FN2) ENH_FN3 = 'test_empty.yaml' TEST_CONFIGS = { ENH_FN: """ sensor_name: visir/test_sensor enhancements: test1_default: name: test1 operations: - name: stretch method: !!python/name:satpy.enhancements.stretch '' kwargs: {stretch: linear, cutoffs: [0., 0.]} """, ENH_ENH_FN: """ sensor_name: visir/test_sensor enhancements: test1_kelvin: name: test1 units: kelvin operations: - name: stretch method: !!python/name:satpy.enhancements.stretch '' kwargs: {stretch: crude, min_stretch: 0, max_stretch: 20} """, ENH_FN2: """ sensor_name: visir/test_sensor2 """, ENH_ENH_FN2: """ sensor_name: visir/test_sensor2 """, ENH_FN3: """""", } @classmethod def setUpClass(cls): """Create fake user configurations.""" for fn, content in cls.TEST_CONFIGS.items(): base_dir = os.path.dirname(fn) mkdir_p(base_dir) with open(fn, 'w') as f: f.write(content) # create fake test image writer from satpy.writers import ImageWriter class CustomImageWriter(ImageWriter): def __init__(self, **kwargs): super(CustomImageWriter, self).__init__(name='test', config_files=[], **kwargs) self.img = None def save_image(self, img, **kwargs): self.img = img cls.CustomImageWriter = CustomImageWriter @classmethod def tearDownClass(cls): """Remove fake user configurations.""" for fn, _content in cls.TEST_CONFIGS.items(): base_dir = os.path.dirname(fn) if base_dir not in ['.', ''] and os.path.isdir(base_dir): shutil.rmtree(base_dir) elif os.path.isfile(fn): os.remove(fn) def test_enhance_empty_config(self): """Test Enhancer doesn't fail with empty enhancement file.""" from satpy.writers import Enhancer, get_enhanced_image from xarray import DataArray ds = DataArray(np.arange(1, 11.).reshape((2, 5)), attrs=dict(sensor='test_empty', mode='L'), dims=['y', 'x']) e = Enhancer() self.assertIsNotNone(e.enhancement_tree) get_enhanced_image(ds, enhance=e) self.assertSetEqual(set(e.sensor_enhancement_configs), {os.path.abspath(self.ENH_FN3)}) def test_enhance_with_sensor_no_entry(self): """Test enhancing an image that has no configuration sections.""" from satpy.writers import Enhancer, get_enhanced_image from xarray import DataArray ds = DataArray(np.arange(1, 11.).reshape((2, 5)), attrs=dict(sensor='test_sensor2', mode='L'), dims=['y', 'x']) e = Enhancer() self.assertIsNotNone(e.enhancement_tree) get_enhanced_image(ds, enhance=e) self.assertSetEqual(set(e.sensor_enhancement_configs), {os.path.abspath(self.ENH_FN2), os.path.abspath(self.ENH_ENH_FN2)}) def test_deprecated_enhance_with_file_specified(self): """Test enhancing an image when config file is specified.""" from satpy.writers import get_enhanced_image from xarray import DataArray ds = DataArray(np.arange(1, 11.).reshape((2, 5)), attrs=dict(name='test1', sensor='test_sensor', mode='L'), dims=['y', 'x']) get_enhanced_image(ds, enhancement_config_file=self.ENH_ENH_FN) def test_no_enhance(self): """Test turning off enhancements.""" from satpy.writers import get_enhanced_image from xarray import DataArray ds = DataArray(np.arange(1, 11.).reshape((2, 5)), attrs=dict(name='test1', sensor='test_sensor', mode='L'), dims=['y', 'x']) img = get_enhanced_image(ds, enhance=False) np.testing.assert_allclose(img.data.data.compute().squeeze(), ds.data) def test_writer_no_enhance(self): """Test turning off enhancements with writer.""" from xarray import DataArray ds = DataArray(np.arange(1, 11.).reshape((2, 5)), attrs=dict(name='test1', sensor='test_sensor', mode='L'), dims=['y', 'x']) writer = self.CustomImageWriter(enhance=False) writer.save_datasets((ds,), compute=False) img = writer.img np.testing.assert_allclose(img.data.data.compute().squeeze(), ds.data) def test_writer_custom_enhance(self): """Test using custom enhancements with writer.""" from satpy.writers import Enhancer from xarray import DataArray ds = DataArray(np.arange(1, 11.).reshape((2, 5)), attrs=dict(name='test1', sensor='test_sensor', mode='L'), dims=['y', 'x']) enhance = Enhancer() writer = self.CustomImageWriter(enhance=enhance) writer.save_datasets((ds,), compute=False) img = writer.img np.testing.assert_almost_equal(img.data.isel(bands=0).max().values, 1.) def test_enhance_with_sensor_entry(self): """Test enhancing an image with a configuration section.""" from satpy.writers import Enhancer, get_enhanced_image from xarray import DataArray import dask.array as da ds = DataArray(np.arange(1, 11.).reshape((2, 5)), attrs=dict(name='test1', sensor='test_sensor', mode='L'), dims=['y', 'x']) e = Enhancer() self.assertIsNotNone(e.enhancement_tree) img = get_enhanced_image(ds, enhance=e) self.assertSetEqual( set(e.sensor_enhancement_configs), {os.path.abspath(self.ENH_FN), os.path.abspath(self.ENH_ENH_FN)}) np.testing.assert_almost_equal(img.data.isel(bands=0).max().values, 1.) ds = DataArray(da.arange(1, 11., chunks=5).reshape((2, 5)), attrs=dict(name='test1', sensor='test_sensor', mode='L'), dims=['y', 'x']) e = Enhancer() self.assertIsNotNone(e.enhancement_tree) img = get_enhanced_image(ds, enhance=e) self.assertSetEqual(set(e.sensor_enhancement_configs), {os.path.abspath(self.ENH_FN), os.path.abspath(self.ENH_ENH_FN)}) np.testing.assert_almost_equal(img.data.isel(bands=0).max().values, 1.) def test_enhance_with_sensor_entry2(self): """Test enhancing an image with a more detailed configuration section.""" from satpy.writers import Enhancer, get_enhanced_image from xarray import DataArray ds = DataArray(np.arange(1, 11.).reshape((2, 5)), attrs=dict(name='test1', units='kelvin', sensor='test_sensor', mode='L'), dims=['y', 'x']) e = Enhancer() self.assertIsNotNone(e.enhancement_tree) img = get_enhanced_image(ds, enhance=e) self.assertSetEqual(set(e.sensor_enhancement_configs), {os.path.abspath(self.ENH_FN), os.path.abspath(self.ENH_ENH_FN)}) np.testing.assert_almost_equal(img.data.isel(bands=0).max().values, 0.5) class TestYAMLFiles(unittest.TestCase): """Test and analyze the writer configuration files.""" def test_filename_matches_writer_name(self): """Test that every writer filename matches the name in the YAML.""" import yaml class IgnoreLoader(yaml.SafeLoader): def _ignore_all_tags(self, tag_suffix, node): return tag_suffix + ' ' + node.value IgnoreLoader.add_multi_constructor('', IgnoreLoader._ignore_all_tags) from satpy.config import glob_config from satpy.writers import read_writer_config for writer_config in glob_config('writers/*.yaml'): writer_fn = os.path.basename(writer_config) writer_fn_name = os.path.splitext(writer_fn)[0] writer_info = read_writer_config([writer_config], loader=IgnoreLoader) self.assertEqual(writer_fn_name, writer_info['name'], "Writer YAML filename doesn't match writer " "name in the YAML file.") def test_available_writers(self): """Test the 'available_writers' function.""" from satpy import available_writers writer_names = available_writers() self.assertGreater(len(writer_names), 0) self.assertIsInstance(writer_names[0], str) self.assertIn('geotiff', writer_names) writer_infos = available_writers(as_dict=True) self.assertEqual(len(writer_names), len(writer_infos)) self.assertIsInstance(writer_infos[0], dict) for writer_info in writer_infos: self.assertIn('name', writer_info) class TestComputeWriterResults(unittest.TestCase): """Test compute_writer_results().""" def setUp(self): """Create temporary directory to save files to and a mock scene.""" import tempfile from datetime import datetime from satpy.scene import Scene import dask.array as da ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={'name': 'test', 'start_time': datetime(2018, 1, 1, 0, 0, 0)} ) self.scn = Scene() self.scn['test'] = ds1 # Temp dir self.base_dir = tempfile.mkdtemp() def tearDown(self): """Remove the temporary directory created for a test.""" try: shutil.rmtree(self.base_dir, ignore_errors=True) except OSError: pass def test_empty(self): """Test empty result list.""" from satpy.writers import compute_writer_results compute_writer_results([]) def test_simple_image(self): """Test writing to PNG file.""" from satpy.writers import compute_writer_results fname = os.path.join(self.base_dir, 'simple_image.png') res = self.scn.save_datasets(filename=fname, datasets=['test'], writer='simple_image', compute=False) compute_writer_results([res]) self.assertTrue(os.path.isfile(fname)) def test_geotiff(self): """Test writing to mitiff file.""" from satpy.writers import compute_writer_results fname = os.path.join(self.base_dir, 'geotiff.tif') res = self.scn.save_datasets(filename=fname, datasets=['test'], writer='geotiff', compute=False) compute_writer_results([res]) self.assertTrue(os.path.isfile(fname)) # FIXME: This reader needs more information than exist at the moment # def test_mitiff(self): # """Test writing to mitiff file""" # fname = os.path.join(self.base_dir, 'mitiff.tif') # res = self.scn.save_datasets(filename=fname, # datasets=['test'], # writer='mitiff') # compute_writer_results([res]) # self.assertTrue(os.path.isfile(fname)) # FIXME: This reader needs more information than exist at the moment # def test_cf(self): # """Test writing to NetCDF4 file""" # fname = os.path.join(self.base_dir, 'cf.nc') # res = self.scn.save_datasets(filename=fname, # datasets=['test'], # writer='cf') # compute_writer_results([res]) # self.assertTrue(os.path.isfile(fname)) def test_multiple_geotiff(self): """Test writing to mitiff file.""" from satpy.writers import compute_writer_results fname1 = os.path.join(self.base_dir, 'geotiff1.tif') res1 = self.scn.save_datasets(filename=fname1, datasets=['test'], writer='geotiff', compute=False) fname2 = os.path.join(self.base_dir, 'geotiff2.tif') res2 = self.scn.save_datasets(filename=fname2, datasets=['test'], writer='geotiff', compute=False) compute_writer_results([res1, res2]) self.assertTrue(os.path.isfile(fname1)) self.assertTrue(os.path.isfile(fname2)) def test_multiple_simple(self): """Test writing to geotiff files.""" from satpy.writers import compute_writer_results fname1 = os.path.join(self.base_dir, 'simple_image1.png') res1 = self.scn.save_datasets(filename=fname1, datasets=['test'], writer='simple_image', compute=False) fname2 = os.path.join(self.base_dir, 'simple_image2.png') res2 = self.scn.save_datasets(filename=fname2, datasets=['test'], writer='simple_image', compute=False) compute_writer_results([res1, res2]) self.assertTrue(os.path.isfile(fname1)) self.assertTrue(os.path.isfile(fname2)) def test_mixed(self): """Test writing to multiple mixed-type files.""" from satpy.writers import compute_writer_results fname1 = os.path.join(self.base_dir, 'simple_image3.png') res1 = self.scn.save_datasets(filename=fname1, datasets=['test'], writer='simple_image', compute=False) fname2 = os.path.join(self.base_dir, 'geotiff3.tif') res2 = self.scn.save_datasets(filename=fname2, datasets=['test'], writer='geotiff', compute=False) res3 = [] compute_writer_results([res1, res2, res3]) self.assertTrue(os.path.isfile(fname1)) self.assertTrue(os.path.isfile(fname2)) class TestBaseWriter(unittest.TestCase): """Test the base writer class.""" def setUp(self): """Set up tests.""" import tempfile from datetime import datetime from satpy.scene import Scene import dask.array as da ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={'name': 'test', 'start_time': datetime(2018, 1, 1, 0, 0, 0)} ) self.scn = Scene() self.scn['test'] = ds1 # Temp dir self.base_dir = tempfile.mkdtemp() def tearDown(self): """Remove the temporary directory created for a test.""" try: shutil.rmtree(self.base_dir, ignore_errors=True) except OSError: pass def test_save_dataset_static_filename(self): """Test saving a dataset with a static filename specified.""" self.scn.save_datasets(base_dir=self.base_dir, filename='geotiff.tif') self.assertTrue(os.path.isfile(os.path.join(self.base_dir, 'geotiff.tif'))) def test_save_dataset_dynamic_filename(self): """Test saving a dataset with a format filename specified.""" fmt_fn = 'geotiff_{name}_{start_time:%Y%m%d_%H%M%S}.tif' exp_fn = 'geotiff_test_20180101_000000.tif' self.scn.save_datasets(base_dir=self.base_dir, filename=fmt_fn) self.assertTrue(os.path.isfile(os.path.join(self.base_dir, exp_fn))) def test_save_dataset_dynamic_filename_with_dir(self): """Test saving a dataset with a format filename that includes a directory.""" fmt_fn = os.path.join('{start_time:%Y%m%d}', 'geotiff_{name}_{start_time:%Y%m%d_%H%M%S}.tif') exp_fn = os.path.join('20180101', 'geotiff_test_20180101_000000.tif') self.scn.save_datasets(base_dir=self.base_dir, filename=fmt_fn) self.assertTrue(os.path.isfile(os.path.join(self.base_dir, exp_fn))) # change the filename pattern but keep the same directory fmt_fn2 = os.path.join('{start_time:%Y%m%d}', 'geotiff_{name}_{start_time:%Y%m%d_%H}.tif') exp_fn2 = os.path.join('20180101', 'geotiff_test_20180101_00.tif') self.scn.save_datasets(base_dir=self.base_dir, filename=fmt_fn2) self.assertTrue(os.path.isfile(os.path.join(self.base_dir, exp_fn2))) # the original file should still exist self.assertTrue(os.path.isfile(os.path.join(self.base_dir, exp_fn))) class TestOverlays(unittest.TestCase): """Tests for add_overlay and add_decorate functions.""" def setUp(self): """Create test data and mock pycoast/pydecorate.""" from trollimage.xrimage import XRImage from pyresample.geometry import AreaDefinition import xarray as xr import dask.array as da proj_dict = {'proj': 'lcc', 'datum': 'WGS84', 'ellps': 'WGS84', 'lon_0': -95., 'lat_0': 25, 'lat_1': 25, 'units': 'm', 'no_defs': True} self.area_def = AreaDefinition( 'test', 'test', 'test', proj_dict, 200, 400, (-1000., -1500., 1000., 1500.), ) self.orig_rgb_img = XRImage( xr.DataArray(da.arange(75., chunks=10).reshape(3, 5, 5) / 75., dims=('bands', 'y', 'x'), coords={'bands': ['R', 'G', 'B']}, attrs={'name': 'test_ds', 'area': self.area_def}) ) self.orig_l_img = XRImage( xr.DataArray(da.arange(25., chunks=10).reshape(5, 5) / 75., dims=('y', 'x'), attrs={'name': 'test_ds', 'area': self.area_def}) ) self.decorate = { 'decorate': [ {'logo': {'logo_path': '', 'height': 143, 'bg': 'white', 'bg_opacity': 255}}, {'text': { 'txt': 'TEST', 'align': {'top_bottom': 'bottom', 'left_right': 'right'}, 'font': '', 'font_size': 22, 'height': 30, 'bg': 'black', 'bg_opacity': 255, 'line': 'white'}}, {'scale': { 'colormap': greys, 'extend': False, 'width': 1670, 'height': 110, 'tick_marks': 5, 'minor_tick_marks': 1, 'cursor': [0, 0], 'bg':'white', 'title':'TEST TITLE OF SCALE', 'fontsize': 110, 'align': 'cc' }} ] } import_mock = mock.MagicMock() modules = {'pycoast': import_mock.pycoast, 'pydecorate': import_mock.pydecorate} self.module_patcher = mock.patch.dict('sys.modules', modules) self.module_patcher.start() def tearDown(self): """Turn off pycoast/pydecorate mocking.""" self.module_patcher.stop() def test_add_overlay_basic_rgb(self): """Test basic add_overlay usage with RGB data.""" from satpy.writers import add_overlay, _burn_overlay from pycoast import ContourWriterAGG coast_dir = '/path/to/coast/data' with mock.patch.object(self.orig_rgb_img, "apply_pil") as apply_pil: apply_pil.return_value = self.orig_rgb_img new_img = add_overlay(self.orig_rgb_img, self.area_def, coast_dir, fill_value=0) self.assertEqual(self.orig_rgb_img.mode, new_img.mode) new_img = add_overlay(self.orig_rgb_img, self.area_def, coast_dir) self.assertEqual(self.orig_rgb_img.mode + 'A', new_img.mode) with mock.patch.object(self.orig_rgb_img, "convert") as convert: convert.return_value = self.orig_rgb_img overlays = {'coasts': {'outline': 'red'}} new_img = add_overlay(self.orig_rgb_img, self.area_def, coast_dir, overlays=overlays, fill_value=0) pil_args = None pil_kwargs = {'fill_value': 0} fun_args = (self.orig_rgb_img.data.area, ContourWriterAGG.return_value, overlays) fun_kwargs = None apply_pil.assert_called_with(_burn_overlay, self.orig_rgb_img.mode, pil_args, pil_kwargs, fun_args, fun_kwargs) ContourWriterAGG.assert_called_with(coast_dir) # test legacy call grid = {'minor_is_tick': True} color = 'red' expected_overlays = {'coasts': {'outline': color, 'width': 0.5, 'level': 1}, 'borders': {'outline': color, 'width': 0.5, 'level': 1}, 'grid': grid} with warnings.catch_warnings(record=True) as wns: warnings.simplefilter("always") new_img = add_overlay(self.orig_rgb_img, self.area_def, coast_dir, color=color, grid=grid, fill_value=0) assert len(wns) == 1 assert issubclass(wns[0].category, DeprecationWarning) assert "deprecated" in str(wns[0].message) pil_args = None pil_kwargs = {'fill_value': 0} fun_args = (self.orig_rgb_img.data.area, ContourWriterAGG.return_value, expected_overlays) fun_kwargs = None apply_pil.assert_called_with(_burn_overlay, self.orig_rgb_img.mode, pil_args, pil_kwargs, fun_args, fun_kwargs) ContourWriterAGG.assert_called_with(coast_dir) def test_add_overlay_basic_l(self): """Test basic add_overlay usage with L data.""" from satpy.writers import add_overlay new_img = add_overlay(self.orig_l_img, self.area_def, '', fill_value=0) self.assertEqual('RGB', new_img.mode) new_img = add_overlay(self.orig_l_img, self.area_def, '') self.assertEqual('RGBA', new_img.mode) def test_add_decorate_basic_rgb(self): """Test basic add_decorate usage with RGB data.""" from satpy.writers import add_decorate new_img = add_decorate(self.orig_rgb_img, **self.decorate) self.assertEqual('RGBA', new_img.mode) def test_add_decorate_basic_l(self): """Test basic add_decorate usage with L data.""" from satpy.writers import add_decorate new_img = add_decorate(self.orig_l_img, **self.decorate) self.assertEqual('RGBA', new_img.mode) def suite(): """Test suite for test_writers.""" loader = unittest.TestLoader() my_suite = unittest.TestSuite() my_suite.addTest(loader.loadTestsFromTestCase(TestWritersModule)) my_suite.addTest(loader.loadTestsFromTestCase(TestEnhancer)) my_suite.addTest(loader.loadTestsFromTestCase(TestEnhancerUserConfigs)) my_suite.addTest(loader.loadTestsFromTestCase(TestYAMLFiles)) my_suite.addTest(loader.loadTestsFromTestCase(TestComputeWriterResults)) my_suite.addTest(loader.loadTestsFromTestCase(TestBaseWriter)) my_suite.addTest(loader.loadTestsFromTestCase(TestOverlays)) return my_suite satpy-0.20.0/satpy/tests/test_yaml_reader.py000066400000000000000000001174201362525524100211420ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Testing the yaml_reader module.""" import os import random import unittest from datetime import datetime from tempfile import mkdtemp from unittest.mock import MagicMock, patch import satpy.readers.yaml_reader as yr from satpy.readers.file_handlers import BaseFileHandler from satpy.dataset import DatasetID class FakeFH(BaseFileHandler): """Fake file handler class.""" def __init__(self, start_time, end_time): """Initialize fake file handler.""" super(FakeFH, self).__init__("", {}, {}) self._start_time = start_time self._end_time = end_time self.get_bounding_box = MagicMock() fake_ds = MagicMock() fake_ds.return_value.dims = ['x', 'y'] self.get_dataset = fake_ds self.combine_info = MagicMock() @property def start_time(self): """Return start time.""" return self._start_time @property def end_time(self): """Return end time.""" return self._end_time class TestUtils(unittest.TestCase): """Test the utility functions.""" def test_get_filebase(self): """Check the get_filebase function.""" base_dir = os.path.join(os.path.expanduser('~'), 'data', 'satellite', 'Sentinel-3') base_data = ('S3A_OL_1_EFR____20161020T081224_20161020T081524_' '20161020T102406_0179_010_078_2340_SVL_O_NR_002.SEN3') base_dir = os.path.join(base_dir, base_data) pattern = ('{mission_id:3s}_OL_{processing_level:1s}_{datatype_id:_<6s' '}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{cre' 'ation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relati' 've_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:' '2s}_{collection:3s}.SEN3/geo_coordinates.nc') pattern = os.path.join(*pattern.split('/')) filename = os.path.join(base_dir, 'Oa05_radiance.nc') expected = os.path.join(base_data, 'Oa05_radiance.nc') self.assertEqual(yr.get_filebase(filename, pattern), expected) def test_match_filenames(self): """Check that matching filenames works.""" # just a fake path for testing that doesn't have to exist base_dir = os.path.join(os.path.expanduser('~'), 'data', 'satellite', 'Sentinel-3') base_data = ('S3A_OL_1_EFR____20161020T081224_20161020T081524_' '20161020T102406_0179_010_078_2340_SVL_O_NR_002.SEN3') base_dir = os.path.join(base_dir, base_data) pattern = ('{mission_id:3s}_OL_{processing_level:1s}_{datatype_id:_<6s' '}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{cre' 'ation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relati' 've_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:' '2s}_{collection:3s}.SEN3/geo_coordinates.nc') pattern = os.path.join(*pattern.split('/')) filenames = [os.path.join(base_dir, 'Oa05_radiance.nc'), os.path.join(base_dir, 'geo_coordinates.nc')] expected = os.path.join(base_dir, 'geo_coordinates.nc') self.assertEqual(yr.match_filenames(filenames, pattern), [expected]) def test_match_filenames_windows_forward_slash(self): """Check that matching filenames works on Windows with forward slashes. This is common from Qt5 which internally uses forward slashes everywhere. """ # just a fake path for testing that doesn't have to exist base_dir = os.path.join(os.path.expanduser('~'), 'data', 'satellite', 'Sentinel-3') base_data = ('S3A_OL_1_EFR____20161020T081224_20161020T081524_' '20161020T102406_0179_010_078_2340_SVL_O_NR_002.SEN3') base_dir = os.path.join(base_dir, base_data) pattern = ('{mission_id:3s}_OL_{processing_level:1s}_{datatype_id:_<6s' '}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{cre' 'ation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relati' 've_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:' '2s}_{collection:3s}.SEN3/geo_coordinates.nc') pattern = os.path.join(*pattern.split('/')) filenames = [os.path.join(base_dir, 'Oa05_radiance.nc').replace(os.sep, '/'), os.path.join(base_dir, 'geo_coordinates.nc').replace(os.sep, '/')] expected = os.path.join(base_dir, 'geo_coordinates.nc').replace(os.sep, '/') self.assertEqual(yr.match_filenames(filenames, pattern), [expected]) def test_listify_string(self): """Check listify_string.""" self.assertEqual(yr.listify_string(None), []) self.assertEqual(yr.listify_string('some string'), ['some string']) self.assertEqual(yr.listify_string(['some', 'string']), ['some', 'string']) class DummyReader(BaseFileHandler): """Dummy reader instance.""" def __init__(self, filename, filename_info, filetype_info): """Initialize the dummy reader.""" super(DummyReader, self).__init__( filename, filename_info, filetype_info) self._start_time = datetime(2000, 1, 1, 12, 1) self._end_time = datetime(2000, 1, 1, 12, 2) self.metadata = {} @property def start_time(self): """Return start time.""" return self._start_time @property def end_time(self): """Return end time.""" return self._end_time class TestFileFileYAMLReaderMultiplePatterns(unittest.TestCase): """Test units from FileYAMLReader with multiple readers.""" @patch('satpy.readers.yaml_reader.recursive_dict_update') @patch('satpy.readers.yaml_reader.yaml', spec=yr.yaml) def setUp(self, _, rec_up): # pylint: disable=arguments-differ """Prepare a reader instance with a fake config.""" patterns = ['a{something:3s}.bla', 'a0{something:2s}.bla'] res_dict = {'reader': {'name': 'fake', 'sensors': ['canon']}, 'file_types': {'ftype1': {'name': 'ft1', 'file_patterns': patterns, 'file_reader': DummyReader}}, 'datasets': {'ch1': {'name': 'ch01', 'wavelength': [0.5, 0.6, 0.7], 'calibration': 'reflectance', 'file_type': 'ftype1', 'coordinates': ['lons', 'lats']}, 'ch2': {'name': 'ch02', 'wavelength': [0.7, 0.75, 0.8], 'calibration': 'counts', 'file_type': 'ftype1', 'coordinates': ['lons', 'lats']}, 'lons': {'name': 'lons', 'file_type': 'ftype2'}, 'lats': {'name': 'lats', 'file_type': 'ftype2'}}} rec_up.return_value = res_dict self.config = res_dict self.reader = yr.FileYAMLReader([__file__], filter_parameters={ 'start_time': datetime(2000, 1, 1), 'end_time': datetime(2000, 1, 2)}) def test_select_from_pathnames(self): """Check select_files_from_pathnames.""" filelist = ['a001.bla', 'a002.bla', 'abcd.bla', 'k001.bla', 'a003.bli'] res = self.reader.select_files_from_pathnames(filelist) for expected in ['a001.bla', 'a002.bla', 'abcd.bla']: self.assertIn(expected, res) self.assertEqual(len(res), 3) def test_fn_items_for_ft(self): """Check filename_items_for_filetype.""" filelist = ['a001.bla', 'a002.bla', 'abcd.bla', 'k001.bla', 'a003.bli'] ft_info = self.config['file_types']['ftype1'] fiter = self.reader.filename_items_for_filetype(filelist, ft_info) filenames = dict(fname for fname in fiter) self.assertEqual(len(filenames.keys()), 3) def test_create_filehandlers(self): """Check create_filehandlers.""" filelist = ['a001.bla', 'a002.bla', 'a001.bla', 'a002.bla', 'abcd.bla', 'k001.bla', 'a003.bli'] self.reader.create_filehandlers(filelist) self.assertEqual(len(self.reader.file_handlers['ftype1']), 3) class TestFileFileYAMLReader(unittest.TestCase): """Test units from FileYAMLReader.""" @patch('satpy.readers.yaml_reader.recursive_dict_update') @patch('satpy.readers.yaml_reader.yaml', spec=yr.yaml) def setUp(self, _, rec_up): # pylint: disable=arguments-differ """Prepare a reader instance with a fake config.""" patterns = ['a{something:3s}.bla'] res_dict = {'reader': {'name': 'fake', 'sensors': ['canon']}, 'file_types': {'ftype1': {'name': 'ft1', 'file_reader': BaseFileHandler, 'file_patterns': patterns}}, 'datasets': {'ch1': {'name': 'ch01', 'wavelength': [0.5, 0.6, 0.7], 'calibration': 'reflectance', 'file_type': 'ftype1', 'coordinates': ['lons', 'lats']}, 'ch2': {'name': 'ch02', 'wavelength': [0.7, 0.75, 0.8], 'calibration': 'counts', 'file_type': 'ftype1', 'coordinates': ['lons', 'lats']}, 'lons': {'name': 'lons', 'file_type': 'ftype2'}, 'lats': {'name': 'lats', 'file_type': 'ftype2'}}} rec_up.return_value = res_dict self.config = res_dict self.reader = yr.FileYAMLReader([__file__], filter_parameters={ 'start_time': datetime(2000, 1, 1), 'end_time': datetime(2000, 1, 2), }) def test_all_dataset_ids(self): """Check that all datasets ids are returned.""" self.assertSetEqual(set(self.reader.all_dataset_ids), {DatasetID(name='ch02', wavelength=(0.7, 0.75, 0.8), resolution=None, polarization=None, calibration='counts', modifiers=()), DatasetID(name='ch01', wavelength=(0.5, 0.6, 0.7), resolution=None, polarization=None, calibration='reflectance', modifiers=()), DatasetID(name='lons', wavelength=None, resolution=None, polarization=None, calibration=None, modifiers=()), DatasetID(name='lats', wavelength=None, resolution=None, polarization=None, calibration=None, modifiers=())}) def test_all_dataset_names(self): """Get all dataset names.""" self.assertSetEqual(self.reader.all_dataset_names, set(['ch01', 'ch02', 'lons', 'lats'])) def test_available_dataset_ids(self): """Get ids of the available datasets.""" loadables = self.reader.select_files_from_pathnames(['a001.bla']) self.reader.create_filehandlers(loadables) self.assertSetEqual(set(self.reader.available_dataset_ids), {DatasetID(name='ch02', wavelength=(0.7, 0.75, 0.8), resolution=None, polarization=None, calibration='counts', modifiers=()), DatasetID(name='ch01', wavelength=(0.5, 0.6, 0.7), resolution=None, polarization=None, calibration='reflectance', modifiers=())}) def test_available_dataset_names(self): """Get ids of the available datasets.""" loadables = self.reader.select_files_from_pathnames(['a001.bla']) self.reader.create_filehandlers(loadables) self.assertSetEqual(set(self.reader.available_dataset_names), set(["ch01", "ch02"])) def test_filter_fh_by_time(self): """Check filtering filehandlers by time.""" fh0 = FakeFH(datetime(1999, 12, 30), datetime(1999, 12, 31)) fh1 = FakeFH(datetime(1999, 12, 31, 10, 0), datetime(2000, 1, 1, 12, 30)) fh2 = FakeFH(datetime(2000, 1, 1, 10, 0), datetime(2000, 1, 1, 12, 30)) fh3 = FakeFH(datetime(2000, 1, 1, 12, 30), datetime(2000, 1, 2, 12, 30)) fh4 = FakeFH(datetime(2000, 1, 2, 12, 30), datetime(2000, 1, 3, 12, 30)) fh5 = FakeFH(datetime(1999, 12, 31, 10, 0), datetime(2000, 1, 3, 12, 30)) for idx, fh in enumerate([fh0, fh1, fh2, fh3, fh4, fh5]): res = self.reader.time_matches(fh.start_time, fh.end_time) # only the first one should be false self.assertEqual(res, idx not in [0, 4]) for idx, fh in enumerate([fh0, fh1, fh2, fh3, fh4, fh5]): res = self.reader.time_matches(fh.start_time, None) self.assertEqual(res, idx not in [0, 1, 4, 5]) @patch('satpy.readers.yaml_reader.get_area_def') @patch('satpy.readers.yaml_reader.AreaDefBoundary') @patch('satpy.readers.yaml_reader.Boundary') def test_file_covers_area(self, bnd, adb, gad): """Test that area coverage is checked properly.""" file_handler = FakeFH(datetime(1999, 12, 31, 10, 0), datetime(2000, 1, 3, 12, 30)) self.reader.filter_parameters['area'] = True bnd.return_value.contour_poly.intersection.return_value = True adb.return_value.contour_poly.intersection.return_value = True res = self.reader.check_file_covers_area(file_handler, True) self.assertTrue(res) bnd.return_value.contour_poly.intersection.return_value = False adb.return_value.contour_poly.intersection.return_value = False res = self.reader.check_file_covers_area(file_handler, True) self.assertFalse(res) file_handler.get_bounding_box.side_effect = NotImplementedError() self.reader.filter_parameters['area'] = True res = self.reader.check_file_covers_area(file_handler, True) self.assertTrue(res) def test_start_end_time(self): """Check start and end time behaviours.""" self.reader.file_handlers = {} def get_start_time(): return self.reader.start_time self.assertRaises(RuntimeError, get_start_time) def get_end_time(): return self.reader.end_time self.assertRaises(RuntimeError, get_end_time) fh0 = FakeFH(datetime(1999, 12, 30, 0, 0), datetime(1999, 12, 31, 0, 0)) fh1 = FakeFH(datetime(1999, 12, 31, 10, 0), datetime(2000, 1, 1, 12, 30)) fh2 = FakeFH(datetime(2000, 1, 1, 10, 0), datetime(2000, 1, 1, 12, 30)) fh3 = FakeFH(datetime(2000, 1, 1, 12, 30), datetime(2000, 1, 2, 12, 30)) fh4 = FakeFH(datetime(2000, 1, 2, 12, 30), datetime(2000, 1, 3, 12, 30)) fh5 = FakeFH(datetime(1999, 12, 31, 10, 0), datetime(2000, 1, 3, 12, 30)) self.reader.file_handlers = { '0': [fh1, fh2, fh3, fh4, fh5], '1': [fh0, fh1, fh2, fh3, fh4, fh5], '2': [fh2, fh3], } self.assertEqual(self.reader.start_time, datetime(1999, 12, 30, 0, 0)) self.assertEqual(self.reader.end_time, datetime(2000, 1, 3, 12, 30)) def test_select_from_pathnames(self): """Check select_files_from_pathnames.""" filelist = ['a001.bla', 'a002.bla', 'abcd.bla', 'k001.bla', 'a003.bli'] res = self.reader.select_files_from_pathnames(filelist) for expected in ['a001.bla', 'a002.bla', 'abcd.bla']: self.assertIn(expected, res) self.assertEqual(0, len(self.reader.select_files_from_pathnames([]))) def test_select_from_directory(self): """Check select_files_from_directory.""" filelist = ['a001.bla', 'a002.bla', 'abcd.bla', 'k001.bla', 'a003.bli'] dpath = mkdtemp() for fname in filelist: with open(os.path.join(dpath, fname), 'w'): pass res = self.reader.select_files_from_directory(dpath) for expected in ['a001.bla', 'a002.bla', 'abcd.bla']: self.assertIn(os.path.join(dpath, expected), res) for fname in filelist: os.remove(os.path.join(dpath, fname)) self.assertEqual(0, len(self.reader.select_files_from_directory(dpath))) os.rmdir(dpath) def test_supports_sensor(self): """Check supports_sensor.""" self.assertTrue(self.reader.supports_sensor('canon')) self.assertFalse(self.reader.supports_sensor('nikon')) @patch('satpy.readers.yaml_reader.StackedAreaDefinition') def test_load_area_def(self, sad): """Test loading the area def for the reader.""" dsid = MagicMock() file_handlers = [] items = random.randrange(2, 10) for _i in range(items): file_handlers.append(MagicMock()) final_area = self.reader._load_area_def(dsid, file_handlers) self.assertEqual(final_area, sad.return_value.squeeze.return_value) args, kwargs = sad.call_args self.assertEqual(len(args), items) def test_preferred_filetype(self): """Test finding the preferred filetype.""" self.reader.file_handlers = {'a': 'a', 'b': 'b', 'c': 'c'} self.assertEqual(self.reader._preferred_filetype(['c', 'a']), 'c') self.assertEqual(self.reader._preferred_filetype(['a', 'c']), 'a') self.assertEqual(self.reader._preferred_filetype(['d', 'e']), None) def test_get_coordinates_for_dataset_key(self): """Test getting coordinates for a key.""" ds_id = DatasetID(name='ch01', wavelength=(0.5, 0.6, 0.7), resolution=None, polarization=None, calibration='reflectance', modifiers=()) res = self.reader._get_coordinates_for_dataset_key(ds_id) self.assertListEqual(res, [DatasetID(name='lons', wavelength=None, resolution=None, polarization=None, calibration=None, modifiers=()), DatasetID(name='lats', wavelength=None, resolution=None, polarization=None, calibration=None, modifiers=())]) def test_get_coordinates_for_dataset_key_without(self): """Test getting coordinates for a key without coordinates.""" ds_id = DatasetID(name='lons', wavelength=None, resolution=None, polarization=None, calibration=None, modifiers=()) res = self.reader._get_coordinates_for_dataset_key(ds_id) self.assertListEqual(res, []) def test_get_coordinates_for_dataset_keys(self): """Test getting coordinates for keys.""" ds_id1 = DatasetID(name='ch01', wavelength=(0.5, 0.6, 0.7), resolution=None, polarization=None, calibration='reflectance', modifiers=()) ds_id2 = DatasetID(name='ch02', wavelength=(0.7, 0.75, 0.8), resolution=None, polarization=None, calibration='counts', modifiers=()) lons = DatasetID(name='lons', wavelength=None, resolution=None, polarization=None, calibration=None, modifiers=()) lats = DatasetID(name='lats', wavelength=None, resolution=None, polarization=None, calibration=None, modifiers=()) res = self.reader._get_coordinates_for_dataset_keys([ds_id1, ds_id2, lons]) expected = {ds_id1: [lons, lats], ds_id2: [lons, lats], lons: []} self.assertDictEqual(res, expected) def test_get_file_handlers(self): """Test getting filehandler to load a dataset.""" ds_id1 = DatasetID(name='ch01', wavelength=(0.5, 0.6, 0.7), resolution=None, polarization=None, calibration='reflectance', modifiers=()) self.reader.file_handlers = {'ftype1': 'bla'} self.assertEqual(self.reader._get_file_handlers(ds_id1), 'bla') lons = DatasetID(name='lons', wavelength=None, resolution=None, polarization=None, calibration=None, modifiers=()) self.assertEqual(self.reader._get_file_handlers(lons), None) @patch('satpy.readers.yaml_reader.xr') def test_load_entire_dataset(self, xarray): """Check loading an entire dataset.""" file_handlers = [FakeFH(None, None), FakeFH(None, None), FakeFH(None, None), FakeFH(None, None)] proj = self.reader._load_dataset(None, {}, file_handlers) self.assertIs(proj, xarray.concat.return_value) class TestFileFileYAMLReaderMultipleFileTypes(unittest.TestCase): """Test units from FileYAMLReader with multiple file types.""" @patch('satpy.readers.yaml_reader.recursive_dict_update') @patch('satpy.readers.yaml_reader.yaml', spec=yr.yaml) def setUp(self, _, rec_up): # pylint: disable=arguments-differ """Prepare a reader instance with a fake config.""" # Example: GOES netCDF data # a) From NOAA CLASS: ftype1, including coordinates # b) From EUMETSAT: ftype2, coordinates in extra file (ftype3) # # For test completeness add one channel (ch3) which is only available # in ftype1. patterns1 = ['a.nc'] patterns2 = ['b.nc'] patterns3 = ['geo.nc'] res_dict = {'reader': {'name': 'fake', 'sensors': ['canon']}, 'file_types': {'ftype1': {'name': 'ft1', 'file_patterns': patterns1}, 'ftype2': {'name': 'ft2', 'file_patterns': patterns2}, 'ftype3': {'name': 'ft3', 'file_patterns': patterns3}}, 'datasets': {'ch1': {'name': 'ch01', 'wavelength': [0.5, 0.6, 0.7], 'calibration': 'reflectance', 'file_type': ['ftype1', 'ftype2'], 'coordinates': ['lons', 'lats']}, 'ch2': {'name': 'ch02', 'wavelength': [0.7, 0.75, 0.8], 'calibration': 'counts', 'file_type': ['ftype1', 'ftype2'], 'coordinates': ['lons', 'lats']}, 'ch3': {'name': 'ch03', 'wavelength': [0.8, 0.85, 0.9], 'calibration': 'counts', 'file_type': 'ftype1', 'coordinates': ['lons', 'lats']}, 'lons': {'name': 'lons', 'file_type': ['ftype1', 'ftype3']}, 'lats': {'name': 'lats', 'file_type': ['ftype1', 'ftype3']}}} rec_up.return_value = res_dict self.config = res_dict self.reader = yr.FileYAMLReader([__file__]) def test_update_ds_ids_from_file_handlers(self): """Test updating existing dataset IDs with information from the file.""" from functools import partial orig_ids = self.reader.all_ids def available_datasets(self, configured_datasets=None): res = self.resolution # update previously configured datasets for is_avail, ds_info in (configured_datasets or []): if is_avail is not None: yield is_avail, ds_info matches = self.file_type_matches(ds_info['file_type']) if matches and ds_info.get('resolution') != res: new_info = ds_info.copy() new_info['resolution'] = res yield True, new_info elif is_avail is None: yield is_avail, ds_info def file_type_matches(self, ds_ftype): if isinstance(ds_ftype, str) and ds_ftype == self.filetype_info['file_type']: return True elif self.filetype_info['file_type'] in ds_ftype: return True return None for ftype, resol in zip(('ftype1', 'ftype2'), (1, 2)): # need to copy this because the dataset infos will be modified _orig_ids = {key: val.copy() for key, val in orig_ids.items()} with patch.dict(self.reader.all_ids, _orig_ids, clear=True), \ patch.dict(self.reader.available_ids, {}, clear=True): # Add a file handler with resolution property fh = MagicMock(filetype_info={'file_type': ftype}, resolution=resol) fh.available_datasets = partial(available_datasets, fh) fh.file_type_matches = partial(file_type_matches, fh) self.reader.file_handlers = { ftype: [fh]} # Update existing dataset IDs with resolution property from # the file handler self.reader.update_ds_ids_from_file_handlers() # Make sure the resolution property has been transferred # correctly from the file handler to the dataset ID for ds_id, ds_info in self.reader.all_ids.items(): file_types = ds_info['file_type'] if not isinstance(file_types, list): file_types = [file_types] expected = resol if ftype in file_types else None self.assertEqual(expected, ds_id.resolution) class TestGEOSegmentYAMLReader(unittest.TestCase): """Test GEOSegmentYAMLReader.""" def setUp(self): """Add setup for GEOSegmentYAMLReader.""" from satpy.readers.yaml_reader import GEOSegmentYAMLReader GEOSegmentYAMLReader.__bases__ = (MagicMock, ) self.reader = GEOSegmentYAMLReader() def test_get_expected_segments(self): """Test that expected segments can come from the filename.""" from satpy.readers.yaml_reader import GEOSegmentYAMLReader cfh = MagicMock() # Hacky: This is setting an attribute on the MagicMock *class* # not on a MagicMock instance GEOSegmentYAMLReader.__bases__[0].create_filehandlers = cfh fake_fh = MagicMock() fake_fh.filename_info = {} fake_fh.filetype_info = {} cfh.return_value = {'ft1': [fake_fh]} reader = GEOSegmentYAMLReader() # default (1) created_fhs = reader.create_filehandlers(['fake.nc']) es = created_fhs['ft1'][0].filetype_info['expected_segments'] self.assertEqual(es, 1) # YAML defined for each file type fake_fh.filetype_info['expected_segments'] = 2 created_fhs = reader.create_filehandlers(['fake.nc']) es = created_fhs['ft1'][0].filetype_info['expected_segments'] self.assertEqual(es, 2) # defined both in the filename and the YAML metadata # YAML has priority fake_fh.filename_info = {'total_segments': 3} fake_fh.filetype_info = {'expected_segments': 2} created_fhs = reader.create_filehandlers(['fake.nc']) es = created_fhs['ft1'][0].filetype_info['expected_segments'] self.assertEqual(es, 2) # defined in the filename fake_fh.filename_info = {'total_segments': 3} fake_fh.filetype_info = {} created_fhs = reader.create_filehandlers(['fake.nc']) es = created_fhs['ft1'][0].filetype_info['expected_segments'] self.assertEqual(es, 3) # undo the hacky-ness del GEOSegmentYAMLReader.__bases__[0].create_filehandlers @patch('satpy.readers.yaml_reader.FileYAMLReader._load_dataset') @patch('satpy.readers.yaml_reader.xr') @patch('satpy.readers.yaml_reader._find_missing_segments') def test_load_dataset(self, mss, xr, parent_load_dataset): """Test _load_dataset().""" # Projectable is None mss.return_value = [0, 0, 0, False, None] with self.assertRaises(KeyError): res = self.reader._load_dataset(None, None, None) # Failure is True mss.return_value = [0, 0, 0, True, 0] with self.assertRaises(KeyError): res = self.reader._load_dataset(None, None, None) # Setup input, and output of mocked functions counter = 9 expected_segments = 8 seg = MagicMock(dims=['y', 'x']) slice_list = expected_segments * [seg, ] failure = False projectable = MagicMock() mss.return_value = (counter, expected_segments, slice_list, failure, projectable) empty_segment = MagicMock() xr.full_like.return_value = empty_segment concat_slices = MagicMock() xr.concat.return_value = concat_slices dsid = MagicMock() ds_info = MagicMock() file_handlers = MagicMock() # No missing segments res = self.reader._load_dataset(dsid, ds_info, file_handlers) self.assertTrue(res.attrs is file_handlers[0].combine_info.return_value) self.assertTrue(empty_segment not in slice_list) # One missing segment in the middle slice_list[4] = None counter = 8 mss.return_value = (counter, expected_segments, slice_list, failure, projectable) res = self.reader._load_dataset(dsid, ds_info, file_handlers) self.assertTrue(slice_list[4] is empty_segment) # The last segment is missing slice_list = expected_segments * [seg, ] slice_list[-1] = None counter = 8 mss.return_value = (counter, expected_segments, slice_list, failure, projectable) res = self.reader._load_dataset(dsid, ds_info, file_handlers) self.assertTrue(slice_list[-1] is empty_segment) # The last two segments are missing slice_list = expected_segments * [seg, ] slice_list[-1] = None counter = 7 mss.return_value = (counter, expected_segments, slice_list, failure, projectable) res = self.reader._load_dataset(dsid, ds_info, file_handlers) self.assertTrue(slice_list[-1] is empty_segment) self.assertTrue(slice_list[-2] is empty_segment) # The first segment is missing slice_list = expected_segments * [seg, ] slice_list[0] = None counter = 9 mss.return_value = (counter, expected_segments, slice_list, failure, projectable) res = self.reader._load_dataset(dsid, ds_info, file_handlers) self.assertTrue(slice_list[0] is empty_segment) # The first two segments are missing slice_list = expected_segments * [seg, ] slice_list[0] = None slice_list[1] = None counter = 9 mss.return_value = (counter, expected_segments, slice_list, failure, projectable) res = self.reader._load_dataset(dsid, ds_info, file_handlers) self.assertTrue(slice_list[0] is empty_segment) self.assertTrue(slice_list[1] is empty_segment) # Disable padding res = self.reader._load_dataset(dsid, ds_info, file_handlers, pad_data=False) parent_load_dataset.assert_called_once_with(dsid, ds_info, file_handlers) @patch('satpy.readers.yaml_reader._load_area_def') @patch('satpy.readers.yaml_reader._stack_area_defs') @patch('satpy.readers.yaml_reader._pad_earlier_segments_area') @patch('satpy.readers.yaml_reader._pad_later_segments_area') def test_load_area_def(self, pesa, plsa, sad, parent_load_area_def): """Test _load_area_def().""" dsid = MagicMock() file_handlers = MagicMock() self.reader._load_area_def(dsid, file_handlers) pesa.assert_called_once() plsa.assert_called_once() sad.assert_called_once() parent_load_area_def.assert_not_called() # Disable padding self.reader._load_area_def(dsid, file_handlers, pad_data=False) parent_load_area_def.assert_called_once_with(dsid, file_handlers) @patch('satpy.readers.yaml_reader.AreaDefinition') def test_pad_later_segments_area(self, AreaDefinition): """Test _pad_later_segments_area().""" from satpy.readers.yaml_reader import _pad_later_segments_area as plsa seg1_area = MagicMock() seg1_area.proj_dict = 'proj_dict' seg1_area.area_extent = [0, 1000, 200, 500] seg1_area.shape = [200, 500] get_area_def = MagicMock() get_area_def.return_value = seg1_area fh_1 = MagicMock() filetype_info = {'expected_segments': 2} filename_info = {'segment': 1} fh_1.filetype_info = filetype_info fh_1.filename_info = filename_info fh_1.get_area_def = get_area_def file_handlers = [fh_1] dsid = 'dsid' res = plsa(file_handlers, dsid) self.assertEqual(len(res), 2) seg2_extent = (0, 1500, 200, 1000) expected_call = ('fill', 'fill', 'fill', 'proj_dict', 500, 200, seg2_extent) AreaDefinition.assert_called_once_with(*expected_call) @patch('satpy.readers.yaml_reader.AreaDefinition') def test_pad_earlier_segments_area(self, AreaDefinition): """Test _pad_earlier_segments_area().""" from satpy.readers.yaml_reader import _pad_earlier_segments_area as pesa seg2_area = MagicMock() seg2_area.proj_dict = 'proj_dict' seg2_area.area_extent = [0, 1000, 200, 500] seg2_area.shape = [200, 500] get_area_def = MagicMock() get_area_def.return_value = seg2_area fh_2 = MagicMock() filetype_info = {'expected_segments': 2} filename_info = {'segment': 2} fh_2.filetype_info = filetype_info fh_2.filename_info = filename_info fh_2.get_area_def = get_area_def file_handlers = [fh_2] dsid = 'dsid' area_defs = {2: seg2_area} res = pesa(file_handlers, dsid, area_defs) self.assertEqual(len(res), 2) seg1_extent = (0, 500, 200, 0) expected_call = ('fill', 'fill', 'fill', 'proj_dict', 500, 200, seg1_extent) AreaDefinition.assert_called_once_with(*expected_call) def test_find_missing_segments(self): """Test _find_missing_segments().""" from satpy.readers.yaml_reader import _find_missing_segments as fms # Dataset with only one segment filename_info = {'segment': 1} fh_seg1 = MagicMock(filename_info=filename_info) projectable = 'projectable' get_dataset = MagicMock() get_dataset.return_value = projectable fh_seg1.get_dataset = get_dataset file_handlers = [fh_seg1] ds_info = {'file_type': []} dsid = 'dsid' res = fms(file_handlers, ds_info, dsid) counter, expected_segments, slice_list, failure, proj = res self.assertEqual(counter, 2) self.assertEqual(expected_segments, 1) self.assertTrue(projectable in slice_list) self.assertFalse(failure) self.assertTrue(proj is projectable) # Three expected segments, first and last missing filename_info = {'segment': 2} filetype_info = {'expected_segments': 3, 'file_type': 'foo'} fh_seg2 = MagicMock(filename_info=filename_info, filetype_info=filetype_info) projectable = 'projectable' get_dataset = MagicMock() get_dataset.return_value = projectable fh_seg2.get_dataset = get_dataset file_handlers = [fh_seg2] ds_info = {'file_type': ['foo']} dsid = 'dsid' res = fms(file_handlers, ds_info, dsid) counter, expected_segments, slice_list, failure, proj = res self.assertEqual(counter, 3) self.assertEqual(expected_segments, 3) self.assertEqual(slice_list, [None, projectable, None]) self.assertFalse(failure) self.assertTrue(proj is projectable) def suite(): """Create test suite for the yaml reader module.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestUtils)) mysuite.addTest(loader.loadTestsFromTestCase(TestFileFileYAMLReader)) mysuite.addTest(loader.loadTestsFromTestCase( TestFileFileYAMLReaderMultiplePatterns)) mysuite.addTest(loader.loadTestsFromTestCase( TestFileFileYAMLReaderMultipleFileTypes)) mysuite.addTest(loader.loadTestsFromTestCase(TestGEOSegmentYAMLReader)) return mysuite if __name__ == "__main__": unittest.main() satpy-0.20.0/satpy/tests/utils.py000066400000000000000000000354701362525524100167630ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . """Utilities for various satpy tests.""" from datetime import datetime from satpy.readers.yaml_reader import FileYAMLReader try: from unittest import mock except ImportError: import mock def spy_decorator(method_to_decorate): """Fancy decorate to wrap an object while still calling it. See https://stackoverflow.com/a/41599695/433202 """ tmp_mock = mock.MagicMock() def wrapper(self, *args, **kwargs): tmp_mock(*args, **kwargs) return method_to_decorate(self, *args, **kwargs) wrapper.mock = tmp_mock return wrapper def convert_file_content_to_data_array(file_content, attrs=tuple(), dims=('z', 'y', 'x')): """Help old reader tests that still use numpy arrays. A lot of old reader tests still use numpy arrays and depend on the "var_name/attr/attr_name" convention established before Satpy used xarray and dask. While these conventions are still used and should be supported, readers need to use xarray DataArrays instead. If possible, new tests should be based on pure DataArray objects instead of the "var_name/attr/attr_name" style syntax provided by the utility file handlers. Args: file_content (dict): Dictionary of string file keys to fake file data. attrs (iterable): Series of attributes to copy to DataArray object from file content dictionary. Defaults to no attributes. dims (iterable): Dimension names to use for resulting DataArrays. The second to last dimension is used for 1D arrays, so for dims of ``('z', 'y', 'x')`` this would use ``'y'``. Otherwise, the dimensions are used starting with the last, so 2D arrays are ``('y', 'x')`` Dimensions are used in reverse order so the last dimension specified is used as the only dimension for 1D arrays and the last dimension for other arrays. """ from xarray import DataArray import dask.array as da import numpy as np for key, val in file_content.items(): da_attrs = {} for a in attrs: if key + '/attr/' + a in file_content: da_attrs[a] = file_content[key + '/attr/' + a] if isinstance(val, np.ndarray): val = da.from_array(val, chunks=4096) if val.ndim == 1: da_dims = dims[-2] elif val.ndim > 1: da_dims = tuple(dims[-val.ndim:]) else: da_dims = None file_content[key] = DataArray(val, dims=da_dims, attrs=da_attrs) def test_datasets(): """Get list of various test datasets.""" from satpy import DatasetID d = [ DatasetID(name='ds1'), DatasetID(name='ds2'), DatasetID(name='ds3'), DatasetID(name='ds4', calibration='reflectance'), DatasetID(name='ds4', calibration='radiance'), DatasetID(name='ds5', resolution=250), DatasetID(name='ds5', resolution=500), DatasetID(name='ds5', resolution=1000), DatasetID(name='ds6', wavelength=(0.1, 0.2, 0.3)), DatasetID(name='ds7', wavelength=(0.4, 0.5, 0.6)), DatasetID(name='ds8', wavelength=(0.7, 0.8, 0.9)), DatasetID(name='ds9_fail_load', wavelength=(1.0, 1.1, 1.2)), DatasetID(name='ds10', wavelength=(0.75, 0.85, 0.95)), DatasetID(name='ds11', resolution=500), DatasetID(name='ds11', resolution=1000), DatasetID(name='ds12', resolution=500), DatasetID(name='ds12', resolution=1000), ] return d def _create_fake_compositor(ds_id, prereqs, opt_prereqs): import numpy as np from xarray import DataArray c = mock.MagicMock() c.attrs = { 'prerequisites': tuple(prereqs), 'optional_prerequisites': tuple(opt_prereqs), } # special case c.attrs.update(ds_id.to_dict()) c.id = ds_id se = mock.MagicMock() def _se(datasets, optional_datasets=None, ds_id=ds_id, **kwargs): if ds_id.name == 'comp14': # used as a test when composites update the dataset id with # information from prereqs ds_id = ds_id._replace(resolution=555) if len(datasets) != len(prereqs): raise ValueError("Not enough prerequisite datasets passed") return DataArray(data=np.arange(75).reshape(5, 5, 3), attrs=ds_id.to_dict(), dims=['y', 'x', 'bands'], coords={'bands': ['R', 'G', 'B']}) se.side_effect = _se c.side_effect = se return c def _create_fake_modifiers(name, prereqs, opt_prereqs): import numpy as np from xarray import DataArray from satpy.composites import CompositeBase, IncompatibleAreas from satpy import DatasetID attrs = { 'name': name, 'prerequisites': tuple(prereqs), 'optional_prerequisites': tuple(opt_prereqs) } def _mod_loader(*args, **kwargs): class FakeMod(CompositeBase): def __init__(self, *args, **kwargs): super(FakeMod, self).__init__(*args, **kwargs) def __call__(self, datasets, optional_datasets, **info): if self.attrs['optional_prerequisites']: for opt_dep in self.attrs['optional_prerequisites']: if 'NOPE' in opt_dep or 'fail' in opt_dep: continue assert optional_datasets is not None and \ len(optional_datasets) resolution = DatasetID.from_dict(datasets[0].attrs).resolution if name == 'res_change' and resolution is not None: i = datasets[0].attrs.copy() i['resolution'] *= 5 elif 'incomp_areas' in name: raise IncompatibleAreas( "Test modifier 'incomp_areas' always raises IncompatibleAreas") else: i = datasets[0].attrs info = datasets[0].attrs.copy() self.apply_modifier_info(i, info) return DataArray(np.ma.MaskedArray(datasets[0]), attrs=info) m = FakeMod(*args, **kwargs) # m.attrs = attrs m._call_mock = mock.patch.object( FakeMod, '__call__', wraps=m.__call__).start() return m return _mod_loader, attrs def test_composites(sensor_name): """Create some test composites.""" from satpy import DatasetID, DatasetDict # Composite ID -> (prereqs, optional_prereqs) comps = { DatasetID(name='comp1'): (['ds1'], []), DatasetID(name='comp2'): (['ds1', 'ds2'], []), DatasetID(name='comp3'): (['ds1', 'ds2', 'ds3'], []), DatasetID(name='comp4'): (['comp2', 'ds3'], []), DatasetID(name='comp5'): (['ds1', 'ds2'], ['ds3']), DatasetID(name='comp6'): (['ds1', 'ds2'], ['comp2']), DatasetID(name='comp7'): (['ds1', 'comp2'], ['ds2']), DatasetID(name='comp8'): (['ds_NOPE', 'comp2'], []), DatasetID(name='comp9'): (['ds1', 'comp2'], ['ds_NOPE']), DatasetID(name='comp10'): ([DatasetID('ds1', modifiers=('mod1',)), 'comp2'], []), DatasetID(name='comp11'): ([0.22, 0.48, 0.85], []), DatasetID(name='comp12'): ([DatasetID(wavelength=0.22, modifiers=('mod1',)), DatasetID(wavelength=0.48, modifiers=('mod1',)), DatasetID(wavelength=0.85, modifiers=('mod1',))], []), DatasetID(name='comp13'): ([DatasetID(name='ds5', modifiers=('res_change',))], []), DatasetID(name='comp14'): (['ds1'], []), DatasetID(name='comp15'): (['ds1', 'ds9_fail_load'], []), DatasetID(name='comp16'): (['ds1'], ['ds9_fail_load']), DatasetID(name='comp17'): (['ds1', 'comp15'], []), DatasetID(name='comp18'): (['ds3', DatasetID(name='ds4', modifiers=('mod1', 'mod3',)), DatasetID(name='ds5', modifiers=('mod1', 'incomp_areas'))], []), DatasetID(name='comp18_2'): (['ds3', DatasetID(name='ds4', modifiers=('mod1', 'mod3',)), DatasetID(name='ds5', modifiers=('mod1', 'incomp_areas_opt'))], []), DatasetID(name='comp19'): ([DatasetID('ds5', modifiers=('res_change',)), 'comp13', 'ds2'], []), DatasetID(name='comp20'): ([DatasetID(name='ds5', modifiers=('mod_opt_prereq',))], []), DatasetID(name='comp21'): ([DatasetID(name='ds5', modifiers=('mod_bad_opt',))], []), DatasetID(name='comp22'): ([DatasetID(name='ds5', modifiers=('mod_opt_only',))], []), DatasetID(name='comp23'): ([0.8], []), DatasetID(name='static_image'): ([], []), DatasetID(name='comp24', resolution=500): ([DatasetID(name='ds11', resolution=500), DatasetID(name='ds12', resolution=500)], []), DatasetID(name='comp24', resolution=1000): ([DatasetID(name='ds11', resolution=1000), DatasetID(name='ds12', resolution=1000)], []), DatasetID(name='comp25', resolution=500): ([DatasetID(name='comp24', resolution=500), DatasetID(name='ds5', resolution=500)], []), DatasetID(name='comp25', resolution=1000): ([DatasetID(name='comp24', resolution=1000), DatasetID(name='ds5', resolution=1000)], []), } # Modifier name -> (prereqs (not including to-be-modified), opt_prereqs) mods = { 'mod1': (['ds2'], []), 'mod2': (['comp3'], []), 'mod3': (['ds2'], []), 'res_change': ([], []), 'incomp_areas': (['ds1'], []), 'incomp_areas_opt': ([DatasetID(name='ds1', modifiers=('incomp_areas',))], ['ds2']), 'mod_opt_prereq': (['ds1'], ['ds2']), 'mod_bad_opt': (['ds1'], ['ds9_fail_load']), 'mod_opt_only': ([], ['ds2']), 'mod_wl': ([DatasetID(wavelength=0.2, modifiers=('mod1',))], []), } comps = {sensor_name: DatasetDict((k, _create_fake_compositor(k, *v)) for k, v in comps.items())} mods = {sensor_name: dict((k, _create_fake_modifiers(k, *v)) for k, v in mods.items())} return comps, mods def _filter_datasets(all_ds, names_or_ids): """Help filtering DatasetIDs by name or DatasetID.""" # DatasetID will match a str to the name # need to separate them out str_filter = [ds_name for ds_name in names_or_ids if isinstance(ds_name, str)] id_filter = [ds_id for ds_id in names_or_ids if not isinstance(ds_id, str)] for ds_id in all_ds: if ds_id in id_filter or ds_id.name in str_filter: yield ds_id class FakeReader(FileYAMLReader): """Fake reader to make testing basic Scene/reader functionality easier.""" def __init__(self, name, sensor_name='fake_sensor', datasets=None, available_datasets=None, start_time=None, end_time=None, filter_datasets=True): """Initialize reader and mock necessary properties and methods. By default any 'datasets' provided will be filtered by what datasets are configured at the top of this module in 'test_datasets'. This can be disabled by specifying `filter_datasets=False`. """ with mock.patch('satpy.readers.yaml_reader.recursive_dict_update') as rdu, \ mock.patch('satpy.readers.yaml_reader.open'), \ mock.patch('satpy.readers.yaml_reader.yaml.load'): rdu.return_value = {'reader': {'name': name}, 'file_types': {}} super(FakeReader, self).__init__(['fake.yaml']) if start_time is None: start_time = datetime.utcnow() self._start_time = start_time if end_time is None: end_time = start_time self._end_time = end_time self._sensor_name = set([sensor_name]) all_ds = test_datasets() if datasets is not None and filter_datasets: all_ds = list(_filter_datasets(all_ds, datasets)) elif datasets: all_ds = datasets if available_datasets is not None: available_datasets = list(_filter_datasets(all_ds, available_datasets)) else: available_datasets = all_ds self.all_ids = {ds_id: {} for ds_id in all_ds} self.available_ids = {ds_id: {} for ds_id in available_datasets} # Wrap load method in mock object so we can record call information self.load = mock.patch.object(self, 'load', wraps=self.load).start() @property def start_time(self): """Get the start time.""" return self._start_time @property def end_time(self): """Get the end time.""" return self._end_time @property def sensor_names(self): """Get the sensor names.""" return self._sensor_name def load(self, dataset_keys): """Load some data.""" from satpy import DatasetDict from xarray import DataArray import numpy as np dataset_ids = self.all_ids.keys() loaded_datasets = DatasetDict() for k in dataset_keys: if k == 'ds9_fail_load': continue for ds in dataset_ids: if ds == k: loaded_datasets[ds] = DataArray(data=np.arange(25).reshape(5, 5), attrs=ds.to_dict(), dims=['y', 'x']) return loaded_datasets class CustomScheduler(object): """Scheduler raising an exception if data are computed too many times.""" def __init__(self, max_computes=1): """Set starting and maximum compute counts.""" self.max_computes = max_computes self.total_computes = 0 def __call__(self, dsk, keys, **kwargs): """Compute dask task and keep track of number of times we do so.""" import dask self.total_computes += 1 if self.total_computes > self.max_computes: raise RuntimeError("Too many dask computations were scheduled: " "{}".format(self.total_computes)) return dask.get(dsk, keys, **kwargs) satpy-0.20.0/satpy/tests/writer_tests/000077500000000000000000000000001362525524100177765ustar00rootroot00000000000000satpy-0.20.0/satpy/tests/writer_tests/__init__.py000066400000000000000000000032161362525524100221110ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The writer tests package.""" import sys from satpy.tests.writer_tests import (test_cf, test_geotiff, test_simple_image, test_scmi, test_mitiff, test_utils, test_ninjotiff, ) if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest def suite(): """Test suite for all writer tests.""" mysuite = unittest.TestSuite() mysuite.addTests(test_cf.suite()) mysuite.addTests(test_geotiff.suite()) mysuite.addTests(test_ninjotiff.suite()) mysuite.addTests(test_simple_image.suite()) mysuite.addTests(test_scmi.suite()) mysuite.addTests(test_mitiff.suite()) mysuite.addTests(test_utils.suite()) return mysuite satpy-0.20.0/satpy/tests/writer_tests/test_cf.py000066400000000000000000001255401362525524100220060ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the CF writer.""" from collections import OrderedDict import os import sys from datetime import datetime import tempfile from satpy import DatasetID import numpy as np if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest try: from unittest import mock except ImportError: import mock try: from pyproj import CRS except ImportError: CRS = None class TempFile(object): """A temporary filename class.""" def __init__(self): """Initialize.""" self.filename = None def __enter__(self): """Enter.""" self.handle, self.filename = tempfile.mkstemp() os.close(self.handle) return self.filename def __exit__(self, *args): """Exit.""" os.remove(self.filename) class TestCFWriter(unittest.TestCase): """Test case for CF writer.""" def test_init(self): """Test initializing the CFWriter class.""" from satpy.writers.cf_writer import CFWriter import satpy.config CFWriter(config_files=[os.path.join(satpy.config.CONFIG_PATH, 'writers', 'cf.yaml')]) def test_save_array(self): """Test saving an array to netcdf/cf.""" from satpy import Scene import xarray as xr scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) scn['test-array'] = xr.DataArray([1, 2, 3], attrs=dict(start_time=start_time, end_time=end_time, prerequisites=[DatasetID('hej')])) with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf') with xr.open_dataset(filename) as f: self.assertTrue(np.all(f['test-array'][:] == [1, 2, 3])) expected_prereq = ("DatasetID(name='hej', wavelength=None, " "resolution=None, polarization=None, " "calibration=None, level=None, modifiers=())") self.assertEqual(f['test-array'].attrs['prerequisites'], expected_prereq) def test_save_with_compression(self): """Test saving an array with compression.""" from satpy import Scene import xarray as xr scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) with mock.patch('satpy.writers.cf_writer.xr.Dataset') as xrdataset,\ mock.patch('satpy.writers.cf_writer.make_time_bounds'): scn['test-array'] = xr.DataArray([1, 2, 3], attrs=dict(start_time=start_time, end_time=end_time, prerequisites=[DatasetID('hej')])) comp = {'zlib': True, 'complevel': 9} scn.save_datasets(filename='bla', writer='cf', compression=comp) ars, kws = xrdataset.call_args_list[1] self.assertDictEqual(ars[0]['test-array'].encoding, comp) def test_save_array_coords(self): """Test saving array with coordinates.""" from satpy import Scene import xarray as xr import numpy as np scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) coords = { 'x': np.arange(3), 'y': np.arange(1), } if CRS is not None: proj_str = ('+proj=geos +lon_0=-95.0 +h=35786023.0 ' '+a=6378137.0 +b=6356752.31414 +sweep=x ' '+units=m +no_defs') coords['crs'] = CRS.from_string(proj_str) scn['test-array'] = xr.DataArray([[1, 2, 3]], dims=('y', 'x'), coords=coords, attrs=dict(start_time=start_time, end_time=end_time, prerequisites=[DatasetID('hej')])) with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf') with xr.open_dataset(filename) as f: self.assertTrue(np.all(f['test-array'][:] == [1, 2, 3])) self.assertTrue(np.all(f['x'][:] == [0, 1, 2])) self.assertTrue(np.all(f['y'][:] == [0])) self.assertNotIn('crs', f) self.assertNotIn('_FillValue', f['x'].attrs) self.assertNotIn('_FillValue', f['y'].attrs) expected_prereq = ("DatasetID(name='hej', wavelength=None, " "resolution=None, polarization=None, " "calibration=None, level=None, modifiers=())") self.assertEqual(f['test-array'].attrs['prerequisites'], expected_prereq) def test_groups(self): """Test creating a file with groups.""" import xarray as xr from satpy import Scene tstart = datetime(2019, 4, 1, 12, 0) tend = datetime(2019, 4, 1, 12, 15) data_visir = [[1, 2], [3, 4]] y_visir = [1, 2] x_visir = [1, 2] time_vis006 = [1, 2] time_ir_108 = [3, 4] data_hrv = [[1, 2, 3], [4, 5, 6], [7, 8, 9]] y_hrv = [1, 2, 3] x_hrv = [1, 2, 3] time_hrv = [1, 2, 3] scn = Scene() scn['VIS006'] = xr.DataArray(data_visir, dims=('y', 'x'), coords={'y': y_visir, 'x': x_visir, 'acq_time': ('y', time_vis006)}, attrs={'name': 'VIS006', 'start_time': tstart, 'end_time': tend}) scn['IR_108'] = xr.DataArray(data_visir, dims=('y', 'x'), coords={'y': y_visir, 'x': x_visir, 'acq_time': ('y', time_ir_108)}, attrs={'name': 'IR_108', 'start_time': tstart, 'end_time': tend}) scn['HRV'] = xr.DataArray(data_hrv, dims=('y', 'x'), coords={'y': y_hrv, 'x': x_hrv, 'acq_time': ('y', time_hrv)}, attrs={'name': 'HRV', 'start_time': tstart, 'end_time': tend}) with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf', groups={'visir': ['IR_108', 'VIS006'], 'hrv': ['HRV']}, pretty=True) nc_root = xr.open_dataset(filename) self.assertIn('history', nc_root.attrs) self.assertSetEqual(set(nc_root.variables.keys()), set()) nc_visir = xr.open_dataset(filename, group='visir') nc_hrv = xr.open_dataset(filename, group='hrv') self.assertSetEqual(set(nc_visir.variables.keys()), {'VIS006', 'IR_108', 'y', 'x', 'VIS006_acq_time', 'IR_108_acq_time'}) self.assertSetEqual(set(nc_hrv.variables.keys()), {'HRV', 'y', 'x', 'acq_time'}) for tst, ref in zip([nc_visir['VIS006'], nc_visir['IR_108'], nc_hrv['HRV']], [scn['VIS006'], scn['IR_108'], scn['HRV']]): self.assertTrue(np.all(tst.data == ref.data)) nc_root.close() nc_visir.close() nc_hrv.close() # Different projection coordinates in one group are not supported with TempFile() as filename: self.assertRaises(ValueError, scn.save_datasets, datasets=['VIS006', 'HRV'], filename=filename, writer='cf') def test_single_time_value(self): """Test setting a single time value.""" from satpy import Scene import xarray as xr scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) test_array = np.array([[1, 2], [3, 4]]) scn['test-array'] = xr.DataArray(test_array, dims=['x', 'y'], coords={'time': np.datetime64('2018-05-30T10:05:00')}, attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf') with xr.open_dataset(filename, decode_cf=True) as f: np.testing.assert_array_equal(f['time'], scn['test-array']['time']) bounds_exp = np.array([[start_time, end_time]], dtype='datetime64[m]') np.testing.assert_array_equal(f['time_bnds'], bounds_exp) def test_bounds(self): """Test setting time bounds.""" from satpy import Scene import xarray as xr scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) test_array = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1) scn['test-array'] = xr.DataArray(test_array, dims=['x', 'y', 'time'], coords={'time': [np.datetime64('2018-05-30T10:05:00')]}, attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf') # Check decoded time coordinates & bounds with xr.open_dataset(filename, decode_cf=True) as f: bounds_exp = np.array([[start_time, end_time]], dtype='datetime64[m]') np.testing.assert_array_equal(f['time_bnds'], bounds_exp) self.assertEqual(f['time'].attrs['bounds'], 'time_bnds') # Check raw time coordinates & bounds with xr.open_dataset(filename, decode_cf=False) as f: np.testing.assert_almost_equal(f['time_bnds'], [[-0.0034722, 0.0069444]]) # User-specified time encoding should have preference with TempFile() as filename: time_units = 'seconds since 2018-01-01' scn.save_datasets(filename=filename, encoding={'time': {'units': time_units}}, writer='cf') with xr.open_dataset(filename, decode_cf=False) as f: np.testing.assert_array_equal(f['time_bnds'], [[12909600, 12910500]]) def test_bounds_minimum(self): """Test minimum bounds.""" from satpy import Scene import xarray as xr scn = Scene() start_timeA = datetime(2018, 5, 30, 10, 0) # expected to be used end_timeA = datetime(2018, 5, 30, 10, 20) start_timeB = datetime(2018, 5, 30, 10, 3) end_timeB = datetime(2018, 5, 30, 10, 15) # expected to be used test_arrayA = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1) test_arrayB = np.array([[1, 2], [3, 5]]).reshape(2, 2, 1) scn['test-arrayA'] = xr.DataArray(test_arrayA, dims=['x', 'y', 'time'], coords={'time': [np.datetime64('2018-05-30T10:05:00')]}, attrs=dict(start_time=start_timeA, end_time=end_timeA)) scn['test-arrayB'] = xr.DataArray(test_arrayB, dims=['x', 'y', 'time'], coords={'time': [np.datetime64('2018-05-30T10:05:00')]}, attrs=dict(start_time=start_timeB, end_time=end_timeB)) with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf') with xr.open_dataset(filename, decode_cf=True) as f: bounds_exp = np.array([[start_timeA, end_timeB]], dtype='datetime64[m]') np.testing.assert_array_equal(f['time_bnds'], bounds_exp) def test_bounds_missing_time_info(self): """Test time bounds generation in case of missing time.""" from satpy import Scene import xarray as xr scn = Scene() start_timeA = datetime(2018, 5, 30, 10, 0) end_timeA = datetime(2018, 5, 30, 10, 15) test_arrayA = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1) test_arrayB = np.array([[1, 2], [3, 5]]).reshape(2, 2, 1) scn['test-arrayA'] = xr.DataArray(test_arrayA, dims=['x', 'y', 'time'], coords={'time': [np.datetime64('2018-05-30T10:05:00')]}, attrs=dict(start_time=start_timeA, end_time=end_timeA)) scn['test-arrayB'] = xr.DataArray(test_arrayB, dims=['x', 'y', 'time'], coords={'time': [np.datetime64('2018-05-30T10:05:00')]}) with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf') with xr.open_dataset(filename, decode_cf=True) as f: bounds_exp = np.array([[start_timeA, end_timeA]], dtype='datetime64[m]') np.testing.assert_array_equal(f['time_bnds'], bounds_exp) def test_encoding_kwarg(self): """Test 'encoding' keyword argument.""" from satpy import Scene import xarray as xr scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) scn['test-array'] = xr.DataArray([1, 2, 3], attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: encoding = {'test-array': {'dtype': 'int8', 'scale_factor': 0.1, 'add_offset': 0.0, '_FillValue': 3}} scn.save_datasets(filename=filename, encoding=encoding, writer='cf') with xr.open_dataset(filename, mask_and_scale=False) as f: self.assertTrue(np.all(f['test-array'][:] == [10, 20, 30])) self.assertTrue(f['test-array'].attrs['scale_factor'] == 0.1) self.assertTrue(f['test-array'].attrs['_FillValue'] == 3) # check that dtype behave as int8 self.assertTrue(np.iinfo(f['test-array'][:].dtype).max == 127) def test_unlimited_dims_kwarg(self): """Test specification of unlimited dimensions.""" from satpy import Scene import xarray as xr scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) test_array = np.array([[1, 2], [3, 4]]) scn['test-array'] = xr.DataArray(test_array, dims=['x', 'y'], coords={'time': np.datetime64('2018-05-30T10:05:00')}, attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf', unlimited_dims=['time']) with xr.open_dataset(filename) as f: self.assertSetEqual(f.encoding['unlimited_dims'], {'time'}) def test_header_attrs(self): """Check master attributes are set.""" from satpy import Scene import xarray as xr scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) scn['test-array'] = xr.DataArray([1, 2, 3], attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: header_attrs = {'sensor': 'SEVIRI', 'orbit': 99999, 'none': None, 'list': [1, 2, 3], 'set': {1, 2, 3}, 'dict': {'a': 1, 'b': 2}, 'nested': {'outer': {'inner1': 1, 'inner2': 2}}, 'bool': True, 'bool_': np.bool_(True)} scn.save_datasets(filename=filename, header_attrs=header_attrs, flatten_attrs=True, writer='cf') with xr.open_dataset(filename) as f: self.assertIn('history', f.attrs) self.assertEqual(f.attrs['sensor'], 'SEVIRI') self.assertEqual(f.attrs['orbit'], 99999) np.testing.assert_array_equal(f.attrs['list'], [1, 2, 3]) if sys.version_info.major == 3: self.assertEqual(f.attrs['set'], '{1, 2, 3}') else: # json module seems to encode sets differently in # Python 2 and 3 self.assertEqual(f.attrs['set'], u'set([1, 2, 3])') self.assertEqual(f.attrs['dict_a'], 1) self.assertEqual(f.attrs['dict_b'], 2) self.assertEqual(f.attrs['nested_outer_inner1'], 1) self.assertEqual(f.attrs['nested_outer_inner2'], 2) self.assertEqual(f.attrs['bool'], 'true') self.assertEqual(f.attrs['bool_'], 'true') self.assertTrue('none' not in f.attrs.keys()) def get_test_attrs(self): """Create some dataset attributes for testing purpose. Returns: Attributes, encoded attributes, encoded and flattened attributes """ attrs = {'name': 'IR_108', 'start_time': datetime(2018, 1, 1, 0), 'end_time': datetime(2018, 1, 1, 0, 15), 'int': 1, 'float': 1.0, 'none': None, # should be dropped 'numpy_int': np.uint8(1), 'numpy_float': np.float32(1), 'numpy_bool': np.bool(True), 'numpy_void': np.void(0), 'numpy_bytes': np.bytes_('test'), 'numpy_string': np.string_('test'), 'list': [1, 2, np.float64(3)], 'nested_list': ["1", ["2", [3]]], 'bool': True, 'array': np.array([1, 2, 3], dtype='uint8'), 'array_bool': np.array([True, False, True]), 'array_2d': np.array([[1, 2], [3, 4]]), 'array_3d': np.array([[[1, 2], [3, 4]], [[1, 2], [3, 4]]]), 'dict': {'a': 1, 'b': 2}, 'nested_dict': {'l1': {'l2': {'l3': np.array([1, 2, 3], dtype='uint8')}}}, 'raw_metadata': OrderedDict([ ('recarray', np.zeros(3, dtype=[('x', 'i4'), ('y', 'u1')])), ('flag', np.bool_(True)), ('dict', OrderedDict([('a', 1), ('b', np.array([1, 2, 3], dtype='uint8'))])) ])} encoded = {'name': 'IR_108', 'start_time': '2018-01-01 00:00:00', 'end_time': '2018-01-01 00:15:00', 'int': 1, 'float': 1.0, 'numpy_int': np.uint8(1), 'numpy_float': np.float32(1), 'numpy_bool': 'true', 'numpy_void': '[]', 'numpy_bytes': 'test', 'numpy_string': 'test', 'list': [1, 2, np.float64(3)], 'nested_list': '["1", ["2", [3]]]', 'bool': 'true', 'array': np.array([1, 2, 3], dtype='uint8'), 'array_bool': ['true', 'false', 'true'], 'array_2d': '[[1, 2], [3, 4]]', 'array_3d': '[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]', 'dict': '{"a": 1, "b": 2}', 'nested_dict': '{"l1": {"l2": {"l3": [1, 2, 3]}}}', 'raw_metadata': '{"recarray": [[0, 0], [0, 0], [0, 0]], ' '"flag": "true", "dict": {"a": 1, "b": [1, 2, 3]}}'} encoded_flat = {'name': 'IR_108', 'start_time': '2018-01-01 00:00:00', 'end_time': '2018-01-01 00:15:00', 'int': 1, 'float': 1.0, 'numpy_int': np.uint8(1), 'numpy_float': np.float32(1), 'numpy_bool': 'true', 'numpy_void': '[]', 'numpy_bytes': 'test', 'numpy_string': 'test', 'list': [1, 2, np.float64(3)], 'nested_list': '["1", ["2", [3]]]', 'bool': 'true', 'array': np.array([1, 2, 3], dtype='uint8'), 'array_bool': ['true', 'false', 'true'], 'array_2d': '[[1, 2], [3, 4]]', 'array_3d': '[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]', 'dict_a': 1, 'dict_b': 2, 'nested_dict_l1_l2_l3': np.array([1, 2, 3], dtype='uint8'), 'raw_metadata_recarray': '[[0, 0], [0, 0], [0, 0]]', 'raw_metadata_flag': 'true', 'raw_metadata_dict_a': 1, 'raw_metadata_dict_b': np.array([1, 2, 3], dtype='uint8')} return attrs, encoded, encoded_flat def assertDictWithArraysEqual(self, d1, d2): """Check that dicts containing arrays are equal.""" self.assertSetEqual(set(d1.keys()), set(d2.keys())) for key, val1 in d1.items(): val2 = d2[key] if isinstance(val1, np.ndarray): self.assertTrue(np.all(val1 == val2)) self.assertEqual(val1.dtype, val2.dtype) else: self.assertEqual(val1, val2) if isinstance(val1, (np.floating, np.integer, np.bool_)): self.assertTrue(isinstance(val2, np.generic)) self.assertEqual(val1.dtype, val2.dtype) def test_encode_attrs_nc(self): """Test attributes encoding.""" from satpy.writers.cf_writer import encode_attrs_nc import json attrs, expected, _ = self.get_test_attrs() # Test encoding encoded = encode_attrs_nc(attrs) self.assertDictWithArraysEqual(expected, encoded) # Test decoding of json-encoded attributes raw_md_roundtrip = {'recarray': [[0, 0], [0, 0], [0, 0]], 'flag': 'true', 'dict': {'a': 1, 'b': [1, 2, 3]}} self.assertDictEqual(json.loads(encoded['raw_metadata']), raw_md_roundtrip) self.assertListEqual(json.loads(encoded['array_3d']), [[[1, 2], [3, 4]], [[1, 2], [3, 4]]]) self.assertDictEqual(json.loads(encoded['nested_dict']), {"l1": {"l2": {"l3": [1, 2, 3]}}}) self.assertListEqual(json.loads(encoded['nested_list']), ["1", ["2", [3]]]) def test_da2cf(self): """Test the conversion of a DataArray to a CF-compatible DataArray.""" from satpy.writers.cf_writer import CFWriter import xarray as xr # Create set of test attributes attrs, attrs_expected, attrs_expected_flat = self.get_test_attrs() attrs['area'] = 'some_area' attrs['prerequisites'] = [DatasetID('hej')] # Adjust expected attributes expected_prereq = ("DatasetID(name='hej', wavelength=None, resolution=None, polarization=None, " "calibration=None, level=None, modifiers=())") update = {'prerequisites': [expected_prereq], 'long_name': attrs['name']} attrs_expected.update(update) attrs_expected_flat.update(update) attrs_expected.pop('name') attrs_expected_flat.pop('name') # Create test data array arr = xr.DataArray(np.array([[1, 2], [3, 4]]), attrs=attrs, dims=('y', 'x'), coords={'y': [0, 1], 'x': [1, 2], 'acq_time': ('y', [3, 4])}) # Test conversion to something cf-compliant res = CFWriter.da2cf(arr) self.assertTrue(np.all(res['x'] == arr['x'])) self.assertTrue(np.all(res['y'] == arr['y'])) self.assertTrue(np.all(res['acq_time'] == arr['acq_time'])) self.assertDictEqual(res['x'].attrs, {'units': 'm', 'standard_name': 'projection_x_coordinate'}) self.assertDictEqual(res['y'].attrs, {'units': 'm', 'standard_name': 'projection_y_coordinate'}) self.assertDictWithArraysEqual(res.attrs, attrs_expected) # Test attribute kwargs res_flat = CFWriter.da2cf(arr, flatten_attrs=True, exclude_attrs=['int']) attrs_expected_flat.pop('int') self.assertDictWithArraysEqual(res_flat.attrs, attrs_expected_flat) @mock.patch('satpy.writers.cf_writer.CFWriter.__init__', return_value=None) @mock.patch('satpy.writers.cf_writer.area2cf') @mock.patch('satpy.writers.cf_writer.CFWriter.da2cf') @mock.patch('satpy.writers.cf_writer.make_alt_coords_unique') @mock.patch('satpy.writers.cf_writer.assert_xy_unique') @mock.patch('satpy.writers.cf_writer.link_coords') def test_collect_datasets(self, link_coords, assert_xy_unique, make_alt_coords_unique, da2cf, area2cf, *mocks): """Test collecting CF datasets from a DataArray objects.""" from satpy.writers.cf_writer import CFWriter import xarray as xr # Patch methods def identity(arg, **kwargs): return arg def raise_key_error(arg, **kwargs): raise KeyError da2cf.side_effect = identity area2cf.side_effect = raise_key_error make_alt_coords_unique.return_value = 'unique_coords' # Define test datasets data = [[1, 2], [3, 4]] y = [1, 2] x = [1, 2] time = [1, 2] tstart = datetime(2019, 4, 1, 12, 0) tend = datetime(2019, 4, 1, 12, 15) datasets = [xr.DataArray(data=data, dims=('y', 'x'), coords={'y': y, 'x': x, 'acq_time': ('y', time)}, attrs={'name': 'var1', 'start_time': tstart, 'end_time': tend}), xr.DataArray(data=data, dims=('y', 'x'), coords={'y': y, 'x': x, 'acq_time': ('y', time)}, attrs={'name': 'var2'})] expected = {'var1': datasets[0], 'var2': datasets[1]} # Collect datasets writer = CFWriter() datas, start_times, end_times = writer._collect_datasets(datasets, include_lonlats=True) # Test results self.assertEqual(datas, 'unique_coords') self.assertListEqual(start_times, [tstart, None]) self.assertListEqual(end_times, [tend, None]) # Test method calls self.assertEqual(len(area2cf.call_args_list), 2) for call_args, ds in zip(area2cf.call_args_list, datasets): self.assertEqual(call_args, mock.call(ds, strict=True)) for func in (assert_xy_unique, link_coords, make_alt_coords_unique): func.assert_called() call_arg = func.call_args[0][0] self.assertIsInstance(call_arg, dict) self.assertSetEqual(set(call_arg.keys()), {'var1', 'var2'}) for key, ds in expected.items(): self.assertTrue(call_arg[key].identical(ds)) def test_assert_xy_unique(self): """Test that the x and y coordinates are unique.""" import xarray as xr from satpy.writers.cf_writer import assert_xy_unique dummy = [[1, 2], [3, 4]] datas = {'a': xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}), 'b': xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}), 'n': xr.DataArray(data=dummy, dims=('v', 'w'), coords={'v': [1, 2], 'w': [3, 4]})} assert_xy_unique(datas) datas['c'] = xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 3], 'x': [3, 4]}) self.assertRaises(ValueError, assert_xy_unique, datas) def test_link_coords(self): """Check that coordinates link has been established correctly.""" import xarray as xr from satpy.writers.cf_writer import link_coords import numpy as np data = [[1, 2], [3, 4]] lon = np.zeros((2, 2)) lat = np.ones((2, 2)) datasets = { 'var1': xr.DataArray(data=data, dims=('y', 'x'), attrs={'coordinates': 'lon lat'}), 'var2': xr.DataArray(data=data, dims=('y', 'x')), 'lon': xr.DataArray(data=lon, dims=('y', 'x')), 'lat': xr.DataArray(data=lat, dims=('y', 'x')) } link_coords(datasets) # Check that link has been established correctly and 'coordinate' atrribute has been dropped self.assertIn('lon', datasets['var1'].coords) self.assertIn('lat', datasets['var1'].coords) self.assertTrue(np.all(datasets['var1']['lon'].data == lon)) self.assertTrue(np.all(datasets['var1']['lat'].data == lat)) self.assertNotIn('coordinates', datasets['var1'].attrs) # There should be no link if there was no 'coordinate' attribute self.assertNotIn('lon', datasets['var2'].coords) self.assertNotIn('lat', datasets['var2'].coords) def test_make_alt_coords_unique(self): """Test that created coordinate variables are unique.""" import xarray as xr from satpy.writers.cf_writer import make_alt_coords_unique data = [[1, 2], [3, 4]] y = [1, 2] x = [1, 2] time1 = [1, 2] time2 = [3, 4] datasets = {'var1': xr.DataArray(data=data, dims=('y', 'x'), coords={'y': y, 'x': x, 'acq_time': ('y', time1)}), 'var2': xr.DataArray(data=data, dims=('y', 'x'), coords={'y': y, 'x': x, 'acq_time': ('y', time2)})} # Test that dataset names are prepended to alternative coordinates res = make_alt_coords_unique(datasets) self.assertTrue(np.all(res['var1']['var1_acq_time'] == time1)) self.assertTrue(np.all(res['var2']['var2_acq_time'] == time2)) self.assertNotIn('acq_time', res['var1'].coords) self.assertNotIn('acq_time', res['var2'].coords) # Make sure nothing else is modified self.assertTrue(np.all(res['var1']['x'] == x)) self.assertTrue(np.all(res['var1']['y'] == y)) self.assertTrue(np.all(res['var2']['x'] == x)) self.assertTrue(np.all(res['var2']['y'] == y)) # Coords not unique -> Dataset names must be prepended, even if pretty=True with mock.patch('satpy.writers.cf_writer.warnings.warn') as warn: res = make_alt_coords_unique(datasets, pretty=True) warn.assert_called() self.assertTrue(np.all(res['var1']['var1_acq_time'] == time1)) self.assertTrue(np.all(res['var2']['var2_acq_time'] == time2)) self.assertNotIn('acq_time', res['var1'].coords) self.assertNotIn('acq_time', res['var2'].coords) # Coords unique and pretty=True -> Don't modify coordinate names datasets['var2']['acq_time'] = ('y', time1) res = make_alt_coords_unique(datasets, pretty=True) self.assertTrue(np.all(res['var1']['acq_time'] == time1)) self.assertTrue(np.all(res['var2']['acq_time'] == time1)) self.assertNotIn('var1_acq_time', res['var1'].coords) self.assertNotIn('var2_acq_time', res['var2'].coords) @mock.patch('satpy.writers.cf_writer.area2lonlat') @mock.patch('satpy.writers.cf_writer.area2gridmapping') def test_area2cf(self, area2gridmapping, area2lonlat): """Test the conversion of an area to CF standards.""" import xarray as xr import pyresample.geometry from satpy.writers.cf_writer import area2cf area2gridmapping.side_effect = lambda x: [1, 2, 3] area2lonlat.side_effect = lambda x: [4, 5, 6] ds_base = xr.DataArray(data=[[1, 2], [3, 4]], dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}, attrs={'name': 'var1'}) # a) Area Definition and strict=False geos = pyresample.geometry.AreaDefinition( area_id='geos', description='geos', proj_id='geos', projection={'proj': 'geos', 'h': 35785831., 'a': 6378169., 'b': 6356583.8}, width=2, height=2, area_extent=[-1, -1, 1, 1]) ds = ds_base.copy(deep=True) ds.attrs['area'] = geos res = area2cf(ds) self.assertEqual(len(res), 4) self.assertListEqual(res[0:3], [1, 2, 3]) self.assertTrue(ds.identical(res[3])) # b) Area Definition and strict=False area2cf(ds, strict=True) area2lonlat.assert_called() # c) Swath Definition swath = pyresample.geometry.SwathDefinition(lons=[[1, 1], [2, 2]], lats=[[1, 2], [1, 2]]) ds = ds_base.copy(deep=True) ds.attrs['area'] = swath res = area2cf(ds) self.assertEqual(len(res), 4) self.assertListEqual(res[0:3], [4, 5, 6]) self.assertTrue(ds.identical(res[3])) def test_area2gridmapping(self): """Test the conversion from pyresample area object to CF grid mapping.""" import xarray as xr import pyresample.geometry from satpy.writers.cf_writer import area2gridmapping def _gm_matches(gmapping, expected): """Assert that all keys in ``expected`` match the values in ``gmapping``.""" for attr_key, attr_val in expected.attrs.items(): test_val = gmapping.attrs[attr_key] if attr_val is None or isinstance(attr_val, str): self.assertEqual(test_val, attr_val) else: np.testing.assert_almost_equal(test_val, attr_val, decimal=3) ds_base = xr.DataArray(data=[[1, 2], [3, 4]], dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}, attrs={'name': 'var1'}) # a) Projection has a corresponding CF representation (e.g. geos) a = 6378169. b = 6356583.8 h = 35785831. geos = pyresample.geometry.AreaDefinition( area_id='geos', description='geos', proj_id='geos', projection={'proj': 'geos', 'h': h, 'a': a, 'b': b}, width=2, height=2, area_extent=[-1, -1, 1, 1]) geos_expected = xr.DataArray(data=0, attrs={'perspective_point_height': h, 'latitude_of_projection_origin': 0, 'longitude_of_projection_origin': 0, 'grid_mapping_name': 'geostationary', 'semi_major_axis': a, 'semi_minor_axis': b, 'sweep_axis': None, 'name': 'geos'}) ds = ds_base.copy() ds.attrs['area'] = geos res, grid_mapping = area2gridmapping(ds) self.assertEqual(res.attrs['grid_mapping'], 'geos') _gm_matches(grid_mapping, geos_expected) # b) Projection does not have a corresponding CF representation (COSMO) cosmo7 = pyresample.geometry.AreaDefinition( area_id='cosmo7', description='cosmo7', proj_id='cosmo7', projection={'proj': 'ob_tran', 'ellps': 'WGS84', 'lat_0': 46, 'lon_0': 4.535, 'o_proj': 'stere', 'o_lat_p': 90, 'o_lon_p': -5.465}, width=597, height=510, area_extent=[-1812933, -1003565, 814056, 1243448] ) ds = ds_base.copy() ds.attrs['area'] = cosmo7 with mock.patch('satpy.writers.cf_writer.warnings.warn') as warn: res, grid_mapping = area2gridmapping(ds) warn.assert_called() proj_dict = pyresample.geometry.proj4_str_to_dict(res.attrs['grid_proj4']) self.assertEqual(proj_dict['lon_0'], 4.535) self.assertEqual(proj_dict['lat_0'], 46.0) self.assertEqual(proj_dict['o_lon_p'], -5.465) self.assertEqual(proj_dict['o_lat_p'], 90.0) self.assertEqual(proj_dict['proj'], 'ob_tran') self.assertEqual(proj_dict['o_proj'], 'stere') self.assertEqual(proj_dict['ellps'], 'WGS84') self.assertEqual(grid_mapping.attrs['name'], 'proj4') # c) Projection Transverse Mercator lat_0 = 36.5 lon_0 = 15.0 tmerc = pyresample.geometry.AreaDefinition( area_id='tmerc', description='tmerc', proj_id='tmerc', projection={'proj': 'tmerc', 'ellps': 'WGS84', 'lat_0': 36.5, 'lon_0': 15.0}, width=2, height=2, area_extent=[-1, -1, 1, 1]) tmerc_expected = xr.DataArray(data=0, attrs={'latitude_of_projection_origin': lat_0, 'longitude_of_central_meridian': lon_0, 'grid_mapping_name': 'transverse_mercator', 'reference_ellipsoid_name': 'WGS84', 'false_easting': 0., 'false_northing': 0., 'name': 'tmerc'}) ds = ds_base.copy() ds.attrs['area'] = tmerc res, grid_mapping = area2gridmapping(ds) self.assertEqual(res.attrs['grid_mapping'], 'tmerc') _gm_matches(grid_mapping, tmerc_expected) # d) Projection that has a representation but no explicit a/b h = 35785831. geos = pyresample.geometry.AreaDefinition( area_id='geos', description='geos', proj_id='geos', projection={'proj': 'geos', 'h': h, 'datum': 'WGS84', 'ellps': 'GRS80'}, width=2, height=2, area_extent=[-1, -1, 1, 1]) geos_expected = xr.DataArray(data=0, attrs={'perspective_point_height': h, 'latitude_of_projection_origin': 0, 'longitude_of_projection_origin': 0, 'grid_mapping_name': 'geostationary', 'semi_major_axis': 6378137.0, 'semi_minor_axis': 6356752.314, 'sweep_axis': None, 'name': 'geos'}) ds = ds_base.copy() ds.attrs['area'] = geos res, grid_mapping = area2gridmapping(ds) self.assertEqual(res.attrs['grid_mapping'], 'geos') _gm_matches(grid_mapping, geos_expected) # e) oblique Mercator area = pyresample.geometry.AreaDefinition( area_id='omerc_otf', description='On-the-fly omerc area', proj_id='omerc', projection={'alpha': '9.02638777018478', 'ellps': 'WGS84', 'gamma': '0', 'k': '1', 'lat_0': '-0.256794486098476', 'lonc': '13.7888658224205', 'proj': 'omerc', 'units': 'm'}, width=2837, height=5940, area_extent=[-1460463.0893, 3455291.3877, 1538407.1158, 9615788.8787] ) omerc_dict = {'name': 'omerc', 'azimuth_of_central_line': 9.02638777018478, 'false_easting': 0., 'false_northing': 0., 'gamma': 0, 'geographic_crs_name': "unknown", 'grid_mapping_name': "oblique_mercator", 'horizontal_datum_name': "unknown", 'latitude_of_projection_origin': -0.256794486098476, 'longitude_of_projection_origin': 13.7888658224205, 'prime_meridian_name': "Greenwich", 'reference_ellipsoid_name': "WGS84"} omerc_expected = xr.DataArray(data=0, attrs=omerc_dict) ds = ds_base.copy() ds.attrs['area'] = area res, grid_mapping = area2gridmapping(ds) self.assertEqual(res.attrs['grid_mapping'], 'omerc') _gm_matches(grid_mapping, omerc_expected) def test_area2lonlat(self): """Test the conversion from areas to lon/lat.""" import pyresample.geometry import xarray as xr from satpy.writers.cf_writer import area2lonlat area = pyresample.geometry.AreaDefinition( 'seviri', 'Native SEVIRI grid', 'geos', "+a=6378169.0 +h=35785831.0 +b=6356583.8 +lon_0=0 +proj=geos", 2, 2, [-5570248.686685662, -5567248.28340708, 5567248.28340708, 5570248.686685662] ) lons_ref, lats_ref = area.get_lonlats() dataarray = xr.DataArray(data=[[1, 2], [3, 4]], dims=('y', 'x'), attrs={'area': area}) res = area2lonlat(dataarray) self.assertEqual(len(res), 1) self.assertEqual(set(res[0].coords), {'longitude', 'latitude'}) lat = res[0]['latitude'] lon = res[0]['longitude'] self.assertTrue(np.all(lat.data == lats_ref)) self.assertTrue(np.all(lon.data == lons_ref)) self.assertDictContainsSubset({'name': 'latitude', 'standard_name': 'latitude', 'units': 'degrees_north'}, lat.attrs) self.assertDictContainsSubset({'name': 'longitude', 'standard_name': 'longitude', 'units': 'degrees_east'}, lon.attrs) def suite(): """Test suite for this writer's tests.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestCFWriter)) return mysuite if __name__ == "__main__": unittest.main() satpy-0.20.0/satpy/tests/writer_tests/test_geotiff.py000066400000000000000000000137241362525524100230410ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the geotiff writer.""" import sys import numpy as np if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest try: from unittest import mock except ImportError: import mock class TestGeoTIFFWriter(unittest.TestCase): """Test the GeoTIFF Writer class.""" def setUp(self): """Create temporary directory to save files to.""" import tempfile self.base_dir = tempfile.mkdtemp() def tearDown(self): """Remove the temporary directory created for a test.""" try: import shutil shutil.rmtree(self.base_dir, ignore_errors=True) except OSError: pass def _get_test_datasets(self): """Create a single test dataset.""" import xarray as xr import dask.array as da from datetime import datetime ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={'name': 'test', 'start_time': datetime.utcnow()} ) return [ds1] def test_init(self): """Test creating the writer with no arguments.""" from satpy.writers.geotiff import GeoTIFFWriter GeoTIFFWriter() def test_simple_write(self): """Test basic writer operation.""" from satpy.writers.geotiff import GeoTIFFWriter datasets = self._get_test_datasets() w = GeoTIFFWriter(base_dir=self.base_dir) w.save_datasets(datasets) def test_simple_delayed_write(self): """Test writing can be delayed.""" import dask.array as da from satpy.writers.geotiff import GeoTIFFWriter datasets = self._get_test_datasets() w = GeoTIFFWriter(base_dir=self.base_dir) # when we switch to rio_save on XRImage then this will be sources # and targets res = w.save_datasets(datasets, compute=False) # this will fail if rasterio isn't installed self.assertIsInstance(res, tuple) # two lists, sources and destinations self.assertEqual(len(res), 2) self.assertIsInstance(res[0], list) self.assertIsInstance(res[1], list) self.assertIsInstance(res[0][0], da.Array) da.store(res[0], res[1]) for target in res[1]: if hasattr(target, 'close'): target.close() def test_colormap_write(self): """Test writing an image with a colormap.""" from satpy.writers.geotiff import GeoTIFFWriter from trollimage.xrimage import XRImage from trollimage.colormap import spectral datasets = self._get_test_datasets() w = GeoTIFFWriter(base_dir=self.base_dir) # we'd have to customize enhancements to test this through # save_datasets. We'll use `save_image` as a workaround. img = XRImage(datasets[0]) img.palettize(spectral) w.save_image(img, keep_palette=True) def test_float_write(self): """Test that geotiffs can be written as floats. NOTE: Does not actually check that the output is floats. """ from satpy.writers.geotiff import GeoTIFFWriter datasets = self._get_test_datasets() w = GeoTIFFWriter(base_dir=self.base_dir, enhancement_config=False, dtype=np.float32) w.save_datasets(datasets) def test_fill_value_from_config(self): """Test fill_value coming from the writer config.""" from satpy.writers.geotiff import GeoTIFFWriter datasets = self._get_test_datasets() w = GeoTIFFWriter(base_dir=self.base_dir) w.info['fill_value'] = 128 with mock.patch('satpy.writers.XRImage.save') as save_method: save_method.return_value = None w.save_datasets(datasets, compute=False) self.assertEqual(save_method.call_args[1]['fill_value'], 128) def test_tags(self): """Test tags being added.""" from satpy.writers.geotiff import GeoTIFFWriter datasets = self._get_test_datasets() w = GeoTIFFWriter(tags={'test1': 1}, base_dir=self.base_dir) w.info['fill_value'] = 128 with mock.patch('satpy.writers.XRImage.save') as save_method: save_method.return_value = None w.save_datasets(datasets, tags={'test2': 2}, compute=False) called_tags = save_method.call_args[1]['tags'] self.assertDictEqual(called_tags, {'test1': 1, 'test2': 2}) def test_scale_offset(self): """Test tags being added.""" from satpy.writers.geotiff import GeoTIFFWriter datasets = self._get_test_datasets() w = GeoTIFFWriter(tags={'test1': 1}, base_dir=self.base_dir) w.info['fill_value'] = 128 with mock.patch('satpy.writers.XRImage.save') as save_method: save_method.return_value = None w.save_datasets(datasets, tags={'test2': 2}, compute=False, include_scale_offset=True) called_include = save_method.call_args[1]['include_scale_offset_tags'] self.assertTrue(called_include) def suite(): """Test suite for this writer's tests.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestGeoTIFFWriter)) return mysuite satpy-0.20.0/satpy/tests/writer_tests/test_mitiff.py000066400000000000000000001352741362525524100227010ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the mitiff writer. Based on the test for geotiff writer """ import sys if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest class TestMITIFFWriter(unittest.TestCase): """Test the MITIFF Writer class.""" def setUp(self): """Create temporary directory to save files to""" import tempfile self.base_dir = tempfile.mkdtemp() def tearDown(self): """Remove the temporary directory created for a test""" try: import shutil shutil.rmtree(self.base_dir, ignore_errors=True) except OSError: pass def _get_test_datasets(self): """Helper function to create a datasets list.""" import xarray as xr import dask.array as da from datetime import datetime from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict area_def = AreaDefinition( 'test', 'test', 'test', proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), 100, 200, (-1000., -1500., 1000., 1500.), ) ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={'name': '1', 'start_time': datetime.utcnow(), 'platform_name': "TEST_PLATFORM_NAME", 'sensor': 'TEST_SENSOR_NAME', 'area': area_def, 'prerequisites': ['1'], 'calibration': 'reflectance', 'metadata_requirements': { 'order': ['1'], 'config': { '1': {'alias': '1-VIS0.63', 'calibration': 'reflectance', 'min-val': '0', 'max-val': '100'}, }, 'translate': {'1': '1', }, 'file_pattern': '1_{start_time:%Y%m%d_%H%M%S}.mitiff' }} ) ds2 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={'name': '4', 'start_time': datetime.utcnow(), 'platform_name': "TEST_PLATFORM_NAME", 'sensor': 'TEST_SENSOR_NAME', 'area': area_def, 'prerequisites': ['4'], 'calibration': 'brightness_temperature', 'metadata_requirements': { 'order': ['4'], 'config': { '4': {'alias': '4-IR10.8', 'calibration': 'brightness_temperature', 'min-val': '-150', 'max-val': '50'}, }, 'translate': {'4': '4', }, 'file_pattern': '4_{start_time:%Y%m%d_%H%M%S}.mitiff'} } ) return [ds1, ds2] def _get_test_dataset(self, bands=3): """Helper function to create a single test dataset.""" import xarray as xr import dask.array as da from datetime import datetime from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict area_def = AreaDefinition( 'test', 'test', 'test', proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), 100, 200, (-1000., -1500., 1000., 1500.), ) ds1 = xr.DataArray( da.zeros((bands, 100, 200), chunks=50), dims=('bands', 'y', 'x'), attrs={'name': 'test', 'start_time': datetime.utcnow(), 'platform_name': "TEST_PLATFORM_NAME", 'sensor': 'TEST_SENSOR_NAME', 'area': area_def, 'prerequisites': ['1', '2', '3']} ) return ds1 def _get_test_one_dataset(self): """Helper function to create a single test dataset.""" import xarray as xr import dask.array as da from datetime import datetime from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict area_def = AreaDefinition( 'test', 'test', 'test', proj4_str_to_dict('+proj=geos +datum=WGS84 +ellps=WGS84 ' '+lon_0=0. h=36000. +units=km'), 100, 200, (-1000., -1500., 1000., 1500.), ) ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={'name': 'test', 'start_time': datetime.utcnow(), 'platform_name': "TEST_PLATFORM_NAME", 'sensor': 'avhrr', 'area': area_def, 'prerequisites': [10.8]} ) return ds1 def _get_test_dataset_with_bad_values(self, bands=3): """Helper function to create a single test dataset.""" import xarray as xr import numpy as np from datetime import datetime from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict area_def = AreaDefinition( 'test', 'test', 'test', proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), 100, 200, (-1000., -1500., 1000., 1500.), ) data = np.arange(-210, 790, 100).reshape((2, 5)) * 0.95 data /= 5.605 data[0, 0] = np.nan # need a nan value data[0, 1] = 0. # Need a 0 value rgb_data = np.stack([data, data, data]) ds1 = xr.DataArray(rgb_data, dims=('bands', 'y', 'x'), attrs={'name': 'test', 'start_time': datetime.utcnow(), 'platform_name': "TEST_PLATFORM_NAME", 'sensor': 'TEST_SENSOR_NAME', 'area': area_def, 'prerequisites': ['1', '2', '3']}) return ds1 def _get_test_dataset_calibration(self, bands=6): """Helper function to create a single test dataset.""" import xarray as xr import dask.array as da from datetime import datetime from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict from satpy import DatasetID from satpy.scene import Scene area_def = AreaDefinition( 'test', 'test', 'test', proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), 100, 200, (-1000., -1500., 1000., 1500.), ) d = [ DatasetID(name='1', calibration='reflectance'), DatasetID(name='2', calibration='reflectance'), DatasetID(name='3', calibration='brightness_temperature'), DatasetID(name='4', calibration='brightness_temperature'), DatasetID(name='5', calibration='brightness_temperature'), DatasetID(name='6', calibration='reflectance') ] scene = Scene() scene["1"] = xr.DataArray(da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={'calibration': 'reflectance'}) scene["2"] = xr.DataArray(da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={'calibration': 'reflectance'}) scene["3"] = xr.DataArray(da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={'calibration': 'brightness_temperature'}) scene["4"] = xr.DataArray(da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={'calibration': 'brightness_temperature'}) scene["5"] = xr.DataArray(da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={'calibration': 'brightness_temperature'}) scene["6"] = xr.DataArray(da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={'calibration': 'reflectance'}) data = xr.concat(scene, 'bands', coords='minimal') bands = [] calibration = [] for p in scene: calibration.append(p.attrs['calibration']) bands.append(p.attrs['name']) data['bands'] = list(bands) new_attrs = {'name': 'datasets', 'start_time': datetime.utcnow(), 'platform_name': "TEST_PLATFORM_NAME", 'sensor': 'test-sensor', 'area': area_def, 'prerequisites': d, 'metadata_requirements': { 'order': ['1', '2', '3', '4', '5', '6'], 'config': { '1': {'alias': '1-VIS0.63', 'calibration': 'reflectance', 'min-val': '0', 'max-val': '100'}, '2': {'alias': '2-VIS0.86', 'calibration': 'reflectance', 'min-val': '0', 'max-val': '100'}, '3': {'alias': '3(3B)-IR3.7', 'calibration': 'brightness_temperature', 'min-val': '-150', 'max-val': '50'}, '4': {'alias': '4-IR10.8', 'calibration': 'brightness_temperature', 'min-val': '-150', 'max-val': '50'}, '5': {'alias': '5-IR11.5', 'calibration': 'brightness_temperature', 'min-val': '-150', 'max-val': '50'}, '6': {'alias': '6(3A)-VIS1.6', 'calibration': 'reflectance', 'min-val': '0', 'max-val': '100'} }, 'translate': {'1': '1', '2': '2', '3': '3', '4': '4', '5': '5', '6': '6' }, 'file_pattern': 'test-dataset-{start_time:%Y%m%d%H%M%S}.mitiff' } } ds1 = xr.DataArray(data=data.data, attrs=new_attrs, dims=data.dims, coords=data.coords) return ds1 def _get_test_dataset_calibration_one_dataset(self, bands=1): """Helper function to create a single test dataset.""" import xarray as xr import dask.array as da from datetime import datetime from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict from satpy import DatasetID from satpy.scene import Scene area_def = AreaDefinition( 'test', 'test', 'test', proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), 100, 200, (-1000., -1500., 1000., 1500.), ) d = [DatasetID(name='4', calibration='brightness_temperature')] scene = Scene() scene["4"] = xr.DataArray(da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={'calibration': 'brightness_temperature'}) data = scene['4'] calibration = [] for p in scene: calibration.append(p.attrs['calibration']) new_attrs = {'name': 'datasets', 'start_time': datetime.utcnow(), 'platform_name': "TEST_PLATFORM_NAME", 'sensor': 'test-sensor', 'area': area_def, 'prerequisites': d, 'metadata_requirements': { 'order': ['4'], 'config': { '4': {'alias': 'BT', 'calibration': 'brightness_temperature', 'min-val': '-150', 'max-val': '50'}, }, 'translate': {'4': '4', }, 'file_pattern': 'test-dataset-{start_time:%Y%m%d%H%M%S}.mitiff' } } ds1 = xr.DataArray(data=data.data, attrs=new_attrs, dims=data.dims, coords=data.coords) return ds1 def _get_test_dataset_three_bands_two_prereq(self, bands=3): """Helper function to create a single test dataset.""" import xarray as xr import dask.array as da from datetime import datetime from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict from satpy import DatasetID area_def = AreaDefinition( 'test', 'test', 'test', proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), 100, 200, (-1000., -1500., 1000., 1500.), ) ds1 = xr.DataArray( da.zeros((bands, 100, 200), chunks=50), coords=[['R', 'G', 'B'], list(range(100)), list(range(200))], dims=('bands', 'y', 'x'), attrs={'name': 'test', 'start_time': datetime.utcnow(), 'platform_name': "TEST_PLATFORM_NAME", 'sensor': 'TEST_SENSOR_NAME', 'area': area_def, 'prerequisites': [DatasetID(name='1', calibration='reflectance'), DatasetID(name='2', calibration='reflectance')]} ) return ds1 def test_init(self): """Test creating the writer with no arguments.""" from satpy.writers.mitiff import MITIFFWriter MITIFFWriter() def test_simple_write(self): """Test basic writer operation.""" from satpy.writers.mitiff import MITIFFWriter dataset = self._get_test_dataset() w = MITIFFWriter(base_dir=self.base_dir) w.save_dataset(dataset) def test_save_datasets(self): """Test basic writer operation save_datasets.""" import os import numpy as np from libtiff import TIFF from satpy.writers.mitiff import MITIFFWriter expected = np.full((100, 200), 0) dataset = self._get_test_datasets() w = MITIFFWriter(base_dir=self.base_dir) w.save_datasets(dataset) filename = (dataset[0].attrs['metadata_requirements']['file_pattern']).format( start_time=dataset[0].attrs['start_time']) tif = TIFF.open(os.path.join(self.base_dir, filename)) for image in tif.iter_images(): np.testing.assert_allclose(image, expected, atol=1.e-6, rtol=0) def test_save_one_dataset(self): """Test basic writer operation with one dataset ie. no bands.""" import os from libtiff import TIFF from satpy.writers.mitiff import MITIFFWriter dataset = self._get_test_one_dataset() w = MITIFFWriter(base_dir=self.base_dir) w.save_dataset(dataset) tif = TIFF.open(os.path.join(self.base_dir, os.listdir(self.base_dir)[0])) IMAGEDESCRIPTION = 270 imgdesc = (tif.GetField(IMAGEDESCRIPTION)).decode('utf-8').split('\n') for key in imgdesc: if 'In this file' in key: self.assertEqual(key, ' Channels: 1 In this file: 1') def test_save_dataset_with_calibration(self): """Test writer operation with calibration.""" import os import numpy as np from libtiff import TIFF from satpy.writers.mitiff import MITIFFWriter expected_ir = np.full((100, 200), 255) expected_vis = np.full((100, 200), 0) expected = np.stack([expected_vis, expected_vis, expected_ir, expected_ir, expected_ir, expected_vis]) expected_key_channel = ['Table_calibration: 1-VIS0.63, Reflectance(Albedo), [%], 8, [ 0.00 0.39 0.78 1.18 1.57 ' '1.96 2.35 2.75 3.14 3.53 3.92 4.31 4.71 5.10 5.49 5.88 6.27 6.67 7.06 7.45 7.84 8.24 ' '8.63 9.02 9.41 9.80 10.20 10.59 10.98 11.37 11.76 12.16 12.55 12.94 13.33 13.73 14.12 ' '14.51 14.90 15.29 15.69 16.08 16.47 16.86 17.25 17.65 18.04 18.43 18.82 19.22 19.61 ' '20.00 20.39 20.78 21.18 21.57 21.96 22.35 22.75 23.14 23.53 23.92 24.31 24.71 25.10 ' '25.49 25.88 26.27 26.67 27.06 27.45 27.84 28.24 28.63 29.02 29.41 29.80 30.20 30.59 ' '30.98 31.37 31.76 32.16 32.55 32.94 33.33 33.73 34.12 34.51 34.90 35.29 35.69 36.08 ' '36.47 36.86 37.25 37.65 38.04 38.43 38.82 39.22 39.61 40.00 40.39 40.78 41.18 41.57 ' '41.96 42.35 42.75 43.14 43.53 43.92 44.31 44.71 45.10 45.49 45.88 46.27 46.67 47.06 ' '47.45 47.84 48.24 48.63 49.02 49.41 49.80 50.20 50.59 50.98 51.37 51.76 52.16 52.55 ' '52.94 53.33 53.73 54.12 54.51 54.90 55.29 55.69 56.08 56.47 56.86 57.25 57.65 58.04 ' '58.43 58.82 59.22 59.61 60.00 60.39 60.78 61.18 61.57 61.96 62.35 62.75 63.14 63.53 ' '63.92 64.31 64.71 65.10 65.49 65.88 66.27 66.67 67.06 67.45 67.84 68.24 68.63 69.02 ' '69.41 69.80 70.20 70.59 70.98 71.37 71.76 72.16 72.55 72.94 73.33 73.73 74.12 74.51 ' '74.90 75.29 75.69 76.08 76.47 76.86 77.25 77.65 78.04 78.43 78.82 79.22 79.61 80.00 ' '80.39 80.78 81.18 81.57 81.96 82.35 82.75 83.14 83.53 83.92 84.31 84.71 85.10 85.49 ' '85.88 86.27 86.67 87.06 87.45 87.84 88.24 88.63 89.02 89.41 89.80 90.20 90.59 90.98 ' '91.37 91.76 92.16 92.55 92.94 93.33 93.73 94.12 94.51 94.90 95.29 95.69 96.08 96.47 ' '96.86 97.25 97.65 98.04 98.43 98.82 99.22 99.61 100.00 ]', 'Table_calibration: 2-VIS0.86, Reflectance(Albedo), [%], 8, [ 0.00 0.39 0.78 1.18 1.57 ' '1.96 2.35 2.75 3.14 3.53 3.92 4.31 4.71 5.10 5.49 5.88 6.27 6.67 7.06 7.45 7.84 8.24 ' '8.63 9.02 9.41 9.80 10.20 10.59 10.98 11.37 11.76 12.16 12.55 12.94 13.33 13.73 14.12 ' '14.51 14.90 15.29 15.69 16.08 16.47 16.86 17.25 17.65 18.04 18.43 18.82 19.22 19.61 ' '20.00 20.39 20.78 21.18 21.57 21.96 22.35 22.75 23.14 23.53 23.92 24.31 24.71 25.10 ' '25.49 25.88 26.27 26.67 27.06 27.45 27.84 28.24 28.63 29.02 29.41 29.80 30.20 30.59 ' '30.98 31.37 31.76 32.16 32.55 32.94 33.33 33.73 34.12 34.51 34.90 35.29 35.69 36.08 ' '36.47 36.86 37.25 37.65 38.04 38.43 38.82 39.22 39.61 40.00 40.39 40.78 41.18 41.57 ' '41.96 42.35 42.75 43.14 43.53 43.92 44.31 44.71 45.10 45.49 45.88 46.27 46.67 47.06 ' '47.45 47.84 48.24 48.63 49.02 49.41 49.80 50.20 50.59 50.98 51.37 51.76 52.16 52.55 ' '52.94 53.33 53.73 54.12 54.51 54.90 55.29 55.69 56.08 56.47 56.86 57.25 57.65 58.04 ' '58.43 58.82 59.22 59.61 60.00 60.39 60.78 61.18 61.57 61.96 62.35 62.75 63.14 63.53 ' '63.92 64.31 64.71 65.10 65.49 65.88 66.27 66.67 67.06 67.45 67.84 68.24 68.63 69.02 ' '69.41 69.80 70.20 70.59 70.98 71.37 71.76 72.16 72.55 72.94 73.33 73.73 74.12 74.51 ' '74.90 75.29 75.69 76.08 76.47 76.86 77.25 77.65 78.04 78.43 78.82 79.22 79.61 80.00 ' '80.39 80.78 81.18 81.57 81.96 82.35 82.75 83.14 83.53 83.92 84.31 84.71 85.10 85.49 ' '85.88 86.27 86.67 87.06 87.45 87.84 88.24 88.63 89.02 89.41 89.80 90.20 90.59 90.98 ' '91.37 91.76 92.16 92.55 92.94 93.33 93.73 94.12 94.51 94.90 95.29 95.69 96.08 96.47 ' '96.86 97.25 97.65 98.04 98.43 98.82 99.22 99.61 100.00 ]', u'Table_calibration: 3(3B)-IR3.7, BT, °[C], 8, [ 50.00 49.22 48.43 47.65 46.86 46.08 ' '45.29 44.51 43.73 42.94 42.16 41.37 40.59 39.80 39.02 38.24 37.45 36.67 35.88 35.10 ' '34.31 33.53 32.75 31.96 31.18 30.39 29.61 28.82 28.04 27.25 26.47 25.69 24.90 24.12 ' '23.33 22.55 21.76 20.98 20.20 19.41 18.63 17.84 17.06 16.27 15.49 14.71 13.92 13.14 ' '12.35 11.57 10.78 10.00 9.22 8.43 7.65 6.86 6.08 5.29 4.51 3.73 2.94 2.16 1.37 0.59 ' '-0.20 -0.98 -1.76 -2.55 -3.33 -4.12 -4.90 -5.69 -6.47 -7.25 -8.04 -8.82 -9.61 -10.39 ' '-11.18 -11.96 -12.75 -13.53 -14.31 -15.10 -15.88 -16.67 -17.45 -18.24 -19.02 -19.80 ' '-20.59 -21.37 -22.16 -22.94 -23.73 -24.51 -25.29 -26.08 -26.86 -27.65 -28.43 -29.22 ' '-30.00 -30.78 -31.57 -32.35 -33.14 -33.92 -34.71 -35.49 -36.27 -37.06 -37.84 -38.63 ' '-39.41 -40.20 -40.98 -41.76 -42.55 -43.33 -44.12 -44.90 -45.69 -46.47 -47.25 -48.04 ' '-48.82 -49.61 -50.39 -51.18 -51.96 -52.75 -53.53 -54.31 -55.10 -55.88 -56.67 -57.45 ' '-58.24 -59.02 -59.80 -60.59 -61.37 -62.16 -62.94 -63.73 -64.51 -65.29 -66.08 -66.86 ' '-67.65 -68.43 -69.22 -70.00 -70.78 -71.57 -72.35 -73.14 -73.92 -74.71 -75.49 -76.27 ' '-77.06 -77.84 -78.63 -79.41 -80.20 -80.98 -81.76 -82.55 -83.33 -84.12 -84.90 -85.69 ' '-86.47 -87.25 -88.04 -88.82 -89.61 -90.39 -91.18 -91.96 -92.75 -93.53 -94.31 -95.10 ' '-95.88 -96.67 -97.45 -98.24 -99.02 -99.80 -100.59 -101.37 -102.16 -102.94 -103.73 ' '-104.51 -105.29 -106.08 -106.86 -107.65 -108.43 -109.22 -110.00 -110.78 -111.57 ' '-112.35 -113.14 -113.92 -114.71 -115.49 -116.27 -117.06 -117.84 -118.63 -119.41 ' '-120.20 -120.98 -121.76 -122.55 -123.33 -124.12 -124.90 -125.69 -126.47 -127.25 ' '-128.04 -128.82 -129.61 -130.39 -131.18 -131.96 -132.75 -133.53 -134.31 -135.10 ' '-135.88 -136.67 -137.45 -138.24 -139.02 -139.80 -140.59 -141.37 -142.16 -142.94 ' '-143.73 -144.51 -145.29 -146.08 -146.86 -147.65 -148.43 -149.22 -150.00 ]', u'Table_calibration: 4-IR10.8, BT, °[C], 8, [ 50.00 49.22 48.43 47.65 46.86 46.08 ' '45.29 ' '44.51 43.73 42.94 42.16 41.37 40.59 39.80 39.02 38.24 37.45 36.67 35.88 35.10 34.31 ' '33.53 32.75 31.96 31.18 30.39 29.61 28.82 28.04 27.25 26.47 25.69 24.90 24.12 23.33 ' '22.55 21.76 20.98 20.20 19.41 18.63 17.84 17.06 16.27 15.49 14.71 13.92 13.14 12.35 ' '11.57 10.78 10.00 9.22 8.43 7.65 6.86 6.08 5.29 4.51 3.73 2.94 2.16 1.37 0.59 -0.20 ' '-0.98 -1.76 -2.55 -3.33 -4.12 -4.90 -5.69 -6.47 -7.25 -8.04 -8.82 -9.61 -10.39 -11.18 ' '-11.96 -12.75 -13.53 -14.31 -15.10 -15.88 -16.67 -17.45 -18.24 -19.02 -19.80 -20.59 ' '-21.37 -22.16 -22.94 -23.73 -24.51 -25.29 -26.08 -26.86 -27.65 -28.43 -29.22 -30.00 ' '-30.78 -31.57 -32.35 -33.14 -33.92 -34.71 -35.49 -36.27 -37.06 -37.84 -38.63 -39.41 ' '-40.20 -40.98 -41.76 -42.55 -43.33 -44.12 -44.90 -45.69 -46.47 -47.25 -48.04 -48.82 ' '-49.61 -50.39 -51.18 -51.96 -52.75 -53.53 -54.31 -55.10 -55.88 -56.67 -57.45 -58.24 ' '-59.02 -59.80 -60.59 -61.37 -62.16 -62.94 -63.73 -64.51 -65.29 -66.08 -66.86 -67.65 ' '-68.43 -69.22 -70.00 -70.78 -71.57 -72.35 -73.14 -73.92 -74.71 -75.49 -76.27 -77.06 ' '-77.84 -78.63 -79.41 -80.20 -80.98 -81.76 -82.55 -83.33 -84.12 -84.90 -85.69 -86.47 ' '-87.25 -88.04 -88.82 -89.61 -90.39 -91.18 -91.96 -92.75 -93.53 -94.31 -95.10 -95.88 ' '-96.67 -97.45 -98.24 -99.02 -99.80 -100.59 -101.37 -102.16 -102.94 -103.73 -104.51 ' '-105.29 -106.08 -106.86 -107.65 -108.43 -109.22 -110.00 -110.78 -111.57 -112.35 ' '-113.14 -113.92 -114.71 -115.49 -116.27 -117.06 -117.84 -118.63 -119.41 -120.20 ' '-120.98 -121.76 -122.55 -123.33 -124.12 -124.90 -125.69 -126.47 -127.25 -128.04 ' '-128.82 -129.61 -130.39 -131.18 -131.96 -132.75 -133.53 -134.31 -135.10 -135.88 ' '-136.67 -137.45 -138.24 -139.02 -139.80 -140.59 -141.37 -142.16 -142.94 -143.73 ' '-144.51 -145.29 -146.08 -146.86 -147.65 -148.43 -149.22 -150.00 ]', u'Table_calibration: 5-IR11.5, BT, °[C], 8, [ 50.00 49.22 48.43 47.65 46.86 46.08 ' '45.29 ' '44.51 43.73 42.94 42.16 41.37 40.59 39.80 39.02 38.24 37.45 36.67 35.88 35.10 34.31 ' '33.53 32.75 31.96 31.18 30.39 29.61 28.82 28.04 27.25 26.47 25.69 24.90 24.12 23.33 ' '22.55 21.76 20.98 20.20 19.41 18.63 17.84 17.06 16.27 15.49 14.71 13.92 13.14 12.35 ' '11.57 10.78 10.00 9.22 8.43 7.65 6.86 6.08 5.29 4.51 3.73 2.94 2.16 1.37 0.59 -0.20 ' '-0.98 -1.76 -2.55 -3.33 -4.12 -4.90 -5.69 -6.47 -7.25 -8.04 -8.82 -9.61 -10.39 -11.18 ' '-11.96 -12.75 -13.53 -14.31 -15.10 -15.88 -16.67 -17.45 -18.24 -19.02 -19.80 -20.59 ' '-21.37 -22.16 -22.94 -23.73 -24.51 -25.29 -26.08 -26.86 -27.65 -28.43 -29.22 -30.00 ' '-30.78 -31.57 -32.35 -33.14 -33.92 -34.71 -35.49 -36.27 -37.06 -37.84 -38.63 -39.41 ' '-40.20 -40.98 -41.76 -42.55 -43.33 -44.12 -44.90 -45.69 -46.47 -47.25 -48.04 -48.82 ' '-49.61 -50.39 -51.18 -51.96 -52.75 -53.53 -54.31 -55.10 -55.88 -56.67 -57.45 -58.24 ' '-59.02 -59.80 -60.59 -61.37 -62.16 -62.94 -63.73 -64.51 -65.29 -66.08 -66.86 -67.65 ' '-68.43 -69.22 -70.00 -70.78 -71.57 -72.35 -73.14 -73.92 -74.71 -75.49 -76.27 -77.06 ' '-77.84 -78.63 -79.41 -80.20 -80.98 -81.76 -82.55 -83.33 -84.12 -84.90 -85.69 -86.47 ' '-87.25 -88.04 -88.82 -89.61 -90.39 -91.18 -91.96 -92.75 -93.53 -94.31 -95.10 -95.88 ' '-96.67 -97.45 -98.24 -99.02 -99.80 -100.59 -101.37 -102.16 -102.94 -103.73 -104.51 ' '-105.29 -106.08 -106.86 -107.65 -108.43 -109.22 -110.00 -110.78 -111.57 -112.35 ' '-113.14 -113.92 -114.71 -115.49 -116.27 -117.06 -117.84 -118.63 -119.41 -120.20 ' '-120.98 -121.76 -122.55 -123.33 -124.12 -124.90 -125.69 -126.47 -127.25 -128.04 ' '-128.82 -129.61 -130.39 -131.18 -131.96 -132.75 -133.53 -134.31 -135.10 -135.88 ' '-136.67 -137.45 -138.24 -139.02 -139.80 -140.59 -141.37 -142.16 -142.94 -143.73 ' '-144.51 -145.29 -146.08 -146.86 -147.65 -148.43 -149.22 -150.00 ]', 'Table_calibration: 6(3A)-VIS1.6, Reflectance(Albedo), [%], 8, [ 0.00 0.39 0.78 1.18 ' '1.57 1.96 2.35 2.75 3.14 3.53 3.92 4.31 4.71 5.10 5.49 5.88 6.27 6.67 7.06 7.45 7.84 ' '8.24 8.63 9.02 9.41 9.80 10.20 10.59 10.98 11.37 11.76 12.16 12.55 12.94 13.33 13.73 ' '14.12 14.51 14.90 15.29 15.69 16.08 16.47 16.86 17.25 17.65 18.04 18.43 18.82 19.22 ' '19.61 20.00 20.39 20.78 21.18 21.57 21.96 22.35 22.75 23.14 23.53 23.92 24.31 24.71 ' '25.10 25.49 25.88 26.27 26.67 27.06 27.45 27.84 28.24 28.63 29.02 29.41 29.80 30.20 ' '30.59 30.98 31.37 31.76 32.16 32.55 32.94 33.33 33.73 34.12 34.51 34.90 35.29 35.69 ' '36.08 36.47 36.86 37.25 37.65 38.04 38.43 38.82 39.22 39.61 40.00 40.39 40.78 41.18 ' '41.57 41.96 42.35 42.75 43.14 43.53 43.92 44.31 44.71 45.10 45.49 45.88 46.27 46.67 ' '47.06 47.45 47.84 48.24 48.63 49.02 49.41 49.80 50.20 50.59 50.98 51.37 51.76 52.16 ' '52.55 52.94 53.33 53.73 54.12 54.51 54.90 55.29 55.69 56.08 56.47 56.86 57.25 57.65 ' '58.04 58.43 58.82 59.22 59.61 60.00 60.39 60.78 61.18 61.57 61.96 62.35 62.75 63.14 ' '63.53 63.92 64.31 64.71 65.10 65.49 65.88 66.27 66.67 67.06 67.45 67.84 68.24 68.63 ' '69.02 69.41 69.80 70.20 70.59 70.98 71.37 71.76 72.16 72.55 72.94 73.33 73.73 74.12 ' '74.51 74.90 75.29 75.69 76.08 76.47 76.86 77.25 77.65 78.04 78.43 78.82 79.22 79.61 ' '80.00 80.39 80.78 81.18 81.57 81.96 82.35 82.75 83.14 83.53 83.92 84.31 84.71 85.10 ' '85.49 85.88 86.27 86.67 87.06 87.45 87.84 88.24 88.63 89.02 89.41 89.80 90.20 90.59 ' '90.98 91.37 91.76 92.16 92.55 92.94 93.33 93.73 94.12 94.51 94.90 95.29 95.69 96.08 ' '96.47 96.86 97.25 97.65 98.04 98.43 98.82 99.22 99.61 100.00 ]'] dataset = self._get_test_dataset_calibration() w = MITIFFWriter(filename=dataset.attrs['metadata_requirements']['file_pattern'], base_dir=self.base_dir) w.save_dataset(dataset) filename = (dataset.attrs['metadata_requirements']['file_pattern']).format( start_time=dataset.attrs['start_time']) tif = TIFF.open(os.path.join(self.base_dir, filename)) IMAGEDESCRIPTION = 270 imgdesc = (tif.GetField(IMAGEDESCRIPTION)).decode('utf-8').split('\n') found_table_calibration = False number_of_calibrations = 0 for key in imgdesc: if 'Table_calibration' in key: found_table_calibration = True if '1-VIS0.63' in key: self.assertEqual(key, expected_key_channel[0]) number_of_calibrations += 1 elif '2-VIS0.86' in key: self.assertEqual(key, expected_key_channel[1]) number_of_calibrations += 1 elif '3(3B)-IR3.7' in key: self.assertEqual(key, expected_key_channel[2]) number_of_calibrations += 1 elif '4-IR10.8' in key: self.assertEqual(key, expected_key_channel[3]) number_of_calibrations += 1 elif '5-IR11.5' in key: self.assertEqual(key, expected_key_channel[4]) number_of_calibrations += 1 elif '6(3A)-VIS1.6' in key: self.assertEqual(key, expected_key_channel[5]) number_of_calibrations += 1 else: self.fail("Not a valid channel description i the given key.") self.assertTrue(found_table_calibration, "Table_calibration is not found in the imagedescription.") self.assertEqual(number_of_calibrations, 6) for i, image in enumerate(tif.iter_images()): np.testing.assert_allclose(image, expected[i], atol=1.e-6, rtol=0) def test_save_dataset_with_calibration_one_dataset(self): """Test saving if mitiff as dataset with only one channel.""" import os import numpy as np from libtiff import TIFF from satpy.writers.mitiff import MITIFFWriter expected = np.full((100, 200), 255) expected_key_channel = [u'Table_calibration: BT, BT, °[C], 8, [ 50.00 49.22 48.43 47.65 46.86 46.08 45.29 ' '44.51 43.73 42.94 42.16 41.37 40.59 39.80 39.02 38.24 37.45 36.67 35.88 35.10 34.31 ' '33.53 32.75 31.96 31.18 30.39 29.61 28.82 28.04 27.25 26.47 25.69 24.90 24.12 23.33 ' '22.55 21.76 20.98 20.20 19.41 18.63 17.84 17.06 16.27 15.49 14.71 13.92 13.14 12.35 ' '11.57 10.78 10.00 9.22 8.43 7.65 6.86 6.08 5.29 4.51 3.73 2.94 2.16 1.37 0.59 -0.20 ' '-0.98 -1.76 -2.55 -3.33 -4.12 -4.90 -5.69 -6.47 -7.25 -8.04 -8.82 -9.61 -10.39 -11.18 ' '-11.96 -12.75 -13.53 -14.31 -15.10 -15.88 -16.67 -17.45 -18.24 -19.02 -19.80 -20.59 ' '-21.37 -22.16 -22.94 -23.73 -24.51 -25.29 -26.08 -26.86 -27.65 -28.43 -29.22 -30.00 ' '-30.78 -31.57 -32.35 -33.14 -33.92 -34.71 -35.49 -36.27 -37.06 -37.84 -38.63 -39.41 ' '-40.20 -40.98 -41.76 -42.55 -43.33 -44.12 -44.90 -45.69 -46.47 -47.25 -48.04 -48.82 ' '-49.61 -50.39 -51.18 -51.96 -52.75 -53.53 -54.31 -55.10 -55.88 -56.67 -57.45 -58.24 ' '-59.02 -59.80 -60.59 -61.37 -62.16 -62.94 -63.73 -64.51 -65.29 -66.08 -66.86 -67.65 ' '-68.43 -69.22 -70.00 -70.78 -71.57 -72.35 -73.14 -73.92 -74.71 -75.49 -76.27 -77.06 ' '-77.84 -78.63 -79.41 -80.20 -80.98 -81.76 -82.55 -83.33 -84.12 -84.90 -85.69 -86.47 ' '-87.25 -88.04 -88.82 -89.61 -90.39 -91.18 -91.96 -92.75 -93.53 -94.31 -95.10 -95.88 ' '-96.67 -97.45 -98.24 -99.02 -99.80 -100.59 -101.37 -102.16 -102.94 -103.73 -104.51 ' '-105.29 -106.08 -106.86 -107.65 -108.43 -109.22 -110.00 -110.78 -111.57 -112.35 ' '-113.14 -113.92 -114.71 -115.49 -116.27 -117.06 -117.84 -118.63 -119.41 -120.20 ' '-120.98 -121.76 -122.55 -123.33 -124.12 -124.90 -125.69 -126.47 -127.25 -128.04 ' '-128.82 -129.61 -130.39 -131.18 -131.96 -132.75 -133.53 -134.31 -135.10 -135.88 ' '-136.67 -137.45 -138.24 -139.02 -139.80 -140.59 -141.37 -142.16 -142.94 -143.73 ' '-144.51 -145.29 -146.08 -146.86 -147.65 -148.43 -149.22 -150.00 ]', ] dataset = self._get_test_dataset_calibration_one_dataset() w = MITIFFWriter(filename=dataset.attrs['metadata_requirements']['file_pattern'], base_dir=self.base_dir) w.save_dataset(dataset) filename = (dataset.attrs['metadata_requirements']['file_pattern']).format( start_time=dataset.attrs['start_time']) tif = TIFF.open(os.path.join(self.base_dir, filename)) IMAGEDESCRIPTION = 270 imgdesc = (tif.GetField(IMAGEDESCRIPTION)).decode('utf-8').split('\n') found_table_calibration = False number_of_calibrations = 0 for key in imgdesc: if 'Table_calibration' in key: found_table_calibration = True if 'BT' in key: self.assertEqual(key, expected_key_channel[0]) number_of_calibrations += 1 self.assertTrue(found_table_calibration, "Expected table_calibration is not found in the imagedescription.") self.assertEqual(number_of_calibrations, 1) for image in tif.iter_images(): np.testing.assert_allclose(image, expected, atol=1.e-6, rtol=0) def test_save_dataset_with_bad_value(self): """Test writer operation with bad values.""" import os import numpy as np from libtiff import TIFF from satpy.writers.mitiff import MITIFFWriter expected = np.array([[0, 4, 1, 37, 73], [110, 146, 183, 219, 255]]) dataset = self._get_test_dataset_with_bad_values() w = MITIFFWriter(base_dir=self.base_dir) w.save_dataset(dataset) filename = "{:s}_{:%Y%m%d_%H%M%S}.mitiff".format(dataset.attrs['name'], dataset.attrs['start_time']) tif = TIFF.open(os.path.join(self.base_dir, filename)) for image in tif.iter_images(): np.testing.assert_allclose(image, expected, atol=1.e-6, rtol=0) def test_convert_proj4_string(self): import xarray as xr import dask.array as da from satpy.writers.mitiff import MITIFFWriter from pyresample.geometry import AreaDefinition checks = [{'epsg': '+init=EPSG:32631', 'proj4': (' Proj string: +proj=etmerc +lat_0=0 +lon_0=3 +k=0.9996 ' '+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 ' '+y_0=1515.000000\n')}, {'epsg': '+init=EPSG:32632', 'proj4': (' Proj string: +proj=etmerc +lat_0=0 +lon_0=9 +k=0.9996 ' '+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 ' '+y_0=1515.000000\n')}, {'epsg': '+init=EPSG:32633', 'proj4': (' Proj string: +proj=etmerc +lat_0=0 +lon_0=15 +k=0.9996 ' '+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 ' '+y_0=1515.000000\n')}, {'epsg': '+init=EPSG:32634', 'proj4': (' Proj string: +proj=etmerc +lat_0=0 +lon_0=21 +k=0.9996 ' '+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 ' '+y_0=1515.000000\n')}, {'epsg': '+init=EPSG:32635', 'proj4': (' Proj string: +proj=etmerc +lat_0=0 +lon_0=27 +k=0.9996 ' '+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 ' '+y_0=1515.000000\n')}] for check in checks: area_def = AreaDefinition( 'test', 'test', 'test', check['epsg'], 100, 200, (-1000., -1500., 1000., 1500.), ) ds1 = xr.DataArray( da.zeros((10, 20), chunks=20), dims=('y', 'x'), attrs={'area': area_def} ) w = MITIFFWriter(filename='dummy.tif', base_dir=self.base_dir) proj4_string = w._add_proj4_string(ds1, ds1) self.assertEqual(proj4_string, check['proj4']) def test_save_dataset_palette(self): """Test writer operation as palette.""" import os import numpy as np from libtiff import TIFF from satpy.writers.mitiff import MITIFFWriter expected = np.full((100, 200), 0) exp_c = ([0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [2, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]) color_map = [[0, 3], [1, 4], [2, 5]] pal_desc = ['test', 'test2'] unit = "Test" dataset = self._get_test_one_dataset() palette = {'palette': True, 'palette_color_map': color_map, 'palette_description': pal_desc, 'palette_unit': unit, 'palette_channel_name': dataset.attrs['name']} w = MITIFFWriter(base_dir=self.base_dir) w.save_dataset(dataset, **palette) filename = "{:s}_{:%Y%m%d_%H%M%S}.mitiff".format(dataset.attrs['name'], dataset.attrs['start_time']) tif = TIFF.open(os.path.join(self.base_dir, filename)) # Need to check PHOTOMETRIC is 3, ie palette self.assertEqual(tif.GetField('PHOTOMETRIC'), 3) colormap = tif.GetField('COLORMAP') # Check the colormap of the palette image self.assertEqual(colormap, exp_c) IMAGEDESCRIPTION = 270 imgdesc = (tif.GetField(IMAGEDESCRIPTION)).decode('utf-8').split('\n') found_color_info = False unit_name_found = False name_length_found = False name_length = 0 names = [] unit_name = None for key in imgdesc: if name_length_found and name_length > len(names): names.append(key) continue elif unit_name_found: name_length = int(key) name_length_found = True unit_name_found = False elif found_color_info: unit_name = key unit_name_found = True found_color_info = False elif 'COLOR INFO:' in key: found_color_info = True # Check the name of the palette description self.assertEqual(name_length, 2) # Check the name and unit name of the palette self.assertEqual(unit_name, ' Test') # Check the palette description of the palette self.assertEqual(names, [' test', ' test2']) for image in tif.iter_images(): np.testing.assert_allclose(image, expected, atol=1.e-6, rtol=0) def test_simple_write_two_bands(self): """Test basic writer operation with 3 bands from 2 prerequisites""" from satpy.writers.mitiff import MITIFFWriter dataset = self._get_test_dataset_three_bands_two_prereq() w = MITIFFWriter(base_dir=self.base_dir) w.save_dataset(dataset) def suite(): """The test suite for this writer's tests. """ loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestMITIFFWriter)) return mysuite if __name__ == '__main__': unittest.main() satpy-0.20.0/satpy/tests/writer_tests/test_ninjotiff.py000066400000000000000000000061151362525524100234000ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the NinJoTIFF writer.""" import sys import unittest from unittest import mock import xarray as xr class FakeImage: """Fake image.""" def __init__(self, data, mode): """Init fake image.""" self.data = data self.mode = mode def get_scaling_from_history(self): """Return dummy scale and offset.""" return xr.DataArray(1), xr.DataArray(0) modules = {'pyninjotiff': mock.Mock(), 'pyninjotiff.ninjotiff': mock.Mock()} @mock.patch.dict(sys.modules, modules) class TestNinjoTIFFWriter(unittest.TestCase): """The ninjo tiff writer tests.""" def test_init(self): """Test the init.""" from satpy.writers.ninjotiff import NinjoTIFFWriter ninjo_tags = {40000: 'NINJO'} ntw = NinjoTIFFWriter(tags=ninjo_tags) self.assertDictEqual(ntw.tags, ninjo_tags) @mock.patch('satpy.writers.ninjotiff.ImageWriter.save_dataset') @mock.patch('satpy.writers.ninjotiff.convert_units') def test_dataset(self, uconv, iwsd): """Test saving a dataset.""" from satpy.writers.ninjotiff import NinjoTIFFWriter ntw = NinjoTIFFWriter() dataset = xr.DataArray([1, 2, 3], attrs={'units': 'K'}) ntw.save_dataset(dataset, physic_unit='CELSIUS') uconv.assert_called_once_with(dataset, 'K', 'CELSIUS') self.assertEqual(iwsd.call_count, 1) @mock.patch('satpy.writers.ninjotiff.NinjoTIFFWriter.save_dataset') @mock.patch('satpy.writers.ninjotiff.ImageWriter.save_image') def test_image(self, iwsi, save_dataset): """Test saving an image.""" import pyninjotiff.ninjotiff as nt from satpy.writers.ninjotiff import NinjoTIFFWriter ntw = NinjoTIFFWriter() dataset = xr.DataArray([1, 2, 3], attrs={'units': 'K'}) img = FakeImage(dataset, 'L') ret = ntw.save_image(img, filename='bla.tif', compute=False) nt.save.assert_called() assert(nt.save.mock_calls[0][2]['compute'] is False) assert(nt.save.mock_calls[0][2]['ch_min_measurement_unit'] < nt.save.mock_calls[0][2]['ch_max_measurement_unit']) assert(ret == nt.save.return_value) def suite(): """Test suite for this writer's tests.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestNinjoTIFFWriter)) return mysuite satpy-0.20.0/satpy/tests/writer_tests/test_scmi.py000066400000000000000000000310411362525524100223410ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the SCMI writer.""" import os from glob import glob from datetime import datetime, timedelta import numpy as np import dask.array as da import unittest class TestSCMIWriter(unittest.TestCase): """Test basic functionality of SCMI writer.""" def setUp(self): """Create temporary directory to save files to.""" import tempfile self.base_dir = tempfile.mkdtemp() def tearDown(self): """Remove the temporary directory created for a test.""" try: import shutil shutil.rmtree(self.base_dir, ignore_errors=True) except OSError: pass def test_init(self): """Test basic init method of writer.""" from satpy.writers.scmi import SCMIWriter SCMIWriter(base_dir=self.base_dir) def test_basic_numbered_1_tile(self): """Test creating a single numbered tile.""" from satpy.writers.scmi import SCMIWriter from xarray import DataArray from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict w = SCMIWriter(base_dir=self.base_dir, compress=True) area_def = AreaDefinition( 'test', 'test', 'test', proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. ' '+lat_0=25 +lat_1=25 +units=m +no_defs'), 100, 200, (-1000., -1500., 1000., 1500.), ) now = datetime(2018, 1, 1, 12, 0, 0) ds = DataArray( da.from_array(np.linspace(0., 1., 20000, dtype=np.float32).reshape((200, 100)), chunks=50), attrs=dict( name='test_ds', platform_name='PLAT', sensor='SENSOR', units='1', area=area_def, start_time=now, end_time=now + timedelta(minutes=20)) ) w.save_datasets([ds], sector_id='TEST', source_name='TESTS') all_files = glob(os.path.join(self.base_dir, 'TESTS_AII*.nc')) self.assertEqual(len(all_files), 1) self.assertEqual(os.path.basename(all_files[0]), 'TESTS_AII_PLAT_SENSOR_test_ds_TEST_T001_20180101_1200.nc') def test_basic_numbered_tiles(self): """Test creating a multiple numbered tiles.""" from satpy.writers.scmi import SCMIWriter from xarray import DataArray from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict w = SCMIWriter(base_dir=self.base_dir, compress=True) area_def = AreaDefinition( 'test', 'test', 'test', proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. ' '+lat_0=25 +lat_1=25 +units=m +no_defs'), 100, 200, (-1000., -1500., 1000., 1500.), ) now = datetime(2018, 1, 1, 12, 0, 0) ds = DataArray( da.from_array(np.linspace(0., 1., 20000, dtype=np.float32).reshape((200, 100)), chunks=50), attrs=dict( name='test_ds', platform_name='PLAT', sensor='SENSOR', units='1', area=area_def, start_time=now, end_time=now + timedelta(minutes=20)) ) w.save_datasets([ds], sector_id='TEST', source_name="TESTS", tile_count=(3, 3)) all_files = glob(os.path.join(self.base_dir, 'TESTS_AII*.nc')) self.assertEqual(len(all_files), 9) def test_basic_lettered_tiles(self): """Test creating a lettered grid.""" import xarray as xr from satpy.writers.scmi import SCMIWriter from xarray import DataArray from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict w = SCMIWriter(base_dir=self.base_dir, compress=True) area_def = AreaDefinition( 'test', 'test', 'test', proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. ' '+lat_0=25 +lat_1=25 +units=m +no_defs'), 1000, 2000, (-1000000., -1500000., 1000000., 1500000.), ) now = datetime(2018, 1, 1, 12, 0, 0) ds = DataArray( da.from_array(np.linspace(0., 1., 2000000, dtype=np.float32).reshape((2000, 1000)), chunks=500), attrs=dict( name='test_ds', platform_name='PLAT', sensor='SENSOR', units='1', area=area_def, start_time=now, end_time=now + timedelta(minutes=20)) ) w.save_datasets([ds], sector_id='LCC', source_name="TESTS", tile_count=(3, 3), lettered_grid=True) all_files = glob(os.path.join(self.base_dir, 'TESTS_AII*.nc')) self.assertEqual(len(all_files), 16) for fn in all_files: nc = xr.open_dataset(fn, mask_and_scale=False) # geolocation coordinates should be monotonically increasing by 1 np.testing.assert_equal(np.diff(nc['x']), 1) np.testing.assert_equal(np.diff(nc['y']), 1) assert nc.attrs['start_date_time'] == now.strftime('%Y-%m-%dT%H:%M:%S') def test_lettered_tiles_sector_ref(self): """Test creating a lettered grid using the sector as reference.""" import xarray as xr from satpy.writers.scmi import SCMIWriter from xarray import DataArray from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict w = SCMIWriter(base_dir=self.base_dir, compress=True) area_def = AreaDefinition( 'test', 'test', 'test', proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. ' '+lat_0=25 +lat_1=25 +units=m +no_defs'), 1000, 2000, (-1000000., -1500000., 1000000., 1500000.), ) now = datetime(2018, 1, 1, 12, 0, 0) ds = DataArray( da.from_array(np.linspace(0., 1., 2000000, dtype=np.float32).reshape((2000, 1000)), chunks=500), attrs=dict( name='test_ds', platform_name='PLAT', sensor='SENSOR', units='1', area=area_def, start_time=now, end_time=now + timedelta(minutes=20)) ) w.save_datasets([ds], sector_id='LCC', source_name="TESTS", lettered_grid=True, use_sector_reference=True, use_end_time=True) all_files = glob(os.path.join(self.base_dir, 'TESTS_AII*.nc')) self.assertEqual(len(all_files), 16) for fn in all_files: nc = xr.open_dataset(fn, mask_and_scale=False) # geolocation coordinates should be monotonically increasing by 1 np.testing.assert_equal(np.diff(nc['x']), 1) np.testing.assert_equal(np.diff(nc['y']), 1) assert nc.attrs['start_date_time'] == (now + timedelta(minutes=20)).strftime('%Y-%m-%dT%H:%M:%S') def test_lettered_tiles_no_fit(self): """Test creating a lettered grid with no data.""" from satpy.writers.scmi import SCMIWriter from xarray import DataArray from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict w = SCMIWriter(base_dir=self.base_dir, compress=True) area_def = AreaDefinition( 'test', 'test', 'test', proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. ' '+lat_0=25 +lat_1=25 +units=m +no_defs'), 1000, 2000, (4000000., 5000000., 5000000., 6000000.), ) now = datetime(2018, 1, 1, 12, 0, 0) ds = DataArray( da.from_array(np.linspace(0., 1., 2000000, dtype=np.float32).reshape((2000, 1000)), chunks=500), attrs=dict( name='test_ds', platform_name='PLAT', sensor='SENSOR', units='1', area=area_def, start_time=now, end_time=now + timedelta(minutes=20)) ) w.save_datasets([ds], sector_id='LCC', source_name="TESTS", tile_count=(3, 3), lettered_grid=True) # No files created all_files = glob(os.path.join(self.base_dir, 'TESTS_AII*.nc')) self.assertEqual(len(all_files), 0) def test_lettered_tiles_bad_filename(self): """Test creating a lettered grid with a bad filename.""" from satpy.writers.scmi import SCMIWriter from xarray import DataArray from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict w = SCMIWriter(base_dir=self.base_dir, compress=True, filename="{Bad Key}.nc") area_def = AreaDefinition( 'test', 'test', 'test', proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. ' '+lat_0=25 +lat_1=25 +units=m +no_defs'), 1000, 2000, (-1000000., -1500000., 1000000., 1500000.), ) now = datetime(2018, 1, 1, 12, 0, 0) ds = DataArray( da.from_array(np.linspace(0., 1., 2000000, dtype=np.float32).reshape((2000, 1000)), chunks=500), attrs=dict( name='test_ds', platform_name='PLAT', sensor='SENSOR', units='1', area=area_def, start_time=now, end_time=now + timedelta(minutes=20)) ) self.assertRaises(KeyError, w.save_datasets, [ds], sector_id='LCC', source_name='TESTS', tile_count=(3, 3), lettered_grid=True) def test_basic_numbered_tiles_rgb(self): """Test creating a multiple numbered tiles with RGB.""" from satpy.writers.scmi import SCMIWriter from xarray import DataArray from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict w = SCMIWriter(base_dir=self.base_dir, compress=True) area_def = AreaDefinition( 'test', 'test', 'test', proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. ' '+lat_0=25 +lat_1=25 +units=m +no_defs'), 100, 200, (-1000., -1500., 1000., 1500.), ) now = datetime(2018, 1, 1, 12, 0, 0) ds = DataArray( da.from_array(np.linspace(0., 1., 60000, dtype=np.float32).reshape((3, 200, 100)), chunks=50), dims=('bands', 'y', 'x'), coords={'bands': ['R', 'G', 'B']}, attrs=dict( name='test_ds', platform_name='PLAT', sensor='SENSOR', units='1', area=area_def, start_time=now, end_time=now + timedelta(minutes=20)) ) w.save_datasets([ds], sector_id='TEST', source_name="TESTS", tile_count=(3, 3)) all_files = glob(os.path.join(self.base_dir, 'TESTS_AII*test_ds_R*.nc')) self.assertEqual(len(all_files), 9) all_files = glob(os.path.join(self.base_dir, 'TESTS_AII*test_ds_G*.nc')) self.assertEqual(len(all_files), 9) all_files = glob(os.path.join(self.base_dir, 'TESTS_AII*test_ds_B*.nc')) self.assertEqual(len(all_files), 9) def suite(): """Create test suite for this writer's tests.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestSCMIWriter)) return mysuite satpy-0.20.0/satpy/tests/writer_tests/test_simple_image.py000066400000000000000000000055261362525524100240520ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the CF writer. """ import sys if sys.version_info < (2, 7): import unittest2 as unittest else: import unittest class TestPillowWriter(unittest.TestCase): def setUp(self): """Create temporary directory to save files to.""" import tempfile self.base_dir = tempfile.mkdtemp() def tearDown(self): """Remove the temporary directory created for a test.""" try: import shutil shutil.rmtree(self.base_dir, ignore_errors=True) except OSError: pass @staticmethod def _get_test_datasets(): """Create DataArray for testing.""" import xarray as xr import dask.array as da from datetime import datetime ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={'name': 'test', 'start_time': datetime.utcnow()} ) return [ds1] def test_init(self): """Test creating the default writer.""" from satpy.writers.simple_image import PillowWriter PillowWriter() def test_simple_write(self): """Test writing datasets with default behavior.""" from satpy.writers.simple_image import PillowWriter datasets = self._get_test_datasets() w = PillowWriter(base_dir=self.base_dir) w.save_datasets(datasets) def test_simple_delayed_write(self): """Test writing datasets with delayed computation.""" from dask.delayed import Delayed from satpy.writers.simple_image import PillowWriter from satpy.writers import compute_writer_results datasets = self._get_test_datasets() w = PillowWriter(base_dir=self.base_dir) res = w.save_datasets(datasets, compute=False) for r__ in res: self.assertIsInstance(r__, Delayed) r__.compute() compute_writer_results(res) def suite(): """The test suite for this writer's tests.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestPillowWriter)) return mysuite satpy-0.20.0/satpy/tests/writer_tests/test_utils.py000066400000000000000000000026251362525524100225540ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for writer utilities""" import unittest import satpy.writers.utils as wutils class WriterUtilsTest(unittest.TestCase): def test_flatten_dict(self): d = {'a': 1, 'b': {'c': 1, 'd': {'e': 1, 'f': {'g': [1, 2]}}}} expected = {'a': 1, 'b_c': 1, 'b_d_e': 1, 'b_d_f_g': [1, 2]} self.assertDictEqual(wutils.flatten_dict(d), expected) def suite(): """The test suite for writer utilities. """ loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(WriterUtilsTest)) return mysuite if __name__ == "__main__": unittest.main() satpy-0.20.0/satpy/utils.py000066400000000000000000000250031362525524100156100ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2009-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # satpy is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . """Module defining various utilities.""" import logging import os import sys import re import warnings import numpy as np try: import configparser except ImportError: from six.moves import configparser _is_logging_on = False TRACE_LEVEL = 5 class OrderedConfigParser(object): """Intercepts read and stores ordered section names. Cannot use inheritance and super as ConfigParser use old style classes. """ def __init__(self, *args, **kwargs): """Initialize the instance.""" self.config_parser = configparser.ConfigParser(*args, **kwargs) def __getattr__(self, name): """Get the attribute.""" return getattr(self.config_parser, name) def read(self, filename): """Read config file.""" try: conf_file = open(filename, 'r') config = conf_file.read() config_keys = re.findall(r'\[.*\]', config) self.section_keys = [key[1:-1] for key in config_keys] except IOError as e: # Pass if file not found if e.errno != 2: raise return self.config_parser.read(filename) def sections(self): """Get sections from config file.""" try: return self.section_keys except: # noqa: E722 return self.config_parser.sections() def ensure_dir(filename): """Check if the dir of f exists, otherwise create it.""" directory = os.path.dirname(filename) if directory and not os.path.isdir(directory): os.makedirs(directory) def debug_on(): """Turn debugging logging on.""" logging_on(logging.DEBUG) def trace_on(): """Turn trace logging on.""" logging_on(TRACE_LEVEL) def logging_on(level=logging.WARNING): """Turn logging on.""" global _is_logging_on if not _is_logging_on: console = logging.StreamHandler() console.setFormatter(logging.Formatter("[%(levelname)s: %(asctime)s :" " %(name)s] %(message)s", '%Y-%m-%d %H:%M:%S')) console.setLevel(level) logging.getLogger('').addHandler(console) _is_logging_on = True log = logging.getLogger('') log.setLevel(level) for h in log.handlers: h.setLevel(level) def logging_off(): """Turn logging off.""" logging.getLogger('').handlers = [logging.NullHandler()] def get_logger(name): """Return logger with null handler added if needed.""" if not hasattr(logging.Logger, 'trace'): logging.addLevelName(TRACE_LEVEL, 'TRACE') def trace(self, message, *args, **kwargs): if self.isEnabledFor(TRACE_LEVEL): # Yes, logger takes its '*args' as 'args'. self._log(TRACE_LEVEL, message, args, **kwargs) logging.Logger.trace = trace log = logging.getLogger(name) if not log.handlers and sys.version_info[0] < 3: log.addHandler(logging.NullHandler()) return log def in_ipynb(): """Check if we are in a jupyter notebook.""" try: return 'ZMQ' in get_ipython().__class__.__name__ except NameError: return False # Spherical conversions def lonlat2xyz(lon, lat): """Convert lon lat to cartesian.""" lat = np.deg2rad(lat) lon = np.deg2rad(lon) x = np.cos(lat) * np.cos(lon) y = np.cos(lat) * np.sin(lon) z = np.sin(lat) return x, y, z def xyz2lonlat(x, y, z, asin=False): """Convert cartesian to lon lat.""" lon = np.rad2deg(np.arctan2(y, x)) if asin: lat = np.rad2deg(np.arcsin(z)) else: lat = np.rad2deg(np.arctan2(z, np.sqrt(x ** 2 + y ** 2))) return lon, lat def angle2xyz(azi, zen): """Convert azimuth and zenith to cartesian.""" azi = np.deg2rad(azi) zen = np.deg2rad(zen) x = np.sin(zen) * np.sin(azi) y = np.sin(zen) * np.cos(azi) z = np.cos(zen) return x, y, z def xyz2angle(x, y, z, acos=False): """Convert cartesian to azimuth and zenith.""" azi = np.rad2deg(np.arctan2(x, y)) if acos: zen = np.rad2deg(np.arccos(z)) else: zen = 90 - np.rad2deg(np.arctan2(z, np.sqrt(x ** 2 + y ** 2))) return azi, zen def proj_units_to_meters(proj_str): """Convert projection units from kilometers to meters.""" proj_parts = proj_str.split() new_parts = [] for itm in proj_parts: key, val = itm.split('=') key = key.strip('+') if key in ['a', 'b', 'h']: val = float(val) if val < 6e6: val *= 1000. val = '%.3f' % val if key == 'units' and val == 'km': continue new_parts.append('+%s=%s' % (key, val)) return ' '.join(new_parts) def _get_sunz_corr_li_and_shibata(cos_zen): return 24.35 / (2. * cos_zen + np.sqrt(498.5225 * cos_zen**2 + 1)) def sunzen_corr_cos(data, cos_zen, limit=88., max_sza=95.): """Perform Sun zenith angle correction. The correction is based on the provided cosine of the zenith angle (``cos_zen``). The correction is limited to ``limit`` degrees (default: 88.0 degrees). For larger zenith angles, the correction is the same as at the ``limit`` if ``max_sza`` is `None`. The default behavior is to gradually reduce the correction past ``limit`` degrees up to ``max_sza`` where the correction becomes 0. Both ``data`` and ``cos_zen`` should be 2D arrays of the same shape. """ # Convert the zenith angle limit to cosine of zenith angle limit_rad = np.deg2rad(limit) limit_cos = np.cos(limit_rad) max_sza_rad = np.deg2rad(max_sza) if max_sza is not None else max_sza # Cosine correction corr = 1. / cos_zen if max_sza is not None: # gradually fall off for larger zenith angle grad_factor = (np.arccos(cos_zen) - limit_rad) / (max_sza_rad - limit_rad) # invert the factor so maximum correction is done at `limit` and falls off later grad_factor = 1. - np.log(grad_factor + 1) / np.log(2) # make sure we don't make anything negative grad_factor = grad_factor.clip(0.) else: # Use constant value (the limit) for larger zenith angles grad_factor = 1. corr = corr.where(cos_zen > limit_cos, grad_factor / limit_cos) # Force "night" pixels to 0 (where SZA is invalid) corr = corr.where(cos_zen.notnull(), 0) return data * corr def atmospheric_path_length_correction(data, cos_zen, limit=88., max_sza=95.): """Perform Sun zenith angle correction. This function uses the correction method proposed by Li and Shibata (2006): https://doi.org/10.1175/JAS3682.1 The correction is limited to ``limit`` degrees (default: 88.0 degrees). For larger zenith angles, the correction is the same as at the ``limit`` if ``max_sza`` is `None`. The default behavior is to gradually reduce the correction past ``limit`` degrees up to ``max_sza`` where the correction becomes 0. Both ``data`` and ``cos_zen`` should be 2D arrays of the same shape. """ # Convert the zenith angle limit to cosine of zenith angle limit_rad = np.deg2rad(limit) limit_cos = np.cos(limit_rad) max_sza_rad = np.deg2rad(max_sza) if max_sza is not None else max_sza # Cosine correction corr = _get_sunz_corr_li_and_shibata(cos_zen) # Use constant value (the limit) for larger zenith angles corr_lim = _get_sunz_corr_li_and_shibata(limit_cos) if max_sza is not None: # gradually fall off for larger zenith angle grad_factor = (np.arccos(cos_zen) - limit_rad) / (max_sza_rad - limit_rad) # invert the factor so maximum correction is done at `limit` and falls off later grad_factor = 1. - np.log(grad_factor + 1) / np.log(2) # make sure we don't make anything negative grad_factor = grad_factor.clip(0.) else: # Use constant value (the limit) for larger zenith angles grad_factor = 1. corr = corr.where(cos_zen > limit_cos, grad_factor * corr_lim) # Force "night" pixels to 0 (where SZA is invalid) corr = corr.where(cos_zen.notnull(), 0) return data * corr def get_satpos(dataset): """Get satellite position from dataset attributes. Preferences are: * Longitude & Latitude: Nadir, actual, nominal, projection * Altitude: Actual, nominal, projection A warning is issued when projection values have to be used because nothing else is available. Returns: Geodetic longitude, latitude, altitude """ try: orb_params = dataset.attrs['orbital_parameters'] # Altitude try: alt = orb_params['satellite_actual_altitude'] except KeyError: try: alt = orb_params['satellite_nominal_altitude'] except KeyError: alt = orb_params['projection_altitude'] warnings.warn('Actual satellite altitude not available, using projection altitude instead.') # Longitude & Latitude try: lon = orb_params['nadir_longitude'] lat = orb_params['nadir_latitude'] except KeyError: try: lon = orb_params['satellite_actual_longitude'] lat = orb_params['satellite_actual_latitude'] except KeyError: try: lon = orb_params['satellite_nominal_longitude'] lat = orb_params['satellite_nominal_latitude'] except KeyError: lon = orb_params['projection_longitude'] lat = orb_params['projection_latitude'] warnings.warn('Actual satellite lon/lat not available, using projection centre instead.') except KeyError: # Legacy lon = dataset.attrs['satellite_longitude'] lat = dataset.attrs['satellite_latitude'] alt = dataset.attrs['satellite_altitude'] return lon, lat, alt satpy-0.20.0/satpy/writers/000077500000000000000000000000001362525524100155755ustar00rootroot00000000000000satpy-0.20.0/satpy/writers/__init__.py000066400000000000000000001251351362525524100177150ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Shared objects of the various writer classes. For now, this includes enhancement configuration utilities. """ import logging import os import warnings import dask.array as da import numpy as np import xarray as xr import yaml try: from yaml import UnsafeLoader except ImportError: from yaml import Loader as UnsafeLoader from satpy.config import (config_search_paths, glob_config, get_environ_config_dir, recursive_dict_update) from satpy import CHUNK_SIZE from satpy.plugin_base import Plugin from satpy.resample import get_area_def from trollsift import parser from trollimage.xrimage import XRImage LOG = logging.getLogger(__name__) def read_writer_config(config_files, loader=UnsafeLoader): """Read the writer `config_files` and return the info extracted.""" conf = {} LOG.debug('Reading %s', str(config_files)) for config_file in config_files: with open(config_file) as fd: conf.update(yaml.load(fd.read(), Loader=loader)) try: writer_info = conf['writer'] except KeyError: raise KeyError( "Malformed config file {}: missing writer 'writer'".format( config_files)) writer_info['config_files'] = config_files return writer_info def load_writer_configs(writer_configs, ppp_config_dir, **writer_kwargs): """Load the writer from the provided `writer_configs`.""" try: writer_info = read_writer_config(writer_configs) writer_class = writer_info['writer'] except (ValueError, KeyError, yaml.YAMLError): raise ValueError("Invalid writer configs: " "'{}'".format(writer_configs)) init_kwargs, kwargs = writer_class.separate_init_kwargs(writer_kwargs) writer = writer_class(ppp_config_dir=ppp_config_dir, config_files=writer_configs, **init_kwargs) return writer, kwargs def load_writer(writer, ppp_config_dir=None, **writer_kwargs): """Find and load writer `writer` in the available configuration files.""" if ppp_config_dir is None: ppp_config_dir = get_environ_config_dir() config_fn = writer + ".yaml" if "." not in writer else writer config_files = config_search_paths( os.path.join("writers", config_fn), ppp_config_dir) writer_kwargs.setdefault("config_files", config_files) if not writer_kwargs['config_files']: raise ValueError("Unknown writer '{}'".format(writer)) try: return load_writer_configs(writer_kwargs['config_files'], ppp_config_dir=ppp_config_dir, **writer_kwargs) except ValueError: raise ValueError("Writer '{}' does not exist or could not be " "loaded".format(writer)) def configs_for_writer(writer=None, ppp_config_dir=None): """Generate writer configuration files for one or more writers. Args: writer (Optional[str]): Yield configs only for this writer ppp_config_dir (Optional[str]): Additional configuration directory to search for writer configuration files. Returns: Generator of lists of configuration files """ search_paths = (ppp_config_dir,) if ppp_config_dir else tuple() if writer is not None: if not isinstance(writer, (list, tuple)): writer = [writer] # given a config filename or writer name config_files = [w if w.endswith('.yaml') else w + '.yaml' for w in writer] else: writer_configs = glob_config(os.path.join('writers', '*.yaml'), *search_paths) config_files = set(writer_configs) for config_file in config_files: config_basename = os.path.basename(config_file) writer_configs = config_search_paths( os.path.join("writers", config_basename), *search_paths) if not writer_configs: LOG.warning("No writer configs found for '%s'", writer) continue yield writer_configs def available_writers(as_dict=False): """Available writers based on current configuration. Args: as_dict (bool): Optionally return writer information as a dictionary. Default: False Returns: List of available writer names. If `as_dict` is `True` then a list of dictionaries including additionally writer information is returned. """ writers = [] for writer_configs in configs_for_writer(): try: writer_info = read_writer_config(writer_configs) except (KeyError, IOError, yaml.YAMLError): LOG.warning("Could not import writer config from: %s", writer_configs) LOG.debug("Error loading YAML", exc_info=True) continue writers.append(writer_info if as_dict else writer_info['name']) return writers def _determine_mode(dataset): if "mode" in dataset.attrs: return dataset.attrs["mode"] if dataset.ndim == 2: return "L" elif dataset.shape[0] == 2: return "LA" elif dataset.shape[0] == 3: return "RGB" elif dataset.shape[0] == 4: return "RGBA" else: raise RuntimeError("Can't determine 'mode' of dataset: %s" % str(dataset)) def _burn_overlay(img, image_metadata, area, cw_, overlays): """Burn the overlay in the image array.""" del image_metadata cw_.add_overlay_from_dict(overlays, area, background=img) return img def add_overlay(orig_img, area, coast_dir, color=None, width=None, resolution=None, level_coast=None, level_borders=None, fill_value=None, grid=None, overlays=None): """Add coastline, political borders and grid(graticules) to image. Uses ``color`` for feature colors where ``color`` is a 3-element tuple of integers between 0 and 255 representing (R, G, B). .. warning:: This function currently loses the data mask (alpha band). ``resolution`` is chosen automatically if None (default), otherwise it should be one of: +-----+-------------------------+---------+ | 'f' | Full resolution | 0.04 km | +-----+-------------------------+---------+ | 'h' | High resolution | 0.2 km | +-----+-------------------------+---------+ | 'i' | Intermediate resolution | 1.0 km | +-----+-------------------------+---------+ | 'l' | Low resolution | 5.0 km | +-----+-------------------------+---------+ | 'c' | Crude resolution | 25 km | +-----+-------------------------+---------+ ``grid`` is a dictionary with key values as documented in detail in pycoast eg. overlay={'grid': {'major_lonlat': (10, 10), 'write_text': False, 'outline': (224, 224, 224), 'width': 0.5}} Here major_lonlat is plotted every 10 deg for both longitude and latitude, no labels for the grid lines are plotted, the color used for the grid lines is light gray, and the width of the gratucules is 0.5 pixels. For grid if aggdraw is used, font option is mandatory, if not ``write_text`` is set to False:: font = aggdraw.Font('black', '/usr/share/fonts/truetype/msttcorefonts/Arial.ttf', opacity=127, size=16) """ if area is None: raise ValueError("Area of image is None, can't add overlay.") from pycoast import ContourWriterAGG if isinstance(area, str): area = get_area_def(area) LOG.info("Add coastlines and political borders to image.") old_args = [color, width, resolution, grid, level_coast, level_borders] if any(arg is not None for arg in old_args): warnings.warn("'color', 'width', 'resolution', 'grid', 'level_coast', 'level_borders'" " arguments will be deprecated soon. Please use 'overlays' instead.", DeprecationWarning) if hasattr(orig_img, 'convert'): # image must be in RGB space to work with pycoast/pydecorate res_mode = ('RGBA' if orig_img.final_mode(fill_value).endswith('A') else 'RGB') orig_img = orig_img.convert(res_mode) elif not orig_img.mode.startswith('RGB'): raise RuntimeError("'trollimage' 1.6+ required to support adding " "overlays/decorations to non-RGB data.") if overlays is None: overlays = dict() # fill with sensible defaults general_params = {'outline': color or (0, 0, 0), 'width': width or 0.5} for key, val in general_params.items(): if val is not None: overlays.setdefault('coasts', {}).setdefault(key, val) overlays.setdefault('borders', {}).setdefault(key, val) if level_coast is None: level_coast = 1 overlays.setdefault('coasts', {}).setdefault('level', level_coast) if level_borders is None: level_borders = 1 overlays.setdefault('borders', {}).setdefault('level', level_borders) if grid is not None: if 'major_lonlat' in grid and grid['major_lonlat']: major_lonlat = grid.pop('major_lonlat') minor_lonlat = grid.pop('minor_lonlat', major_lonlat) grid.update({'Dlonlat': major_lonlat, 'dlonlat': minor_lonlat}) for key, val in grid.items(): overlays.setdefault('grid', {}).setdefault(key, val) cw_ = ContourWriterAGG(coast_dir) new_image = orig_img.apply_pil(_burn_overlay, res_mode, None, {'fill_value': fill_value}, (area, cw_, overlays), None) return new_image def add_text(orig, dc, img, text): """Add text to an image using the pydecorate package. All the features of pydecorate's ``add_text`` are available. See documentation of :doc:`pydecorate:index` for more info. """ LOG.info("Add text to image.") dc.add_text(**text) arr = da.from_array(np.array(img) / 255.0, chunks=CHUNK_SIZE) new_data = xr.DataArray(arr, dims=['y', 'x', 'bands'], coords={'y': orig.data.coords['y'], 'x': orig.data.coords['x'], 'bands': list(img.mode)}, attrs=orig.data.attrs) return XRImage(new_data) def add_logo(orig, dc, img, logo): """Add logos or other images to an image using the pydecorate package. All the features of pydecorate's ``add_logo`` are available. See documentation of :doc:`pydecorate:index` for more info. """ LOG.info("Add logo to image.") dc.add_logo(**logo) arr = da.from_array(np.array(img) / 255.0, chunks=CHUNK_SIZE) new_data = xr.DataArray(arr, dims=['y', 'x', 'bands'], coords={'y': orig.data.coords['y'], 'x': orig.data.coords['x'], 'bands': list(img.mode)}, attrs=orig.data.attrs) return XRImage(new_data) def add_scale(orig, dc, img, scale): """Add scale to an image using the pydecorate package. All the features of pydecorate's ``add_scale`` are available. See documentation of :doc:`pydecorate:index` for more info. """ LOG.info("Add scale to image.") dc.add_scale(**scale) arr = da.from_array(np.array(img) / 255.0, chunks=CHUNK_SIZE) new_data = xr.DataArray(arr, dims=['y', 'x', 'bands'], coords={'y': orig.data.coords['y'], 'x': orig.data.coords['x'], 'bands': list(img.mode)}, attrs=orig.data.attrs) return XRImage(new_data) def add_decorate(orig, fill_value=None, **decorate): """Decorate an image with text and/or logos/images. This call adds text/logos in order as given in the input to keep the alignment features available in pydecorate. An example of the decorate config:: decorate = { 'decorate': [ {'logo': {'logo_path': , 'height': 143, 'bg': 'white', 'bg_opacity': 255}}, {'text': {'txt': start_time_txt, 'align': {'top_bottom': 'bottom', 'left_right': 'right'}, 'font': , 'font_size': 22, 'height': 30, 'bg': 'black', 'bg_opacity': 255, 'line': 'white'}} ] } Any numbers of text/logo in any order can be added to the decorate list, but the order of the list is kept as described above. Note that a feature given in one element, eg. bg (which is the background color) will also apply on the next elements unless a new value is given. align is a special keyword telling where in the image to start adding features, top_bottom is either top or bottom and left_right is either left or right. """ LOG.info("Decorate image.") # Need to create this here to possible keep the alignment # when adding text and/or logo with pydecorate if hasattr(orig, 'convert'): # image must be in RGB space to work with pycoast/pydecorate orig = orig.convert('RGBA' if orig.mode.endswith('A') else 'RGB') elif not orig.mode.startswith('RGB'): raise RuntimeError("'trollimage' 1.6+ required to support adding " "overlays/decorations to non-RGB data.") img_orig = orig.pil_image(fill_value=fill_value) from pydecorate import DecoratorAGG dc = DecoratorAGG(img_orig) # decorate need to be a list to maintain the alignment # as ordered in the list img = orig if 'decorate' in decorate: for dec in decorate['decorate']: if 'logo' in dec: img = add_logo(img, dc, img_orig, logo=dec['logo']) elif 'text' in dec: img = add_text(img, dc, img_orig, text=dec['text']) elif 'scale' in dec: img = add_scale(img, dc, img_orig, scale=dec['scale']) return img def get_enhanced_image(dataset, ppp_config_dir=None, enhance=None, enhancement_config_file=None, overlay=None, decorate=None, fill_value=None): """Get an enhanced version of `dataset` as an :class:`~trollimage.xrimage.XRImage` instance. Args: dataset (xarray.DataArray): Data to be enhanced and converted to an image. ppp_config_dir (str): Root configuration directory. enhance (bool or Enhancer): Whether to automatically enhance data to be more visually useful and to fit inside the file format being saved to. By default this will default to using the enhancement configuration files found using the default :class:`~satpy.writers.Enhancer` class. This can be set to `False` so that no enhancments are performed. This can also be an instance of the :class:`~satpy.writers.Enhancer` class if further custom enhancement is needed. enhancement_config_file (str): Deprecated. overlay (dict): Options for image overlays. See :func:`add_overlay` for available options. decorate (dict): Options for decorating the image. See :func:`add_decorate` for available options. fill_value (int or float): Value to use when pixels are masked or invalid. Default of `None` means to create an alpha channel. See :meth:`~trollimage.xrimage.XRImage.finalize` for more details. Only used when adding overlays or decorations. Otherwise it is up to the caller to "finalize" the image before using it except if calling ``img.show()`` or providing the image to a writer as these will finalize the image. .. versionchanged:: 0.10 Deprecated `enhancement_config_file` and 'enhancer' in favor of `enhance`. Pass an instance of the `Enhancer` class to `enhance` instead. """ if ppp_config_dir is None: ppp_config_dir = get_environ_config_dir() if enhancement_config_file is not None: warnings.warn("'enhancement_config_file' has been deprecated. Pass an instance of the " "'Enhancer' class to the 'enhance' keyword argument instead.", DeprecationWarning) if enhance is False: # no enhancement enhancer = None elif enhance is None or enhance is True: # default enhancement enhancer = Enhancer(ppp_config_dir, enhancement_config_file) else: # custom enhancer enhancer = enhance # Create an image for enhancement img = to_image(dataset) if enhancer is None or enhancer.enhancement_tree is None: LOG.debug("No enhancement being applied to dataset") else: if dataset.attrs.get("sensor", None): enhancer.add_sensor_enhancements(dataset.attrs["sensor"]) enhancer.apply(img, **dataset.attrs) if overlay is not None: img = add_overlay(img, dataset.attrs['area'], fill_value=fill_value, **overlay) if decorate is not None: img = add_decorate(img, fill_value=fill_value, **decorate) return img def show(dataset, **kwargs): """Display the dataset as an image.""" img = get_enhanced_image(dataset.squeeze(), **kwargs) img.show() return img def to_image(dataset): """Convert ``dataset`` into a :class:`~trollimage.xrimage.XRImage` instance. Convert the ``dataset`` into an instance of the :class:`~trollimage.xrimage.XRImage` class. This function makes no other changes. To get an enhanced image, possibly with overlays and decoration, see :func:`~get_enhanced_image`. Args: dataset (xarray.DataArray): Data to be converted to an image. Returns: Instance of :class:`~trollimage.xrimage.XRImage`. """ dataset = dataset.squeeze() if dataset.ndim < 2: raise ValueError("Need at least a 2D array to make an image.") else: return XRImage(dataset) def split_results(results): """Split results. Get sources, targets and delayed objects to separate lists from a list of results collected from (multiple) writer(s). """ from dask.delayed import Delayed def flatten(results): out = [] if isinstance(results, (list, tuple)): for itm in results: out.extend(flatten(itm)) return out return [results] sources = [] targets = [] delayeds = [] for res in flatten(results): if isinstance(res, da.Array): sources.append(res) elif isinstance(res, Delayed): delayeds.append(res) else: targets.append(res) return sources, targets, delayeds def compute_writer_results(results): """Compute all the given dask graphs `results` so that the files are saved. Args: results (iterable): Iterable of dask graphs resulting from calls to `scn.save_datasets(..., compute=False)` """ if not results: return sources, targets, delayeds = split_results(results) # one or more writers have targets that we need to close in the future if targets: delayeds.append(da.store(sources, targets, compute=False)) if delayeds: da.compute(delayeds) if targets: for target in targets: if hasattr(target, 'close'): target.close() class Writer(Plugin): """Base Writer class for all other writers. A minimal writer subclass should implement the `save_dataset` method. """ def __init__(self, name=None, filename=None, base_dir=None, **kwargs): """Initialize the writer object. Args: name (str): A name for this writer for log and error messages. If this writer is configured in a YAML file its name should match the name of the YAML file. Writer names may also appear in output file attributes. filename (str): Filename to save data to. This filename can and should specify certain python string formatting fields to differentiate between data written to the files. Any attributes provided by the ``.attrs`` of a DataArray object may be included. Format and conversion specifiers provided by the :class:`trollsift ` package may also be used. Any directories in the provided pattern will be created if they do not exist. Example:: {platform_name}_{sensor}_{name}_{start_time:%Y%m%d_%H%M%S}.tif base_dir (str): Base destination directories for all created files. kwargs (dict): Additional keyword arguments to pass to the :class:`~satpy.plugin_base.Plugin` class. """ # Load the config Plugin.__init__(self, **kwargs) self.info = self.config.get('writer', {}) if 'file_pattern' in self.info: warnings.warn("Writer YAML config is using 'file_pattern' which " "has been deprecated, use 'filename' instead.") self.info['filename'] = self.info.pop('file_pattern') if 'file_pattern' in kwargs: warnings.warn("'file_pattern' has been deprecated, use 'filename' instead.", DeprecationWarning) filename = kwargs.pop('file_pattern') # Use options from the config file if they weren't passed as arguments self.name = self.info.get("name", None) if name is None else name self.file_pattern = self.info.get("filename", None) if filename is None else filename if self.name is None: raise ValueError("Writer 'name' not provided") self.filename_parser = self.create_filename_parser(base_dir) @classmethod def separate_init_kwargs(cls, kwargs): """Help separating arguments between init and save methods. Currently the :class:`~satpy.scene.Scene` is passed one set of arguments to represent the Writer creation and saving steps. This is not preferred for Writer structure, but provides a simpler interface to users. This method splits the provided keyword arguments between those needed for initialization and those needed for the ``save_dataset`` and ``save_datasets`` method calls. Writer subclasses should try to prefer keyword arguments only for the save methods only and leave the init keyword arguments to the base classes when possible. """ # FUTURE: Don't pass Scene.save_datasets kwargs to init and here init_kwargs = {} kwargs = kwargs.copy() for kw in ['base_dir', 'filename', 'file_pattern']: if kw in kwargs: init_kwargs[kw] = kwargs.pop(kw) return init_kwargs, kwargs def create_filename_parser(self, base_dir): """Create a :class:`trollsift.parser.Parser` object for later use.""" # just in case a writer needs more complex file patterns # Set a way to create filenames if we were given a pattern if base_dir and self.file_pattern: file_pattern = os.path.join(base_dir, self.file_pattern) else: file_pattern = self.file_pattern return parser.Parser(file_pattern) if file_pattern else None def get_filename(self, **kwargs): """Create a filename where output data will be saved. Args: kwargs (dict): Attributes and other metadata to use for formatting the previously provided `filename`. """ if self.filename_parser is None: raise RuntimeError("No filename pattern or specific filename provided") output_filename = self.filename_parser.compose(kwargs) dirname = os.path.dirname(output_filename) if dirname and not os.path.isdir(dirname): LOG.info("Creating output directory: {}".format(dirname)) os.makedirs(dirname) return output_filename def save_datasets(self, datasets, compute=True, **kwargs): """Save all datasets to one or more files. Subclasses can use this method to save all datasets to one single file or optimize the writing of individual datasets. By default this simply calls `save_dataset` for each dataset provided. Args: datasets (iterable): Iterable of `xarray.DataArray` objects to save using this writer. compute (bool): If `True` (default), compute all of the saves to disk. If `False` then the return value is either a :doc:`dask:delayed` object or two lists to be passed to a :func:`dask.array.store` call. See return values below for more details. **kwargs: Keyword arguments to pass to `save_dataset`. See that documentation for more details. Returns: Value returned depends on `compute` keyword argument. If `compute` is `True` the value is the result of a either a :func:`dask.array.store` operation or a :doc:`dask:delayed` compute, typically this is `None`. If `compute` is `False` then the result is either a :doc:`dask:delayed` object that can be computed with `delayed.compute()` or a two element tuple of sources and targets to be passed to :func:`dask.array.store`. If `targets` is provided then it is the caller's responsibility to close any objects that have a "close" method. """ results = [] for ds in datasets: results.append(self.save_dataset(ds, compute=False, **kwargs)) if compute: LOG.info("Computing and writing results...") return compute_writer_results([results]) targets, sources, delayeds = split_results([results]) if delayeds: # This writer had only delayed writes return delayeds else: return targets, sources def save_dataset(self, dataset, filename=None, fill_value=None, compute=True, **kwargs): """Save the ``dataset`` to a given ``filename``. This method must be overloaded by the subclass. Args: dataset (xarray.DataArray): Dataset to save using this writer. filename (str): Optionally specify the filename to save this dataset to. If not provided then `filename` which can be provided to the init method will be used and formatted by dataset attributes. fill_value (int or float): Replace invalid values in the dataset with this fill value if applicable to this writer. compute (bool): If `True` (default), compute and save the dataset. If `False` return either a :doc:`dask:delayed` object or tuple of (source, target). See the return values below for more information. **kwargs: Other keyword arguments for this particular writer. Returns: Value returned depends on `compute`. If `compute` is `True` then the return value is the result of computing a :doc:`dask:delayed` object or running :func:`dask.array.store`. If `compute` is `False` then the returned value is either a :doc:`dask:delayed` object that can be computed using `delayed.compute()` or a tuple of (source, target) that should be passed to :func:`dask.array.store`. If target is provided the the caller is responsible for calling `target.close()` if the target has this method. """ raise NotImplementedError( "Writer '%s' has not implemented dataset saving" % (self.name, )) class ImageWriter(Writer): """Base writer for image file formats.""" def __init__(self, name=None, filename=None, base_dir=None, enhance=None, enhancement_config=None, **kwargs): """Initialize image writer object. Args: name (str): A name for this writer for log and error messages. If this writer is configured in a YAML file its name should match the name of the YAML file. Writer names may also appear in output file attributes. filename (str): Filename to save data to. This filename can and should specify certain python string formatting fields to differentiate between data written to the files. Any attributes provided by the ``.attrs`` of a DataArray object may be included. Format and conversion specifiers provided by the :class:`trollsift ` package may also be used. Any directories in the provided pattern will be created if they do not exist. Example:: {platform_name}_{sensor}_{name}_{start_time:%Y%m%d_%H%M%S}.tif base_dir (str): Base destination directories for all created files. enhance (bool or Enhancer): Whether to automatically enhance data to be more visually useful and to fit inside the file format being saved to. By default this will default to using the enhancement configuration files found using the default :class:`~satpy.writers.Enhancer` class. This can be set to `False` so that no enhancments are performed. This can also be an instance of the :class:`~satpy.writers.Enhancer` class if further custom enhancement is needed. enhancement_config (str): Deprecated. kwargs (dict): Additional keyword arguments to pass to the :class:`~satpy.writer.Writer` base class. .. versionchanged:: 0.10 Deprecated `enhancement_config_file` and 'enhancer' in favor of `enhance`. Pass an instance of the `Enhancer` class to `enhance` instead. """ super(ImageWriter, self).__init__(name, filename, base_dir, **kwargs) if enhancement_config is not None: warnings.warn("'enhancement_config' has been deprecated. Pass an instance of the " "'Enhancer' class to the 'enhance' keyword argument instead.", DeprecationWarning) else: enhancement_config = self.info.get("enhancement_config", None) if enhance is False: # No enhancement self.enhancer = False elif enhance is None or enhance is True: # default enhancement self.enhancer = Enhancer(ppp_config_dir=self.ppp_config_dir, enhancement_config_file=enhancement_config) else: # custom enhancer self.enhancer = enhance @classmethod def separate_init_kwargs(cls, kwargs): """Separate the init kwargs.""" # FUTURE: Don't pass Scene.save_datasets kwargs to init and here init_kwargs, kwargs = super(ImageWriter, cls).separate_init_kwargs(kwargs) for kw in ['enhancement_config', 'enhance']: if kw in kwargs: init_kwargs[kw] = kwargs.pop(kw) return init_kwargs, kwargs def save_dataset(self, dataset, filename=None, fill_value=None, overlay=None, decorate=None, compute=True, **kwargs): """Save the ``dataset`` to a given ``filename``. This method creates an enhanced image using :func:`get_enhanced_image`. The image is then passed to :meth:`save_image`. See both of these functions for more details on the arguments passed to this method. """ img = get_enhanced_image(dataset.squeeze(), enhance=self.enhancer, overlay=overlay, decorate=decorate, fill_value=fill_value) return self.save_image(img, filename=filename, compute=compute, fill_value=fill_value, **kwargs) def save_image(self, img, filename=None, compute=True, **kwargs): """Save Image object to a given ``filename``. Args: img (trollimage.xrimage.XRImage): Image object to save to disk. filename (str): Optionally specify the filename to save this dataset to. It may include string formatting patterns that will be filled in by dataset attributes. compute (bool): If `True` (default), compute and save the dataset. If `False` return either a :doc:`dask:delayed` object or tuple of (source, target). See the return values below for more information. **kwargs: Other keyword arguments to pass to this writer. Returns: Value returned depends on `compute`. If `compute` is `True` then the return value is the result of computing a :doc:`dask:delayed` object or running :func:`dask.array.store`. If `compute` is `False` then the returned value is either a :doc:`dask:delayed` object that can be computed using `delayed.compute()` or a tuple of (source, target) that should be passed to :func:`dask.array.store`. If target is provided the the caller is responsible for calling `target.close()` if the target has this method. """ raise NotImplementedError("Writer '%s' has not implemented image saving" % (self.name,)) class DecisionTree(object): """The decision tree.""" any_key = None def __init__(self, decision_dicts, attrs, **kwargs): """Init the decision tree.""" self.attrs = attrs self.tree = {} if not isinstance(decision_dicts, (list, tuple)): decision_dicts = [decision_dicts] self.add_config_to_tree(*decision_dicts) def add_config_to_tree(self, *decision_dicts): """Add a configuration to the tree.""" conf = {} for decision_dict in decision_dicts: conf = recursive_dict_update(conf, decision_dict) self._build_tree(conf) def _build_tree(self, conf): """Build the tree.""" for _section_name, attrs in conf.items(): # Set a path in the tree for each section in the configuration # files curr_level = self.tree for attr in self.attrs: # or None is necessary if they have empty strings this_attr = attrs.get(attr, self.any_key) or None if attr == self.attrs[-1]: # if we are at the last attribute, then assign the value # set the dictionary of attributes because the config is # not persistent curr_level[this_attr] = attrs elif this_attr not in curr_level: curr_level[this_attr] = {} curr_level = curr_level[this_attr] def _find_match(self, curr_level, attrs, kwargs): """Find a match.""" if len(attrs) == 0: # we're at the bottom level, we must have found something return curr_level match = None try: if attrs[0] in kwargs and kwargs[attrs[0]] in curr_level: # we know what we're searching for, try to find a pattern # that uses this attribute match = self._find_match(curr_level[kwargs[attrs[0]]], attrs[1:], kwargs) except TypeError: # we don't handle multiple values (for example sensor) atm. LOG.debug("Strange stuff happening in decision tree for %s: %s", attrs[0], kwargs[attrs[0]]) if match is None and self.any_key in curr_level: # if we couldn't find it using the attribute then continue with # the other attributes down the 'any' path match = self._find_match(curr_level[self.any_key], attrs[1:], kwargs) return match def find_match(self, **kwargs): """Find a match.""" try: match = self._find_match(self.tree, self.attrs, kwargs) except (KeyError, IndexError, ValueError): LOG.debug("Match exception:", exc_info=True) LOG.error("Error when finding matching decision section") if match is None: # only possible if no default section was provided raise KeyError("No decision section found for %s" % (kwargs.get("uid", None), )) return match class EnhancementDecisionTree(DecisionTree): """The enhancement decision tree.""" def __init__(self, *decision_dicts, **kwargs): """Init the decision tree.""" attrs = kwargs.pop("attrs", ("name", "platform_name", "sensor", "standard_name", "units",)) self.prefix = kwargs.pop("config_section", "enhancements") super(EnhancementDecisionTree, self).__init__( decision_dicts, attrs, **kwargs) def add_config_to_tree(self, *decision_dict): """Add configuration to tree.""" conf = {} for config_file in decision_dict: if os.path.isfile(config_file): with open(config_file) as fd: enhancement_config = yaml.load(fd, Loader=UnsafeLoader) if enhancement_config is None: # empty file continue enhancement_section = enhancement_config.get( self.prefix, {}) if not enhancement_section: LOG.debug("Config '{}' has no '{}' section or it is empty".format(config_file, self.prefix)) continue conf = recursive_dict_update(conf, enhancement_section) elif isinstance(config_file, dict): conf = recursive_dict_update(conf, config_file) else: LOG.debug("Loading enhancement config string") d = yaml.load(config_file, Loader=UnsafeLoader) if not isinstance(d, dict): raise ValueError( "YAML file doesn't exist or string is not YAML dict: {}".format(config_file)) conf = recursive_dict_update(conf, d) self._build_tree(conf) def find_match(self, **kwargs): """Find a match.""" try: return super(EnhancementDecisionTree, self).find_match(**kwargs) except KeyError: # give a more understandable error message raise KeyError("No enhancement configuration found for %s" % (kwargs.get("uid", None), )) class Enhancer(object): """Helper class to get enhancement information for images.""" def __init__(self, ppp_config_dir=None, enhancement_config_file=None): """Initialize an Enhancer instance. Args: ppp_config_dir: Points to the base configuration directory enhancement_config_file: The enhancement configuration to apply, False to leave as is. """ self.ppp_config_dir = ppp_config_dir or get_environ_config_dir() self.enhancement_config_file = enhancement_config_file # Set enhancement_config_file to False for no enhancements if self.enhancement_config_file is None: # it wasn't specified in the config or in the kwargs, we should # provide a default config_fn = os.path.join("enhancements", "generic.yaml") self.enhancement_config_file = config_search_paths(config_fn, self.ppp_config_dir) if not self.enhancement_config_file: # They don't want any automatic enhancements self.enhancement_tree = None else: if not isinstance(self.enhancement_config_file, (list, tuple)): self.enhancement_config_file = [self.enhancement_config_file] self.enhancement_tree = EnhancementDecisionTree(*self.enhancement_config_file) self.sensor_enhancement_configs = [] def get_sensor_enhancement_config(self, sensor): """Get the sensor-specific config.""" if isinstance(sensor, str): # one single sensor sensor = [sensor] for sensor_name in sensor: config_fn = os.path.join("enhancements", sensor_name + ".yaml") config_files = config_search_paths(config_fn, self.ppp_config_dir) # Note: Enhancement configuration files can't overwrite individual # options, only entire sections are overwritten for config_file in config_files: yield config_file def add_sensor_enhancements(self, sensor): """Add sensor-specific enhancements.""" # XXX: Should we just load all enhancements from the base directory? new_configs = [] for config_file in self.get_sensor_enhancement_config(sensor): if config_file not in self.sensor_enhancement_configs: self.sensor_enhancement_configs.append(config_file) new_configs.append(config_file) if new_configs: self.enhancement_tree.add_config_to_tree(*new_configs) def apply(self, img, **info): """Apply the enhancements.""" enh_kwargs = self.enhancement_tree.find_match(**info) LOG.debug("Enhancement configuration options: %s" % (str(enh_kwargs['operations']), )) for operation in enh_kwargs['operations']: fun = operation['method'] args = operation.get('args', []) kwargs = operation.get('kwargs', {}) fun(img, *args, **kwargs) # img.enhance(**enh_kwargs) satpy-0.20.0/satpy/writers/cf_writer.py000066400000000000000000000721131362525524100201370ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Writer for netCDF4/CF. Example usage ------------- The CF writer saves datasets in a Scene as `CF-compliant`_ netCDF file. Here is an example with MSG SEVIRI data in HRIT format: >>> from satpy import Scene >>> import glob >>> filenames = glob.glob('data/H*201903011200*') >>> scn = Scene(filenames=filenames, reader='seviri_l1b_hrit') >>> scn.load(['VIS006', 'IR_108']) >>> scn.save_datasets(writer='cf', datasets=['VIS006', 'IR_108'], filename='seviri_test.nc', exclude_attrs=['raw_metadata']) * You can select the netCDF backend using the ``engine`` keyword argument. Default is ``h5netcdf``. * For datasets with area definition you can exclude lat/lon coordinates by setting ``include_lonlats=False``. * By default the dataset name is prepended to non-dimensional coordinates such as scanline timestamps. This ensures maximum consistency, i.e. the netCDF variable names are independent of the number/set of datasets to be written. If a non-dimensional coordinate is identical for Grouping ~~~~~~~~ All datasets to be saved must have the same projection coordinates ``x`` and ``y``. If a scene holds datasets with different grids, the CF compliant workaround is to save the datasets to separate files. Alternatively, you can save datasets with common grids in separate netCDF groups as follows: >>> scn.load(['VIS006', 'IR_108', 'HRV']) >>> scn.save_datasets(writer='cf', datasets=['VIS006', 'IR_108', 'HRV'], filename='seviri_test.nc', exclude_attrs=['raw_metadata'], groups={'visir': ['VIS006', 'IR_108'], 'hrv': ['HRV']}) Note that the resulting file will not be fully CF compliant. Attribute Encoding ~~~~~~~~~~~~~~~~~~ In the above examples, raw metadata from the HRIT files have been excluded. If you want all attributes to be included, just remove the ``exclude_attrs`` keyword argument. By default, dict-type dataset attributes, such as the raw metadata, are encoded as a string using json. Thus, you can use json to decode them afterwards: >>> import xarray as xr >>> import json >>> # Save scene to nc-file >>> scn.save_datasets(writer='cf', datasets=['VIS006', 'IR_108'], filename='seviri_test.nc') >>> # Now read data from the nc-file >>> ds = xr.open_dataset('seviri_test.nc') >>> raw_mda = json.loads(ds['IR_108'].attrs['raw_metadata']) >>> print(raw_mda['RadiometricProcessing']['Level15ImageCalibration']['CalSlope']) [0.020865 0.0278287 0.0232411 0.00365867 0.00831811 0.03862197 0.12674432 0.10396091 0.20503568 0.22231115 0.1576069 0.0352385] Alternatively it is possible to flatten dict-type attributes by setting ``flatten_attrs=True``. This is more human readable as it will create a separate nc-attribute for each item in every dictionary. Keys are concatenated with underscore separators. The `CalSlope` attribute can then be accessed as follows: >>> scn.save_datasets(writer='cf', datasets=['VIS006', 'IR_108'], filename='seviri_test.nc', flatten_attrs=True) >>> ds = xr.open_dataset('seviri_test.nc') >>> print(ds['IR_108'].attrs['raw_metadata_RadiometricProcessing_Level15ImageCalibration_CalSlope']) [0.020865 0.0278287 0.0232411 0.00365867 0.00831811 0.03862197 0.12674432 0.10396091 0.20503568 0.22231115 0.1576069 0.0352385] This is what the corresponding ``ncdump`` output would look like in this case: .. code-block:: none $ ncdump -h test_seviri.nc ... IR_108:raw_metadata_RadiometricProcessing_Level15ImageCalibration_CalOffset = -1.064, ...; IR_108:raw_metadata_RadiometricProcessing_Level15ImageCalibration_CalSlope = 0.021, ...; IR_108:raw_metadata_RadiometricProcessing_MPEFCalFeedback_AbsCalCoeff = 0.021, ...; ... .. _CF-compliant: http://cfconventions.org/ """ from collections import OrderedDict, defaultdict import logging from datetime import datetime import json import warnings from dask.base import tokenize import xarray as xr from xarray.coding.times import CFDatetimeCoder import numpy as np from pyresample.geometry import AreaDefinition, SwathDefinition from satpy.writers import Writer from satpy.writers.utils import flatten_dict logger = logging.getLogger(__name__) EPOCH = u"seconds since 1970-01-01 00:00:00" # Numpy datatypes compatible with all netCDF4 backends. ``np.unicode_`` is # excluded because h5py (and thus h5netcdf) has problems with unicode, see # https://github.com/h5py/h5py/issues/624.""" NC4_DTYPES = [np.dtype('int8'), np.dtype('uint8'), np.dtype('int16'), np.dtype('uint16'), np.dtype('int32'), np.dtype('uint32'), np.dtype('int64'), np.dtype('uint64'), np.dtype('float32'), np.dtype('float64'), np.string_] # Unsigned and int64 isn't CF 1.7 compatible CF_DTYPES = [np.dtype('int8'), np.dtype('int16'), np.dtype('int32'), np.dtype('float32'), np.dtype('float64'), np.string_] CF_VERSION = 'CF-1.7' def tmerc2cf(area): """Return the cf grid mapping for the tmerc projection.""" proj_dict = area.proj_dict args = dict(latitude_of_projection_origin=proj_dict.get('lat_0'), longitude_of_central_meridian=proj_dict.get('lon_0'), grid_mapping_name='transverse_mercator', reference_ellipsoid_name=proj_dict.get('ellps', 'WGS84'), false_easting=0., false_northing=0. ) if "no_rot" in proj_dict: args['no_rotation'] = 1 if "gamma" in proj_dict: args['gamma'] = proj_dict['gamma'] return args def omerc2cf(area): """Return the cf grid mapping for the omerc projection.""" proj_dict = area.proj_dict args = dict(azimuth_of_central_line=proj_dict.get('alpha'), latitude_of_projection_origin=proj_dict.get('lat_0'), longitude_of_projection_origin=proj_dict.get('lonc'), grid_mapping_name='oblique_mercator', reference_ellipsoid_name=proj_dict.get('ellps', 'WGS84'), prime_meridian_name=proj_dict.get('pm', 'Greenwich'), horizontal_datum_name=proj_dict.get('datum', 'unknown'), geographic_crs_name='unknown', false_easting=0., false_northing=0. ) if "no_rot" in proj_dict: args['no_rotation'] = 1 if "gamma" in proj_dict: args['gamma'] = proj_dict['gamma'] return args def geos2cf(area): """Return the cf grid mapping for the geos projection.""" from pyresample.utils import proj4_radius_parameters proj_dict = area.proj_dict a, b = proj4_radius_parameters(proj_dict) args = dict(perspective_point_height=proj_dict.get('h'), latitude_of_projection_origin=proj_dict.get('lat_0', 0), longitude_of_projection_origin=proj_dict.get('lon_0', 0), grid_mapping_name='geostationary', semi_major_axis=a, semi_minor_axis=b, # semi_major_axis=proj_dict.get('a'), # semi_minor_axis=proj_dict.get('b'), sweep_axis=proj_dict.get('sweep'), ) return args def laea2cf(area): """Return the cf grid mapping for the laea projection.""" proj_dict = area.proj_dict args = dict(latitude_of_projection_origin=proj_dict.get('lat_0'), longitude_of_projection_origin=proj_dict.get('lon_0'), grid_mapping_name='lambert_azimuthal_equal_area', ) return args mappings = {'omerc': omerc2cf, 'laea': laea2cf, 'geos': geos2cf, 'tmerc': tmerc2cf} def create_grid_mapping(area): """Create the grid mapping instance for `area`.""" try: grid_mapping = mappings[area.proj_dict['proj']](area) grid_mapping['name'] = area.proj_dict['proj'] except KeyError: warnings.warn('The projection "{}" is either not CF compliant or not implemented yet. ' 'Using the proj4 string instead.'.format(area.proj_str)) grid_mapping = {'name': 'proj4', 'proj4': area.proj_str} return grid_mapping def get_extra_ds(dataset): """Get the extra datasets associated to *dataset*.""" ds_collection = {} for ds in dataset.attrs.get('ancillary_variables', []): ds_collection.update(get_extra_ds(ds)) ds_collection[dataset.attrs['name']] = dataset return ds_collection def area2lonlat(dataarray): """Convert an area to longitudes and latitudes.""" area = dataarray.attrs['area'] lons, lats = area.get_lonlats_dask() lons = xr.DataArray(lons, dims=['y', 'x'], attrs={'name': "longitude", 'standard_name': "longitude", 'units': 'degrees_east'}, name='longitude') lats = xr.DataArray(lats, dims=['y', 'x'], attrs={'name': "latitude", 'standard_name': "latitude", 'units': 'degrees_north'}, name='latitude') dataarray['longitude'] = lons dataarray['latitude'] = lats return [dataarray] def area2gridmapping(dataarray): """Convert an area to at CF grid mapping.""" area = dataarray.attrs['area'] attrs = create_grid_mapping(area) if attrs is not None and 'name' in attrs.keys() and attrs['name'] != "proj4": dataarray.attrs['grid_mapping'] = attrs['name'] name = attrs['name'] else: # Handle the case when the projection cannot be converted to a standard CF representation or this has not # been implemented yet. dataarray.attrs['grid_proj4'] = area.proj4_string name = "proj4" return [dataarray, xr.DataArray(0, attrs=attrs, name=name)] def area2cf(dataarray, strict=False): """Convert an area to at CF grid mapping or lon and lats.""" res = [] dataarray = dataarray.copy(deep=True) if isinstance(dataarray.attrs['area'], SwathDefinition) or strict: res = area2lonlat(dataarray) if isinstance(dataarray.attrs['area'], AreaDefinition): res.extend(area2gridmapping(dataarray)) res.append(dataarray) return res def make_time_bounds(start_times, end_times): """Create time bounds for the current *dataarray*.""" start_time = min(start_time for start_time in start_times if start_time is not None) end_time = min(end_time for end_time in end_times if end_time is not None) data = xr.DataArray([[np.datetime64(start_time), np.datetime64(end_time)]], dims=['time', 'bnds_1d']) return data def assert_xy_unique(datas): """Check that all datasets share the same projection coordinates x/y.""" unique_x = set() unique_y = set() for dataset in datas.values(): if 'y' in dataset.dims: token_y = tokenize(dataset['y'].data) unique_y.add(token_y) if 'x' in dataset.dims: token_x = tokenize(dataset['x'].data) unique_x.add(token_x) if len(unique_x) > 1 or len(unique_y) > 1: raise ValueError('Datasets to be saved in one file (or one group) must have identical projection coordinates. ' 'Please group them by area or save them in separate files.') def link_coords(datas): """Link datasets and coordinates. If the `coordinates` attribute of a data array links to other datasets in the scene, for example `coordinates='lon lat'`, add them as coordinates to the data array and drop that attribute. In the final call to `xr.Dataset.to_netcdf()` all coordinate relations will be resolved and the `coordinates` attributes be set automatically. """ for ds_name, dataset in datas.items(): coords = dataset.attrs.get('coordinates', []) if isinstance(coords, str): coords = coords.split(' ') for coord in coords: if coord not in dataset.coords: try: dataset[coord] = datas[coord] except KeyError: warnings.warn('Coordinate "{}" referenced by dataset {} does not exist, dropping reference.'.format( coord, ds_name)) continue # Drop 'coordinates' attribute in any case to avoid conflicts in xr.Dataset.to_netcdf() dataset.attrs.pop('coordinates', None) def make_alt_coords_unique(datas, pretty=False): """Make non-dimensional coordinates unique among all datasets. Non-dimensional (or alternative) coordinates, such as scanline timestamps, may occur in multiple datasets with the same name and dimension but different values. In order to avoid conflicts, prepend the dataset name to the coordinate name. If a non-dimensional coordinate is unique among all datasets and ``pretty=True``, its name will not be modified. Since all datasets must have the same projection coordinates, this is not applied to latitude and longitude. Args: datas (dict): Dictionary of (dataset name, dataset) pretty (bool): Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. Returns: Dictionary holding the updated datasets """ # Determine which non-dimensional coordinates are unique tokens = defaultdict(set) for dataset in datas.values(): for coord_name in dataset.coords: if coord_name.lower() not in ('latitude', 'longitude', 'lat', 'lon') and coord_name not in dataset.dims: tokens[coord_name].add(tokenize(dataset[coord_name].data)) coords_unique = dict([(coord_name, len(tokens) == 1) for coord_name, tokens in tokens.items()]) # Prepend dataset name, if not unique or no pretty-format desired new_datas = datas.copy() for coord_name, unique in coords_unique.items(): if not pretty or not unique: if pretty: warnings.warn('Cannot pretty-format "{}" coordinates because they are not unique among the ' 'given datasets'.format(coord_name)) for ds_name, dataset in datas.items(): if coord_name in dataset.coords: rename = {coord_name: '{}_{}'.format(ds_name, coord_name)} new_datas[ds_name] = new_datas[ds_name].rename(rename) return new_datas class AttributeEncoder(json.JSONEncoder): """JSON encoder for dataset attributes.""" def default(self, obj): """Return a json-serializable object for *obj*. In order to facilitate decoding, elements in dictionaries, lists/tuples and multi-dimensional arrays are encoded recursively. """ if isinstance(obj, dict): serialized = {} for key, val in obj.items(): serialized[key] = self.default(val) return serialized elif isinstance(obj, (list, tuple, np.ndarray)): return [self.default(item) for item in obj] return self._encode(obj) def _encode(self, obj): """Encode the given object as a json-serializable datatype.""" if isinstance(obj, (bool, np.bool_)): # Bool has to be checked first, because it is a subclass of int return str(obj).lower() elif isinstance(obj, (int, float, str)): return obj elif isinstance(obj, np.integer): return int(obj) elif isinstance(obj, np.floating): return float(obj) elif isinstance(obj, np.void): return tuple(obj) elif isinstance(obj, np.ndarray): return obj.tolist() return str(obj) def _encode_nc(obj): """Try to encode `obj` as a netcdf compatible datatype which most closely resembles the object's nature. Raises: ValueError if no such datatype could be found """ if isinstance(obj, int) and not isinstance(obj, (bool, np.bool_)): return obj elif isinstance(obj, (float, str, np.integer, np.floating)): return obj elif isinstance(obj, np.ndarray): # Only plain 1-d arrays are supported. Skip record arrays and multi-dimensional arrays. is_plain_1d = not obj.dtype.fields and len(obj.shape) <= 1 if is_plain_1d: if obj.dtype in NC4_DTYPES: return obj elif obj.dtype == np.bool_: # Boolean arrays are not supported, convert to array of strings. return [s.lower() for s in obj.astype(str)] return obj.tolist() raise ValueError('Unable to encode') def encode_nc(obj): """Encode the given object as a netcdf compatible datatype. Try to find the datatype which most closely resembles the object's nature. If that fails, encode as a string. Plain lists are encoded recursively. """ if isinstance(obj, (list, tuple)) and all([not isinstance(item, (list, tuple)) for item in obj]): return [encode_nc(item) for item in obj] try: return _encode_nc(obj) except ValueError: try: # Decode byte-strings decoded = obj.decode() except AttributeError: decoded = obj return json.dumps(decoded, cls=AttributeEncoder).strip('"') def encode_attrs_nc(attrs): """Encode dataset attributes in a netcdf compatible datatype. Args: attrs (dict): Attributes to be encoded Returns: dict: Encoded (and sorted) attributes """ encoded_attrs = [] for key, val in sorted(attrs.items()): if val is not None: encoded_attrs.append((key, encode_nc(val))) return OrderedDict(encoded_attrs) class CFWriter(Writer): """Writer producing NetCDF/CF compatible datasets.""" @staticmethod def da2cf(dataarray, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, compression=None): """Convert the dataarray to something cf-compatible. Args: dataarray (xr.DataArray): The data array to be converted epoch (str): Reference time for encoding of time coordinates flatten_attrs (bool): If True, flatten dict-type attributes exclude_attrs (list): List of dataset attributes to be excluded """ if exclude_attrs is None: exclude_attrs = [] new_data = dataarray.copy() # Remove area as well as user-defined attributes for key in ['area'] + exclude_attrs: new_data.attrs.pop(key, None) anc = [ds.attrs['name'] for ds in new_data.attrs.get('ancillary_variables', [])] if anc: new_data.attrs['ancillary_variables'] = ' '.join(anc) # TODO: make this a grid mapping or lon/lats # new_data.attrs['area'] = str(new_data.attrs.get('area')) for key, val in new_data.attrs.copy().items(): if val is None: new_data.attrs.pop(key) if key == 'ancillary_variables' and val == []: new_data.attrs.pop(key) new_data.attrs.pop('_last_resampler', None) if compression is not None: new_data.encoding.update(compression) if 'time' in new_data.coords: new_data['time'].encoding['units'] = epoch new_data['time'].attrs['standard_name'] = 'time' new_data['time'].attrs.pop('bounds', None) if 'time' not in new_data.dims: new_data = new_data.expand_dims('time') if 'x' in new_data.coords: new_data['x'].attrs['standard_name'] = 'projection_x_coordinate' new_data['x'].attrs['units'] = 'm' if 'y' in new_data.coords: new_data['y'].attrs['standard_name'] = 'projection_y_coordinate' new_data['y'].attrs['units'] = 'm' if 'crs' in new_data.coords: new_data = new_data.drop('crs') new_data.attrs.setdefault('long_name', new_data.attrs.pop('name')) if 'prerequisites' in new_data.attrs: new_data.attrs['prerequisites'] = [np.string_(str(prereq)) for prereq in new_data.attrs['prerequisites']] # Flatten dict-type attributes, if desired if flatten_attrs: new_data.attrs = flatten_dict(new_data.attrs) # Encode attributes to netcdf-compatible datatype new_data.attrs = encode_attrs_nc(new_data.attrs) return new_data def save_dataset(self, dataset, filename=None, fill_value=None, **kwargs): """Save the *dataset* to a given *filename*.""" return self.save_datasets([dataset], filename, **kwargs) def _collect_datasets(self, datasets, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, include_lonlats=True, pretty=False, compression=None): """Collect and prepare datasets to be written.""" ds_collection = {} for ds in datasets: ds_collection.update(get_extra_ds(ds)) datas = {} start_times = [] end_times = [] for ds_name, ds in sorted(ds_collection.items()): if ds.dtype not in CF_DTYPES: warnings.warn('Dtype {} not compatible with {}.'.format(str(ds.dtype), CF_VERSION)) try: new_datasets = area2cf(ds, strict=include_lonlats) except KeyError: new_datasets = [ds.copy(deep=True)] for new_ds in new_datasets: start_times.append(new_ds.attrs.get("start_time", None)) end_times.append(new_ds.attrs.get("end_time", None)) datas[new_ds.attrs['name']] = self.da2cf(new_ds, epoch=epoch, flatten_attrs=flatten_attrs, exclude_attrs=exclude_attrs, compression=compression) # Check and prepare coordinates assert_xy_unique(datas) link_coords(datas) datas = make_alt_coords_unique(datas, pretty=pretty) return datas, start_times, end_times def update_encoding(self, dataset, to_netcdf_kwargs): """Update encoding. Avoid _FillValue attribute being added to coordinate variables (https://github.com/pydata/xarray/issues/1865). """ other_to_netcdf_kwargs = to_netcdf_kwargs.copy() encoding = other_to_netcdf_kwargs.pop('encoding', {}).copy() coord_vars = [] for name, data_array in dataset.items(): coord_vars.extend(set(data_array.dims).intersection(data_array.coords)) for coord_var in coord_vars: encoding.setdefault(coord_var, {}) encoding[coord_var].update({'_FillValue': None}) # Make sure time coordinates and bounds have the same units. Default is xarray's CF datetime # encoding, which can be overridden by user-defined encoding. if 'time' in dataset: try: dtnp64 = dataset['time'].data[0] except IndexError: dtnp64 = dataset['time'].data default = CFDatetimeCoder().encode(xr.DataArray(dtnp64)) time_enc = {'units': default.attrs['units'], 'calendar': default.attrs['calendar']} time_enc.update(encoding.get('time', {})) bounds_enc = {'units': time_enc['units'], 'calendar': time_enc['calendar'], '_FillValue': None} encoding['time'] = time_enc encoding['time_bnds'] = bounds_enc # FUTURE: Not required anymore with xarray-0.14+ return encoding, other_to_netcdf_kwargs def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, engine=None, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, include_lonlats=True, pretty=False, compression=None, **to_netcdf_kwargs): """Save the given datasets in one netCDF file. Note that all datasets (if grouping: in one group) must have the same projection coordinates. Args: datasets (list): Datasets to be saved filename (str): Output file groups (dict): Group datasets according to the given assignment: `{'group_name': ['dataset1', 'dataset2', ...]}`. Group name `None` corresponds to the root of the file, i.e. no group will be created. Warning: The results will not be fully CF compliant! header_attrs: Global attributes to be included engine (str): Module to be used for writing netCDF files. Follows xarray's :meth:`~xarray.Dataset.to_netcdf` engine choices with a preference for 'netcdf4'. epoch (str): Reference time for encoding of time coordinates flatten_attrs (bool): If True, flatten dict-type attributes exclude_attrs (list): List of dataset attributes to be excluded include_lonlats (bool): Always include latitude and longitude coordinates, even for datasets with area definition pretty (bool): Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. compression (dict): Compression to use on the datasets before saving, for example {'zlib': True, 'complevel': 9}. This is in turn passed the xarray's `to_netcdf` method: http://xarray.pydata.org/en/stable/generated/xarray.Dataset.to_netcdf.html for more possibilities. """ logger.info('Saving datasets to NetCDF4/CF.') if groups is None: # Write all datasets to the file root without creating a group groups_ = {None: datasets} else: # User specified a group assignment using dataset names. Collect the corresponding datasets. groups_ = defaultdict(list) for dataset in datasets: for group_name, group_members in groups.items(): if dataset.attrs['name'] in group_members: groups_[group_name].append(dataset) break if compression is None: compression = {'zlib': True} # Write global attributes to file root (creates the file) filename = filename or self.get_filename(**datasets[0].attrs) root = xr.Dataset({}, attrs={}) if header_attrs is not None: if flatten_attrs: header_attrs = flatten_dict(header_attrs) root.attrs = encode_attrs_nc(header_attrs) root.attrs['history'] = 'Created by pytroll/satpy on {}'.format(datetime.utcnow()) if groups is None: # Groups are not CF-1.7 compliant root.attrs['Conventions'] = CF_VERSION # Remove satpy-specific kwargs satpy_kwargs = ['overlay', 'decorate', 'config_files'] for kwarg in satpy_kwargs: to_netcdf_kwargs.pop(kwarg, None) init_nc_kwargs = to_netcdf_kwargs.copy() init_nc_kwargs.pop('encoding', None) # No variables to be encoded at this point init_nc_kwargs.pop('unlimited_dims', None) written = [root.to_netcdf(filename, engine=engine, mode='w', **init_nc_kwargs)] # Write datasets to groups (appending to the file; group=None means no group) for group_name, group_datasets in groups_.items(): # XXX: Should we combine the info of all datasets? datas, start_times, end_times = self._collect_datasets( group_datasets, epoch=epoch, flatten_attrs=flatten_attrs, exclude_attrs=exclude_attrs, include_lonlats=include_lonlats, pretty=pretty, compression=compression) dataset = xr.Dataset(datas) if 'time' in dataset: dataset['time_bnds'] = make_time_bounds(start_times, end_times) dataset['time'].attrs['bounds'] = "time_bnds" dataset['time'].attrs['standard_name'] = "time" else: grp_str = ' of group {}'.format(group_name) if group_name is not None else '' logger.warning('No time dimension in datasets{}, skipping time bounds creation.'.format(grp_str)) encoding, other_to_netcdf_kwargs = self.update_encoding(dataset, to_netcdf_kwargs) res = dataset.to_netcdf(filename, engine=engine, group=group_name, mode='a', encoding=encoding, **other_to_netcdf_kwargs) written.append(res) return written satpy-0.20.0/satpy/writers/geotiff.py000066400000000000000000000207441362525524100176010ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """GeoTIFF writer objects for creating GeoTIFF files from `DataArray` objects.""" import logging import numpy as np from satpy.writers import ImageWriter # make sure we have rasterio even though we don't use it until trollimage # saves the image import rasterio # noqa LOG = logging.getLogger(__name__) class GeoTIFFWriter(ImageWriter): """Writer to save GeoTIFF images. Basic example from Scene: >>> scn.save_datasets(writer='geotiff') Un-enhanced float geotiff with NaN for fill values: >>> scn.save_datasets(writer='geotiff', dtype=np.float32, enhance=False) To add custom metadata use `tags`: >>> scn.save_dataset(dataset_name, writer='geotiff', ... tags={'offset': 291.8, 'scale': -0.35}) For performance tips on creating geotiffs quickly and making them smaller see the :doc:`faq`. """ GDAL_OPTIONS = ("tfw", "rpb", "rpctxt", "interleave", "tiled", "blockxsize", "blockysize", "nbits", "compress", "num_threads", "predictor", "discard_lsb", "sparse_ok", "jpeg_quality", "jpegtablesmode", "zlevel", "photometric", "alpha", "profile", "bigtiff", "pixeltype", "copy_src_overviews",) def __init__(self, dtype=None, tags=None, **kwargs): """Init the writer.""" super(GeoTIFFWriter, self).__init__(default_config_filename="writers/geotiff.yaml", **kwargs) self.dtype = self.info.get("dtype") if dtype is None else dtype self.tags = self.info.get("tags", None) if tags is None else tags if self.tags is None: self.tags = {} elif not isinstance(self.tags, dict): # if it's coming from a config file self.tags = dict(tuple(x.split("=")) for x in self.tags.split(",")) # GDAL specific settings self.gdal_options = {} for k in self.GDAL_OPTIONS: if k in kwargs or k in self.info: self.gdal_options[k] = kwargs.get(k, self.info[k]) @classmethod def separate_init_kwargs(cls, kwargs): """Separate the init keyword args.""" # FUTURE: Don't pass Scene.save_datasets kwargs to init and here init_kwargs, kwargs = super(GeoTIFFWriter, cls).separate_init_kwargs( kwargs) for kw in ['dtype', 'tags']: if kw in kwargs: init_kwargs[kw] = kwargs.pop(kw) return init_kwargs, kwargs def save_image(self, img, filename=None, dtype=None, fill_value=None, compute=True, keep_palette=False, cmap=None, tags=None, include_scale_offset=False, **kwargs): """Save the image to the given ``filename`` in geotiff_ format. Note for faster output and reduced memory usage the ``rasterio`` library must be installed. This writer currently falls back to using ``gdal`` directly, but that will be deprecated in the future. Args: img (xarray.DataArray): Data to save to geotiff. filename (str): Filename to save the image to. Defaults to ``filename`` passed during writer creation. Unlike the creation ``filename`` keyword argument, this filename does not get formatted with data attributes. dtype (numpy.dtype): Numpy data type to save the image as. Defaults to 8-bit unsigned integer (``np.uint8``). If the ``dtype`` argument is provided during writer creation then that will be used as the default. fill_value (int or float): Value to use where data values are NaN/null. If this is specified in the writer configuration file that value will be used as the default. compute (bool): Compute dask arrays and save the image immediately. If ``False`` then the return value can be passed to :func:`~satpy.writers.compute_writer_results` to do the computation. This is useful when multiple images may share input calculations where dask can benefit from not repeating them multiple times. Defaults to ``True`` in the writer by itself, but is typically passed as ``False`` by callers where calculations can be combined. keep_palette (bool): Save palette/color table to geotiff. To be used with images that were palettized with the "palettize" enhancement. Setting this to ``True`` will cause the colormap of the image to be written as a "color table" in the output geotiff and the image data values will represent the index values in to that color table. By default, this will use the colormap used in the "palettize" operation. See the ``cmap`` option for other options. This option defaults to ``False`` and palettized images will be converted to RGB/A. cmap (trollimage.colormap.Colormap or None): Colormap to save as a color table in the output geotiff. See ``keep_palette`` for more information. Defaults to the palette of the provided ``img`` object. The colormap's range should be set to match the index range of the palette (ex. `cmap.set_range(0, len(colors))`). tags (dict): Extra metadata to store in geotiff. include_scale_offset (bool): Activate inclusion of scale and offset factors in the geotiff to allow retrieving original values from the pixel values. ``False`` by default. .. _geotiff: http://trac.osgeo.org/geotiff/ """ filename = filename or self.get_filename(**img.data.attrs) # Update global GDAL options with these specific ones gdal_options = self.gdal_options.copy() for k in kwargs.keys(): if k in self.GDAL_OPTIONS: gdal_options[k] = kwargs[k] if fill_value is None: # fall back to fill_value from configuration file fill_value = self.info.get('fill_value') dtype = dtype if dtype is not None else self.dtype if dtype is None: dtype = np.uint8 if "alpha" in kwargs: raise ValueError( "Keyword 'alpha' is automatically set based on 'fill_value' " "and should not be specified") if np.issubdtype(dtype, np.floating): if img.mode != "L": raise ValueError("Image must be in 'L' mode for floating " "point geotiff saving") if fill_value is None: LOG.debug("Alpha band not supported for float geotiffs, " "setting fill value to 'NaN'") fill_value = np.nan if keep_palette and cmap is None and img.palette is not None: from satpy.enhancements import create_colormap cmap = create_colormap({'colors': img.palette}) cmap.set_range(0, len(img.palette) - 1) if tags is None: tags = {} tags.update(self.tags) return img.save(filename, fformat='tif', fill_value=fill_value, dtype=dtype, compute=compute, keep_palette=keep_palette, cmap=cmap, tags=tags, include_scale_offset_tags=include_scale_offset, **gdal_options) satpy-0.20.0/satpy/writers/mitiff.py000066400000000000000000000772761362525524100174500ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018, 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """MITIFF writer objects for creating MITIFF files from `Dataset` objects.""" import logging import numpy as np from satpy.writers import ImageWriter from satpy.writers import get_enhanced_image from satpy.dataset import DatasetID import dask IMAGEDESCRIPTION = 270 LOG = logging.getLogger(__name__) KELVIN_TO_CELSIUS = -273.15 class MITIFFWriter(ImageWriter): """Writer to produce MITIFF image files.""" def __init__(self, name=None, tags=None, **kwargs): """Initialize reader with tag and other configuration information.""" ImageWriter.__init__(self, name=name, default_config_filename="writers/mitiff.yaml", **kwargs) self.tags = self.info.get("tags", None) if tags is None else tags if self.tags is None: self.tags = {} elif not isinstance(self.tags, dict): # if it's coming from a config file self.tags = dict(tuple(x.split("=")) for x in self.tags.split(",")) self.mitiff_config = {} self.translate_channel_name = {} self.channel_order = {} self.palette = False def save_image(self): """Save dataset as an image array.""" raise NotImplementedError("save_image mitiff is not implemented.") def save_dataset(self, dataset, filename=None, fill_value=None, compute=True, **kwargs): """Save single dataset as mitiff file.""" LOG.debug("Starting in mitiff save_dataset ... ") def _delayed_create(create_opts, dataset): try: if 'palette' in kwargs: self.palette = kwargs['palette'] if 'platform_name' not in kwargs: kwargs['platform_name'] = dataset.attrs['platform_name'] if 'name' not in kwargs: kwargs['name'] = dataset.attrs['name'] if 'start_time' not in kwargs: kwargs['start_time'] = dataset.attrs['start_time'] if 'sensor' not in kwargs: kwargs['sensor'] = dataset.attrs['sensor'] try: self.mitiff_config[kwargs['sensor']] = dataset.attrs['metadata_requirements']['config'] self.channel_order[kwargs['sensor']] = dataset.attrs['metadata_requirements']['order'] self.file_pattern = dataset.attrs['metadata_requirements']['file_pattern'] except KeyError: # For some mitiff products this info is needed, for others not. # If needed you should know how to fix this pass try: self.translate_channel_name[kwargs['sensor']] = \ dataset.attrs['metadata_requirements']['translate'] except KeyError: # For some mitiff products this info is needed, for others not. # If needed you should know how to fix this pass image_description = self._make_image_description(dataset, **kwargs) gen_filename = filename or self.get_filename(**dataset.attrs) LOG.info("Saving mitiff to: %s ...", gen_filename) self._save_datasets_as_mitiff(dataset, image_description, gen_filename, **kwargs) except (KeyError, ValueError, RuntimeError): raise create_opts = () delayed = dask.delayed(_delayed_create)(create_opts, dataset) if compute: return delayed.compute() return delayed def save_datasets(self, datasets, filename=None, fill_value=None, compute=True, **kwargs): """Save all datasets to one or more files.""" LOG.debug("Starting in mitiff save_datasets ... ") def _delayed_create(create_opts, datasets): LOG.debug("create_opts: %s", create_opts) try: if 'platform_name' not in kwargs: kwargs['platform_name'] = datasets[0].attrs['platform_name'] if 'name' not in kwargs: kwargs['name'] = datasets[0].attrs['name'] if 'start_time' not in kwargs: kwargs['start_time'] = datasets[0].attrs['start_time'] if 'sensor' not in kwargs: kwargs['sensor'] = datasets[0].attrs['sensor'] try: self.mitiff_config[kwargs['sensor']] = datasets[0].attrs['metadata_requirements']['config'] translate = datasets[0].attrs['metadata_requirements']['translate'] self.translate_channel_name[kwargs['sensor']] = translate self.channel_order[kwargs['sensor']] = datasets[0].attrs['metadata_requirements']['order'] self.file_pattern = datasets[0].attrs['metadata_requirements']['file_pattern'] except KeyError: # For some mitiff products this info is needed, for others not. # If needed you should know how to fix this pass image_description = self._make_image_description(datasets, **kwargs) LOG.debug("File pattern %s", self.file_pattern) if isinstance(datasets, list): kwargs['start_time'] = datasets[0].attrs['start_time'] else: kwargs['start_time'] = datasets.attrs['start_time'] gen_filename = filename or self.get_filename(**kwargs) LOG.info("Saving mitiff to: %s ...", gen_filename) self._save_datasets_as_mitiff(datasets, image_description, gen_filename, **kwargs) except (KeyError, ValueError, RuntimeError): raise create_opts = () delayed = dask.delayed(_delayed_create)(create_opts, datasets) LOG.debug("About to call delayed compute ...") if compute: return delayed.compute() return delayed def _make_channel_list(self, datasets, **kwargs): channels = [] try: if self.channel_order: for cn in self.channel_order[kwargs['sensor']]: for ch, ds in enumerate(datasets): if ds.attrs['prerequisites'][ch][0] == cn: channels.append( ds.attrs['prerequisites'][ch][0]) break elif self.palette: if 'palette_channel_name' in kwargs: channels.append(kwargs['palette_channel_name'].upper()) else: LOG.error("Is palette but can not find palette_channel_name to name the dataset") else: for ch in range(len(datasets)): channels.append(ch + 1) except KeyError: for ch in range(len(datasets)): channels.append(ch + 1) return channels def _channel_names(self, channels, cns, **kwargs): _image_description = "" for ch in channels: try: _image_description += str( self.mitiff_config[kwargs['sensor']][cns.get(ch, ch)]['alias']) except KeyError: _image_description += str(ch) _image_description += ' ' # Replace last char(space) with \n _image_description = _image_description[:-1] _image_description += '\n' return _image_description def _add_sizes(self, datasets, first_dataset): _image_description = ' Xsize: ' if isinstance(datasets, list): _image_description += str(first_dataset.sizes['x']) + '\n' else: _image_description += str(datasets.sizes['x']) + '\n' _image_description += ' Ysize: ' if isinstance(datasets, list): _image_description += str(first_dataset.sizes['y']) + '\n' else: _image_description += str(datasets.sizes['y']) + '\n' return _image_description def _add_proj4_string(self, datasets, first_dataset): proj4_string = " Proj string: " if isinstance(datasets, list): area = first_dataset.attrs['area'] else: area = datasets.attrs['area'] # Use pyproj's CRS object to get a valid EPSG code if possible # only in newer pyresample versions with pyproj 2.0+ installed if hasattr(area, 'crs') and area.crs.to_epsg() is not None: proj4_string += "+init=EPSG:{}".format(area.crs.to_epsg()) else: proj4_string += area.proj_str x_0 = 0 y_0 = 0 # FUTURE: Use pyproj 2.0+ to convert EPSG to PROJ4 if possible if 'EPSG:32631' in proj4_string: proj4_string = proj4_string.replace("+init=EPSG:32631", "+proj=etmerc +lat_0=0 +lon_0=3 +k=0.9996 +ellps=WGS84 +datum=WGS84") x_0 = 500000 elif 'EPSG:32632' in proj4_string: proj4_string = proj4_string.replace("+init=EPSG:32632", "+proj=etmerc +lat_0=0 +lon_0=9 +k=0.9996 +ellps=WGS84 +datum=WGS84") x_0 = 500000 elif 'EPSG:32633' in proj4_string: proj4_string = proj4_string.replace("+init=EPSG:32633", "+proj=etmerc +lat_0=0 +lon_0=15 +k=0.9996 +ellps=WGS84 +datum=WGS84") x_0 = 500000 elif 'EPSG:32634' in proj4_string: proj4_string = proj4_string.replace("+init=EPSG:32634", "+proj=etmerc +lat_0=0 +lon_0=21 +k=0.9996 +ellps=WGS84 +datum=WGS84") x_0 = 500000 elif 'EPSG:32635' in proj4_string: proj4_string = proj4_string.replace("+init=EPSG:32635", "+proj=etmerc +lat_0=0 +lon_0=27 +k=0.9996 +ellps=WGS84 +datum=WGS84") x_0 = 500000 elif 'EPSG' in proj4_string: LOG.warning("EPSG used in proj string but not converted. Please add this in code") if 'geos' in proj4_string: proj4_string = proj4_string.replace("+sweep=x ", "") if '+a=6378137.0 +b=6356752.31414' in proj4_string: proj4_string = proj4_string.replace("+a=6378137.0 +b=6356752.31414", "+ellps=WGS84") if '+units=m' in proj4_string: proj4_string = proj4_string.replace("+units=m", "+units=km") if not any(datum in proj4_string for datum in ['datum', 'towgs84']): proj4_string += ' +towgs84=0,0,0' if 'units' not in proj4_string: proj4_string += ' +units=km' if 'x_0' not in proj4_string and isinstance(datasets, list): proj4_string += ' +x_0=%.6f' % ( (-first_dataset.attrs['area'].area_extent[0] + first_dataset.attrs['area'].pixel_size_x) + x_0) proj4_string += ' +y_0=%.6f' % ( (-first_dataset.attrs['area'].area_extent[1] + first_dataset.attrs['area'].pixel_size_y) + y_0) elif 'x_0' not in proj4_string: proj4_string += ' +x_0=%.6f' % ( (-datasets.attrs['area'].area_extent[0] + datasets.attrs['area'].pixel_size_x) + x_0) proj4_string += ' +y_0=%.6f' % ( (-datasets.attrs['area'].area_extent[1] + datasets.attrs['area'].pixel_size_y) + y_0) elif '+x_0=0' in proj4_string and '+y_0=0' in proj4_string and isinstance(datasets, list): proj4_string = proj4_string.replace("+x_0=0", '+x_0=%.6f' % ( (-first_dataset.attrs['area'].area_extent[0] + first_dataset.attrs['area'].pixel_size_x) + x_0)) proj4_string = proj4_string.replace("+y_0=0", '+y_0=%.6f' % ( (-first_dataset.attrs['area'].area_extent[1] + first_dataset.attrs['area'].pixel_size_y) + y_0)) elif '+x_0=0' in proj4_string and '+y_0=0' in proj4_string: proj4_string = proj4_string.replace("+x_0=0", '+x_0=%.6f' % ( (-datasets.attrs['area'].area_extent[0] + datasets.attrs['area'].pixel_size_x) + x_0)) proj4_string = proj4_string.replace("+y_0=0", '+y_0=%.6f' % ( (-datasets.attrs['area'].area_extent[1] + datasets.attrs['area'].pixel_size_y) + y_0)) LOG.debug("proj4_string: %s", proj4_string) proj4_string += '\n' return proj4_string def _add_pixel_sizes(self, datasets, first_dataset): _image_description = "" if isinstance(datasets, list): _image_description += ' Ax: %.6f' % ( first_dataset.attrs['area'].pixel_size_x / 1000.) _image_description += ' Ay: %.6f' % ( first_dataset.attrs['area'].pixel_size_y / 1000.) else: _image_description += ' Ax: %.6f' % ( datasets.attrs['area'].pixel_size_x / 1000.) _image_description += ' Ay: %.6f' % ( datasets.attrs['area'].pixel_size_y / 1000.) return _image_description def _add_corners(self, datasets, first_dataset): # But this ads up to upper left corner of upper left pixel. # But need to use the center of the pixel. # Therefor use the center of the upper left pixel. _image_description = "" if isinstance(datasets, list): _image_description += ' Bx: %.6f' % ( first_dataset.attrs['area'].area_extent[0] / 1000. + first_dataset.attrs['area'].pixel_size_x / 1000. / 2.) # LL_x _image_description += ' By: %.6f' % ( first_dataset.attrs['area'].area_extent[3] / 1000. - first_dataset.attrs['area'].pixel_size_y / 1000. / 2.) # UR_y else: _image_description += ' Bx: %.6f' % ( datasets.attrs['area'].area_extent[0] / 1000. + datasets.attrs['area'].pixel_size_x / 1000. / 2.) # LL_x _image_description += ' By: %.6f' % ( datasets.attrs['area'].area_extent[3] / 1000. - datasets.attrs['area'].pixel_size_y / 1000. / 2.) # UR_y _image_description += '\n' return _image_description def _add_calibration_datasets(self, ch, datasets, reverse_offset, reverse_scale, decimals): _reverse_offset = reverse_offset _reverse_scale = reverse_scale _decimals = decimals _table_calibration = "" found_calibration = False skip_calibration = False ds_list = datasets if not isinstance(datasets, list) and 'bands' not in datasets.sizes: ds_list = [datasets] for i, ds in enumerate(ds_list): if ('prerequisites' in ds.attrs and isinstance(ds.attrs['prerequisites'], list) and len(ds.attrs['prerequisites']) >= i + 1 and isinstance(ds.attrs['prerequisites'][i], DatasetID)): if ds.attrs['prerequisites'][i][0] == ch: if ds.attrs['prerequisites'][i][4] == 'RADIANCE': raise NotImplementedError( "Mitiff radiance calibration not implemented.") # _table_calibration += ', Radiance, ' # _table_calibration += '[W/m²/µm/sr]' # _decimals = 8 elif ds.attrs['prerequisites'][i][4] == 'brightness_temperature': found_calibration = True _table_calibration += ', BT, ' _table_calibration += u'\u00B0' # '\u2103' _table_calibration += u'[C]' _reverse_offset = 255. _reverse_scale = -1. _decimals = 2 elif ds.attrs['prerequisites'][i][4] == 'reflectance': found_calibration = True _table_calibration += ', Reflectance(Albedo), ' _table_calibration += '[%]' _decimals = 2 else: LOG.warning("Unknown calib type. Must be Radiance, Reflectance or BT.") break else: continue else: _table_calibration = "" skip_calibration = True break if not found_calibration: _table_calibration = "" skip_calibration = True # How to format string by passing the format # http://stackoverflow.com/questions/1598579/rounding-decimals-with-new-python-format-function return skip_calibration, _table_calibration, _reverse_offset, _reverse_scale, _decimals def _add_palette_info(self, datasets, palette_unit, palette_description, **kwargs): # mitiff key word for palette interpretion _palette = '\n COLOR INFO:\n' # mitiff info for the unit of the interpretion _palette += ' {}\n'.format(palette_unit) # The length of the palette description as needed by mitiff in DIANA _palette += ' {}\n'.format(len(palette_description)) for desc in palette_description: _palette += ' {}\n'.format(desc) return _palette def _add_calibration(self, channels, cns, datasets, **kwargs): _table_calibration = "" skip_calibration = False for ch in channels: palette = False # Make calibration. if palette: raise NotImplementedError("Mitiff palette saving is not implemented.") else: _table_calibration += 'Table_calibration: ' try: _table_calibration += str( self.mitiff_config[kwargs['sensor']][cns.get(ch, ch)]['alias']) except KeyError: _table_calibration += str(ch) _reverse_offset = 0. _reverse_scale = 1. _decimals = 2 skip_calibration, __table_calibration, _reverse_offset, _reverse_scale, _decimals = \ self._add_calibration_datasets(ch, datasets, _reverse_offset, _reverse_scale, _decimals) _table_calibration += __table_calibration if not skip_calibration: _table_calibration += ', 8, [ ' for val in range(0, 256): # Comma separated list of values _table_calibration += '{0:.{1}f} '.format((float(self.mitiff_config[ kwargs['sensor']][cns.get(ch, ch)]['min-val']) + ((_reverse_offset + _reverse_scale * val) * (float(self.mitiff_config[kwargs['sensor']][cns.get(ch, ch)]['max-val']) - float(self.mitiff_config[kwargs['sensor']][cns.get(ch, ch)]['min-val']))) / 255.), _decimals) # _table_calibration += '0.00000000 ' _table_calibration += ']\n\n' else: _table_calibration = "" return _table_calibration def _make_image_description(self, datasets, **kwargs): r"""Generate image description for mitiff. Satellite: NOAA 18 Date and Time: 06:58 31/05-2016 SatDir: 0 Channels: 6 In this file: 1-VIS0.63 2-VIS0.86 3(3B)-IR3.7 4-IR10.8 5-IR11.5 6(3A)-VIS1.6 Xsize: 4720 Ysize: 5544 Map projection: Stereographic Proj string: +proj=stere +lon_0=0 +lat_0=90 +lat_ts=60 +ellps=WGS84 +towgs84=0,0,0 +units=km +x_0=2526000.000000 +y_0=5806000.000000 TrueLat: 60 N GridRot: 0 Xunit:1000 m Yunit: 1000 m NPX: 0.000000 NPY: 0.000000 Ax: 1.000000 Ay: 1.000000 Bx: -2526.000000 By: -262.000000 Satellite: Date and Time: SatDir: 0 Channels: In this file: Xsize: Ysize: Map projection: Stereographic Proj string: TrueLat: 60 N GridRot: 0 Xunit:1000 m Yunit: 1000 m NPX: 0.000000 NPY: 0.000000 Ax: Ay: Bx: By: if palette image write special palette if normal channel write table calibration: Table_calibration: , , [], , []\n\n """ translate_platform_name = {'metop01': 'Metop-B', 'metop02': 'Metop-A', 'metop03': 'Metop-C', 'noaa15': 'NOAA-15', 'noaa16': 'NOAA-16', 'noaa17': 'NOAA-17', 'noaa18': 'NOAA-18', 'noaa19': 'NOAA-19'} first_dataset = datasets if isinstance(datasets, list): LOG.debug("Datasets is a list of dataset") first_dataset = datasets[0] if 'platform_name' in first_dataset.attrs: _platform_name = translate_platform_name.get( first_dataset.attrs['platform_name'], first_dataset.attrs['platform_name']) elif 'platform_name' in kwargs: _platform_name = translate_platform_name.get( kwargs['platform_name'], kwargs['platform_name']) else: _platform_name = None _image_description = '' _image_description.encode('utf-8') _image_description += ' Satellite: ' if _platform_name is not None: _image_description += _platform_name _image_description += '\n' _image_description += ' Date and Time: ' # Select earliest start_time first = True earliest = 0 for dataset in datasets: if first: earliest = dataset.attrs['start_time'] else: if dataset.attrs['start_time'] < earliest: earliest = dataset.attrs['start_time'] first = False LOG.debug("earliest start_time: %s", earliest) _image_description += earliest.strftime("%H:%M %d/%m-%Y\n") _image_description += ' SatDir: 0\n' _image_description += ' Channels: ' if isinstance(datasets, list): LOG.debug("len datasets: %s", len(datasets)) _image_description += str(len(datasets)) elif 'bands' in datasets.sizes: LOG.debug("len datasets: %s", datasets.sizes['bands']) _image_description += str(datasets.sizes['bands']) elif len(datasets.sizes) == 2: LOG.debug("len datasets: 1") _image_description += '1' _image_description += ' In this file: ' channels = self._make_channel_list(datasets, **kwargs) try: cns = self.translate_channel_name.get(kwargs['sensor'], {}) except KeyError: pass _image_description += self._channel_names(channels, cns, **kwargs) _image_description += self._add_sizes(datasets, first_dataset) _image_description += ' Map projection: Stereographic\n' _image_description += self._add_proj4_string(datasets, first_dataset) _image_description += ' TrueLat: 60N\n' _image_description += ' GridRot: 0\n' _image_description += ' Xunit:1000 m Yunit: 1000 m\n' _image_description += ' NPX: %.6f' % (0) _image_description += ' NPY: %.6f' % (0) + '\n' _image_description += self._add_pixel_sizes(datasets, first_dataset) _image_description += self._add_corners(datasets, first_dataset) if isinstance(datasets, list): LOG.debug("Area extent: %s", first_dataset.attrs['area'].area_extent) else: LOG.debug("Area extent: %s", datasets.attrs['area'].area_extent) if self.palette: LOG.debug("Doing palette image") _image_description += self._add_palette_info(datasets, **kwargs) else: _image_description += self._add_calibration(channels, cns, datasets, **kwargs) return _image_description def _calibrate_data(self, dataset, calibration, min_val, max_val): reverse_offset = 0. reverse_scale = 1. if calibration == 'brightness_temperature': # If data is brightness temperature, the data must be inverted. reverse_offset = 255. reverse_scale = -1. dataset.data += KELVIN_TO_CELSIUS # Need to possible translate channels names from satpy to mitiff _data = reverse_offset + reverse_scale * ((dataset.data - float(min_val)) / (float(max_val) - float(min_val))) * 255. return _data.clip(0, 255) def _save_as_palette(self, tif, datasets, **kwargs): # MITIFF palette has only one data channel if len(datasets.dims) == 2: LOG.debug("Palette ok with only 2 dimensions. ie only x and y") # 3 = Palette color. In this model, a color is described with a single component. # The value of the component is used as an index into the red, green and blue curves # in the ColorMap field to retrieve an RGB triplet that defines the color. When # PhotometricInterpretation=3 is used, ColorMap must be present and SamplesPerPixel must be 1. tif.SetField('PHOTOMETRIC', 3) # As write_image can not save tiff image as palette, this has to be done basicly # ie. all needed tags needs to be set. tif.SetField('IMAGEWIDTH', datasets.sizes['x']) tif.SetField('IMAGELENGTH', datasets.sizes['y']) tif.SetField('BITSPERSAMPLE', 8) tif.SetField('COMPRESSION', tif.get_tag_define('deflate')) if 'palette_color_map' in kwargs: tif.SetField('COLORMAP', kwargs['palette_color_map']) else: LOG.ERROR("In a mitiff palette image a color map must be provided: palette_color_map is missing.") data_type = np.uint8 # Looks like we need to pass the data to writeencodedstrip as ctypes cont_data = np.ascontiguousarray(datasets.data, data_type) tif.WriteEncodedStrip(0, cont_data.ctypes.data, datasets.sizes['x'] * datasets.sizes['y']) tif.WriteDirectory() def _save_as_enhanced(self, tif, datasets, **kwargs): """Save datasets as an enhanced RGB image.""" img = get_enhanced_image(datasets.squeeze(), enhance=self.enhancer) if 'bands' in img.data.sizes and 'bands' not in datasets.sizes: LOG.debug("Datasets without 'bands' become image with 'bands' due to enhancement.") LOG.debug("Needs to regenerate mitiff image description") image_description = self._make_image_description(img.data, **kwargs) tif.SetField(IMAGEDESCRIPTION, (image_description).encode('utf-8')) for band in img.data['bands']: chn = img.data.sel(bands=band) data = chn.values.clip(0, 1) * 254. + 1 data = data.clip(0, 255) tif.write_image(data.astype(np.uint8), compression='deflate') def _save_datasets_as_mitiff(self, datasets, image_description, gen_filename, **kwargs): """Put all together and save as a tiff file. Include the special tags making it a mitiff file. """ from libtiff import TIFF tif = TIFF.open(gen_filename, mode='wb') tif.SetField(IMAGEDESCRIPTION, (image_description).encode('utf-8')) cns = self.translate_channel_name.get(kwargs['sensor'], {}) if isinstance(datasets, list): LOG.debug("Saving datasets as list") for _cn in self.channel_order[kwargs['sensor']]: for dataset in datasets: if dataset.attrs['name'] == _cn: # Need to possible translate channels names from satpy to mitiff cn = cns.get(dataset.attrs['name'], dataset.attrs['name']) data = self._calibrate_data(dataset, dataset.attrs['calibration'], self.mitiff_config[kwargs['sensor']][cn]['min-val'], self.mitiff_config[kwargs['sensor']][cn]['max-val']) tif.write_image(data.astype(np.uint8), compression='deflate') break elif 'dataset' in datasets.attrs['name']: LOG.debug("Saving %s as a dataset.", datasets.attrs['name']) if len(datasets.dims) == 2 and (all('bands' not in i for i in datasets.dims)): # Special case with only one channel ie. no bands # Need to possible translate channels names from satpy to mitiff # Note the last index is a tuple index. cn = cns.get(datasets.attrs['prerequisites'][0][0], datasets.attrs['prerequisites'][0][0]) data = self._calibrate_data(datasets, datasets.attrs['prerequisites'][0][4], self.mitiff_config[kwargs['sensor']][cn]['min-val'], self.mitiff_config[kwargs['sensor']][cn]['max-val']) tif.write_image(data.astype(np.uint8), compression='deflate') else: for _cn_i, _cn in enumerate(self.channel_order[kwargs['sensor']]): for band in datasets['bands']: if band == _cn: chn = datasets.sel(bands=band) # Need to possible translate channels names from satpy to mitiff # Note the last index is a tuple index. cn = cns.get(chn.attrs['prerequisites'][_cn_i][0], chn.attrs['prerequisites'][_cn_i][0]) data = self._calibrate_data(chn, chn.attrs['prerequisites'][_cn_i][4], self.mitiff_config[kwargs['sensor']][cn]['min-val'], self.mitiff_config[kwargs['sensor']][cn]['max-val']) tif.write_image(data.astype(np.uint8), compression='deflate') break elif self.palette: LOG.debug("Saving dataset as palette.") self._save_as_palette(tif, datasets, **kwargs) else: LOG.debug("Saving datasets as enhanced image") self._save_as_enhanced(tif, datasets, **kwargs) del tif satpy-0.20.0/satpy/writers/ninjotiff.py000066400000000000000000000163151362525524100201430ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Writer for TIFF images compatible with the NinJo visualization tool (NinjoTIFFs). NinjoTIFFs can be color images or monochromatic. For monochromatic images, the physical units and scale and offsets to retrieve the physical values are provided. Metadata is also recorded in the file. In order to write ninjotiff files, some metadata needs to be provided to the writer. Here is an example on how to write a color image:: chn = "airmass" ninjoRegion = load_area("areas.def", "nrEURO3km") filenames = glob("data/*__") global_scene = Scene(reader="hrit_msg", filenames=filenames) global_scene.load([chn]) local_scene = global_scene.resample(ninjoRegion) local_scene.save_dataset(chn, filename="airmass.tif", writer='ninjotiff', sat_id=6300014, chan_id=6500015, data_cat='GPRN', data_source='EUMCAST', nbits=8) Here is an example on how to write a color image:: chn = "IR_108" ninjoRegion = load_area("areas.def", "nrEURO3km") filenames = glob("data/*__") global_scene = Scene(reader="hrit_msg", filenames=filenames) global_scene.load([chn]) local_scene = global_scene.resample(ninjoRegion) local_scene.save_dataset(chn, filename="msg.tif", writer='ninjotiff', sat_id=6300014, chan_id=900015, data_cat='GORN', data_source='EUMCAST', physic_unit='K', nbits=8) The metadata to provide to the writer can also be stored in a configuration file (see pyninjotiff), so that the previous example can be rewritten as:: chn = "IR_108" ninjoRegion = load_area("areas.def", "nrEURO3km") filenames = glob("data/*__") global_scene = Scene(reader="hrit_msg", filenames=filenames) global_scene.load([chn]) local_scene = global_scene.resample(ninjoRegion) local_scene.save_dataset(chn, filename="msg.tif", writer='ninjotiff', # ninjo product name to look for in .cfg file ninjo_product_name="IR_108", # custom configuration file for ninjo tiff products # if not specified PPP_CONFIG_DIR is used as config file directory ninjo_product_file="/config_dir/ninjotiff_products.cfg") .. _ninjotiff: http://www.ssec.wisc.edu/~davidh/polar2grid/misc/NinJo_Satellite_Import_Formats.html """ import logging import numpy as np import pyninjotiff.ninjotiff as nt from satpy.writers import ImageWriter from trollimage.xrimage import invert_scale_offset logger = logging.getLogger(__name__) def convert_units(dataset, in_unit, out_unit): """Convert units of *dataset*.""" from pint import UnitRegistry ureg = UnitRegistry() # Commented because buggy: race condition ? # ureg.define("degree_Celsius = degC = Celsius = C = CELSIUS") in_unit = ureg.parse_expression(in_unit, False) if out_unit in ['CELSIUS', 'C', 'Celsius', 'celsius']: dest_unit = ureg.degC else: dest_unit = ureg.parse_expression(out_unit, False) data = ureg.Quantity(dataset, in_unit) attrs = dataset.attrs dataset = data.to(dest_unit).magnitude dataset.attrs = attrs dataset.attrs["units"] = out_unit return dataset class NinjoTIFFWriter(ImageWriter): """Writer for NinjoTiff files.""" def __init__(self, tags=None, **kwargs): """Inititalize the writer.""" ImageWriter.__init__( self, default_config_filename="writers/ninjotiff.yaml", **kwargs ) self.tags = self.info.get("tags", None) if tags is None else tags if self.tags is None: self.tags = {} elif not isinstance(self.tags, dict): # if it's coming from a config file self.tags = dict(tuple(x.split("=")) for x in self.tags.split(",")) def save_image(self, img, filename=None, compute=True, **kwargs): # floating_point=False, """Save the image to the given *filename* in ninjotiff_ format. .. _ninjotiff: http://www.ssec.wisc.edu/~davidh/polar2grid/misc/NinJo_Satellite_Import_Formats.html """ filename = filename or self.get_filename(**img.data.attrs) if img.mode.startswith("L") and ( "ch_min_measurement_unit" not in kwargs or "ch_max_measurement_unit" not in kwargs ): try: scale, offset = img.get_scaling_from_history() scale, offset = invert_scale_offset(scale, offset) except ValueError as err: logger.warning(str(err)) else: try: # Here we know that the data if the image is scaled between 0 and 1 dmin = offset dmax = scale + offset ch_min_measurement_unit, ch_max_measurement_unit = np.minimum(dmin, dmax), np.maximum(dmin, dmax) kwargs["ch_min_measurement_unit"] = ch_min_measurement_unit kwargs["ch_max_measurement_unit"] = ch_max_measurement_unit except KeyError: raise NotImplementedError( "Don't know how to handle non-scale/offset-based enhancements yet." ) return nt.save(img, filename, data_is_scaled_01=True, compute=compute, **kwargs) def save_dataset( self, dataset, filename=None, fill_value=None, compute=True, **kwargs ): """Save a dataset to ninjotiff format. This calls `save_image` in turn, but first preforms some unit conversion if necessary. """ nunits = kwargs.get("physic_unit", None) if nunits is None: try: options = nt.get_product_config( kwargs["ninjo_product_name"], True, kwargs["ninjo_product_file"] ) nunits = options["physic_unit"] except KeyError: pass if nunits is not None: try: units = dataset.attrs["units"] except KeyError: logger.warning( "Saving to physical ninjo file without units defined in dataset!" ) else: dataset = convert_units(dataset, units, nunits) return super(NinjoTIFFWriter, self).save_dataset( dataset, filename=filename, compute=compute, fill_value=fill_value, **kwargs ) satpy-0.20.0/satpy/writers/scmi.py000066400000000000000000001760761362525524100171230ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The SCMI AWIPS writer is used to create AWIPS-compatible tiled NetCDF4 files. The Advanced Weather Interactive Processing System (AWIPS) is a program used by the United States National Weather Service (NWS) and others to view different forms of weather imagery. Sectorized Cloud and Moisture Imagery (SCMI) is a netcdf format accepted by AWIPS to store one image broken up in to one or more "tiles". Once AWIPS is configured for specific products the SCMI NetCDF backend can be used to provide compatible products to the system. The files created by this backend are compatible with AWIPS II (AWIPS I is no longer supported). The SCMI writer takes remapped binary image data and creates an AWIPS-compatible NetCDF4 file. The SCMI writer and the AWIPS client may need to be configured to make things appear the way the user wants in the AWIPS client. The SCMI writer can only produce files for datasets mapped to areas with specific projections: - lcc - geos - merc - stere This is a limitation of the AWIPS client and not of the SCMI writer. Numbered versus Lettered Grids ------------------------------ By default the SCMI writer will save tiles by number starting with '1' representing the upper-left image tile. Tile numbers then increase along the column and then on to the next row. By specifying `lettered_grid` as `True` tiles can be designated with a letter. Lettered grids or sectors are preconfigured in the `scmi.yaml` configuration file. The lettered tile locations are static and will not change with the data being written to them. Each lettered tile is split in to a certain number of subtiles (`num_subtiles`), default 2 rows by 2 columns. Lettered tiles are meant to make it easier for receiving AWIPS clients/stations to filter what tiles they receive; saving time, bandwidth, and space. Any tiles (numbered or lettered) not containing any valid data are not created. Updating tiles -------------- There are some input data cases where we want to put new data in a tile file written by a previous execution. An example is a pre-tiled input dataset that is processed one tile at a time. One input tile may map to one or more output SCMI tiles, but may not perfectly align with the SCMI tile, leaving empty/unused space in the SCMI tile. The next input tile may be able to fill in that empty space and should be allowed to write the "new" data to the file. This is the default behavior of the SCMI writer. In cases where data overlaps the existing data in the tile, the newer data has priority. Shifting Lettered Grids ----------------------- Due to the static nature of the lettered grids, there is sometimes a need to shift the locations of where these tiles are by up to 0.5 pixels in each dimension to align with the data being processed. This means that the tiles for a 1000m resolution grid may be shifted up to 500m in each direction from the original definition of the lettered "sector". This can cause differences in the location of the tiles between executions depending on the locations of the input data. In the worst case tile A01 from one execution could be shifted up to 1 grid cell from tile A01 in another execution (one is shifted 0.5 pixels to the left, the other is shifted 0.5 to the right). This shifting makes the calculations for generating tiles easier and more accurate. By default, the lettered tile locations are changed to match the location of the data. This works well when output tiles will not be updated (see above) in future processing. In cases where output tiles will be filled in or updated with more data the ``use_sector_reference`` keyword argument can be set to ``True`` to tell the SCMI writer to shift the data's geolocation by up to 0.5 pixels in each dimension instead of shifting the lettered tile locations. """ import os import logging import string import sys from datetime import datetime, timedelta from netCDF4 import Dataset import numpy as np from pyproj import Proj import dask.array as da from satpy.writers import Writer, DecisionTree, Enhancer, get_enhanced_image from pyresample.geometry import AreaDefinition from collections import namedtuple try: from pyresample.utils import proj4_radius_parameters except ImportError: raise ImportError("SCMI Writer requires pyresample>=1.7.0") LOG = logging.getLogger(__name__) # AWIPS 2 seems to not like data values under 0 AWIPS_USES_NEGATIVES = False AWIPS_DATA_DTYPE = np.int16 DEFAULT_OUTPUT_PATTERN = '{source_name}_AII_{platform_name}_{sensor}_' \ '{name}_{sector_id}_{tile_id}_' \ '{start_time:%Y%m%d_%H%M}.nc' # misc. global attributes SCMI_GLOBAL_ATT = dict( satellite_id=None, # GOES-H8 pixel_y_size=None, # km start_date_time=None, # 2015181030000, # %Y%j%H%M%S pixel_x_size=None, # km product_name=None, # "HFD-010-B11-M1C01", production_location=None, # "MSC", ) UNIT_CONV = { 'micron': 'microm', 'mm h-1': 'mm/h', '1': '*1', 'none': '*1', 'percent': '%', 'Kelvin': 'kelvin', 'K': 'kelvin', } TileInfo = namedtuple('TileInfo', ['tile_count', 'image_shape', 'tile_shape', 'tile_row_offset', 'tile_column_offset', 'tile_id', 'x', 'y', 'tile_slices', 'data_slices']) XYFactors = namedtuple('XYFactors', ['mx', 'bx', 'my', 'by']) def fix_awips_file(fn): """Hack the NetCDF4 files to workaround NetCDF-Java bugs used by AWIPS. This should not be needed for new versions of AWIPS. """ # hack to get files created by new NetCDF library # versions to be read by AWIPS buggy java version # of NetCDF LOG.info("Modifying SCMI NetCDF file to work with AWIPS") import h5py h = h5py.File(fn, 'a') if '_NCProperties' in h.attrs: del h.attrs['_NCProperties'] h.close() class NumberedTileGenerator(object): """Helper class to generate per-tile metadata for numbered tiles.""" def __init__(self, area_definition, tile_shape=None, tile_count=None): """Initialize and generate tile information for this sector/grid for later use.""" self.area_definition = area_definition self._rows = self.area_definition.y_size self._cols = self.area_definition.x_size # get tile shape, number of tiles, etc. self._get_tile_properties(tile_shape, tile_count) # scaling parameters for the overall images X and Y coordinates # they must be the same for all X and Y variables for all tiles # and must be stored in the file as 0, 1, 2, 3, ... # (X factor, X offset, Y factor, Y offset) self.mx, self.bx, self.my, self.by = self._get_xy_scaling_parameters() self.xy_factors = XYFactors(self.mx, self.bx, self.my, self.by) self._tile_cache = [] def _get_tile_properties(self, tile_shape, tile_count): """Generate tile information for numbered tiles.""" if tile_shape is not None: tile_shape = (int(min(tile_shape[0], self._rows)), int(min(tile_shape[1], self._cols))) tile_count = (int(np.ceil(self._rows / float(tile_shape[0]))), int(np.ceil(self._cols / float(tile_shape[1])))) elif tile_count: tile_shape = (int(np.ceil(self._rows / float(tile_count[0]))), int(np.ceil(self._cols / float(tile_count[1])))) else: raise ValueError("Either 'tile_count' or 'tile_shape' must be provided") # number of pixels per each tile self.tile_shape = tile_shape # number of tiles in each direction (rows, columns) self.tile_count = tile_count # number of tiles in the entire image self.total_tiles = tile_count[0] * tile_count[1] # number of pixels in the whole image (rows, columns) self.image_shape = (self.tile_shape[0] * self.tile_count[0], self.tile_shape[1] * self.tile_count[1]) # X and Y coordinates of the whole image self.x, self.y = self._get_xy_arrays() def _get_xy_arrays(self): """Get the overall X/Y coordinate variable arrays.""" gd = self.area_definition ts = self.tile_shape tc = self.tile_count # Since our tiles may go over the edge of the original "grid" we # need to make sure we calculate X/Y to the edge of all of the tiles imaginary_data_size = (ts[0] * tc[0], ts[1] * tc[1]) ps_x = gd.pixel_size_x ps_y = gd.pixel_size_y # tiles start from upper-left new_extents = ( gd.area_extent[0], gd.area_extent[1] - ps_y * (imaginary_data_size[1] - gd.y_size), gd.area_extent[2] + ps_x * (imaginary_data_size[0] - gd.x_size), gd.area_extent[3]) imaginary_grid_def = AreaDefinition( gd.area_id, gd.name, gd.proj_id, gd.proj_dict, imaginary_data_size[1], imaginary_data_size[0], new_extents, ) x, y = imaginary_grid_def.get_proj_coords() x = x[0].squeeze() # all rows should have the same coordinates y = y[:, 0].squeeze() # all columns should have the same coordinates # scale the X and Y arrays to fit in the file for 16-bit integers # AWIPS is dumb and requires the integer values to be 0, 1, 2, 3, 4 # Max value of a signed 16-bit integer is 32767 meaning # 32768 values. if x.shape[0] > 2**15: # awips uses 0, 1, 2, 3 so we can't use the negative end of the variable space raise ValueError("X variable too large for AWIPS-version of 16-bit integer space") if y.shape[0] > 2**15: # awips uses 0, 1, 2, 3 so we can't use the negative end of the variable space raise ValueError("Y variable too large for AWIPS-version of 16-bit integer space") # NetCDF library doesn't handle numpy arrays nicely anymore for some # reason and has been masking values that shouldn't be return np.ma.masked_array(x), np.ma.masked_array(y) def _get_xy_scaling_parameters(self): """Get the X/Y coordinate limits for the full resulting image.""" gd = self.area_definition bx = self.x.min() mx = gd.pixel_size_x by = self.y.min() my = -abs(gd.pixel_size_y) return mx, bx, my, by def _tile_number(self, ty, tx): """Get tile number from tile row/column.""" # e.g. # 001 002 003 004 # 005 006 ... return ty * self.tile_count[1] + tx + 1 def _tile_identifier(self, ty, tx): """Get tile identifier for numbered tiles.""" return "T{:03d}".format(self._tile_number(ty, tx)) def _generate_tile_info(self): """Get numbered tile metadata.""" x = self.x y = self.y ts = self.tile_shape tc = self.tile_count if self._tile_cache: for tile_info in self._tile_cache: yield tile_info for ty in range(tc[0]): for tx in range(tc[1]): tile_id = self._tile_identifier(ty, tx) tile_row_offset = ty * ts[0] tile_column_offset = tx * ts[1] # store tile data to an intermediate array # the tile may be larger than the remaining data, handle that: max_row_idx = min((ty + 1) * ts[0], self._rows) - (ty * ts[0]) max_col_idx = min((tx + 1) * ts[1], self._cols) - (tx * ts[1]) tile_slices = (slice(0, max_row_idx), slice(0, max_col_idx)) data_slices = (slice(ty * ts[0], (ty + 1) * ts[0]), slice(tx * ts[1], (tx + 1) * ts[1])) tmp_x = x[data_slices[1]] tmp_y = y[data_slices[0]] tile_info = TileInfo( tc, self.image_shape, ts, tile_row_offset, tile_column_offset, tile_id, tmp_x, tmp_y, tile_slices, data_slices) self._tile_cache.append(tile_info) yield tile_info def __call__(self, data): """Provide simple call interface for getting tile metadata.""" if self._tile_cache: tile_infos = self._tile_cache else: tile_infos = self._generate_tile_info() for tile_info in tile_infos: tile_data = data[tile_info.data_slices] if not tile_data.size: LOG.info("Tile {} is empty, skipping...".format(tile_info[2])) continue yield tile_info, tile_data class LetteredTileGenerator(NumberedTileGenerator): """Helper class to generate per-tile metadata for lettered tiles.""" def __init__(self, area_definition, extents, cell_size=(2000000, 2000000), num_subtiles=None, use_sector_reference=False): """Initialize tile information for later generation.""" # (row subtiles, col subtiles) self.num_subtiles = num_subtiles or (2, 2) self.cell_size = cell_size # (row tile height, col tile width) # x/y self.ll_extents = extents[:2] # (x min, y min) self.ur_extents = extents[2:] # (x max, y max) self.use_sector_reference = use_sector_reference super(LetteredTileGenerator, self).__init__(area_definition) def _get_tile_properties(self, tile_shape, tile_count): """Calculate tile information for this particular sector/grid.""" # ignore tile_shape and tile_count # they come from the base class, but aren't used here del tile_shape, tile_count # get original image's X/Y ad = self.area_definition x, y = ad.get_proj_vectors() ll_xy = self.ll_extents ur_xy = self.ur_extents cw = abs(ad.pixel_size_x) ch = abs(ad.pixel_size_y) st = self.num_subtiles cs = self.cell_size # row height, column width # make sure the number of total tiles is a factor of the subtiles # meaning each letter has the full number of subtiles # Tile numbering/naming starts from the upper left corner ul_xy = (ll_xy[0], ur_xy[1]) # Adjust the upper-left corner to 'perfectly' match the data # X/Y are center of pixels, adjust by half a pixels to get upper-left pixel corner shift_x = float(ul_xy[0] - (x.min() - cw / 2.)) % cw # could be negative shift_y = float(ul_xy[1] - (y.max() + ch / 2.)) % ch # could be negative # if we're really close to 0 then don't worry about it if abs(shift_x) < 1e-10 or abs(shift_x - cw) < 1e-10: shift_x = 0 if abs(shift_y) < 1e-10 or abs(shift_y - ch) < 1e-10: shift_y = 0 if self.use_sector_reference: LOG.debug("Adjusting X/Y by ({}, {}) so it better matches lettered grid".format(shift_x, shift_y)) x = x + shift_x y = y + shift_y else: LOG.debug("Adjusting lettered grid by ({}, {}) so it better matches data X/Y".format(shift_x, shift_y)) ul_xy = (ul_xy[0] - shift_x, ul_xy[1] - shift_y) # outer edge of grid # always keep the same distance between the extents ll_xy = (ul_xy[0], ll_xy[1] - shift_y) ur_xy = (ur_xy[0] - shift_x, ul_xy[1]) fcs_y, fcs_x = (np.ceil(float(cs[0]) / st[0]), np.ceil(float(cs[1]) / st[1])) # need X/Y for *whole* tiles max_cols = np.ceil((ur_xy[0] - ul_xy[0]) / fcs_x) max_rows = np.ceil((ul_xy[1] - ll_xy[1]) / fcs_y) # don't create partial alpha-tiles max_cols = int(np.ceil(max_cols / st[1]) * st[1]) max_rows = int(np.ceil(max_rows / st[0]) * st[0]) # make tile cell size a factor of pixel size num_pixels_x = int(np.floor(fcs_x / cw)) num_pixels_y = int(np.floor(fcs_y / ch)) # NOTE: this does not change the *total* number of columns/rows that # will be produced. This is important because otherwise the number # of lettered tiles could depend on the input data which is not what we # want fcs_x = num_pixels_x * cw fcs_y = num_pixels_y * ch # NOTE: this takes the center of the pixel relative to the upper-left outer edge: min_col = max(int(np.floor((x.min() - ul_xy[0]) / fcs_x)), 0) max_col = min(int(np.floor((x.max() - ul_xy[0]) / fcs_x)), max_cols - 1) min_row = max(int(np.floor((ul_xy[1] - y.max()) / fcs_y)), 0) max_row = min(int(np.floor((ul_xy[1] - y.min()) / fcs_y)), max_rows - 1) num_cols = max_col - min_col + 1 num_rows = max_row - min_row + 1 total_alphas = (max_cols * max_rows) / (st[0] * st[1]) if total_alphas > 26: raise ValueError("Too many lettered grid cells '{}' (sector cell size too small). " "Maximum of 26".format(total_alphas)) self.tile_shape = (num_pixels_y, num_pixels_x) self.total_tile_count = (max_rows, max_cols) self.tile_count = (num_rows, num_cols) self.total_tiles = num_rows * num_cols self.image_shape = (num_pixels_y * num_rows, num_pixels_x * num_cols) self.min_col = min_col self.max_col = max_col self.min_row = min_row self.max_row = max_row self.ul_xy = ul_xy self.mx = cw self.bx = ul_xy[0] + cw / 2.0 # X represents the center of the pixel self.my = -ch self.by = ul_xy[1] - ch / 2.0 # Y represents the center of the pixel self.x = x self.y = y def _get_xy_scaling_parameters(self): """Get the X/Y coordinate limits for the full resulting image.""" return self.mx, self.bx, self.my, self.by def _tile_identifier(self, ty, tx): """Get tile identifier (name) for a particular tile row/column.""" st = self.num_subtiles ttc = self.total_tile_count alpha_num = int((ty // st[0]) * (ttc[1] // st[1]) + (tx // st[1])) alpha = string.ascii_uppercase[alpha_num] tile_num = int((ty % st[0]) * st[1] + (tx % st[1])) + 1 return "T{}{:02d}".format(alpha, tile_num) def _generate_tile_info(self): """Create generator of individual tile metadata.""" if self._tile_cache: for tile_info in self._tile_cache: yield tile_info ts = self.tile_shape ul_xy = self.ul_xy x, y = self.x, self.y cw = abs(float(self.area_definition.pixel_size_x)) ch = abs(float(self.area_definition.pixel_size_y)) # where does the data fall in our lettered grid for gy in range(self.min_row, self.max_row + 1): for gx in range(self.min_col, self.max_col + 1): tile_id = self._tile_identifier(gy, gx) # ul_xy is outer-edge of upper-left corner # x/y are center of each data pixel x_left = ul_xy[0] + gx * ts[1] * cw x_right = x_left + ts[1] * cw y_top = ul_xy[1] - gy * ts[0] * ch y_bot = y_top - ts[0] * ch x_mask = np.nonzero((x >= x_left) & (x < x_right))[0] y_mask = np.nonzero((y > y_bot) & (y <= y_top))[0] if not x_mask.any() or not y_mask.any(): # no data in this tile LOG.debug("Tile '{}' doesn't have any data in it".format(tile_id)) continue x_slice = slice(x_mask[0], x_mask[-1] + 1) # assume it's continuous y_slice = slice(y_mask[0], y_mask[-1] + 1) # theoretically we can precompute the X/Y now # instead of taking the x/y data and mapping it # to the tile tmp_x = np.ma.arange(x_left + cw / 2., x_right, cw) tmp_y = np.ma.arange(y_top - ch / 2., y_bot, -ch) data_x_idx_min = np.nonzero(np.isclose(tmp_x, x[x_slice.start]))[0][0] data_x_idx_max = np.nonzero(np.isclose(tmp_x, x[x_slice.stop - 1]))[0][0] # I have a half pixel error some where data_y_idx_min = np.nonzero(np.isclose(tmp_y, y[y_slice.start]))[0][0] data_y_idx_max = np.nonzero(np.isclose(tmp_y, y[y_slice.stop - 1]))[0][0] # now put the data in the grid tile tile_slices = (slice(data_y_idx_min, data_y_idx_max + 1), slice(data_x_idx_min, data_x_idx_max + 1)) data_slices = (y_slice, x_slice) tile_info = TileInfo( self.tile_count, self.image_shape, ts, gy * ts[0], gx * ts[1], tile_id, tmp_x, tmp_y, tile_slices, data_slices) self._tile_cache.append(tile_info) yield tile_info class SCMIDatasetDecisionTree(DecisionTree): """Load AWIPS-specific metadata from YAML configuration.""" def __init__(self, decision_dicts, **kwargs): """Initialize decision tree with specific keys to look for.""" # Fields used to match a product object to it's correct configuration attrs = kwargs.pop('attrs', ["name", "standard_name", "satellite", "instrument", "area_id", "units", "reader"] ) super(SCMIDatasetDecisionTree, self).__init__(decision_dicts, attrs, **kwargs) class AttributeHelper(object): """Helper object which wraps around metadata to provide SCMI attributes.""" def __init__(self, ds_info): """Initialize metadata for future attribute collection.""" self.ds_info = ds_info def apply_attributes(self, nc, table, prefix=''): """Apply fixed attributes or look up attributes needed and apply them.""" for name, value in sorted(table.items()): if name in nc.ncattrs(): LOG.debug('already have a value for %s' % name) continue if value is not None: setattr(nc, name, value) else: funcname = prefix+name # _global_ + product_tile_height func = getattr(self, funcname, None) if func is not None: value = func() if value is not None: setattr(nc, name, value) else: LOG.info('no routine matching %s' % funcname) def _scene_time(self): """Get default start time of this observation.""" return self.ds_info["start_time"] + timedelta(minutes=int(os.environ.get("DEBUG_TIME_SHIFT", 0))) def _global_product_name(self): """Get default global product name attribute.""" return self.ds_info["name"] def _global_pixel_x_size(self): """Get default global x size attribute.""" return self.ds_info["area"].pixel_size_x / 1000. def _global_pixel_y_size(self): """Get default global y size attribute.""" return self.ds_info["area"].pixel_size_y / 1000. def _global_start_date_time(self): """Get default global start time attribute.""" when = self._scene_time() return when.strftime('%Y-%m-%dT%H:%M:%S') def _global_production_location(self): """Get default global production_location attribute.""" org = os.environ.get('ORGANIZATION', None) if org is not None: return org else: LOG.warning('environment ORGANIZATION not set for .production_location attribute, using hostname') import socket return socket.gethostname() # FUTURE: something more correct but this will do for now class NetCDFWriter(object): """Write a basic AWIPS compatible NetCDF4 SCMI file representing one "tile" of data.""" _kind = None # 'albedo', 'brightness_temp' _band = None _include_fgf = True _fill_value = 0 image_var_name = 'data' fgf_y = None fgf_x = None projection = None def __init__(self, filename, include_fgf=True, ds_info=None, compress=False, is_geographic=False): """Initialize variable and dimension names and metadata helper objects.""" self._nc = None self.filename = filename self._include_fgf = include_fgf self._compress = compress self.helper = AttributeHelper(ds_info) self.image_data = None self.is_geographic = is_geographic self.exists = os.path.isfile(self.filename) if self.is_geographic: self.row_dim_name = 'lat' self.col_dim_name = 'lon' self.y_var_name = 'lat' self.x_var_name = 'lon' else: self.row_dim_name = 'y' self.col_dim_name = 'x' self.y_var_name = 'y' self.x_var_name = 'x' @property def nc(self): """Access the NetCDF file object if not already created.""" if self._nc is None: self._nc = Dataset(self.filename, 'r+' if self.exists else 'w') return self._nc def create_dimensions(self, lines, columns): """Create NetCDF dimensions.""" # Create Dimensions if self.exists: LOG.debug("Skipping creating dimensions because file already exists.") return _nc = self.nc _nc.createDimension(self.row_dim_name, lines) _nc.createDimension(self.col_dim_name, columns) def create_variables(self, bitdepth, fill_value, scale_factor=None, add_offset=None, valid_min=None, valid_max=None): """Create data and geolcoation NetCDF variables.""" if self.exists: LOG.debug("Skipping creating variables because file already exists.") self.image_data = self.nc[self.image_var_name] self.fgf_y = self.nc[self.y_var_name] self.fgf_x = self.nc[self.x_var_name] return fgf_coords = "%s %s" % (self.y_var_name, self.x_var_name) self.image_data = self.nc.createVariable(self.image_var_name, AWIPS_DATA_DTYPE, dimensions=(self.row_dim_name, self.col_dim_name), fill_value=fill_value, zlib=self._compress) self.image_data.coordinates = fgf_coords self.apply_data_attributes(bitdepth, scale_factor, add_offset, valid_min=valid_min, valid_max=valid_max) if self._include_fgf: self.fgf_y = self.nc.createVariable( self.y_var_name, 'i2', dimensions=(self.row_dim_name,), zlib=self._compress) self.fgf_x = self.nc.createVariable( self.x_var_name, 'i2', dimensions=(self.col_dim_name,), zlib=self._compress) def apply_data_attributes(self, bitdepth, scale_factor, add_offset, valid_min=None, valid_max=None): """Assign various data variable metadata.""" # NOTE: grid_mapping is set by `set_projection_attrs` self.image_data.scale_factor = np.float32(scale_factor) self.image_data.add_offset = np.float32(add_offset) u = self.helper.ds_info.get('units', '1') self.image_data.units = UNIT_CONV.get(u, u) file_bitdepth = self.image_data.dtype.itemsize * 8 is_unsigned = self.image_data.dtype.kind == 'u' if not AWIPS_USES_NEGATIVES and not is_unsigned: file_bitdepth -= 1 is_unsigned = True if bitdepth >= file_bitdepth: bitdepth = file_bitdepth num_fills = 1 else: bitdepth = bitdepth num_fills = 0 if valid_min is not None and valid_max is not None: self.image_data.valid_min = valid_min self.image_data.valid_max = valid_max elif not is_unsigned: # signed data type self.image_data.valid_min = -2**(bitdepth - 1) # 1 less for data type (65535), another 1 less for fill value (fill value = max file value) self.image_data.valid_max = 2**(bitdepth - 1) - 1 - num_fills else: # unsigned data type self.image_data.valid_min = 0 self.image_data.valid_max = 2**bitdepth - 1 - num_fills if "standard_name" in self.helper.ds_info: self.image_data.standard_name = self.helper.ds_info["standard_name"] elif self.helper.ds_info.get("standard_name") in ["reflectance", "albedo"]: self.image_data.standard_name = "toa_bidirectional_reflectance" else: self.image_data.standard_name = self.helper.ds_info.get("standard_name") or '' def set_fgf(self, x, mx, bx, y, my, by, units=None, downsample_factor=1): """Assign geolocation x/y variables metadata.""" if self.exists: LOG.debug("Skipping setting FGF variable attributes because file already exists.") return # assign values before scale factors to avoid implicit scale reversal LOG.debug('y variable shape is {}'.format(self.fgf_y.shape)) self.fgf_y.scale_factor = np.float64(my * float(downsample_factor)) self.fgf_y.add_offset = np.float64(by) if self.is_geographic: self.fgf_y.units = units if units is not None else 'degrees_north' self.fgf_y.standard_name = "latitude" else: self.fgf_y.units = units if units is not None else 'meters' self.fgf_y.standard_name = "projection_y_coordinate" self.fgf_y[:] = y self.fgf_x.scale_factor = np.float64(mx * float(downsample_factor)) self.fgf_x.add_offset = np.float64(bx) if self.is_geographic: self.fgf_x.units = units if units is not None else 'degrees_east' self.fgf_x.standard_name = "longitude" else: self.fgf_x.units = units if units is not None else 'meters' self.fgf_x.standard_name = "projection_x_coordinate" self.fgf_x[:] = x def set_image_data(self, data): """Write image variable data.""" LOG.debug('writing image data') if not hasattr(data, 'mask'): data = np.ma.masked_array(data, np.isnan(data)) # note: autoscaling will be applied to make int16 self.image_data[:, :] = np.require(data, dtype=np.float32) def set_projection_attrs(self, area_id, proj4_info): """Assign projection attributes per GRB standard.""" if self.exists: LOG.debug("Skipping setting projection attributes because file already exists.") return proj4_info['a'], proj4_info['b'] = proj4_radius_parameters(proj4_info) if proj4_info["proj"] == "geos": p = self.projection = self.nc.createVariable("fixedgrid_projection", 'i4') self.image_data.grid_mapping = "fixedgrid_projection" p.short_name = area_id p.grid_mapping_name = "geostationary" p.sweep_angle_axis = proj4_info.get("sweep", "y") p.perspective_point_height = proj4_info['h'] p.latitude_of_projection_origin = np.float32(0.0) p.longitude_of_projection_origin = np.float32(proj4_info.get('lon_0', 0.0)) # is the float32 needed? elif proj4_info["proj"] == "lcc": p = self.projection = self.nc.createVariable("lambert_projection", 'i4') self.image_data.grid_mapping = "lambert_projection" p.short_name = area_id p.grid_mapping_name = "lambert_conformal_conic" p.standard_parallel = proj4_info["lat_0"] # How do we specify two standard parallels? p.longitude_of_central_meridian = proj4_info["lon_0"] p.latitude_of_projection_origin = proj4_info.get('lat_1', proj4_info['lat_0']) # Correct? elif proj4_info['proj'] == 'stere': p = self.projection = self.nc.createVariable("polar_projection", 'i4') self.image_data.grid_mapping = "polar_projection" p.short_name = area_id p.grid_mapping_name = "polar_stereographic" p.standard_parallel = proj4_info["lat_ts"] p.straight_vertical_longitude_from_pole = proj4_info.get("lon_0", 0.0) p.latitude_of_projection_origin = proj4_info["lat_0"] # ? elif proj4_info['proj'] == 'merc': p = self.projection = self.nc.createVariable("mercator_projection", 'i4') self.image_data.grid_mapping = "mercator_projection" p.short_name = area_id p.grid_mapping_name = "mercator" p.standard_parallel = proj4_info.get('lat_ts', proj4_info.get('lat_0', 0.0)) p.longitude_of_projection_origin = proj4_info.get("lon_0", 0.0) # AWIPS 2 Doesn't actually support this yet # elif proj4_info['proj'] in ['latlong', 'longlat', 'lonlat', 'latlon']: # p = self.projection = self._nc.createVariable("latitude_longitude_projection", 'i4') # self.image_data.grid_mapping = "latitude_longitude_projection" # p.short_name = area_id # p.grid_mapping_name = 'latitude_longitude' else: raise ValueError("SCMI can not handle projection '{}'".format(proj4_info['proj'])) p.semi_major_axis = np.float64(proj4_info["a"]) p.semi_minor_axis = np.float64(proj4_info["b"]) p.false_easting = np.float32(proj4_info.get("x", 0.0)) p.false_northing = np.float32(proj4_info.get("y", 0.0)) def set_global_attrs(self, physical_element, awips_id, sector_id, creating_entity, total_tiles, total_pixels, tile_row, tile_column, tile_height, tile_width, creator=None): """Assign NetCDF global attributes.""" if self.exists: LOG.debug("Skipping setting global attributes because file already exists.") return self.nc.Conventions = "CF-1.7" if creator is None: from satpy import __version__ self.nc.creator = "Satpy Version {} - SCMI Writer".format(__version__) else: self.nc.creator = creator self.nc.creation_time = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S') # name as it shows in the product browser (physicalElement) self.nc.physical_element = physical_element self.nc.satellite_id = creating_entity # identifying name to match against AWIPS common descriptions (ex. "AWIPS_product_name") self.nc.awips_id = awips_id self.nc.sector_id = sector_id self.nc.tile_row_offset = tile_row self.nc.tile_column_offset = tile_column self.nc.product_tile_height = tile_height self.nc.product_tile_width = tile_width self.nc.number_product_tiles = total_tiles[0] * total_tiles[1] self.nc.product_rows = total_pixels[0] self.nc.product_columns = total_pixels[1] self.helper.apply_attributes(self.nc, SCMI_GLOBAL_ATT, '_global_') def close(self): """Close the NetCDF file if created.""" if self._nc is not None: self._nc.sync() self._nc.close() self._nc = None class NetCDFWrapper(object): """Object to wrap all NetCDF data-based operations in to a single call. This makes it possible to do SCMI writing with dask's delayed `da.store` function. """ def __init__(self, filename, sector_id, ds_info, awips_info, xy_factors, tile_info, compress=False, fix_awips=False, update_existing=True): """Assign instance attributes for later use.""" self.filename = filename self.sector_id = sector_id self.ds_info = ds_info self.awips_info = awips_info self.tile_info = tile_info self.xy_factors = xy_factors self.compress = compress self.fix_awips = fix_awips self.update_existing = update_existing self.exists = os.path.isfile(self.filename) def __setitem__(self, key, data): """Write an entire tile to a file.""" if np.isnan(data).all(): LOG.info("Tile {} contains all invalid data, skipping...".format(self.filename)) return ds_info = self.ds_info awips_info = self.awips_info tile_info = self.tile_info area_def = ds_info['area'] if hasattr(area_def, 'crs'): is_geographic = area_def.crs.is_geographic else: is_geographic = Proj(area_def.proj_dict).is_latlong() nc = NetCDFWriter(self.filename, ds_info=self.ds_info, compress=self.compress, is_geographic=is_geographic) LOG.debug("Scaling %s data to fit in netcdf file...", ds_info["name"]) bit_depth = ds_info.get("bit_depth", 16) valid_min = ds_info.get('valid_min') if valid_min is None and self.update_existing and self.exists: # reuse the valid_min that was previously computed valid_min = nc.nc['data'].valid_min elif valid_min is None: valid_min = np.nanmin(data) valid_max = ds_info.get('valid_max') if valid_max is None and self.update_existing and self.exists: # reuse the valid_max that was previously computed valid_max = nc.nc['data'].valid_max elif valid_max is None: valid_max = np.nanmax(data) LOG.debug("Using product valid min {} and valid max {}".format(valid_min, valid_max)) is_cat = 'flag_meanings' in ds_info fills, factor, offset = self._calc_factor_offset( data=data, bitdepth=bit_depth, min=valid_min, max=valid_max, dtype=AWIPS_DATA_DTYPE, flag_meanings=is_cat) if is_cat: data = data.astype(AWIPS_DATA_DTYPE) tmp_tile = np.empty(tile_info.tile_shape, dtype=data.dtype) tmp_tile[:] = np.nan LOG.info("Writing tile '%s' to '%s'", self.tile_info[2], self.filename) LOG.debug("Creating dimensions...") nc.create_dimensions(tmp_tile.shape[0], tmp_tile.shape[1]) LOG.debug("Creating variables...") nc.create_variables(bit_depth, fills[0], factor, offset) LOG.debug("Creating global attributes...") nc.set_global_attrs(awips_info['physical_element'], awips_info['awips_id'], self.sector_id, awips_info['creating_entity'], tile_info.tile_count, tile_info.image_shape, tile_info.tile_row_offset, tile_info.tile_column_offset, tmp_tile.shape[0], tmp_tile.shape[1]) LOG.debug("Creating projection attributes...") nc.set_projection_attrs(area_def.area_id, area_def.proj_dict) LOG.debug("Writing X/Y navigation data...") mx, bx, my, by = self.xy_factors nc.set_fgf(tile_info.x, mx, bx, tile_info.y, my, by) tmp_tile[tile_info.tile_slices] = data if self.exists and self.update_existing: # use existing data where possible existing_data = nc.nc['data'][:] # where we don't have new data but we also have good existing data old_mask = np.isnan(tmp_tile) & ~existing_data.mask tmp_tile[old_mask] = existing_data[old_mask] LOG.debug("Writing image data...") np.clip(tmp_tile, valid_min, valid_max, out=tmp_tile) nc.set_image_data(tmp_tile) nc.close() if self.fix_awips and not self.exists: fix_awips_file(self.filename) def _calc_factor_offset(self, data=None, dtype=np.int16, bitdepth=None, min=None, max=None, num_fills=1, flag_meanings=False): """Compute netcdf variable factor and offset.""" if num_fills > 1: raise NotImplementedError("More than one fill value is not implemented yet") dtype = np.dtype(dtype) file_bitdepth = dtype.itemsize * 8 is_unsigned = dtype.kind == 'u' if not AWIPS_USES_NEGATIVES and not is_unsigned: file_bitdepth -= 1 is_unsigned = True if bitdepth is None: bitdepth = file_bitdepth if bitdepth >= file_bitdepth: bitdepth = file_bitdepth else: # don't take away from the data bitdepth if there is room in # file data type to allow for extra fill values num_fills = 0 if min is None: min = data.min() if max is None: max = data.max() if not is_unsigned: # max value fills = [2**(file_bitdepth - 1) - 1] else: # max value fills = [2**file_bitdepth - 1] if flag_meanings: # AWIPS doesn't like Identity conversion so we can't have # a factor of 1 and an offset of 0 mx = 0.5 bx = 0 else: mx = float(max - min) / (2**bitdepth - 1 - num_fills) bx = min if not is_unsigned: bx += 2**(bitdepth - 1) * mx return fills, mx, bx class SCMIWriter(Writer): """Writer for AWIPS NetCDF4 SCMI files. These files are **not** the official GOES-R style files, but rather a custom "Polar SCMI" file scheme originally developed at the University of Wisconsin - Madison, Space Science and Engineering Center (SSEC) for use by the CSPP Polar2Grid project. Despite the name these files should support data from polar-orbitting satellites (after resampling) and geostationary satellites in single band (luminance) or RGB image format. """ def __init__(self, compress=False, fix_awips=False, **kwargs): """Initialize writer and decision trees.""" super(SCMIWriter, self).__init__(default_config_filename="writers/scmi.yaml", **kwargs) self.keep_intermediate = False self.overwrite_existing = True self.scmi_sectors = self.config['sectors'] self.scmi_datasets = SCMIDatasetDecisionTree([self.config['datasets']]) self.compress = compress self.fix_awips = fix_awips self._fill_sector_info() self._enhancer = None @property def enhancer(self): """Get lazy loaded enhancer object only if needed.""" if self._enhancer is None: self._enhancer = Enhancer(ppp_config_dir=self.ppp_config_dir) return self._enhancer @classmethod def separate_init_kwargs(cls, kwargs): """Separate keyword arguments by initialization and saving keyword arguments.""" # FUTURE: Don't pass Scene.save_datasets kwargs to init and here init_kwargs, kwargs = super(SCMIWriter, cls).separate_init_kwargs( kwargs) for kw in ['compress', 'fix_awips']: if kw in kwargs: init_kwargs[kw] = kwargs.pop(kw) return init_kwargs, kwargs def _fill_sector_info(self): """Convert sector extents if needed.""" for sector_info in self.scmi_sectors.values(): p = Proj(sector_info['projection']) if 'lower_left_xy' in sector_info: sector_info['lower_left_lonlat'] = p(*sector_info['lower_left_xy'], inverse=True) else: sector_info['lower_left_xy'] = p(*sector_info['lower_left_lonlat']) if 'upper_right_xy' in sector_info: sector_info['upper_right_lonlat'] = p(*sector_info['upper_right_xy'], inverse=True) else: sector_info['upper_right_xy'] = p(*sector_info['upper_right_lonlat']) def _get_sector_info(self, sector_id, lettered_grid): """Get metadata for the current sector if configured. This is not necessary for numbered grids. If found, the sector info will provide the overall tile layout for this grid/sector. This allows for consistent tile numbering/naming regardless of where the data being converted actually is. """ try: sector_info = self.scmi_sectors[sector_id] except KeyError: if lettered_grid: raise ValueError("Unknown sector '{}'".format(sector_id)) else: sector_info = None return sector_info def _get_tile_generator(self, area_def, lettered_grid, sector_id, num_subtiles, tile_size, tile_count, use_sector_reference=False): """Get the appropriate tile generator class for lettered or numbered tiles.""" sector_info = self._get_sector_info(sector_id, lettered_grid) # Create a tile generator for this grid definition if lettered_grid: tile_gen = LetteredTileGenerator( area_def, sector_info['lower_left_xy'] + sector_info['upper_right_xy'], cell_size=sector_info['resolution'], num_subtiles=num_subtiles, use_sector_reference=use_sector_reference, ) else: tile_gen = NumberedTileGenerator( area_def, tile_shape=tile_size, tile_count=tile_count, ) return tile_gen def _get_awips_info(self, ds_info, source_name=None, physical_element=None): """Get metadata for this product when shown in AWIPS if configured in the YAML file.""" try: awips_info = self.scmi_datasets.find_match(**ds_info).copy() awips_info['awips_id'] = "AWIPS_" + ds_info['name'] if not physical_element: physical_element = awips_info.get('physical_info') if not physical_element: physical_element = ds_info['name'] if "{" in physical_element: physical_element = physical_element.format(**ds_info) awips_info['physical_element'] = physical_element if source_name: awips_info['source_name'] = source_name if awips_info['source_name'] is None: raise TypeError("'source_name' keyword must be specified") def_ce = "{}-{}".format(ds_info["platform_name"].upper(), ds_info["sensor"].upper()) awips_info.setdefault('creating_entity', def_ce) return awips_info except KeyError: LOG.error("Could not get information on dataset from backend configuration file") raise def _group_by_area(self, datasets): """Group datasets by their area.""" def _area_id(area_def): return area_def.name + str(area_def.area_extent) + str(area_def.shape) # get all of the datasets stored by area area_datasets = {} for x in datasets: area_id = _area_id(x.attrs['area']) area, ds_list = area_datasets.setdefault(area_id, (x.attrs['area'], [])) ds_list.append(x) return area_datasets def _split_rgbs(self, ds): """Split a single RGB dataset in to multiple.""" for component in 'RGB': band_data = ds.sel(bands=component) band_data.attrs['name'] += '_{}'.format(component) band_data.attrs['valid_min'] = 0.0 band_data.attrs['valid_max'] = 1.0 yield band_data def _enhance_and_split_rgbs(self, datasets): """Handle multi-band images by splitting in to separate products.""" new_datasets = [] for ds in datasets: if ds.ndim == 2: new_datasets.append(ds) continue elif ds.ndim > 3 or ds.ndim < 1 or (ds.ndim == 3 and 'bands' not in ds.coords): LOG.error("Can't save datasets with more or less than 2 dimensions " "that aren't RGBs to SCMI format: {}".format(ds.name)) else: # this is an RGB img = get_enhanced_image(ds.squeeze(), enhance=self.enhancer) res_data = img.finalize(fill_value=0, dtype=np.float32)[0] new_datasets.extend(self._split_rgbs(res_data)) return new_datasets def save_dataset(self, dataset, **kwargs): """Save a single DataArray to one or more NetCDF4 SCMI files.""" LOG.warning("For best performance use `save_datasets`") return self.save_datasets([dataset], **kwargs) def get_filename(self, area_def, tile_info, sector_id, **kwargs): """Generate output NetCDF file from metadata.""" # format the filename kwargs["start_time"] += timedelta(minutes=int(os.environ.get("DEBUG_TIME_SHIFT", 0))) return super(SCMIWriter, self).get_filename( area_id=area_def.area_id, rows=area_def.y_size, columns=area_def.x_size, sector_id=sector_id, tile_id=tile_info.tile_id, **kwargs) def check_tile_exists(self, output_filename): """Check if tile exists and report error accordingly.""" if os.path.isfile(output_filename): if not self.overwrite_existing: LOG.error("AWIPS file already exists: %s", output_filename) raise RuntimeError("AWIPS file already exists: %s" % (output_filename,)) else: LOG.info("AWIPS file already exists, will update with new data: %s", output_filename) def save_datasets(self, datasets, sector_id=None, source_name=None, filename=None, tile_count=(1, 1), tile_size=None, lettered_grid=False, num_subtiles=None, use_end_time=False, use_sector_reference=False, compute=True, **kwargs): """Write a series of DataArray objects to multiple NetCDF4 SCMI files. Args: datasets (iterable): Series of gridded :class:`~xarray.DataArray` objects with the necessary metadata to be converted to a valid SCMI product file. sector_id (str): Name of the region or sector that the provided data is on. This name will be written to the NetCDF file and will be used as the sector in the AWIPS client. For lettered grids this name should match the name configured in the writer YAML. This is required but is defined as a keyword argument for better error handling in Satpy. source_name (str): Name of producer of these files (ex. "SSEC"). This name is used to create the output filename. filename (str): Filename format pattern to be filled in with dataset metadata for each tile. See YAML configuration file for default. tile_count (tuple): For numbered tiles only, how many tile rows and tile columns to produce. Default to ``(1, 1)``, a single giant tile. Either ``tile_count``, ``tile_size``, or ``lettered_grid`` should be specified. tile_size (tuple): For numbered tiles only, how many pixels each tile should be. This takes precedence over ``tile_count`` if specified. Either ``tile_count``, ``tile_size``, or ``lettered_grid`` should be specified. lettered_grid (bool): Whether to use a preconfigured grid and label tiles with letters and numbers instead of only numbers. For example, tiles will be named "A01", "A02", "B01", and so on in the first row of data and continue on to "A03", "A04", and "B03" in the default case where ``num_subtiles`` is (2, 2). Letters start in the upper-left corner and will go from A up to Z, if necessary. num_subtiles (tuple): For lettered tiles only, how many rows and columns to split each lettered tile in to. By default 2 rows and 2 columns will be created. For example, the tile for letter "A" will have "A01" and "A02" in the top row and "A03" and "A04" in the second row. use_end_time (bool): Instead of using the ``start_time`` for the product filename and time written to the file, use the ``end_time``. This is useful for multi-day composites where the ``end_time`` is a better representation of what data is in the file. use_sector_reference (bool): For lettered tiles only, whether to shift the data locations to align with the preconfigured grid's pixels. By default this is False meaning that the grid's tiles will be shifted to align with the data locations. If True, the data is shifted. At most the data will be shifted by 0.5 pixels. See :mod:`satpy.writers.scmi` for more information. compute (bool): Compute and write the output immediately using dask. Default to ``False``. """ if sector_id is None: raise TypeError("Keyword 'sector_id' is required") area_datasets = self._group_by_area(datasets) sources_targets = [] for area_def, ds_list in area_datasets.values(): tile_gen = self._get_tile_generator( area_def, lettered_grid, sector_id, num_subtiles, tile_size, tile_count, use_sector_reference=use_sector_reference) for dataset in self._enhance_and_split_rgbs(ds_list): LOG.info("Preparing product %s to be written to AWIPS SCMI NetCDF file", dataset.attrs["name"]) awips_info = self._get_awips_info(dataset.attrs, source_name=source_name) for tile_info, tmp_tile in tile_gen(dataset): # make sure this entire tile is loaded as one single array tmp_tile.data = tmp_tile.data.rechunk(tmp_tile.shape) ds_info = dataset.attrs.copy() if use_end_time: # replace start_time with end_time for multi-day composites ds_info['start_time'] = ds_info['end_time'] output_filename = filename or self.get_filename(area_def, tile_info, sector_id, source_name=awips_info['source_name'], **ds_info) self.check_tile_exists(output_filename) nc_wrapper = NetCDFWrapper(output_filename, sector_id, ds_info, awips_info, tile_gen.xy_factors, tile_info, compress=self.compress, fix_awips=self.fix_awips) sources_targets.append((tmp_tile.data, nc_wrapper)) if compute and sources_targets: # the NetCDF creation is per-file so we don't need to lock return da.store(*zip(*sources_targets), lock=False) return sources_targets def _create_debug_array(sector_info, num_subtiles, font_path='Verdana.ttf'): from PIL import Image, ImageDraw, ImageFont from pkg_resources import resource_filename as get_resource_filename size = (1000, 1000) img = Image.new("L", size, 0) draw = ImageDraw.Draw(img) if ':' in font_path: # load from a python package font_path = get_resource_filename(*font_path.split(':')) font = ImageFont.truetype(font_path, 25) ll_extent = sector_info['lower_left_xy'] ur_extent = sector_info['upper_right_xy'] total_meters_x = ur_extent[0] - ll_extent[0] total_meters_y = ur_extent[1] - ll_extent[1] fcs_x = np.ceil(float(sector_info['resolution'][1]) / num_subtiles[1]) fcs_y = np.ceil(float(sector_info['resolution'][0]) / num_subtiles[0]) total_cells_x = np.ceil(total_meters_x / fcs_x) total_cells_y = np.ceil(total_meters_y / fcs_y) total_cells_x = np.ceil(total_cells_x / num_subtiles[1]) * num_subtiles[1] total_cells_y = np.ceil(total_cells_y / num_subtiles[0]) * num_subtiles[0] # total_alpha_cells_x = int(total_cells_x / num_subtiles[1]) # total_alpha_cells_y = int(total_cells_y / num_subtiles[0]) # "round" the total meters up to the number of alpha cells # total_meters_x = total_cells_x * fcs_x # total_meters_y = total_cells_y * fcs_y # Pixels per tile ppt_x = np.floor(float(size[0]) / total_cells_x) ppt_y = np.floor(float(size[1]) / total_cells_y) half_ppt_x = np.floor(ppt_x / 2.) half_ppt_y = np.floor(ppt_y / 2.) # Meters per pixel meters_ppx = fcs_x / ppt_x meters_ppy = fcs_y / ppt_y for idx, alpha in enumerate(string.ascii_uppercase): for i in range(4): st_x = i % num_subtiles[1] st_y = int(i / num_subtiles[1]) t = "{}{:02d}".format(alpha, i + 1) t_size = font.getsize(t) cell_x = (idx * num_subtiles[1] + st_x) % total_cells_x cell_y = int(idx / (total_cells_x / num_subtiles[1])) * num_subtiles[0] + st_y if cell_x > total_cells_x: continue elif cell_y > total_cells_y: continue x = ppt_x * cell_x + half_ppt_x y = ppt_y * cell_y + half_ppt_y # draw box around the tile edge # PIL Documentation: "The second point is just outside the drawn rectangle." # we want to be just inside 0 and just inside the outer edge of the tile draw_rectangle(draw, (x - half_ppt_x, y - half_ppt_y, x + half_ppt_x, y + half_ppt_y), outline=255, fill=75, width=3) draw.text((x - t_size[0] / 2., y - t_size[1] / 2.), t, fill=255, font=font) img.save("test.png") from pyresample.utils import proj4_str_to_dict new_extents = ( ll_extent[0], ur_extent[1] - 1001. * meters_ppy, ll_extent[0] + 1001. * meters_ppx, ur_extent[1], ) grid_def = AreaDefinition( 'debug_grid', 'debug_grid', 'debug_grid', proj4_str_to_dict(sector_info['projection']), 1000, 1000, new_extents ) return grid_def, np.array(img) def draw_rectangle(draw, coordinates, outline=None, fill=None, width=1): """Draw simple rectangle in to a numpy array image.""" for i in range(width): rect_start = (coordinates[0] + i, coordinates[1] + i) rect_end = (coordinates[2] - i, coordinates[3] - i) draw.rectangle((rect_start, rect_end), outline=outline, fill=fill) def create_debug_lettered_tiles(init_args, create_args): """Create SCMI files with tile identifiers "burned" in to the image data for debugging.""" import xarray as xr create_args['lettered_grid'] = True create_args['num_subtiles'] = (2, 2) # default, don't use command line argument writer = SCMIWriter(**init_args) sector_id = create_args['sector_id'] sector_info = writer.scmi_sectors[sector_id] area_def, arr = _create_debug_array(sector_info, create_args['num_subtiles']) now = datetime.utcnow() product = xr.DataArray(da.from_array(arr, chunks='auto'), attrs=dict( name='debug_{}'.format(sector_id), platform_name='DEBUG', sensor='TILES', start_time=now, end_time=now, area=area_def, standard_name="toa_bidirectional_reflectance", units='1', valid_min=0, valid_max=255, )) created_files = writer.save_dataset( product, **create_args ) return created_files def add_backend_argument_groups(parser): """Add command line arguments for this writer used for debugging.""" group_1 = parser.add_argument_group(title="Backend Initialization") group_1.add_argument("--backend-configs", nargs="*", dest="backend_configs", help="alternative backend configuration files") group_1.add_argument("--compress", action="store_true", help="zlib compress each netcdf file") group_1.add_argument("--fix-awips", action="store_true", help="modify NetCDF output to work with the old/broken AWIPS NetCDF library") group_2 = parser.add_argument_group(title="Backend Output Creation") group_2.add_argument("--tiles", dest="tile_count", nargs=2, type=int, default=[1, 1], help="Number of tiles to produce in Y (rows) and X (cols) direction respectively") group_2.add_argument("--tile-size", dest="tile_size", nargs=2, type=int, default=None, help="Specify how many pixels are in each tile (overrides '--tiles')") # group.add_argument('--tile-offset', nargs=2, default=(0, 0), # help="Start counting tiles from this offset ('row_offset col_offset')") group_2.add_argument("--letters", dest="lettered_grid", action='store_true', help="Create tiles from a static letter-based grid based on the product projection") group_2.add_argument("--letter-subtiles", nargs=2, type=int, default=(2, 2), help="Specify number of subtiles in each lettered tile: \'row col\'") group_2.add_argument("--output-pattern", default=DEFAULT_OUTPUT_PATTERN, help="output filenaming pattern") group_2.add_argument("--source-name", default='SSEC', help="specify processing source name used in attributes and filename (default 'SSEC')") group_2.add_argument("--sector-id", required=True, help="specify name for sector/region used in attributes and filename (example 'LCC')") return group_1, group_2 def main(): """Command line interface mimicing CSPP Polar2Grid.""" import argparse parser = argparse.ArgumentParser(description="Create SCMI AWIPS compatible NetCDF files") subgroups = add_backend_argument_groups(parser) parser.add_argument("--create-debug", action='store_true', help='Create debug NetCDF files to show tile locations in AWIPS') parser.add_argument('-v', '--verbose', dest='verbosity', action="count", default=0, help='each occurrence increases verbosity 1 level through ' 'ERROR-WARNING-INFO-DEBUG (default INFO)') parser.add_argument('-l', '--log', dest="log_fn", default=None, help="specify the log filename") args = parser.parse_args() init_args = {ga.dest: getattr(args, ga.dest) for ga in subgroups[0]._group_actions} create_args = {ga.dest: getattr(args, ga.dest) for ga in subgroups[1]._group_actions} # Logs are renamed once data the provided start date is known levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG] logging.basicConfig(level=levels[min(3, args.verbosity)], filename=args.log_fn) if args.create_debug: create_debug_lettered_tiles(init_args, create_args) return else: raise NotImplementedError("Command line interface not implemented yet for SCMI writer") if __name__ == '__main__': sys.exit(main()) satpy-0.20.0/satpy/writers/simple_image.py000066400000000000000000000052121362525524100206020ustar00rootroot00000000000000#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright (c) 2015-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """ """ import logging from satpy.writers import ImageWriter LOG = logging.getLogger(__name__) class PillowWriter(ImageWriter): def __init__(self, **kwargs): ImageWriter.__init__( self, default_config_filename="writers/simple_image.yaml", **kwargs) def save_image(self, img, filename=None, compute=True, **kwargs): """Save Image object to a given ``filename``. Args: img (trollimage.xrimage.XRImage): Image object to save to disk. filename (str): Optionally specify the filename to save this dataset to. It may include string formatting patterns that will be filled in by dataset attributes. compute (bool): If `True` (default), compute and save the dataset. If `False` return either a `dask.delayed.Delayed` object or tuple of (source, target). See the return values below for more information. **kwargs: Keyword arguments to pass to the images `save` method. Returns: Value returned depends on `compute`. If `compute` is `True` then the return value is the result of computing a `dask.delayed.Delayed` object or running `dask.array.store`. If `compute` is `False` then the returned value is either a `dask.delayed.Delayed` object that can be computed using `delayed.compute()` or a tuple of (source, target) that should be passed to `dask.array.store`. If target is provided the the caller is responsible for calling `target.close()` if the target has this method. """ filename = filename or self.get_filename(**img.data.attrs) LOG.debug("Saving to image: %s", filename) return img.save(filename, compute=compute, **kwargs) satpy-0.20.0/satpy/writers/utils.py000066400000000000000000000022471362525524100173140ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Writer utilities.""" def flatten_dict(d, parent_key='', sep='_'): """Flatten a nested dictionary. Based on https://stackoverflow.com/a/6027615/5703449 """ items = [] for k, v in d.items(): new_key = parent_key + sep + k if parent_key else k if isinstance(v, dict): items.extend(flatten_dict(v, parent_key=new_key, sep=sep).items()) else: items.append((new_key, v)) return dict(items) satpy-0.20.0/setup.cfg000066400000000000000000000005041362525524100145560ustar00rootroot00000000000000[options] setup_requires = setuptools_scm setuptools_scm_git_archive [bdist_rpm] requires=h5py pyresample python2-numexpr pyhdf xarray dask h5netcdf release=1 doc_files = doc/Makefile doc/source/*.rst doc/examples/*.py [bdist_wheel] universal=1 [flake8] max-line-length = 120 exclude = satpy/readers/li_l2.py satpy-0.20.0/setup.py000066400000000000000000000124711362525524100144550ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2009-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Setup file for satpy.""" import os.path from glob import glob from setuptools import find_packages, setup try: # HACK: https://github.com/pypa/setuptools_scm/issues/190#issuecomment-351181286 # Stop setuptools_scm from including all repository files import setuptools_scm.integration setuptools_scm.integration.find_files = lambda _: [] except ImportError: pass requires = ['numpy >=1.13', 'pillow', 'pyresample >=1.11.0', 'trollsift', 'trollimage >1.10.1', 'pykdtree', 'six', 'pyyaml', 'xarray >=0.10.1, !=0.13.0', 'dask[array] >=0.17.1', 'pyproj', 'zarr'] test_requires = ['behave', 'h5py', 'netCDF4', 'pyhdf', 'imageio', 'libtiff', 'rasterio', 'geoviews', 'trollimage'] extras_require = { # Readers: 'avhrr_l1b_gaclac': ['pygac >= 1.3.0'], 'modis_l1b': ['pyhdf', 'python-geotiepoints >= 1.1.7'], 'geocat': ['pyhdf'], 'acspo': ['netCDF4 >= 1.1.8'], 'clavrx': ['netCDF4 >= 1.1.8'], 'viirs_l1b': ['netCDF4 >= 1.1.8'], 'viirs_sdr': ['h5py >= 2.7.0'], 'viirs_compact': ['h5py >= 2.7.0'], 'omps_edr': ['h5py >= 2.7.0'], 'amsr2_l1b': ['h5py >= 2.7.0'], 'hrpt': ['pyorbital >= 1.3.1', 'pygac', 'python-geotiepoints >= 1.1.7'], 'proj': ['pyresample'], 'pyspectral': ['pyspectral >= 0.8.7'], 'pyorbital': ['pyorbital >= 1.3.1'], 'hrit_msg': ['pytroll-schedule'], 'nc_nwcsaf_msg': ['netCDF4 >= 1.1.8'], 'sar_c': ['python-geotiepoints >= 1.1.7', 'gdal'], 'abi_l1b': ['h5netcdf'], 'seviri_l2_bufr': ['eccodes-python'], 'hsaf_grib': ['pygrib'], # Writers: 'cf': ['h5netcdf >= 0.7.3'], 'scmi': ['netCDF4 >= 1.1.8'], 'geotiff': ['rasterio', 'trollimage[geotiff]'], 'mitiff': ['libtiff'], 'ninjo': ['pyninjotiff', 'pint'], # MultiScene: 'animations': ['imageio'], # Documentation: 'doc': ['sphinx'], # Other 'geoviews': ['geoviews'], } all_extras = [] for extra_deps in extras_require.values(): all_extras.extend(extra_deps) extras_require['all'] = list(set(all_extras)) def _config_data_files(base_dirs, extensions=(".cfg", )): """Find all subdirectory configuration files. Searches each base directory relative to this setup.py file and finds all files ending in the extensions provided. :param base_dirs: iterable of relative base directories to search :param extensions: iterable of file extensions to include (with '.' prefix) :returns: list of 2-element tuples compatible with `setuptools.setup` """ data_files = [] pkg_root = os.path.realpath(os.path.dirname(__file__)) + "/" for base_dir in base_dirs: new_data_files = [] for ext in extensions: configs = glob(os.path.join(pkg_root, base_dir, "*" + ext)) configs = [c.replace(pkg_root, "") for c in configs] new_data_files.extend(configs) data_files.append((base_dir, new_data_files)) return data_files NAME = 'satpy' README = open('README.rst', 'r').read() setup(name=NAME, description='Python package for earth-observing satellite data processing', long_description=README, author='The Pytroll Team', author_email='pytroll@googlegroups.com', classifiers=["Development Status :: 5 - Production/Stable", "Intended Audience :: Science/Research", "License :: OSI Approved :: GNU General Public License v3 " + "or later (GPLv3+)", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Scientific/Engineering"], url="https://github.com/pytroll/satpy", test_suite='satpy.tests.suite', packages=find_packages(), package_data={'satpy': [os.path.join('etc', 'geo_image.cfg'), os.path.join('etc', 'areas.yaml'), os.path.join('etc', 'satpy.cfg'), os.path.join('etc', 'himawari-8.cfg'), os.path.join('etc', 'eps_avhrrl1b_6.5.xml'), os.path.join('etc', 'readers', '*.yaml'), os.path.join('etc', 'writers', '*.yaml'), os.path.join('etc', 'composites', '*.yaml'), os.path.join('etc', 'enhancements', '*.cfg'), os.path.join('etc', 'enhancements', '*.yaml'), ]}, zip_safe=False, use_scm_version=True, install_requires=requires, tests_require=test_requires, python_requires='>=3.6', extras_require=extras_require, ) satpy-0.20.0/utils/000077500000000000000000000000001362525524100140765ustar00rootroot00000000000000satpy-0.20.0/utils/convert_to_ninjotiff.py000066400000000000000000000065651362525524100207140ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """ First version of a simple command line too that converts an image into a NinJo Tiff file. NinJo Tiff metadata can be passed as command line input or through a config file (an example is given in the ninjo-cmd.yaml file in the etc directory). The area matching the input image shall be defined in the areas configuration file (located in $PPP_CONFIG_DIR). """ import os from satpy.utils import debug_on from satpy import Scene from mpop.projector import get_area_def import argparse import yaml try: from yaml import UnsafeLoader except ImportError: from yaml import Loader as UnsafeLoader debug_on() parser = argparse.ArgumentParser(description='Turn an image into a NinjoTiff.') parser.add_argument('--cfg', dest='cfg', action="store", help="YAML configuration as an alternative to the command line input for NinJo metadata.") parser.add_argument('--input_dir', dest='input_dir', action="store", help="Directory with input data, that must contain a timestamp in the filename.") parser.add_argument('--chan_id', dest='chan_id', action="store", help="Channel ID", default="9999") parser.add_argument('--sat_id', dest='sat_id', action="store", help="Satellite ID", default="8888") parser.add_argument('--data_cat', dest='data_cat', action="store", help="Category of data (one of GORN, GPRN, PORN)", default="GORN") parser.add_argument('--area', dest='areadef', action="store", help="Area name, the definition must exist in your areas configuration file", default="nrEURO1km_NPOL_COALeqc") parser.add_argument('--ph_unit', dest='ph_unit', action="store", help="Physical unit", default="CELSIUS") parser.add_argument('--data_src', dest='data_src', action="store", help="Data source", default="EUMETCAST") args = parser.parse_args() if (args.input_dir is not None): os.chdir(args.input_dir) cfg = vars(args) if (args.cfg is not None): with open(args.cfg, 'r') as ymlfile: cfg = yaml.load(ymlfile, Loader=UnsafeLoader) narea = get_area_def(args.areadef) global_data = Scene(sensor="images", reader="generic_image", area=narea) global_data.load(['image']) global_data['image'].info['area'] = narea fname = global_data['image'].info['filename'] ofname = fname[:-3] + "tif" # global_data.save_dataset('image', filename="out.png", writer="simple_image") global_data.save_dataset('image', filename=ofname, writer="ninjotiff", sat_id=cfg['sat_id'], chan_id=cfg['chan_id'], data_cat=cfg['data_cat'], data_source=cfg['data_src'], physic_unit=cfg['ph_unit']) satpy-0.20.0/utils/coord2area_def.py000066400000000000000000000130661362525524100173150ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2012-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Convert human coordinates (lon and lat) to an area definition. Here is a usage example. python coord2area_def.py france stere 42.0 51.5 -5.5 8.0 1.5 The arguments are "name proj min_lat max_lat min_lon max_lon resolution(km)". The command above yelds the following result. ### +proj=stere +lat_0=46.75 +lon_0=1.25 +ellps=WGS84 france: description: france projection: proj: stere ellps: WGS84 lat_0: 46.75 lon_0: 1.25 shape: height: 703 width: 746 area_extent: lower_left_xy: [-559750.381098, -505020.675776] upper_right_xy: [559750.381098, 549517.351948] The first commented line is just a sum-up. The value of "description" can be changed to any descriptive text. Such a custom yaml configuration can be profitably saved in a local areas.yaml configuration file that won't be overridden by future updates of SatPy package. For that purpose the local processing script may have suitable lines as reported below. # set PPP_CONFIG_DIR for custom composites import os os.environ['PPP_CONFIG_DIR'] = '/my_local_path/for_satpy_configuration' As a further functionality this script may give a quick display of the defined area, provided the path for the GSHHG library is supplied via the "-s" option and the modules PyCoast, Pillow and AggDraw have been installed. python coord2area_def.py france stere 42.0 51.5 -5.5 8.0 1.5 -s /path/for/gshhs/library The command above would first print the seen area definition and then launch a casual representation of the area relying on the information about borders involved. """ import argparse import sys from pyproj import Proj if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument("name", help="The name of the area.") parser.add_argument("proj", help="The projection to use. Use proj.4 names, like 'stere', 'merc'...") parser.add_argument("min_lat", help="The the latitude of the bottom of the area", type=float) parser.add_argument("max_lat", help="The the latitude of the top of the area", type=float) parser.add_argument("min_lon", help="The the longitude of the left of the area", type=float) parser.add_argument("max_lon", help="The the longitude of the right of the area", type=float) parser.add_argument("resolution", help="The resolution of the area (in km)", type=float) parser.add_argument("-s", "--shapes", help="Show a preview of the area using the coastlines in this directory") args = parser.parse_args() name = args.name proj = args.proj left = args.min_lon right = args.max_lon up = args.min_lat down = args.max_lat res = args.resolution * 1000 lat_0 = (up + down) / 2 lon_0 = (right + left) / 2 p = Proj(proj=proj, lat_0=lat_0, lon_0=lon_0, ellps="WGS84") left_ex1, up_ex1 = p(left, up) right_ex1, up_ex2 = p(right, up) left_ex2, down_ex1 = p(left, down) right_ex2, down_ex2 = p(right, down) left_ex3, dummy = p(left, lat_0) right_ex3, dummy = p(right, lat_0) area_extent = (min(left_ex1, left_ex2, left_ex3), min(up_ex1, up_ex2), max(right_ex1, right_ex2, right_ex3), max(down_ex1, down_ex2)) xsize = int(round((area_extent[2] - area_extent[0]) / res)) ysize = int(round((area_extent[3] - area_extent[1]) / res)) proj4_string = "+" + \ " +".join(("proj=" + proj + ",lat_0=" + str(lat_0) + ",lon_0=" + str(lon_0) + ",ellps=WGS84").split(",")) print('### ' + proj4_string) print() print(name + ":") print(" description: " + name) print(" projection:") print(" proj: " + proj) print(" ellps: WGS84") print(" lat_0: " + str(lat_0)) print(" lon_0: " + str(lon_0)) print(" shape:") print(" height: " + str(ysize)) print(" width: " + str(xsize)) print(" area_extent:") print(" lower_left_xy: [%f, %f]" % (area_extent[0], area_extent[1])) print(" upper_right_xy: [%f, %f]" % (area_extent[2], area_extent[3])) if args.shapes is None: sys.exit(0) from PIL import Image from pycoast import ContourWriterAGG img = Image.new('RGB', (xsize, ysize)) area_def = (proj4_string, area_extent) cw = ContourWriterAGG(args.shapes) cw.add_coastlines(img, (proj4_string, area_extent), resolution='l', width=0.5) cw.add_grid(img, area_def, (10.0, 10.0), (2.0, 2.0), write_text=False, outline='white', outline_opacity=175, width=1.0, minor_outline='white', minor_outline_opacity=175, minor_width=0.2, minor_is_tick=False) img.show() satpy-0.20.0/utils/fetch_avhrr_calcoeffs.py000066400000000000000000000111771362525524100207570ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . import urllib2 import h5py import datetime as dt import os.path import sys BASE_URL = "http://www.star.nesdis.noaa.gov/smcd/spb/fwu/homepage/" + \ "AVHRR/Op_Cal_AVHRR/" URLS = { "Metop-B": {"ch1": BASE_URL + "Metop1_AVHRR_Libya_ch1.txt", "ch2": BASE_URL + "Metop1_AVHRR_Libya_ch2.txt", "ch3a": BASE_URL + "Metop1_AVHRR_Libya_ch3a.txt"}, "Metop-A": {"ch1": BASE_URL + "Metop2_AVHRR_Libya_ch1.txt", "ch2": BASE_URL + "Metop2_AVHRR_Libya_ch2.txt", "ch3a": BASE_URL + "Metop2_AVHRR_Libya_ch3a.txt"}, "NOAA-16": {"ch1": BASE_URL + "N16_AVHRR_Libya_ch1.txt", "ch2": BASE_URL + "N16_AVHRR_Libya_ch2.txt"}, "NOAA-17": {"ch1": BASE_URL + "N17_AVHRR_Libya_ch1.txt", "ch2": BASE_URL + "N17_AVHRR_Libya_ch2.txt", "ch3a": BASE_URL + "N17_AVHRR_Libya_ch3a.txt"}, "NOAA-18": {"ch1": BASE_URL + "N18_AVHRR_Libya_ch1.txt", "ch2": BASE_URL + "N18_AVHRR_Libya_ch2.txt"}, "NOAA-19": {"ch1": BASE_URL + "N19_AVHRR_Libya_ch1.txt", "ch2": BASE_URL + "N19_AVHRR_Libya_ch2.txt"} } def get_page(url): '''Retrieve the given page.''' return urllib2.urlopen(url).read() def get_coeffs(page): '''Parse coefficients from the page.''' coeffs = {} coeffs['datetime'] = [] coeffs['slope1'] = [] coeffs['intercept1'] = [] coeffs['slope2'] = [] coeffs['intercept2'] = [] slope1_idx, intercept1_idx, slope2_idx, intercept2_idx = \ None, None, None, None date_idx = 0 for row in page.lower().split('\n'): row = row.split() if len(row) == 0: continue if row[0] == 'update': # Get the column indices from the header line slope1_idx = row.index('slope_lo') intercept1_idx = row.index('int_lo') slope2_idx = row.index('slope_hi') intercept2_idx = row.index('int_hi') continue if slope1_idx is None: continue # In some cases the fields are connected, skip those rows if max([slope1_idx, intercept1_idx, slope2_idx, intercept2_idx]) >= len(row): continue try: dat = dt.datetime.strptime(row[date_idx], "%m/%d/%Y") except ValueError: continue coeffs['datetime'].append([dat.year, dat.month, dat.day]) coeffs['slope1'].append(float(row[slope1_idx])) coeffs['intercept1'].append(float(row[intercept1_idx])) coeffs['slope2'].append(float(row[slope2_idx])) coeffs['intercept2'].append(float(row[intercept2_idx])) return coeffs def get_all_coeffs(): '''Get all available calibration coefficients for the satellites.''' coeffs = {} for platform in URLS.keys(): if platform not in coeffs: coeffs[platform] = {} for chan in URLS[platform].keys(): url = URLS[platform][chan] print url page = get_page(url) coeffs[platform][chan] = get_coeffs(page) return coeffs def save_coeffs(coeffs, out_dir=''): '''Save calibration coefficients to HDF5 files.''' for platform in coeffs.keys(): fname = os.path.join(out_dir, "%s_calibration_data.h5" % platform) fid = h5py.File(fname, 'w') for chan in coeffs[platform].keys(): fid.create_group(chan) fid[chan]['datetime'] = coeffs[platform][chan]['datetime'] fid[chan]['slope1'] = coeffs[platform][chan]['slope1'] fid[chan]['intercept1'] = coeffs[platform][chan]['intercept1'] fid[chan]['slope2'] = coeffs[platform][chan]['slope2'] fid[chan]['intercept2'] = coeffs[platform][chan]['intercept2'] fid.close() print "Calibration coefficients saved for %s" % platform def main(): '''Create calibration coefficient files for AVHRR''' out_dir = sys.argv[1] coeffs = get_all_coeffs() save_coeffs(coeffs, out_dir=out_dir) if __name__ == "__main__": main()